Browse Source

merge 1.3.0-release branch code

1.3.2-release
gaojun2048 4 years ago
parent
commit
3cf9afc195
  1. 4
      ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml
  2. 40
      docker/docker-compose.yml
  3. 147
      docker/docker-swarm/docker-compose.yml
  4. 12
      docker/docker-swarm/dolphinscheduler_env.sh
  5. 6
      docker/kubernetes/dolphinscheduler/values.yaml
  6. 765
      docker/postgres/docker-entrypoint-initdb/init.sql
  7. 93
      dockerfile/Dockerfile
  8. 328
      dockerfile/README.md
  9. 328
      dockerfile/README_zh_CN.md
  10. 27
      dockerfile/checkpoint.sh
  11. 50
      dockerfile/conf/dolphinscheduler/alert.properties.tpl
  12. 45
      dockerfile/conf/dolphinscheduler/application-api.properties.tpl
  13. 78
      dockerfile/conf/dolphinscheduler/common.properties.tpl
  14. 71
      dockerfile/conf/dolphinscheduler/datasource.properties.tpl
  15. 26
      dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env.sh
  16. 40
      dockerfile/conf/dolphinscheduler/master.properties.tpl
  17. 54
      dockerfile/conf/dolphinscheduler/quartz.properties.tpl
  18. 37
      dockerfile/conf/dolphinscheduler/worker.properties.tpl
  19. 29
      dockerfile/conf/dolphinscheduler/zookeeper.properties.tpl
  20. 48
      dockerfile/conf/nginx/dolphinscheduler.conf
  21. 45
      dockerfile/conf/zookeeper/zoo.cfg
  22. 53
      dockerfile/hooks/build
  23. 56
      dockerfile/hooks/build.bat
  24. 35
      dockerfile/hooks/check
  25. 24
      dockerfile/hooks/push
  26. 23
      dockerfile/hooks/push.bat
  27. 104
      dockerfile/startup-init-conf.sh
  28. 196
      dockerfile/startup.sh
  29. 2
      dolphinscheduler-alert/pom.xml
  30. 30
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java
  31. 5
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java
  32. 10
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java
  33. 130
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java
  34. 17
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java
  35. 3
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplate.java
  36. 23
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java
  37. 17
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java
  38. 27
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  39. 12
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java
  40. 25
      dolphinscheduler-alert/src/main/resources/alert.properties
  41. 1
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java
  42. 37
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplateTest.java
  43. 27
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java
  44. 10
      dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java
  45. 67
      dolphinscheduler-alert/src/test/resources/alert.properties
  46. 2
      dolphinscheduler-api/pom.xml
  47. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java
  48. 129
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java
  49. 165
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java
  50. 140
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java
  51. 367
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  52. 159
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
  53. 58
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java
  54. 91
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java
  55. 63
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java
  56. 526
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  57. 361
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java
  58. 177
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
  59. 137
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java
  60. 715
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java
  61. 246
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java
  62. 65
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java
  63. 90
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java
  64. 174
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java
  65. 359
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  66. 109
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java
  67. 16
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java
  68. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  69. 3
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java
  70. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java
  71. 59
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  72. 41
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  73. 34
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java
  74. 412
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  75. 82
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  76. 246
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
  77. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  78. 130
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  79. 193
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java
  80. 53
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java
  81. 17
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  82. 17
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  83. 18
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  84. 17
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java
  85. 4
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java
  86. 509
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java
  87. 37
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java
  88. 65
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java
  89. 10
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java
  90. 13
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java
  91. 150
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  92. 14
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java
  93. 57
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
  94. 30
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
  95. 117
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java
  96. 13
      dolphinscheduler-common/pom.xml
  97. 242
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  98. 9
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java
  99. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java
  100. 22
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java
  101. Some files were not shown because too many files have changed in this diff Show More

4
ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml

@ -43,7 +43,7 @@
</property>
<property>
<name>zookeeper.connection.timeout</name>
<value>300</value>
<value>30000</value>
<value-attributes>
<type>int</type>
</value-attributes>
@ -73,7 +73,7 @@
</property>
<property>
<name>zookeeper.retry.maxtime</name>
<value>5</value>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>

40
docker/docker-compose.yml

@ -1,40 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version: '2'
services:
zookeeper:
image: zookeeper
restart: always
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOO_MY_ID: 1
ZOO_4LW_COMMANDS_WHITELIST: srvr,ruok,wchs,cons
db:
image: postgres
container_name: postgres
environment:
- POSTGRES_USER=test
- POSTGRES_PASSWORD=test
- POSTGRES_DB=dolphinscheduler
ports:
- "5432:5432"
volumes:
- pgdata:/var/lib/postgresql/data
- ./postgres/docker-entrypoint-initdb:/docker-entrypoint-initdb.d
volumes:
pgdata:

147
docker/docker-swarm/docker-compose.yml

@ -16,36 +16,6 @@
version: "3.4"
networks:
dolphinscheduler-postgresql:
driver: bridge
dolphinscheduler-zookeeper:
driver: bridge
dolphinscheduler-api:
driver: bridge
dolphinscheduler-frontend:
driver: bridge
dolphinscheduler-alert:
driver: bridge
dolphinscheduler-master:
driver: bridge
dolphinscheduler-worker:
driver: bridge
volumes:
dolphinscheduler-postgresql:
dolphinscheduler-zookeeper:
dolphinscheduler-api:
dolphinscheduler-frontend:
dolphinscheduler-alert:
dolphinscheduler-master:
dolphinscheduler-worker-data:
dolphinscheduler-worker-logs:
configs:
dolphinscheduler-worker-task-env:
file: ./dolphinscheduler_env.sh
services:
dolphinscheduler-postgresql:
@ -58,16 +28,11 @@ services:
POSTGRESQL_USERNAME: root
POSTGRESQL_PASSWORD: root
POSTGRESQL_DATABASE: dolphinscheduler
healthcheck:
test: ["CMD", "pg_isready", "-U", "${POSTGRESQL_USERNAME}", "-d", "{POSTGRESQL_PASSWORD}", "-h", "localhost", "5432"]
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
volumes:
volumes:
- dolphinscheduler-postgresql:/bitnami/postgresql
- dolphinscheduler-postgresql-initdb:/docker-entrypoint-initdb.d
networks:
- dolphinscheduler-postgresql
- dolphinscheduler
dolphinscheduler-zookeeper:
image: bitnami/zookeeper:latest
@ -77,19 +42,14 @@ services:
environment:
TZ: Asia/Shanghai
ALLOW_ANONYMOUS_LOGIN: "yes"
healthcheck:
test: ["CMD-SHELL", "nc -z localhost 2181"]
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
ZOO_4LW_COMMANDS_WHITELIST: srvr,ruok,wchs,cons
volumes:
- dolphinscheduler-zookeeper:/bitnami/zookeeper
networks:
- dolphinscheduler-zookeeper
- dolphinscheduler
dolphinscheduler-api:
image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-api
command: ["api-server"]
ports:
@ -103,23 +63,21 @@ services:
POSTGRESQL_DATABASE: dolphinscheduler
ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:12345"]
test: ["CMD", "/root/checkpoint.sh", "ApiApplicationServer"]
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- dolphinscheduler-api:/opt/dolphinscheduler/logs
networks:
- dolphinscheduler-api
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
networks:
- dolphinscheduler
dolphinscheduler-frontend:
image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-frontend
command: ["frontend"]
ports:
@ -129,21 +87,20 @@ services:
FRONTEND_API_SERVER_HOST: dolphinscheduler-api
FRONTEND_API_SERVER_PORT: 12345
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8888"]
test: ["CMD", "nc", "-z", "localhost", "8888"]
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
start_period: 30s
depends_on:
- dolphinscheduler-api
volumes:
- dolphinscheduler-frontend:/var/log/nginx
- dolphinscheduler-logs:/var/log/nginx
networks:
- dolphinscheduler-frontend
- dolphinscheduler-api
- dolphinscheduler
dolphinscheduler-alert:
image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-alert
command: ["alert-server"]
environment:
@ -172,22 +129,21 @@ services:
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
volumes:
- dolphinscheduler-alert:/opt/dolphinscheduler/logs
networks:
- dolphinscheduler-alert
- dolphinscheduler-postgresql
volumes:
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
networks:
- dolphinscheduler
dolphinscheduler-master:
image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-master
command: ["master-server"]
ports:
ports:
- 5678:5678
environment:
environment:
TZ: Asia/Shanghai
MASTER_EXEC_THREADS: "100"
MASTER_EXEC_TASK_NUM: "20"
@ -207,25 +163,23 @@ services:
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
depends_on:
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- dolphinscheduler-master:/opt/dolphinscheduler/logs
volumes:
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
networks:
- dolphinscheduler-master
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
- dolphinscheduler
dolphinscheduler-worker:
image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-worker
command: ["worker-server"]
ports:
ports:
- 1234:1234
- 50051:50051
environment:
environment:
TZ: Asia/Shanghai
WORKER_EXEC_THREADS: "100"
WORKER_HEARTBEAT_INTERVAL: "10"
@ -245,17 +199,34 @@ services:
interval: 30s
timeout: 5s
retries: 3
# start_period: 30s
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- dolphinscheduler-worker-data:/tmp/dolphinscheduler
- dolphinscheduler-worker-logs:/opt/dolphinscheduler/logs
configs:
- source: dolphinscheduler-worker-task-env
volumes:
- type: bind
source: ./dolphinscheduler_env.sh
target: /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh
- type: volume
source: dolphinscheduler-worker-data
target: /tmp/dolphinscheduler
- type: volume
source: dolphinscheduler-logs
target: /opt/dolphinscheduler/logs
networks:
- dolphinscheduler-worker
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
- dolphinscheduler
networks:
dolphinscheduler:
driver: bridge
volumes:
dolphinscheduler-postgresql:
dolphinscheduler-postgresql-initdb:
dolphinscheduler-zookeeper:
dolphinscheduler-worker-data:
dolphinscheduler-logs:
configs:
dolphinscheduler-worker-task-env:
file: ./dolphinscheduler_env.sh

12
docker/docker-swarm/dolphinscheduler_env.sh

@ -15,12 +15,6 @@
# limitations under the License.
#
export HADOOP_HOME=/opt/soft/hadoop
export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop
export SPARK_HOME1=/opt/soft/spark1
export SPARK_HOME2=/opt/soft/spark2
export PYTHON_HOME=/opt/soft/python
export JAVA_HOME=/opt/soft/java
export HIVE_HOME=/opt/soft/hive
export FLINK_HOME=/opt/soft/flink
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH
export PYTHON_HOME=/usr/bin/python2
export JAVA_HOME=/usr/lib/jvm/java-1.8-openjdk
export PATH=$PYTHON_HOME:$JAVA_HOME/bin:$PATH

6
docker/kubernetes/dolphinscheduler/values.yaml

@ -25,9 +25,9 @@ fullnameOverride: ""
timezone: "Asia/Shanghai"
image:
registry: "docker.io"
registry: "apache"
repository: "dolphinscheduler"
tag: "1.3.0"
tag: "latest"
pullPolicy: "IfNotPresent"
imagePullSecrets: []
@ -56,6 +56,8 @@ externalDatabase:
zookeeper:
enabled: true
taskQueue: "zookeeper"
config:
ZOO_4LW_COMMANDS_WHITELIST: srvr,ruok,wchs,cons
service:
port: "2181"
persistence:

765
docker/postgres/docker-entrypoint-initdb/init.sql

@ -1,765 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
DROP TABLE IF EXISTS QRTZ_LOCKS;
DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
DROP TABLE IF EXISTS QRTZ_CALENDARS;
CREATE TABLE QRTZ_JOB_DETAILS(
SCHED_NAME character varying(120) NOT NULL,
JOB_NAME character varying(200) NOT NULL,
JOB_GROUP character varying(200) NOT NULL,
DESCRIPTION character varying(250) NULL,
JOB_CLASS_NAME character varying(250) NOT NULL,
IS_DURABLE boolean NOT NULL,
IS_NONCONCURRENT boolean NOT NULL,
IS_UPDATE_DATA boolean NOT NULL,
REQUESTS_RECOVERY boolean NOT NULL,
JOB_DATA bytea NULL);
alter table QRTZ_JOB_DETAILS add primary key(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE TABLE QRTZ_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
JOB_NAME character varying(200) NOT NULL,
JOB_GROUP character varying(200) NOT NULL,
DESCRIPTION character varying(250) NULL,
NEXT_FIRE_TIME BIGINT NULL,
PREV_FIRE_TIME BIGINT NULL,
PRIORITY INTEGER NULL,
TRIGGER_STATE character varying(16) NOT NULL,
TRIGGER_TYPE character varying(8) NOT NULL,
START_TIME BIGINT NOT NULL,
END_TIME BIGINT NULL,
CALENDAR_NAME character varying(200) NULL,
MISFIRE_INSTR SMALLINT NULL,
JOB_DATA bytea NULL) ;
alter table QRTZ_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_SIMPLE_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
REPEAT_COUNT BIGINT NOT NULL,
REPEAT_INTERVAL BIGINT NOT NULL,
TIMES_TRIGGERED BIGINT NOT NULL) ;
alter table QRTZ_SIMPLE_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_CRON_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
CRON_EXPRESSION character varying(120) NOT NULL,
TIME_ZONE_ID character varying(80)) ;
alter table QRTZ_CRON_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_SIMPROP_TRIGGERS
(
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
STR_PROP_1 character varying(512) NULL,
STR_PROP_2 character varying(512) NULL,
STR_PROP_3 character varying(512) NULL,
INT_PROP_1 INT NULL,
INT_PROP_2 INT NULL,
LONG_PROP_1 BIGINT NULL,
LONG_PROP_2 BIGINT NULL,
DEC_PROP_1 NUMERIC(13,4) NULL,
DEC_PROP_2 NUMERIC(13,4) NULL,
BOOL_PROP_1 boolean NULL,
BOOL_PROP_2 boolean NULL) ;
alter table QRTZ_SIMPROP_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_BLOB_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
BLOB_DATA bytea NULL) ;
alter table QRTZ_BLOB_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_CALENDARS (
SCHED_NAME character varying(120) NOT NULL,
CALENDAR_NAME character varying(200) NOT NULL,
CALENDAR bytea NOT NULL) ;
alter table QRTZ_CALENDARS add primary key(SCHED_NAME,CALENDAR_NAME);
CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS (
SCHED_NAME character varying(120) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL) ;
alter table QRTZ_PAUSED_TRIGGER_GRPS add primary key(SCHED_NAME,TRIGGER_GROUP);
CREATE TABLE QRTZ_FIRED_TRIGGERS (
SCHED_NAME character varying(120) NOT NULL,
ENTRY_ID character varying(95) NOT NULL,
TRIGGER_NAME character varying(200) NOT NULL,
TRIGGER_GROUP character varying(200) NOT NULL,
INSTANCE_NAME character varying(200) NOT NULL,
FIRED_TIME BIGINT NOT NULL,
SCHED_TIME BIGINT NOT NULL,
PRIORITY INTEGER NOT NULL,
STATE character varying(16) NOT NULL,
JOB_NAME character varying(200) NULL,
JOB_GROUP character varying(200) NULL,
IS_NONCONCURRENT boolean NULL,
REQUESTS_RECOVERY boolean NULL) ;
alter table QRTZ_FIRED_TRIGGERS add primary key(SCHED_NAME,ENTRY_ID);
CREATE TABLE QRTZ_SCHEDULER_STATE (
SCHED_NAME character varying(120) NOT NULL,
INSTANCE_NAME character varying(200) NOT NULL,
LAST_CHECKIN_TIME BIGINT NOT NULL,
CHECKIN_INTERVAL BIGINT NOT NULL) ;
alter table QRTZ_SCHEDULER_STATE add primary key(SCHED_NAME,INSTANCE_NAME);
CREATE TABLE QRTZ_LOCKS (
SCHED_NAME character varying(120) NOT NULL,
LOCK_NAME character varying(40) NOT NULL) ;
alter table QRTZ_LOCKS add primary key(SCHED_NAME,LOCK_NAME);
CREATE INDEX IDX_QRTZ_J_REQ_RECOVERY ON QRTZ_JOB_DETAILS(SCHED_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_J_GRP ON QRTZ_JOB_DETAILS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_J ON QRTZ_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_JG ON QRTZ_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_T_C ON QRTZ_TRIGGERS(SCHED_NAME,CALENDAR_NAME);
CREATE INDEX IDX_QRTZ_T_G ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_T_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_N_G_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NEXT_FIRE_TIME ON QRTZ_TRIGGERS(SCHED_NAME,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE_GRP ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE);
CREATE INDEX IDX_QRTZ_FT_TRIG_INST_NAME ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME);
CREATE INDEX IDX_QRTZ_FT_INST_JOB_REQ_RCVRY ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY);
CREATE INDEX IDX_QRTZ_FT_J_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_JG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_GROUP);
CREATE INDEX IDX_QRTZ_FT_T_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP);
--
-- Table structure for table t_ds_access_token
--
DROP TABLE IF EXISTS t_ds_access_token;
CREATE TABLE t_ds_access_token (
id int NOT NULL ,
user_id int DEFAULT NULL ,
token varchar(64) DEFAULT NULL ,
expire_time timestamp DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_alert
--
DROP TABLE IF EXISTS t_ds_alert;
CREATE TABLE t_ds_alert (
id int NOT NULL ,
title varchar(64) DEFAULT NULL ,
show_type int DEFAULT NULL ,
content text ,
alert_type int DEFAULT NULL ,
alert_status int DEFAULT '0' ,
log text ,
alertgroup_id int DEFAULT NULL ,
receivers text ,
receivers_cc text ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_alertgroup
--
DROP TABLE IF EXISTS t_ds_alertgroup;
CREATE TABLE t_ds_alertgroup (
id int NOT NULL ,
group_name varchar(255) DEFAULT NULL ,
group_type int DEFAULT NULL ,
description varchar(255) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_command
--
DROP TABLE IF EXISTS t_ds_command;
CREATE TABLE t_ds_command (
id int NOT NULL ,
command_type int DEFAULT NULL ,
process_definition_id int DEFAULT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
executor_id int DEFAULT NULL ,
dependence varchar(255) DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_datasource
--
DROP TABLE IF EXISTS t_ds_datasource;
CREATE TABLE t_ds_datasource (
id int NOT NULL ,
name varchar(64) NOT NULL ,
note varchar(256) DEFAULT NULL ,
type int NOT NULL ,
user_id int NOT NULL ,
connection_params text NOT NULL ,
create_time timestamp NOT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_error_command
--
DROP TABLE IF EXISTS t_ds_error_command;
CREATE TABLE t_ds_error_command (
id int NOT NULL ,
command_type int DEFAULT NULL ,
executor_id int DEFAULT NULL ,
process_definition_id int DEFAULT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
dependence text ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
message text ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_master_server
--
--
-- Table structure for table t_ds_process_definition
--
DROP TABLE IF EXISTS t_ds_process_definition;
CREATE TABLE t_ds_process_definition (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
version int DEFAULT NULL ,
release_state int DEFAULT NULL ,
project_id int DEFAULT NULL ,
user_id int DEFAULT NULL ,
process_definition_json text ,
description text ,
global_params text ,
flag int DEFAULT NULL ,
locations text ,
connects text ,
receivers text ,
receivers_cc text ,
create_time timestamp DEFAULT NULL ,
timeout int DEFAULT '0' ,
tenant_id int NOT NULL DEFAULT '-1' ,
update_time timestamp DEFAULT NULL ,
modify_by varchar(36) DEFAULT '' ,
resource_ids varchar(64)
PRIMARY KEY (id)
) ;
create index process_definition_index on t_ds_process_definition (project_id,id);
--
-- Table structure for table t_ds_process_instance
--
DROP TABLE IF EXISTS t_ds_process_instance;
CREATE TABLE t_ds_process_instance (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
process_definition_id int DEFAULT NULL ,
state int DEFAULT NULL ,
recovery int DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
end_time timestamp DEFAULT NULL ,
run_times int DEFAULT NULL ,
host varchar(45) DEFAULT NULL ,
command_type int DEFAULT NULL ,
command_param text ,
task_depend_type int DEFAULT NULL ,
max_try_times int DEFAULT '0' ,
failure_strategy int DEFAULT '0' ,
warning_type int DEFAULT '0' ,
warning_group_id int DEFAULT NULL ,
schedule_time timestamp DEFAULT NULL ,
command_start_time timestamp DEFAULT NULL ,
global_params text ,
process_instance_json text ,
flag int DEFAULT '1' ,
update_time timestamp NULL ,
is_sub_process int DEFAULT '0' ,
executor_id int NOT NULL ,
locations text ,
connects text ,
history_cmd text ,
dependence_schedule_times text ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64) ,
timeout int DEFAULT '0' ,
tenant_id int NOT NULL DEFAULT '-1' ,
PRIMARY KEY (id)
) ;
create index process_instance_index on t_ds_process_instance (process_definition_id,id);
create index start_time_index on t_ds_process_instance (start_time);
--
-- Table structure for table t_ds_project
--
DROP TABLE IF EXISTS t_ds_project;
CREATE TABLE t_ds_project (
id int NOT NULL ,
name varchar(100) DEFAULT NULL ,
description varchar(200) DEFAULT NULL ,
user_id int DEFAULT NULL ,
flag int DEFAULT '1' ,
create_time timestamp DEFAULT CURRENT_TIMESTAMP ,
update_time timestamp DEFAULT CURRENT_TIMESTAMP ,
PRIMARY KEY (id)
) ;
create index user_id_index on t_ds_project (user_id);
--
-- Table structure for table t_ds_queue
--
DROP TABLE IF EXISTS t_ds_queue;
CREATE TABLE t_ds_queue (
id int NOT NULL ,
queue_name varchar(64) DEFAULT NULL ,
queue varchar(64) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_relation_datasource_user
--
DROP TABLE IF EXISTS t_ds_relation_datasource_user;
CREATE TABLE t_ds_relation_datasource_user (
id int NOT NULL ,
user_id int NOT NULL ,
datasource_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
;
--
-- Table structure for table t_ds_relation_process_instance
--
DROP TABLE IF EXISTS t_ds_relation_process_instance;
CREATE TABLE t_ds_relation_process_instance (
id int NOT NULL ,
parent_process_instance_id int DEFAULT NULL ,
parent_task_instance_id int DEFAULT NULL ,
process_instance_id int DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_relation_project_user
--
DROP TABLE IF EXISTS t_ds_relation_project_user;
CREATE TABLE t_ds_relation_project_user (
id int NOT NULL ,
user_id int NOT NULL ,
project_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
create index relation_project_user_id_index on t_ds_relation_project_user (user_id);
--
-- Table structure for table t_ds_relation_resources_user
--
DROP TABLE IF EXISTS t_ds_relation_resources_user;
CREATE TABLE t_ds_relation_resources_user (
id int NOT NULL ,
user_id int NOT NULL ,
resources_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_relation_udfs_user
--
DROP TABLE IF EXISTS t_ds_relation_udfs_user;
CREATE TABLE t_ds_relation_udfs_user (
id int NOT NULL ,
user_id int NOT NULL ,
udf_id int DEFAULT NULL ,
perm int DEFAULT '1' ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
;
--
-- Table structure for table t_ds_relation_user_alertgroup
--
DROP TABLE IF EXISTS t_ds_relation_user_alertgroup;
CREATE TABLE t_ds_relation_user_alertgroup (
id int NOT NULL,
alertgroup_id int DEFAULT NULL,
user_id int DEFAULT NULL,
create_time timestamp DEFAULT NULL,
update_time timestamp DEFAULT NULL,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_resources
--
DROP TABLE IF EXISTS t_ds_resources;
CREATE TABLE t_ds_resources (
id int NOT NULL ,
alias varchar(64) DEFAULT NULL ,
file_name varchar(64) DEFAULT NULL ,
description varchar(256) DEFAULT NULL ,
user_id int DEFAULT NULL ,
type int DEFAULT NULL ,
size bigint DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
pid int,
full_name varchar(64),
is_directory int
PRIMARY KEY (id)
) ;
;
--
-- Table structure for table t_ds_schedules
--
DROP TABLE IF EXISTS t_ds_schedules;
CREATE TABLE t_ds_schedules (
id int NOT NULL ,
process_definition_id int NOT NULL ,
start_time timestamp NOT NULL ,
end_time timestamp NOT NULL ,
crontab varchar(256) NOT NULL ,
failure_strategy int NOT NULL ,
user_id int NOT NULL ,
release_state int NOT NULL ,
warning_type int NOT NULL ,
warning_group_id int DEFAULT NULL ,
process_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
create_time timestamp NOT NULL ,
update_time timestamp NOT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_session
--
DROP TABLE IF EXISTS t_ds_session;
CREATE TABLE t_ds_session (
id varchar(64) NOT NULL ,
user_id int DEFAULT NULL ,
ip varchar(45) DEFAULT NULL ,
last_login_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_task_instance
--
DROP TABLE IF EXISTS t_ds_task_instance;
CREATE TABLE t_ds_task_instance (
id int NOT NULL ,
name varchar(255) DEFAULT NULL ,
task_type varchar(64) DEFAULT NULL ,
process_definition_id int DEFAULT NULL ,
process_instance_id int DEFAULT NULL ,
task_json text ,
state int DEFAULT NULL ,
submit_time timestamp DEFAULT NULL ,
start_time timestamp DEFAULT NULL ,
end_time timestamp DEFAULT NULL ,
host varchar(45) DEFAULT NULL ,
execute_path varchar(200) DEFAULT NULL ,
log_path varchar(200) DEFAULT NULL ,
alert_flag int DEFAULT NULL ,
retry_times int DEFAULT '0' ,
pid int DEFAULT NULL ,
app_link varchar(255) DEFAULT NULL ,
flag int DEFAULT '1' ,
retry_interval int DEFAULT NULL ,
max_retry_times int DEFAULT NULL ,
task_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
executor_id int DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_tenant
--
DROP TABLE IF EXISTS t_ds_tenant;
CREATE TABLE t_ds_tenant (
id int NOT NULL ,
tenant_code varchar(64) DEFAULT NULL ,
tenant_name varchar(64) DEFAULT NULL ,
description varchar(256) DEFAULT NULL ,
queue_id int DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_udfs
--
DROP TABLE IF EXISTS t_ds_udfs;
CREATE TABLE t_ds_udfs (
id int NOT NULL ,
user_id int NOT NULL ,
func_name varchar(100) NOT NULL ,
class_name varchar(255) NOT NULL ,
type int NOT NULL ,
arg_types varchar(255) DEFAULT NULL ,
database varchar(255) DEFAULT NULL ,
description varchar(255) DEFAULT NULL ,
resource_id int NOT NULL ,
resource_name varchar(255) NOT NULL ,
create_time timestamp NOT NULL ,
update_time timestamp NOT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_user
--
DROP TABLE IF EXISTS t_ds_user;
CREATE TABLE t_ds_user (
id int NOT NULL ,
user_name varchar(64) DEFAULT NULL ,
user_password varchar(64) DEFAULT NULL ,
user_type int DEFAULT NULL ,
email varchar(64) DEFAULT NULL ,
phone varchar(11) DEFAULT NULL ,
tenant_id int DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
queue varchar(64) DEFAULT NULL ,
PRIMARY KEY (id)
);
--
-- Table structure for table t_ds_version
--
DROP TABLE IF EXISTS t_ds_version;
CREATE TABLE t_ds_version (
id int NOT NULL ,
version varchar(200) NOT NULL,
PRIMARY KEY (id)
) ;
create index version_index on t_ds_version(version);
--
-- Table structure for table t_ds_worker_group
--
DROP TABLE IF EXISTS t_ds_worker_group;
CREATE TABLE t_ds_worker_group (
id bigint NOT NULL ,
name varchar(256) DEFAULT NULL ,
ip_list varchar(256) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
update_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
--
-- Table structure for table t_ds_worker_server
--
DROP TABLE IF EXISTS t_ds_worker_server;
CREATE TABLE t_ds_worker_server (
id int NOT NULL ,
host varchar(45) DEFAULT NULL ,
port int DEFAULT NULL ,
zk_directory varchar(64) DEFAULT NULL ,
res_info varchar(255) DEFAULT NULL ,
create_time timestamp DEFAULT NULL ,
last_heartbeat_time timestamp DEFAULT NULL ,
PRIMARY KEY (id)
) ;
DROP SEQUENCE IF EXISTS t_ds_access_token_id_sequence;
CREATE SEQUENCE t_ds_access_token_id_sequence;
ALTER TABLE t_ds_access_token ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_access_token_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_alert_id_sequence;
CREATE SEQUENCE t_ds_alert_id_sequence;
ALTER TABLE t_ds_alert ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alert_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_alertgroup_id_sequence;
CREATE SEQUENCE t_ds_alertgroup_id_sequence;
ALTER TABLE t_ds_alertgroup ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alertgroup_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_command_id_sequence;
CREATE SEQUENCE t_ds_command_id_sequence;
ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence;
CREATE SEQUENCE t_ds_datasource_id_sequence;
ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_master_server_id_sequence;
CREATE SEQUENCE t_ds_master_server_id_sequence;
ALTER TABLE t_ds_master_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_master_server_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence;
CREATE SEQUENCE t_ds_process_definition_id_sequence;
ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence;
CREATE SEQUENCE t_ds_process_instance_id_sequence;
ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_project_id_sequence;
CREATE SEQUENCE t_ds_project_id_sequence;
ALTER TABLE t_ds_project ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_project_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_queue_id_sequence;
CREATE SEQUENCE t_ds_queue_id_sequence;
ALTER TABLE t_ds_queue ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_queue_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_datasource_user_id_sequence;
CREATE SEQUENCE t_ds_relation_datasource_user_id_sequence;
ALTER TABLE t_ds_relation_datasource_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_datasource_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_process_instance_id_sequence;
CREATE SEQUENCE t_ds_relation_process_instance_id_sequence;
ALTER TABLE t_ds_relation_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_process_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_project_user_id_sequence;
CREATE SEQUENCE t_ds_relation_project_user_id_sequence;
ALTER TABLE t_ds_relation_project_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_resources_user_id_sequence;
CREATE SEQUENCE t_ds_relation_resources_user_id_sequence;
ALTER TABLE t_ds_relation_resources_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_resources_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_udfs_user_id_sequence;
CREATE SEQUENCE t_ds_relation_udfs_user_id_sequence;
ALTER TABLE t_ds_relation_udfs_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_udfs_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_relation_user_alertgroup_id_sequence;
CREATE SEQUENCE t_ds_relation_user_alertgroup_id_sequence;
ALTER TABLE t_ds_relation_user_alertgroup ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_user_alertgroup_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_resources_id_sequence;
CREATE SEQUENCE t_ds_resources_id_sequence;
ALTER TABLE t_ds_resources ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_resources_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_schedules_id_sequence;
CREATE SEQUENCE t_ds_schedules_id_sequence;
ALTER TABLE t_ds_schedules ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_schedules_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_task_instance_id_sequence;
CREATE SEQUENCE t_ds_task_instance_id_sequence;
ALTER TABLE t_ds_task_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_instance_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_tenant_id_sequence;
CREATE SEQUENCE t_ds_tenant_id_sequence;
ALTER TABLE t_ds_tenant ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_tenant_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_udfs_id_sequence;
CREATE SEQUENCE t_ds_udfs_id_sequence;
ALTER TABLE t_ds_udfs ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_udfs_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_user_id_sequence;
CREATE SEQUENCE t_ds_user_id_sequence;
ALTER TABLE t_ds_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_user_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_version_id_sequence;
CREATE SEQUENCE t_ds_version_id_sequence;
ALTER TABLE t_ds_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_version_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_worker_group_id_sequence;
CREATE SEQUENCE t_ds_worker_group_id_sequence;
ALTER TABLE t_ds_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_group_id_sequence');
DROP SEQUENCE IF EXISTS t_ds_worker_server_id_sequence;
CREATE SEQUENCE t_ds_worker_server_id_sequence;
ALTER TABLE t_ds_worker_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_server_id_sequence');
-- Records of t_ds_user?user : admin , password : dolphinscheduler123
INSERT INTO t_ds_user(user_name,user_password,user_type,email,phone,tenant_id,create_time,update_time) VALUES ('admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', 'xx', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22');
-- Records of t_ds_alertgroup,dolphinscheduler warning group
INSERT INTO t_ds_alertgroup(group_name,group_type,description,create_time,update_time) VALUES ('dolphinscheduler warning group', '0', 'dolphinscheduler warning group','2018-11-29 10:20:39', '2018-11-29 10:20:39');
INSERT INTO t_ds_relation_user_alertgroup(alertgroup_id,user_id,create_time,update_time) VALUES ( '1', '1', '2018-11-29 10:22:33', '2018-11-29 10:22:33');
-- Records of t_ds_queue,default queue name : default
INSERT INTO t_ds_queue(queue_name,queue,create_time,update_time) VALUES ('default', 'default','2018-11-29 10:22:33', '2018-11-29 10:22:33');
-- Records of t_ds_queue,default queue name : default
INSERT INTO t_ds_version(version) VALUES ('2.0.0');

93
dockerfile/Dockerfile

@ -1,93 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
FROM nginx:alpine
ARG VERSION
ENV TZ Asia/Shanghai
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND noninteractive
#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip tini kazoo.
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \
apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk add --update procps && \
openrc boot && \
pip install kazoo
#2. install jdk
RUN apk add openjdk8
ENV JAVA_HOME /usr/lib/jvm/java-1.8-openjdk
ENV PATH $JAVA_HOME/bin:$PATH
#3. install zk
RUN cd /opt && \
wget https://downloads.apache.org/zookeeper/zookeeper-3.5.7/apache-zookeeper-3.5.7-bin.tar.gz && \
tar -zxvf apache-zookeeper-3.5.7-bin.tar.gz && \
mv apache-zookeeper-3.5.7-bin zookeeper && \
mkdir -p /tmp/zookeeper && \
rm -rf ./zookeeper-*tar.gz && \
rm -rf /opt/zookeeper/conf/zoo_sample.cfg
ADD ./conf/zookeeper/zoo.cfg /opt/zookeeper/conf
ENV ZK_HOME /opt/zookeeper
ENV PATH $ZK_HOME/bin:$PATH
#4. install pg
RUN apk add postgresql postgresql-contrib
#5. add dolphinscheduler
ADD ./apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz /opt/
RUN mv /opt/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin/ /opt/dolphinscheduler/
ENV DOLPHINSCHEDULER_HOME /opt/dolphinscheduler
#6. modify nginx
RUN echo "daemon off;" >> /etc/nginx/nginx.conf && \
rm -rf /etc/nginx/conf.d/*
ADD ./conf/nginx/dolphinscheduler.conf /etc/nginx/conf.d
#7. add configuration and modify permissions and set soft links
ADD ./checkpoint.sh /root/checkpoint.sh
ADD ./startup-init-conf.sh /root/startup-init-conf.sh
ADD ./startup.sh /root/startup.sh
ADD ./conf/dolphinscheduler/*.tpl /opt/dolphinscheduler/conf/
ADD conf/dolphinscheduler/env/dolphinscheduler_env.sh /opt/dolphinscheduler/conf/env/
RUN chmod +x /root/checkpoint.sh && \
chmod +x /root/startup-init-conf.sh && \
chmod +x /root/startup.sh && \
chmod +x /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
chmod +x /opt/dolphinscheduler/script/*.sh && \
chmod +x /opt/dolphinscheduler/bin/*.sh && \
dos2unix /root/checkpoint.sh && \
dos2unix /root/startup-init-conf.sh && \
dos2unix /root/startup.sh && \
dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
dos2unix /opt/dolphinscheduler/script/*.sh && \
dos2unix /opt/dolphinscheduler/bin/*.sh && \
rm -rf /bin/sh && \
ln -s /bin/bash /bin/sh && \
mkdir -p /tmp/xls
#8. remove apk index cache
RUN rm -rf /var/cache/apk/*
#9. expose port
EXPOSE 2181 2888 3888 5432 5678 1234 12345 50051 8888
ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"]

328
dockerfile/README.md

@ -1,328 +0,0 @@
## What is Dolphin Scheduler?
Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.
Github URL: https://github.com/apache/incubator-dolphinscheduler
Official Website: https://dolphinscheduler.apache.org
![Dolphin Scheduler](https://dolphinscheduler.apache.org/img/hlogo_colorful.svg)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
## How to use this docker image
#### You can start a dolphinscheduler instance
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -e POSTGRESQL_DATABASE=dolphinscheduler \
-p 8888:8888 \
dolphinscheduler all
```
The default postgres user `root`, postgres password `root` and database `dolphinscheduler` are created in the `startup.sh`.
The default zookeeper is created in the `startup.sh`.
#### Or via Environment Variables **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`POSTGRESQL_DATABASE`** **`ZOOKEEPER_QUORUM`**
You can specify **existing postgres service**. Example:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 8888:8888 \
dolphinscheduler all
```
You can specify **existing zookeeper service**. Example:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-p 8888:8888 \
dolphinscheduler all
```
#### Or start a standalone dolphinscheduler server
You can start a standalone dolphinscheduler server.
* Start a **master server**, For example:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler master-server
```
* Start a **worker server**, For example:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler worker-server
```
* Start a **api server**, For example:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 12345:12345 \
dolphinscheduler api-server
```
* Start a **alert server**, For example:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler alert-server
```
* Start a **frontend**, For example:
```
$ docker run -dit --name dolphinscheduler \
-e FRONTEND_API_SERVER_HOST="192.168.x.x" -e FRONTEND_API_SERVER_PORT="12345" \
-p 8888:8888 \
dolphinscheduler frontend
```
**Note**: You must be specify `POSTGRESQL_HOST` `POSTGRESQL_PORT` `POSTGRESQL_DATABASE` `POSTGRESQL_USERNAME` `POSTGRESQL_PASSWORD` `ZOOKEEPER_QUORUM` when start a standalone dolphinscheduler server.
## How to build a docker image
You can build a docker image in A Unix-like operating system, You can also build it in Windows operating system.
In Unix-Like, Example:
```bash
$ cd path/incubator-dolphinscheduler
$ sh ./dockerfile/hooks/build
```
In Windows, Example:
```bat
c:\incubator-dolphinscheduler>.\dockerfile\hooks\build.bat
```
Please read `./dockerfile/hooks/build` `./dockerfile/hooks/build.bat` script files if you don't understand
## Environment Variables
The Dolphin Scheduler image uses several environment variables which are easy to miss. While none of the variables are required, they may significantly aid you in using the image.
**`POSTGRESQL_HOST`**
This environment variable sets the host for PostgreSQL. The default value is `127.0.0.1`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_PORT`**
This environment variable sets the port for PostgreSQL. The default value is `5432`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_USERNAME`**
This environment variable sets the username for PostgreSQL. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_PASSWORD`**
This environment variable sets the password for PostgreSQL. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`POSTGRESQL_DATABASE`**
This environment variable sets the database for PostgreSQL. The default value is `dolphinscheduler`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DOLPHINSCHEDULER_ENV_PATH`**
This environment variable sets the runtime environment for task. The default value is `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`.
**`DOLPHINSCHEDULER_DATA_BASEDIR_PATH`**
User data directory path, self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler`
**`ZOOKEEPER_QUORUM`**
This environment variable sets zookeeper quorum for `master-server` and `worker-serverr`. The default value is `127.0.0.1:2181`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`.
**`MASTER_EXEC_THREADS`**
This environment variable sets exec thread num for `master-server`. The default value is `100`.
**`MASTER_EXEC_TASK_NUM`**
This environment variable sets exec task num for `master-server`. The default value is `20`.
**`MASTER_HEARTBEAT_INTERVAL`**
This environment variable sets heartbeat interval for `master-server`. The default value is `10`.
**`MASTER_TASK_COMMIT_RETRYTIMES`**
This environment variable sets task commit retry times for `master-server`. The default value is `5`.
**`MASTER_TASK_COMMIT_INTERVAL`**
This environment variable sets task commit interval for `master-server`. The default value is `1000`.
**`MASTER_MAX_CPULOAD_AVG`**
This environment variable sets max cpu load avg for `master-server`. The default value is `100`.
**`MASTER_RESERVED_MEMORY`**
This environment variable sets reserved memory for `master-server`. The default value is `0.1`.
**`MASTER_LISTEN_PORT`**
This environment variable sets port for `master-server`. The default value is `5678`.
**`WORKER_EXEC_THREADS`**
This environment variable sets exec thread num for `worker-server`. The default value is `100`.
**`WORKER_HEARTBEAT_INTERVAL`**
This environment variable sets heartbeat interval for `worker-server`. The default value is `10`.
**`WORKER_FETCH_TASK_NUM`**
This environment variable sets fetch task num for `worker-server`. The default value is `3`.
**`WORKER_MAX_CPULOAD_AVG`**
This environment variable sets max cpu load avg for `worker-server`. The default value is `100`.
**`WORKER_RESERVED_MEMORY`**
This environment variable sets reserved memory for `worker-server`. The default value is `0.1`.
**`WORKER_LISTEN_PORT`**
This environment variable sets port for `worker-server`. The default value is `1234`.
**`WORKER_GROUP`**
This environment variable sets group for `worker-server`. The default value is `default`.
**`XLS_FILE_PATH`**
This environment variable sets xls file path for `alert-server`. The default value is `/tmp/xls`.
**`MAIL_SERVER_HOST`**
This environment variable sets mail server host for `alert-server`. The default value is empty.
**`MAIL_SERVER_PORT`**
This environment variable sets mail server port for `alert-server`. The default value is empty.
**`MAIL_SENDER`**
This environment variable sets mail sender for `alert-server`. The default value is empty.
**`MAIL_USER=`**
This environment variable sets mail user for `alert-server`. The default value is empty.
**`MAIL_PASSWD`**
This environment variable sets mail password for `alert-server`. The default value is empty.
**`MAIL_SMTP_STARTTLS_ENABLE`**
This environment variable sets SMTP tls for `alert-server`. The default value is `true`.
**`MAIL_SMTP_SSL_ENABLE`**
This environment variable sets SMTP ssl for `alert-server`. The default value is `false`.
**`MAIL_SMTP_SSL_TRUST`**
This environment variable sets SMTP ssl truest for `alert-server`. The default value is empty.
**`ENTERPRISE_WECHAT_ENABLE`**
This environment variable sets enterprise wechat enable for `alert-server`. The default value is `false`.
**`ENTERPRISE_WECHAT_CORP_ID`**
This environment variable sets enterprise wechat corp id for `alert-server`. The default value is empty.
**`ENTERPRISE_WECHAT_SECRET`**
This environment variable sets enterprise wechat secret for `alert-server`. The default value is empty.
**`ENTERPRISE_WECHAT_AGENT_ID`**
This environment variable sets enterprise wechat agent id for `alert-server`. The default value is empty.
**`ENTERPRISE_WECHAT_USERS`**
This environment variable sets enterprise wechat users for `alert-server`. The default value is empty.
**`FRONTEND_API_SERVER_HOST`**
This environment variable sets api server host for `frontend`. The default value is `127.0.0.1`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `api-server`.
**`FRONTEND_API_SERVER_PORT`**
This environment variable sets api server port for `frontend`. The default value is `123451`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `api-server`.
## Initialization scripts
If you would like to do additional initialization in an image derived from this one, add one or more environment variable under `/root/start-init-conf.sh`, and modify template files in `/opt/dolphinscheduler/conf/*.tpl`.
For example, to add an environment variable `API_SERVER_PORT` in `/root/start-init-conf.sh`:
```
export API_SERVER_PORT=5555
```
and to modify `/opt/dolphinscheduler/conf/application-api.properties.tpl` template file, add server port:
```
server.port=${API_SERVER_PORT}
```
`/root/start-init-conf.sh` will dynamically generate config file:
```sh
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
echo "generate nginx config"
sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf
sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf
```

328
dockerfile/README_zh_CN.md

@ -1,328 +0,0 @@
## Dolphin Scheduler是什么?
一个分布式易扩展的可视化DAG工作流任务调度系统。致力于解决数据处理流程中错综复杂的依赖关系,使调度系统在数据处理流程中`开箱即用`。
Github URL: https://github.com/apache/incubator-dolphinscheduler
Official Website: https://dolphinscheduler.apache.org
![Dolphin Scheduler](https://dolphinscheduler.apache.org/img/hlogo_colorful.svg)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
## 如何使用docker镜像
#### 你可以运行一个dolphinscheduler实例
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -e POSTGRESQL_DATABASE=dolphinscheduler \
-p 8888:8888 \
dolphinscheduler all
```
在`startup.sh`脚本中,默认的创建`Postgres`的用户、密码和数据库,默认值分别为:`root`、`root`、`dolphinscheduler`。
同时,默认的`Zookeeper`也会在`startup.sh`脚本中被创建。
#### 或者通过环境变量 **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`ZOOKEEPER_QUORUM`** 使用已存在的服务
你可以指定一个已经存在的 **`Postgres`** 服务. 如下:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 8888:8888 \
dolphinscheduler all
```
你也可以指定一个已经存在的 **Zookeeper** 服务. 如下:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-p 8888:8888 \
dolphinscheduler all
```
#### 或者运行dolphinscheduler中的部分服务
你能够运行dolphinscheduler中的部分服务。
* 启动一个 **master server**, 如下:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler master-server
```
* 启动一个 **worker server**, 如下:
```
$ docker run -dit --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="l92.168.x.x:2181"
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler worker-server
```
* 启动一个 **api server**, 如下:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
-p 12345:12345 \
dolphinscheduler api-server
```
* 启动一个 **alert server**, 如下:
```
$ docker run -dit --name dolphinscheduler \
-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" -e POSTGRESQL_DATABASE="dolphinscheduler" \
-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \
dolphinscheduler alert-server
```
* 启动一个 **frontend**, 如下:
```
$ docker run -dit --name dolphinscheduler \
-e FRONTEND_API_SERVER_HOST="192.168.x.x" -e FRONTEND_API_SERVER_PORT="12345" \
-p 8888:8888 \
dolphinscheduler frontend
```
**注意**: 当你运行dolphinscheduler中的部分服务时,你必须指定这些环境变量 `POSTGRESQL_HOST` `POSTGRESQL_PORT` `POSTGRESQL_DATABASE` `POSTGRESQL_USERNAME` `POSTGRESQL_PASSWORD` `ZOOKEEPER_QUORUM`
## 如何构建一个docker镜像
你能够在类Unix系统和Windows系统中构建一个docker镜像。
类Unix系统, 如下:
```bash
$ cd path/incubator-dolphinscheduler
$ sh ./dockerfile/hooks/build
```
Windows系统, 如下:
```bat
c:\incubator-dolphinscheduler>.\dockerfile\hooks\build.bat
```
如果你不理解这些脚本 `./dockerfile/hooks/build` `./dockerfile/hooks/build.bat`,请阅读里面的内容。
## 环境变量
Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些变量不是必须的,但是可以帮助你更容易配置镜像并根据你的需求定义相应的服务配置。
**`POSTGRESQL_HOST`**
配置`PostgreSQL`的`HOST`, 默认值 `127.0.0.1`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_PORT`**
配置`PostgreSQL`的`PORT`, 默认值 `5432`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_USERNAME`**
配置`PostgreSQL`的`USERNAME`, 默认值 `root`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_PASSWORD`**
配置`PostgreSQL`的`PASSWORD`, 默认值 `root`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`POSTGRESQL_DATABASE`**
配置`PostgreSQL`的`DATABASE`, 默认值 `dolphinscheduler`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`DOLPHINSCHEDULER_ENV_PATH`**
任务执行时的环境变量配置文件, 默认值 `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`
**`DOLPHINSCHEDULER_DATA_BASEDIR_PATH`**
用户数据目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler`
**`ZOOKEEPER_QUORUM`**
配置`master-server`和`worker-serverr`的`Zookeeper`地址, 默认值 `127.0.0.1:2181`
**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。
**`MASTER_EXEC_THREADS`**
配置`master-server`中的执行线程数量,默认值 `100`
**`MASTER_EXEC_TASK_NUM`**
配置`master-server`中的执行任务数量,默认值 `20`
**`MASTER_HEARTBEAT_INTERVAL`**
配置`master-server`中的心跳交互时间,默认值 `10`
**`MASTER_TASK_COMMIT_RETRYTIMES`**
配置`master-server`中的任务提交重试次数,默认值 `5`
**`MASTER_TASK_COMMIT_INTERVAL`**
配置`master-server`中的任务提交交互时间,默认值 `1000`
**`MASTER_MAX_CPULOAD_AVG`**
配置`master-server`中的CPU中的`load average`值,默认值 `100`
**`MASTER_RESERVED_MEMORY`**
配置`master-server`的保留内存,默认值 `0.1`
**`MASTER_LISTEN_PORT`**
配置`master-server`的端口,默认值 `5678`
**`WORKER_EXEC_THREADS`**
配置`worker-server`中的执行线程数量,默认值 `100`
**`WORKER_HEARTBEAT_INTERVAL`**
配置`worker-server`中的心跳交互时间,默认值 `10`
**`WORKER_FETCH_TASK_NUM`**
配置`worker-server`中的获取任务的数量,默认值 `3`
**`WORKER_MAX_CPULOAD_AVG`**
配置`worker-server`中的CPU中的最大`load average`值,默认值 `100`
**`WORKER_RESERVED_MEMORY`**
配置`worker-server`的保留内存,默认值 `0.1`
**`WORKER_LISTEN_PORT`**
配置`worker-server`的端口,默认值 `1234`
**`WORKER_GROUP`**
配置`worker-server`的分组,默认值 `default`
**`XLS_FILE_PATH`**
配置`alert-server`的`XLS`文件的存储路径,默认值 `/tmp/xls`
**`MAIL_SERVER_HOST`**
配置`alert-server`的邮件服务地址,默认值 `空`
**`MAIL_SERVER_PORT`**
配置`alert-server`的邮件服务端口,默认值 `空`
**`MAIL_SENDER`**
配置`alert-server`的邮件发送人,默认值 `空`
**`MAIL_USER=`**
配置`alert-server`的邮件服务用户名,默认值 `空`
**`MAIL_PASSWD`**
配置`alert-server`的邮件服务用户密码,默认值 `空`
**`MAIL_SMTP_STARTTLS_ENABLE`**
配置`alert-server`的邮件服务是否启用TLS,默认值 `true`
**`MAIL_SMTP_SSL_ENABLE`**
配置`alert-server`的邮件服务是否启用SSL,默认值 `false`
**`MAIL_SMTP_SSL_TRUST`**
配置`alert-server`的邮件服务SSL的信任地址,默认值 `空`
**`ENTERPRISE_WECHAT_ENABLE`**
配置`alert-server`的邮件服务是否启用企业微信,默认值 `false`
**`ENTERPRISE_WECHAT_CORP_ID`**
配置`alert-server`的邮件服务企业微信`ID`,默认值 `空`
**`ENTERPRISE_WECHAT_SECRET`**
配置`alert-server`的邮件服务企业微信`SECRET`,默认值 `空`
**`ENTERPRISE_WECHAT_AGENT_ID`**
配置`alert-server`的邮件服务企业微信`AGENT_ID`,默认值 `空`
**`ENTERPRISE_WECHAT_USERS`**
配置`alert-server`的邮件服务企业微信`USERS`,默认值 `空`
**`FRONTEND_API_SERVER_HOST`**
配置`frontend`的连接`api-server`的地址,默认值 `127.0.0.1`
**Note**: 当单独运行`api-server`时,你应该指定`api-server`这个值。
**`FRONTEND_API_SERVER_PORT`**
配置`frontend`的连接`api-server`的端口,默认值 `12345`
**Note**: 当单独运行`api-server`时,你应该指定`api-server`这个值。
## 初始化脚本
如果你想在编译的时候或者运行的时候附加一些其它的操作及新增一些环境变量,你可以在`/root/start-init-conf.sh`文件中进行修改,同时如果涉及到配置文件的修改,请在`/opt/dolphinscheduler/conf/*.tpl`中修改相应的配置文件
例如,在`/root/start-init-conf.sh`添加一个环境变量`API_SERVER_PORT`:
```
export API_SERVER_PORT=5555
```
当添加以上环境变量后,你应该在相应的模板文件`/opt/dolphinscheduler/conf/application-api.properties.tpl`中添加这个环境变量配置:
```
server.port=${API_SERVER_PORT}
```
`/root/start-init-conf.sh`将根据模板文件动态的生成配置文件:
```sh
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
echo "generate nginx config"
sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf
sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf
```

27
dockerfile/checkpoint.sh

@ -1,27 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
if [ "$(ps -ef | grep java | grep -c $1)" -eq 0 ]; then
echo "[ERROR] $1 process not exits."
exit 1
else
echo "[INFO] $1 process exits."
exit 0
fi

50
dockerfile/conf/dolphinscheduler/alert.properties.tpl

@ -1,50 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#alert type is EMAIL/SMS
alert.type=EMAIL
# alter msg template, default is html template
#alert.template=html
# mail server configuration
mail.protocol=SMTP
mail.server.host=${MAIL_SERVER_HOST}
mail.server.port=${MAIL_SERVER_PORT}
mail.sender=${MAIL_SENDER}
mail.user=${MAIL_USER}
mail.passwd=${MAIL_PASSWD}
# TLS
mail.smtp.starttls.enable=${MAIL_SMTP_STARTTLS_ENABLE}
# SSL
mail.smtp.ssl.enable=${MAIL_SMTP_SSL_ENABLE}
mail.smtp.ssl.trust=${MAIL_SMTP_SSL_TRUST}
#xls file path,need create if not exist
xls.file.path=${XLS_FILE_PATH}
# Enterprise WeChat configuration
enterprise.wechat.enable=${ENTERPRISE_WECHAT_ENABLE}
enterprise.wechat.corp.id=${ENTERPRISE_WECHAT_CORP_ID}
enterprise.wechat.secret=${ENTERPRISE_WECHAT_SECRET}
enterprise.wechat.agent.id=${ENTERPRISE_WECHAT_AGENT_ID}
enterprise.wechat.users=${ENTERPRISE_WECHAT_USERS}
enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret
enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token
enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}
enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}

45
dockerfile/conf/dolphinscheduler/application-api.properties.tpl

@ -1,45 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# server port
server.port=12345
# session config
server.servlet.session.timeout=7200
# servlet config
server.servlet.context-path=/dolphinscheduler/
# file size limit for upload
spring.servlet.multipart.max-file-size=1024MB
spring.servlet.multipart.max-request-size=1024MB
# post content
server.jetty.max-http-post-size=5000000
# i18n
spring.messages.encoding=UTF-8
#i18n classpath folder , file prefix messages, if have many files, use "," seperator
spring.messages.basename=i18n/messages
# Authentication types (supported types: PASSWORD)
security.authentication.type=PASSWORD

78
dockerfile/conf/dolphinscheduler/common.properties.tpl

@ -1,78 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#============================================================================
# System
#============================================================================
# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions
dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH}
# user data directory path, self configuration, please make sure the directory exists and have read write permissions
data.basedir.path=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH}
# resource upload startup type : HDFS,S3,NONE
resource.storage.type=NONE
#============================================================================
# HDFS
#============================================================================
# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended
#resource.upload.path=/dolphinscheduler
# whether kerberos starts
#hadoop.security.authentication.startup.state=false
# java.security.krb5.conf path
#java.security.krb5.conf.path=/opt/krb5.conf
# loginUserFromKeytab user
#login.user.keytab.username=hdfs-mycluster@ESZ.COM
# loginUserFromKeytab path
#login.user.keytab.path=/opt/hdfs.headless.keytab
#resource.view.suffixs
#resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties
# if resource.storage.type=HDFS, the user need to have permission to create directories under the HDFS root path
hdfs.root.user=hdfs
# kerberos expire time
kerberos.expire.time=7
#============================================================================
# S3
#============================================================================
# if resource.storage.type=S3,the value like: s3a://dolphinscheduler ; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir
fs.defaultFS=hdfs://mycluster:8020
# if resource.storage.type=S3,s3 endpoint
#fs.s3a.endpoint=http://192.168.199.91:9010
# if resource.storage.type=S3,s3 access key
#fs.s3a.access.key=A3DXS30FO22544RE
# if resource.storage.type=S3,s3 secret key
#fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK
# if not use hadoop resourcemanager, please keep default value; if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty TODO
yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# If resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname.
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s

71
dockerfile/conf/dolphinscheduler/datasource.properties.tpl

@ -1,71 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# mysql
#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8
# postgre
spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8
spring.datasource.username=${POSTGRESQL_USERNAME}
spring.datasource.password=${POSTGRESQL_PASSWORD}
## base spring data source configuration todo need to remove
#spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
# connection configuration
#spring.datasource.initialSize=5
# min connection number
#spring.datasource.minIdle=5
# max connection number
#spring.datasource.maxActive=50
# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases.
# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true.
#spring.datasource.maxWait=60000
# milliseconds for check to close free connections
#spring.datasource.timeBetweenEvictionRunsMillis=60000
# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis.
#spring.datasource.timeBetweenConnectErrorMillis=60000
# the longest time a connection remains idle without being evicted, in milliseconds
#spring.datasource.minEvictableIdleTimeMillis=300000
#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work.
#spring.datasource.validationQuery=SELECT 1
#check whether the connection is valid for timeout, in seconds
#spring.datasource.validationQueryTimeout=3
# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis,
# validation Query is performed to check whether the connection is valid
#spring.datasource.testWhileIdle=true
#execute validation to check if the connection is valid when applying for a connection
#spring.datasource.testOnBorrow=true
#execute validation to check if the connection is valid when the connection is returned
#spring.datasource.testOnReturn=false
#spring.datasource.defaultAutoCommit=true
#spring.datasource.keepAlive=true
# open PSCache, specify count PSCache for every connection
#spring.datasource.poolPreparedStatements=true
#spring.datasource.maxPoolPreparedStatementPerConnectionSize=20

26
dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env.sh vendored

@ -1,26 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
export HADOOP_HOME=/opt/soft/hadoop
export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop
export SPARK_HOME1=/opt/soft/spark1
export SPARK_HOME2=/opt/soft/spark2
export PYTHON_HOME=/opt/soft/python
export JAVA_HOME=/opt/soft/java
export HIVE_HOME=/opt/soft/hive
export FLINK_HOME=/opt/soft/flink
export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH

40
dockerfile/conf/dolphinscheduler/master.properties.tpl

@ -1,40 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# master execute thread num
master.exec.threads=${MASTER_EXEC_THREADS}
# master execute task number in parallel
master.exec.task.num=${MASTER_EXEC_TASK_NUM}
# master heartbeat interval
master.heartbeat.interval=${MASTER_HEARTBEAT_INTERVAL}
# master commit task retry times
master.task.commit.retryTimes=${MASTER_TASK_COMMIT_RETRYTIMES}
# master commit task interval
master.task.commit.interval=${MASTER_TASK_COMMIT_INTERVAL}
# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2
master.max.cpuload.avg=${MASTER_MAX_CPULOAD_AVG}
# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G.
master.reserved.memory=${MASTER_RESERVED_MEMORY}
# master listen port
#master.listen.port=${MASTER_LISTEN_PORT}

54
dockerfile/conf/dolphinscheduler/quartz.properties.tpl

@ -1,54 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#============================================================================
# Configure Main Scheduler Properties
#============================================================================
#org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
#org.quartz.scheduler.instanceName = DolphinScheduler
#org.quartz.scheduler.instanceId = AUTO
#org.quartz.scheduler.makeSchedulerThreadDaemon = true
#org.quartz.jobStore.useProperties = false
#============================================================================
# Configure ThreadPool
#============================================================================
#org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
#org.quartz.threadPool.makeThreadsDaemons = true
#org.quartz.threadPool.threadCount = 25
#org.quartz.threadPool.threadPriority = 5
#============================================================================
# Configure JobStore
#============================================================================
#org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX
#org.quartz.jobStore.tablePrefix = QRTZ_
#org.quartz.jobStore.isClustered = true
#org.quartz.jobStore.misfireThreshold = 60000
#org.quartz.jobStore.clusterCheckinInterval = 5000
#org.quartz.jobStore.acquireTriggersWithinLock=true
#org.quartz.jobStore.dataSource = myDs
#============================================================================
# Configure Datasources
#============================================================================
#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider

37
dockerfile/conf/dolphinscheduler/worker.properties.tpl

@ -1,37 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# worker execute thread num
worker.exec.threads=${WORKER_EXEC_THREADS}
# worker heartbeat interval
worker.heartbeat.interval=${WORKER_HEARTBEAT_INTERVAL}
# submit the number of tasks at a time
worker.fetch.task.num=${WORKER_FETCH_TASK_NUM}
# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2
worker.max.cpuload.avg=${WORKER_MAX_CPULOAD_AVG}
# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G.
worker.reserved.memory=${WORKER_RESERVED_MEMORY}
# worker listener port
#worker.listen.port=${WORKER_LISTEN_PORT}
# default worker group
#worker.group=${WORKER_GROUP}

29
dockerfile/conf/dolphinscheduler/zookeeper.properties.tpl

@ -1,29 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181
zookeeper.quorum=${ZOOKEEPER_QUORUM}
# dolphinscheduler root directory
#zookeeper.dolphinscheduler.root=/dolphinscheduler
# dolphinscheduler failover directory
#zookeeper.session.timeout=300
#zookeeper.connection.timeout=300
#zookeeper.retry.base.sleep=100
#zookeeper.retry.max.sleep=30000
#zookeeper.retry.maxtime=5

48
dockerfile/conf/nginx/dolphinscheduler.conf

@ -1,48 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
server {
listen 8888;
server_name localhost;
#charset koi8-r;
#access_log /var/log/nginx/host.access.log main;
location / {
root /opt/dolphinscheduler/ui;
index index.html index.html;
}
location /dolphinscheduler {
proxy_pass http://FRONTEND_API_SERVER_HOST:FRONTEND_API_SERVER_PORT;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header x_real_ipP $remote_addr;
proxy_set_header remote_addr $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_http_version 1.1;
proxy_connect_timeout 300s;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
}

45
dockerfile/conf/zookeeper/zoo.cfg

@ -1,45 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# The number of milliseconds of each tick
tickTime=2000
# The number of ticks that the initial
# synchronization phase can take
initLimit=10
# The number of ticks that can pass between
# sending a request and getting an acknowledgement
syncLimit=5
# the directory where the snapshot is stored.
# do not use /tmp for storage, /tmp here is just
# example sakes.
dataDir=/tmp/zookeeper
# the port at which the clients will connect
clientPort=2181
# the maximum number of client connections.
# increase this if you need to handle more clients
#maxClientCnxns=60
#
# Be sure to read the maintenance section of the
# administrator guide before turning on autopurge.
#
# http://zookeeper.apache.org/doc/current/zookeeperAdmin.html#sc_maintenance
#
# The number of snapshots to retain in dataDir
#autopurge.snapRetainCount=3
# Purge task interval in hours
# Set to "0" to disable auto purge feature
#autopurge.purgeInterval=1

53
dockerfile/hooks/build

@ -1,53 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
echo "------ dolphinscheduler start - build -------"
printenv
if [ -z "${VERSION}" ]
then
echo "set default environment variable [VERSION]"
VERSION=$(cat $(pwd)/sql/soft_version)
fi
if [ "${DOCKER_REPO}x" = "x" ]
then
echo "set default environment variable [DOCKER_REPO]"
DOCKER_REPO='dolphinscheduler'
fi
echo "Version: $VERSION"
echo "Repo: $DOCKER_REPO"
echo -e "Current Directory is $(pwd)\n"
# maven package(Project Directory)
echo -e "mvn -B clean compile package -Prelease -Dmaven.test.skip=true"
mvn -B clean compile package -Prelease -Dmaven.test.skip=true
# mv dolphinscheduler-bin.tar.gz file to dockerfile directory
echo -e "mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz $(pwd)/dockerfile/\n"
mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz $(pwd)/dockerfile/
# docker build
echo -e "docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/dockerfile/\n"
docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/dockerfile/
echo "------ dolphinscheduler end - build -------"

56
dockerfile/hooks/build.bat

@ -1,56 +0,0 @@
:: Licensed to the Apache Software Foundation (ASF) under one or more
:: contributor license agreements. See the NOTICE file distributed with
:: this work for additional information regarding copyright ownership.
:: The ASF licenses this file to You under the Apache License, Version 2.0
:: (the "License"); you may not use this file except in compliance with
:: the License. You may obtain a copy of the License at
::
:: http://www.apache.org/licenses/LICENSE-2.0
::
:: Unless required by applicable law or agreed to in writing, software
:: distributed under the License is distributed on an "AS IS" BASIS,
:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
:: See the License for the specific language governing permissions and
:: limitations under the License.
::
@echo off
echo "------ dolphinscheduler start - build -------"
set
if not defined VERSION (
echo "set environment variable [VERSION]"
for /f %%l in (%cd%\sql\soft_version) do (set VERSION=%%l)
)
if not defined DOCKER_REPO (
echo "set environment variable [DOCKER_REPO]"
set DOCKER_REPO='dolphinscheduler'
)
echo "Version: %VERSION%"
echo "Repo: %DOCKER_REPO%"
echo "Current Directory is %cd%"
:: maven package(Project Directory)
echo "call mvn clean compile package -Prelease"
call mvn clean compile package -Prelease -DskipTests=true
if "%errorlevel%"=="1" goto :mvnFailed
:: move dolphinscheduler-bin.tar.gz file to dockerfile directory
echo "move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-incubating-%VERSION%-SNAPSHOT-dolphinscheduler-bin.tar.gz %cd%\dockerfile\"
move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-incubating-%VERSION%-SNAPSHOT-dolphinscheduler-bin.tar.gz %cd%\dockerfile\
:: docker build
echo "docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\dockerfile\"
docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\dockerfile\
if "%errorlevel%"=="1" goto :dockerBuildFailed
echo "------ dolphinscheduler end - build -------"
:mvnFailed
echo "MAVEN PACKAGE FAILED!"
:dockerBuildFailed
echo "DOCKER BUILD FAILED!"

35
dockerfile/hooks/check

@ -1,35 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo "------ dolphinscheduler check - server - status -------"
sleep 60
server_num=$(docker top `docker container list | grep '/sbin/tini' | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l)
if [ $server_num -eq 5 ]
then
echo "Server all start successfully"
else
echo "Server start failed "$server_num
exit 1
fi
ready=`curl http://127.0.0.1:8888/dolphinscheduler/login -d 'userName=admin&userPassword=dolphinscheduler123' -v | grep "login success" | wc -l`
if [ $ready -eq 1 ]
then
echo "Servers is ready"
else
echo "Servers is not ready"
exit 1
fi

24
dockerfile/hooks/push

@ -1,24 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo "------ push start -------"
printenv
docker push $DOCKER_REPO:${VERSION}
echo "------ push end -------"

23
dockerfile/hooks/push.bat

@ -1,23 +0,0 @@
:: Licensed to the Apache Software Foundation (ASF) under one or more
:: contributor license agreements. See the NOTICE file distributed with
:: this work for additional information regarding copyright ownership.
:: The ASF licenses this file to You under the Apache License, Version 2.0
:: (the "License"); you may not use this file except in compliance with
:: the License. You may obtain a copy of the License at
::
:: http://www.apache.org/licenses/LICENSE-2.0
::
:: Unless required by applicable law or agreed to in writing, software
:: distributed under the License is distributed on an "AS IS" BASIS,
:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
:: See the License for the specific language governing permissions and
:: limitations under the License.
::
@echo off
echo "------ push start -------"
set
docker push %DOCKER_REPO%:%VERSION%
echo "------ push end -------"

104
dockerfile/startup-init-conf.sh

@ -1,104 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
echo "init env variables"
# Define parameters default value.
#============================================================================
# Database Source
#============================================================================
export POSTGRESQL_HOST=${POSTGRESQL_HOST:-"127.0.0.1"}
export POSTGRESQL_PORT=${POSTGRESQL_PORT:-"5432"}
export POSTGRESQL_USERNAME=${POSTGRESQL_USERNAME:-"root"}
export POSTGRESQL_PASSWORD=${POSTGRESQL_PASSWORD:-"root"}
export POSTGRESQL_DATABASE=${POSTGRESQL_DATABASE:-"dolphinscheduler"}
#============================================================================
# System
#============================================================================
export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"}
export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"}
#============================================================================
# Zookeeper
#============================================================================
export ZOOKEEPER_QUORUM=${ZOOKEEPER_QUORUM:-"127.0.0.1:2181"}
#============================================================================
# Master Server
#============================================================================
export MASTER_EXEC_THREADS=${MASTER_EXEC_THREADS:-"100"}
export MASTER_EXEC_TASK_NUM=${MASTER_EXEC_TASK_NUM:-"20"}
export MASTER_HEARTBEAT_INTERVAL=${MASTER_HEARTBEAT_INTERVAL:-"10"}
export MASTER_TASK_COMMIT_RETRYTIMES=${MASTER_TASK_COMMIT_RETRYTIMES:-"5"}
export MASTER_TASK_COMMIT_INTERVAL=${MASTER_TASK_COMMIT_INTERVAL:-"1000"}
export MASTER_MAX_CPULOAD_AVG=${MASTER_MAX_CPULOAD_AVG:-"100"}
export MASTER_RESERVED_MEMORY=${MASTER_RESERVED_MEMORY:-"0.1"}
export MASTER_LISTEN_PORT=${MASTER_LISTEN_PORT:-"5678"}
#============================================================================
# Worker Server
#============================================================================
export WORKER_EXEC_THREADS=${WORKER_EXEC_THREADS:-"100"}
export WORKER_HEARTBEAT_INTERVAL=${WORKER_HEARTBEAT_INTERVAL:-"10"}
export WORKER_FETCH_TASK_NUM=${WORKER_FETCH_TASK_NUM:-"3"}
export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"}
export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"}
export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"}
export WORKER_GROUP=${WORKER_GROUP:-"default"}
#============================================================================
# Alert Server
#============================================================================
# XLS FILE
export XLS_FILE_PATH=${XLS_FILE_PATH:-"/tmp/xls"}
# mail
export MAIL_SERVER_HOST=${MAIL_SERVER_HOST:-""}
export MAIL_SERVER_PORT=${MAIL_SERVER_PORT:-""}
export MAIL_SENDER=${MAIL_SENDER:-""}
export MAIL_USER=${MAIL_USER:-""}
export MAIL_PASSWD=${MAIL_PASSWD:-""}
export MAIL_SMTP_STARTTLS_ENABLE=${MAIL_SMTP_STARTTLS_ENABLE:-"true"}
export MAIL_SMTP_SSL_ENABLE=${MAIL_SMTP_SSL_ENABLE:-"false"}
export MAIL_SMTP_SSL_TRUST=${MAIL_SMTP_SSL_TRUST:-""}
# wechat
export ENTERPRISE_WECHAT_ENABLE=${ENTERPRISE_WECHAT_ENABLE:-"false"}
export ENTERPRISE_WECHAT_CORP_ID=${ENTERPRISE_WECHAT_CORP_ID:-""}
export ENTERPRISE_WECHAT_SECRET=${ENTERPRISE_WECHAT_SECRET:-""}
export ENTERPRISE_WECHAT_AGENT_ID=${ENTERPRISE_WECHAT_AGENT_ID:-""}
export ENTERPRISE_WECHAT_USERS=${ENTERPRISE_WECHAT_USERS:-""}
#============================================================================
# Frontend
#============================================================================
export FRONTEND_API_SERVER_HOST=${FRONTEND_API_SERVER_HOST:-"127.0.0.1"}
export FRONTEND_API_SERVER_PORT=${FRONTEND_API_SERVER_PORT:-"12345"}
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
echo "generate nginx config"
sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf
sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf

196
dockerfile/startup.sh

@ -1,196 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
DOLPHINSCHEDULER_BIN=${DOLPHINSCHEDULER_HOME}/bin
DOLPHINSCHEDULER_SCRIPT=${DOLPHINSCHEDULER_HOME}/script
DOLPHINSCHEDULER_LOGS=${DOLPHINSCHEDULER_HOME}/logs
# start postgresql
initPostgreSQL() {
echo "checking postgresql"
if [ -n "$(ifconfig | grep ${POSTGRESQL_HOST})" ]; then
echo "start postgresql service"
rc-service postgresql restart
# role if not exists, create
flag=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='${POSTGRESQL_USERNAME}'")
if [ -z "${flag}" ]; then
echo "create user"
sudo -u postgres psql -tAc "create user ${POSTGRESQL_USERNAME} with password '${POSTGRESQL_PASSWORD}'"
fi
# database if not exists, create
flag=$(sudo -u postgres psql -tAc "select 1 from pg_database where datname='dolphinscheduler'")
if [ -z "${flag}" ]; then
echo "init db"
sudo -u postgres psql -tAc "create database dolphinscheduler owner ${POSTGRESQL_USERNAME}"
fi
# grant
sudo -u postgres psql -tAc "grant all privileges on database dolphinscheduler to ${POSTGRESQL_USERNAME}"
fi
echo "connect postgresql service"
v=$(sudo -u postgres PGPASSWORD=${POSTGRESQL_PASSWORD} psql -h ${POSTGRESQL_HOST} -U ${POSTGRESQL_USERNAME} -d dolphinscheduler -tAc "select 1")
if [ "$(echo '${v}' | grep 'FATAL' | wc -l)" -eq 1 ]; then
echo "Can't connect to database...${v}"
exit 1
fi
echo "import sql data"
${DOLPHINSCHEDULER_SCRIPT}/create-dolphinscheduler.sh
}
# start zk
initZK() {
echo -e "checking zookeeper"
if [[ "${ZOOKEEPER_QUORUM}" = "127.0.0.1:2181" || "${ZOOKEEPER_QUORUM}" = "localhost:2181" ]]; then
echo "start local zookeeper"
/opt/zookeeper/bin/zkServer.sh restart
else
echo "connect remote zookeeper"
echo "${ZOOKEEPER_QUORUM}" | awk -F ',' 'BEGIN{ i=1 }{ while( i <= NF ){ print $i; i++ } }' | while read line; do
while ! nc -z ${line%:*} ${line#*:}; do
counter=$((counter+1))
if [ $counter == 30 ]; then
echo "Error: Couldn't connect to zookeeper."
exit 1
fi
echo "Trying to connect to zookeeper at ${line}. Attempt $counter."
sleep 5
done
done
fi
}
# start nginx
initNginx() {
echo "start nginx"
nginx &
}
# start master-server
initMasterServer() {
echo "start master-server"
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop master-server
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start master-server
}
# start worker-server
initWorkerServer() {
echo "start worker-server"
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop worker-server
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start worker-server
}
# start api-server
initApiServer() {
echo "start api-server"
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop api-server
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start api-server
}
# start logger-server
initLoggerServer() {
echo "start logger-server"
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop logger-server
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start logger-server
}
# start alert-server
initAlertServer() {
echo "start alert-server"
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop alert-server
${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start alert-server
}
# print usage
printUsage() {
echo -e "Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system,"
echo -e "dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.\n"
echo -e "Usage: [ all | master-server | worker-server | api-server | alert-server | frontend ]\n"
printf "%-13s: %s\n" "all" "Run master-server, worker-server, api-server, alert-server and frontend."
printf "%-13s: %s\n" "master-server" "MasterServer is mainly responsible for DAG task split, task submission monitoring."
printf "%-13s: %s\n" "worker-server" "WorkerServer is mainly responsible for task execution and providing log services.."
printf "%-13s: %s\n" "api-server" "ApiServer is mainly responsible for processing requests from the front-end UI layer."
printf "%-13s: %s\n" "alert-server" "AlertServer mainly include Alarms."
printf "%-13s: %s\n" "frontend" "Frontend mainly provides various visual operation interfaces of the system."
}
# init config file
source /root/startup-init-conf.sh
LOGFILE=/var/log/nginx/access.log
case "$1" in
(all)
initZK
initPostgreSQL
initMasterServer
initWorkerServer
initApiServer
initAlertServer
initLoggerServer
initNginx
LOGFILE=/var/log/nginx/access.log
;;
(master-server)
initZK
initPostgreSQL
initMasterServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-master.log
;;
(worker-server)
initZK
initPostgreSQL
initWorkerServer
initLoggerServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log
;;
(api-server)
initZK
initPostgreSQL
initApiServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log
;;
(alert-server)
initPostgreSQL
initAlertServer
LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-alert.log
;;
(frontend)
initNginx
LOGFILE=/var/log/nginx/access.log
;;
(help)
printUsage
exit 1
;;
(*)
printUsage
exit 1
;;
esac
# init directories and log files
mkdir -p ${DOLPHINSCHEDULER_LOGS} && mkdir -p /var/log/nginx/ && cat /dev/null >> ${LOGFILE}
echo "tail begin"
exec bash -c "tail -n 1 -f ${LOGFILE}"

2
dolphinscheduler-alert/pom.xml

@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.2.1-SNAPSHOT</version>
<version>1.3.1-SNAPSHOT</version>
</parent>
<artifactId>dolphinscheduler-alert</artifactId>
<name>${project.artifactId}</name>

30
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java

@ -16,8 +16,11 @@
*/
package org.apache.dolphinscheduler.alert;
import org.apache.dolphinscheduler.alert.plugin.EmailAlertPlugin;
import org.apache.dolphinscheduler.alert.runner.AlertSender;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.plugin.FilePluginManager;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.DaoFactory;
@ -41,34 +44,47 @@ public class AlertServer {
private static AlertServer instance;
public AlertServer() {
private FilePluginManager alertPluginManager;
private static final String[] whitePrefixes = new String[]{"org.apache.dolphinscheduler.plugin.utils."};
private static final String[] excludePrefixes = new String[]{
"org.apache.dolphinscheduler.plugin.",
"ch.qos.logback.",
"org.slf4j."
};
public AlertServer() {
alertPluginManager =
new FilePluginManager(PropertyUtils.getString(Constants.PLUGIN_DIR), whitePrefixes, excludePrefixes);
// add default alert plugins
alertPluginManager.addPlugin(new EmailAlertPlugin());
}
public synchronized static AlertServer getInstance(){
public synchronized static AlertServer getInstance() {
if (null == instance) {
instance = new AlertServer();
}
return instance;
}
public void start(){
public void start() {
logger.info("alert server ready start ");
while (Stopper.isRunning()){
while (Stopper.isRunning()) {
try {
Thread.sleep(Constants.ALERT_SCAN_INTERVAL);
} catch (InterruptedException e) {
logger.error(e.getMessage(),e);
logger.error(e.getMessage(), e);
Thread.currentThread().interrupt();
}
List<Alert> alerts = alertDao.listWaitExecutionAlert();
alertSender = new AlertSender(alerts, alertDao);
alertSender = new AlertSender(alerts, alertDao, alertPluginManager);
alertSender.run();
}
}
public static void main(String[] args){
public static void main(String[] args) {
AlertServer alertServer = AlertServer.getInstance();
alertServer.start();
}

5
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EmailManager.java

@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.alert.manager;
import org.apache.dolphinscheduler.alert.utils.MailUtils;
import org.apache.dolphinscheduler.common.enums.ShowType;
import java.util.List;
import java.util.Map;
@ -35,7 +34,7 @@ public class EmailManager {
* @param showType the showType
* @return the send result
*/
public Map<String,Object> send(List<String> receviersList,List<String> receviersCcList,String title,String content,ShowType showType){
public Map<String,Object> send(List<String> receviersList,List<String> receviersCcList,String title,String content,String showType){
return MailUtils.sendMails(receviersList, receviersCcList, title, content, showType);
}
@ -48,7 +47,7 @@ public class EmailManager {
* @param showType the showType
* @return the send result
*/
public Map<String,Object> send(List<String> receviersList,String title,String content,ShowType showType){
public Map<String,Object> send(List<String> receviersList,String title,String content,String showType){
return MailUtils.sendMails(receviersList,title, content, showType);
}

10
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/manager/EnterpriseWeChatManager.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.alert.manager;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.EnterpriseWeChatUtils;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.plugin.model.AlertInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -35,18 +35,18 @@ public class EnterpriseWeChatManager {
private static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatManager.class);
/**
* Enterprise We Chat send
* @param alert the alert
* @param alertInfo the alert info
* @param token the token
* @return the send result
*/
public Map<String,Object> send(Alert alert, String token){
public Map<String,Object> send(AlertInfo alertInfo, String token){
Map<String,Object> retMap = new HashMap<>();
retMap.put(Constants.STATUS, false);
String agentId = EnterpriseWeChatUtils.ENTERPRISE_WE_CHAT_AGENT_ID;
String users = EnterpriseWeChatUtils.ENTERPRISE_WE_CHAT_USERS;
List<String> userList = Arrays.asList(users.split(","));
logger.info("send message {}",alert);
String msg = EnterpriseWeChatUtils.makeUserSendMsg(userList, agentId,EnterpriseWeChatUtils.markdownByAlert(alert));
logger.info("send message {}", alertInfo.getAlertData().getTitle());
String msg = EnterpriseWeChatUtils.makeUserSendMsg(userList, agentId,EnterpriseWeChatUtils.markdownByAlert(alertInfo.getAlertData()));
try {
EnterpriseWeChatUtils.sendEnterpriseWeChat(Constants.UTF_8, msg, token);
} catch (IOException e) {

130
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java

@ -16,139 +16,85 @@
*/
package org.apache.dolphinscheduler.alert.runner;
import org.apache.dolphinscheduler.alert.manager.EmailManager;
import org.apache.dolphinscheduler.alert.manager.EnterpriseWeChatManager;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.EnterpriseWeChatUtils;
import org.apache.dolphinscheduler.common.enums.AlertStatus;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.plugin.PluginManager;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.plugin.api.AlertPlugin;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import org.apache.dolphinscheduler.plugin.model.AlertInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* alert sender
*/
public class AlertSender{
public class AlertSender {
private static final Logger logger = LoggerFactory.getLogger(AlertSender.class);
private static final EmailManager emailManager= new EmailManager();
private static final EnterpriseWeChatManager weChatManager= new EnterpriseWeChatManager();
private List<Alert> alertList;
private AlertDao alertDao;
private PluginManager pluginManager;
public AlertSender() {
}
public AlertSender(){}
public AlertSender(List<Alert> alertList, AlertDao alertDao){
public AlertSender(List<Alert> alertList, AlertDao alertDao, PluginManager pluginManager) {
super();
this.alertList = alertList;
this.alertDao = alertDao;
this.pluginManager = pluginManager;
}
public void run() {
List<User> users;
Map<String, Object> retMaps = null;
for(Alert alert:alertList){
for (Alert alert : alertList) {
users = alertDao.listUserByAlertgroupId(alert.getAlertGroupId());
// receiving group list
List<String> receviersList = new ArrayList<>();
for(User user:users){
for (User user : users) {
receviersList.add(user.getEmail());
}
// custom receiver
String receivers = alert.getReceivers();
if (StringUtils.isNotEmpty(receivers)){
String[] splits = receivers.split(",");
receviersList.addAll(Arrays.asList(splits));
}
// copy list
List<String> receviersCcList = new ArrayList<>();
// Custom Copier
String receiversCc = alert.getReceiversCc();
if (StringUtils.isNotEmpty(receiversCc)){
String[] splits = receiversCc.split(",");
receviersCcList.addAll(Arrays.asList(splits));
}
if (CollectionUtils.isEmpty(receviersList) && CollectionUtils.isEmpty(receviersCcList)) {
logger.warn("alert send error : At least one receiver address required");
alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, "execution failure,At least one receiver address required.", alert.getId());
continue;
}
if (alert.getAlertType() == AlertType.EMAIL){
retMaps = emailManager.send(receviersList,receviersCcList, alert.getTitle(), alert.getContent(),alert.getShowType());
alert.setInfo(retMaps);
}else if (alert.getAlertType() == AlertType.SMS){
retMaps = emailManager.send(getReciversForSMS(users), alert.getTitle(), alert.getContent(),alert.getShowType());
alert.setInfo(retMaps);
AlertData alertData = new AlertData();
alertData.setId(alert.getId())
.setAlertGroupId(alert.getAlertGroupId())
.setContent(alert.getContent())
.setLog(alert.getLog())
.setReceivers(alert.getReceivers())
.setReceiversCc(alert.getReceiversCc())
.setShowType(alert.getShowType().getDescp())
.setTitle(alert.getTitle());
AlertInfo alertInfo = new AlertInfo();
alertInfo.setAlertData(alertData);
alertInfo.addProp("receivers", receviersList);
AlertPlugin emailPlugin = pluginManager.findOne(Constants.PLUGIN_DEFAULT_EMAIL);
retMaps = emailPlugin.process(alertInfo);
if (retMaps == null) {
alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, "alert send error", alert.getId());
logger.info("alert send error : return value is null");
} else if (!Boolean.parseBoolean(String.valueOf(retMaps.get(Constants.STATUS)))) {
alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, String.valueOf(retMaps.get(Constants.MESSAGE)), alert.getId());
logger.info("alert send error : {}", retMaps.get(Constants.MESSAGE));
} else {
logger.error("AlertType is not defined. code: {}, descp: {}",
alert.getAlertType().getCode(),
alert.getAlertType().getDescp());
return;
}
//send flag
boolean flag = false;
if (null != retMaps) {
flag = Boolean.parseBoolean(String.valueOf(retMaps.get(Constants.STATUS)));
}
if (flag) {
alertDao.updateAlert(AlertStatus.EXECUTION_SUCCESS, "execution success", alert.getId());
alertDao.updateAlert(AlertStatus.EXECUTION_SUCCESS, (String) retMaps.get(Constants.MESSAGE), alert.getId());
logger.info("alert send success");
if (EnterpriseWeChatUtils.isEnable()) {
logger.info("Enterprise WeChat is enable!");
try {
String token = EnterpriseWeChatUtils.getToken();
weChatManager.send(alert, token);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
} else {
if (null != retMaps) {
alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, String.valueOf(retMaps.get(Constants.MESSAGE)), alert.getId());
logger.info("alert send error : {}", retMaps.get(Constants.MESSAGE));
}
}
}
}
/**
* get a list of SMS users
* @param users
* @return
*/
private List<String> getReciversForSMS(List<User> users){
List<String> list = new ArrayList<>();
for (User user : users){
list.add(user.getPhone());
}
return list;
}
}

17
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java

@ -17,9 +17,6 @@
package org.apache.dolphinscheduler.alert.template;
import org.apache.dolphinscheduler.alert.template.impl.DefaultHTMLTemplate;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -30,8 +27,6 @@ public class AlertTemplateFactory {
private static final Logger logger = LoggerFactory.getLogger(AlertTemplateFactory.class);
private static final String alertTemplate = PropertyUtils.getString(Constants.ALERT_TEMPLATE);
private AlertTemplateFactory(){}
/**
@ -39,16 +34,6 @@ public class AlertTemplateFactory {
* @return a template, default is DefaultHTMLTemplate
*/
public static AlertTemplate getMessageTemplate() {
if(StringUtils.isEmpty(alertTemplate)){
return new DefaultHTMLTemplate();
}
switch (alertTemplate){
case "html":
return new DefaultHTMLTemplate();
default:
throw new IllegalArgumentException(String.format("not support alert template: %s",alertTemplate));
}
return new DefaultHTMLTemplate();
}
}

3
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplate.java

@ -35,6 +35,7 @@ public class DefaultHTMLTemplate implements AlertTemplate {
public static final Logger logger = LoggerFactory.getLogger(DefaultHTMLTemplate.class);
@Override
public String getMessageFromTemplate(String content, ShowType showType,boolean showAll) {
@ -140,7 +141,7 @@ public class DefaultHTMLTemplate implements AlertTemplate {
checkNotNull(content);
String htmlTableThead = StringUtils.isEmpty(title) ? "" : String.format("<thead>%s</thead>\n",title);
return "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN' 'http://www.w3.org/TR/html4/loose.dtd'><html><head><title>dolphinscheduler</title><meta name='Keywords' content=''><meta name='Description' content=''><style type=\"text/css\">table {margin-top:0px;padding-top:0px;border:1px solid;font-size: 14px;color: #333333;border-width: 1px;border-color: #666666;border-collapse: collapse;}table th {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #dedede;text-align: left;}table td {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #ffffff;text-align: left;}</style></head><body style=\"margin:0;padding:0\"><table border=\"1px\" cellpadding=\"5px\" cellspacing=\"-10px\"> " +htmlTableThead + content +"</table></body></html>";
return Constants.HTML_HEADER_PREFIX +htmlTableThead + content + Constants.TABLE_BODY_HTML_TAIL;
}
}

23
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java

@ -77,8 +77,6 @@ public class Constants {
public static final int NUMBER_1000 = 1000;
public static final String ALERT_TEMPLATE = "alert.template";
public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name";
public static final String SPRING_DATASOURCE_URL = "spring.datasource.url";
@ -158,4 +156,25 @@ public class Constants {
public static final String ENTERPRISE_WECHAT_AGENT_ID = "enterprise.wechat.agent.id";
public static final String ENTERPRISE_WECHAT_USERS = "enterprise.wechat.users";
public static final String HTML_HEADER_PREFIX = "<!DOCTYPE HTML PUBLIC '-//W3C//DTD HTML 4.01 Transitional//EN' 'http://www.w3.org/TR/html4/loose.dtd'><html><head><title>dolphinscheduler</title><meta name='Keywords' content=''><meta name='Description' content=''><style type=\"text/css\">table {margin-top:0px;padding-top:0px;border:1px solid;font-size: 14px;color: #333333;border-width: 1px;border-color: #666666;border-collapse: collapse;}table th {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #dedede;text-align: left;}table td {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #ffffff;text-align: left;}</style></head><body style=\"margin:0;padding:0\"><table border=\"1px\" cellpadding=\"5px\" cellspacing=\"-10px\"> ";
public static final String TABLE_BODY_HTML_TAIL = "</table></body></html>";
/**
* plugin config
*/
public static final String PLUGIN_DIR = "plugin.dir";
public static final String PLUGIN_DEFAULT_EMAIL = "email";
public static final String PLUGIN_DEFAULT_EMAIL_CH = "邮件";
public static final String PLUGIN_DEFAULT_EMAIL_EN = "email";
public static final String PLUGIN_DEFAULT_EMAIL_RECEIVERS = "receivers";
public static final String PLUGIN_DEFAULT_EMAIL_RECEIVERCCS = "receiverCcs";
public static final String RETMAP_MSG = "msg";
}

17
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java

@ -18,10 +18,10 @@ package org.apache.dolphinscheduler.alert.utils;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Alert;
import com.alibaba.fastjson.JSON;
import com.google.common.reflect.TypeToken;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@ -66,15 +66,17 @@ public class EnterpriseWeChatUtils {
* get Enterprise WeChat is enable
* @return isEnable
*/
public static Boolean isEnable(){
Boolean isEnable = false;
public static boolean isEnable(){
Boolean isEnable = null;
try {
isEnable = PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE);
} catch (Exception e) {
logger.error(e.getMessage(),e);
}
if (isEnable == null) {
return false;
}
return isEnable;
}
/**
@ -253,14 +255,13 @@ public class EnterpriseWeChatUtils {
/**
* Determine the mardown style based on the show type of the alert
* @param alert the alert
* @return the markdown alert table/text
*/
public static String markdownByAlert(Alert alert){
public static String markdownByAlert(AlertData alert){
String result = "";
if (alert.getShowType() == ShowType.TABLE) {
if (alert.getShowType().equals(ShowType.TABLE.getDescp())) {
result = markdownTable(alert.getTitle(),alert.getContent());
}else if(alert.getShowType() == ShowType.TEXT){
}else if(alert.getShowType().equals(ShowType.TEXT.getDescp())){
result = markdownText(alert.getTitle(),alert.getContent());
}
return result;

27
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -55,7 +55,7 @@ public class MailUtils {
public static final Boolean MAIL_USE_SSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String XLS_FILE_PATH = PropertyUtils.getString(Constants.XLS_FILE_PATH);
public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH,"/tmp/xls");
public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
@ -74,7 +74,7 @@ public class MailUtils {
* @param showType the show type
* @return the result map
*/
public static Map<String,Object> sendMails(Collection<String> receivers, String title, String content,ShowType showType) {
public static Map<String,Object> sendMails(Collection<String> receivers, String title, String content,String showType) {
return sendMails(receivers, null, title, content, showType);
}
@ -87,7 +87,7 @@ public class MailUtils {
* @param showType the show type
* @return the send result
*/
public static Map<String,Object> sendMails(Collection<String> receivers, Collection<String> receiversCc, String title, String content, ShowType showType) {
public static Map<String,Object> sendMails(Collection<String> receivers, Collection<String> receiversCc, String title, String content, String showType) {
Map<String,Object> retMap = new HashMap<>();
retMap.put(Constants.STATUS, false);
@ -98,7 +98,7 @@ public class MailUtils {
receivers.removeIf(StringUtils::isEmpty);
if (showType == ShowType.TABLE || showType == ShowType.TEXT){
if (showType.equals(ShowType.TABLE.getDescp()) || showType.equals(ShowType.TEXT.getDescp())) {
// send email
HtmlEmail email = new HtmlEmail();
@ -125,10 +125,10 @@ public class MailUtils {
} catch (Exception e) {
handleException(receivers, retMap, e);
}
}else if (showType == ShowType.ATTACHMENT || showType == ShowType.TABLEATTACHMENT){
}else if (showType.equals(ShowType.ATTACHMENT.getDescp()) || showType.equals(ShowType.TABLEATTACHMENT.getDescp())) {
try {
String partContent = (showType == ShowType.ATTACHMENT ? "Please see the attachment " + title + Constants.EXCEL_SUFFIX_XLS : htmlTable(content,false));
String partContent = (showType.equals(ShowType.ATTACHMENT.getDescp()) ? "Please see the attachment " + title + Constants.EXCEL_SUFFIX_XLS : htmlTable(content,false));
attachment(receivers,receiversCc,title,content,partContent);
@ -260,9 +260,14 @@ public class MailUtils {
part1.setContent(partContent, Constants.TEXT_HTML_CHARSET_UTF_8);
// set attach file
MimeBodyPart part2 = new MimeBodyPart();
File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS);
if (!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
// make excel file
ExcelUtils.genExcelFile(content,title, XLS_FILE_PATH);
File file = new File(XLS_FILE_PATH + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS);
ExcelUtils.genExcelFile(content,title,xlsFilePath);
part2.attachFile(file);
part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS,Constants.UTF_8,"B"));
// add components to collection
@ -285,7 +290,7 @@ public class MailUtils {
* @return the result map
* @throws EmailException
*/
private static Map<String, Object> getStringObjectMap(String title, String content, ShowType showType, Map<String, Object> retMap, HtmlEmail email) throws EmailException {
private static Map<String, Object> getStringObjectMap(String title, String content, String showType, Map<String, Object> retMap, HtmlEmail email) throws EmailException {
/**
* the subject of the message to be sent
@ -294,9 +299,9 @@ public class MailUtils {
/**
* to send information, you can use HTML tags in mail content because of the use of HtmlEmail
*/
if (showType == ShowType.TABLE) {
if (showType.equals(ShowType.TABLE.getDescp())) {
email.setMsg(htmlTable(content));
} else if (showType == ShowType.TEXT) {
} else if (showType.equals(ShowType.TEXT.getDescp())) {
email.setMsg(htmlText(content));
}

12
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java

@ -79,6 +79,18 @@ public class PropertyUtils {
return properties.getProperty(key.trim());
}
/**
* get property value
*
* @param key property name
* @param defaultVal default value
* @return property value
*/
public static String getString(String key, String defaultVal) {
String val = properties.getProperty(key.trim());
return val == null ? defaultVal : val;
}
/**
* get property value
*

25
dolphinscheduler-alert/src/main/resources/alert.properties

@ -18,9 +18,6 @@
#alert type is EMAIL/SMS
alert.type=EMAIL
# alter msg template, default is html template
#alert.template=html
# mail server configuration
mail.protocol=SMTP
mail.server.host=xxx.xxx.com
@ -35,18 +32,18 @@ mail.smtp.ssl.enable=false
mail.smtp.ssl.trust=xxx.xxx.com
#xls file path,need create if not exist
xls.file.path=/tmp/xls
#xls.file.path=/tmp/xls
# Enterprise WeChat configuration
enterprise.wechat.enable=false
enterprise.wechat.corp.id=xxxxxxx
enterprise.wechat.secret=xxxxxxx
enterprise.wechat.agent.id=xxxxxxx
enterprise.wechat.users=xxxxxxx
enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret
enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token
enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}
enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}
#enterprise.wechat.corp.id=xxxxxxx
#enterprise.wechat.secret=xxxxxxx
#enterprise.wechat.agent.id=xxxxxxx
#enterprise.wechat.users=xxxxxxx
#enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret
#enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token
#enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}
#enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}
plugin.dir=/Users/xx/your/path/to/plugin/dir

1
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java

@ -47,7 +47,6 @@ public class AlertTemplateFactoryTest {
public void testGetMessageTemplate(){
PowerMockito.mockStatic(PropertyUtils.class);
when(PropertyUtils.getString(Constants.ALERT_TEMPLATE)).thenReturn("html");
AlertTemplate defaultTemplate = AlertTemplateFactory.getMessageTemplate();

37
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/impl/DefaultHTMLTemplateTest.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.alert.template.impl;
import org.apache.dolphinscheduler.alert.utils.Constants;
import org.apache.dolphinscheduler.alert.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.junit.Test;
@ -82,42 +83,14 @@ public class DefaultHTMLTemplateTest{
private String generateMockTableTypeResultByHand(){
return "<html>\n" +
" <head>\n" +
" <title>dolphinscheduler</title>\n" +
" <meta name='Keywords' content=''>\n" +
" <meta name='Description' content=''>\n" +
" <style type=\"text/css\">\n" +
" table {margin-top:0px;padding-top:0px;border:1px solid;font-size: 14px;color: #333333;border-width: 1px;border-color: #666666;border-collapse: collapse;}\n" +
" table th {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #dedede;text-align: right;}\n" +
" table td {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #ffffff;text-align: right;}\n" +
" </style>\n" +
" </head>\n" +
" <body style=\"margin:0;padding:0\">\n" +
" <table border=\"1px\" cellpadding=\"5px\" cellspacing=\"-10px\">\n" +
return Constants.HTML_HEADER_PREFIX +
"<thead><tr><th>mysql service name</th><th>mysql address</th><th>port</th><th>no index of number</th><th>database client connections</th></tr></thead>\n" +
"<tr><td>mysql200</td><td>192.168.xx.xx</td><td>3306</td><td>80</td><td>190</td></tr><tr><td>mysql210</td><td>192.168.xx.xx</td><td>3306</td><td>10</td><td>90</td></tr> </table>\n" +
" </body>\n" +
"</html>";
"<tr><td>mysql200</td><td>192.168.xx.xx</td><td>3306</td><td>80</td><td>190</td></tr><tr><td>mysql210</td><td>192.168.xx.xx</td><td>3306</td><td>10</td><td>90</td></tr>" + Constants.TABLE_BODY_HTML_TAIL;
}
private String generateMockTextTypeResultByHand(){
return "<html>\n" +
" <head>\n" +
" <title>dolphinscheduler</title>\n" +
" <meta name='Keywords' content=''>\n" +
" <meta name='Description' content=''>\n" +
" <style type=\"text/css\">\n" +
" table {margin-top:0px;padding-top:0px;border:1px solid;font-size: 14px;color: #333333;border-width: 1px;border-color: #666666;border-collapse: collapse;}\n" +
" table th {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #dedede;text-align: right;}\n" +
" table td {border-width: 1px;padding: 8px;border-style: solid;border-color: #666666;background-color: #ffffff;text-align: right;}\n" +
" </style>\n" +
" </head>\n" +
" <body style=\"margin:0;padding:0\">\n" +
" <table border=\"1px\" cellpadding=\"5px\" cellspacing=\"-10px\">\n" +
"<tr><td>{\"mysql service name\":\"mysql200\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"190\",\"port\":\"3306\",\"no index of number\":\"80\"}</td></tr><tr><td>{\"mysql service name\":\"mysql210\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"90\",\"port\":\"3306\",\"no index of number\":\"10\"}</td></tr> </table>\n" +
" </body>\n" +
"</html>";
return Constants.HTML_HEADER_PREFIX + "<tr><td>{\"mysql service name\":\"mysql200\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"190\",\"port\":\"3306\",\"no index of number\":\"80\"}</td></tr><tr><td>{\"mysql service name\":\"mysql210\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"90\",\"port\":\"3306\",\"no index of number\":\"10\"}</td></tr>" + Constants.TABLE_BODY_HTML_TAIL;
}
}

27
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java

@ -20,7 +20,9 @@ import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -54,11 +56,19 @@ public class EnterpriseWeChatUtilsTest {
private static final String enterpriseWechatUsers="LiGang,journey";
private static final String msg = "hello world";
private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"$toParty\\\",\\\"agentid\\\":\\\"$agentId\\\",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"$msg\\\"},\\\"safe\\\":\\\"0\\\"}";
private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"$toUser\\\",\\\"agentid\\\":\\\"$agentId\\\",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"$msg\\\"}}";
@Test
public void testIsEnable(){
@Before
public void init(){
PowerMockito.mockStatic(PropertyUtils.class);
Mockito.when(PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE)).thenReturn(true);
Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG)).thenReturn(enterpriseWechatUserSendMsg);
Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG)).thenReturn(enterpriseWechatTeamSendMsg);
}
@Test
public void testIsEnable(){
Boolean weChartEnable = EnterpriseWeChatUtils.isEnable();
Assert.assertTrue(weChartEnable);
}
@ -88,6 +98,7 @@ public class EnterpriseWeChatUtilsTest {
@Test
public void tesMakeUserSendMsg1(){
String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(enterpriseWechatUsers, enterpriseWechatAgentId, msg);
Assert.assertTrue(sendMsg.contains(enterpriseWechatUsers));
Assert.assertTrue(sendMsg.contains(enterpriseWechatAgentId));
@ -110,14 +121,22 @@ public class EnterpriseWeChatUtilsTest {
@Test
public void testMarkdownByAlertForText(){
Alert alertForText = createAlertForText();
String result = EnterpriseWeChatUtils.markdownByAlert(alertForText);
AlertData alertData = new AlertData();
alertData.setTitle(alertForText.getTitle())
.setShowType(alertForText.getShowType().getDescp())
.setContent(alertForText.getContent());
String result = EnterpriseWeChatUtils.markdownByAlert(alertData);
Assert.assertNotNull(result);
}
@Test
public void testMarkdownByAlertForTable(){
Alert alertForText = createAlertForTable();
String result = EnterpriseWeChatUtils.markdownByAlert(alertForText);
AlertData alertData = new AlertData();
alertData.setTitle(alertForText.getTitle())
.setShowType(alertForText.getShowType().getDescp())
.setContent(alertForText.getContent());
String result = EnterpriseWeChatUtils.markdownByAlert(alertData);
Assert.assertNotNull(result);
}

10
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java

@ -58,7 +58,7 @@ public class MailUtilsTest {
alert.setAlertType(AlertType.EMAIL);
alert.setAlertGroupId(4);
MailUtils.sendMails(Arrays.asList(receivers),Arrays.asList(receiversCc),alert.getTitle(),alert.getContent(), ShowType.TEXT);
MailUtils.sendMails(Arrays.asList(receivers),Arrays.asList(receiversCc),alert.getTitle(),alert.getContent(), ShowType.TEXT.getDescp());
}
@ -70,7 +70,7 @@ public class MailUtilsTest {
String[] mails = new String[]{"xx@xx.com"};
for(Alert alert : alerts){
MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), alert.getShowType());
MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), ShowType.TABLE.getDescp());
}
}
@ -111,7 +111,7 @@ public class MailUtilsTest {
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
alert.setAlertGroupId(1);
MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), ShowType.TABLE);
MailUtils.sendMails(Arrays.asList(mails),"gaojing", alert.getContent(), ShowType.TABLE.getDescp());
}
/**
@ -170,7 +170,7 @@ public class MailUtilsTest {
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
alert.setAlertGroupId(1);
MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.ATTACHMENT);
MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.ATTACHMENT.getDescp());
}
@Test
@ -183,7 +183,7 @@ public class MailUtilsTest {
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
alert.setAlertGroupId(1);
MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.TABLEATTACHMENT);
MailUtils.sendMails(Arrays.asList(mails),"gaojing",alert.getContent(),ShowType.TABLEATTACHMENT.getDescp());
}
}

67
dolphinscheduler-alert/src/test/resources/alert.properties

@ -1,67 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For unit test
#alert type is EMAIL/SMS
alert.type=EMAIL
# mail server configuration
mail.protocol=SMTP
mail.server.host=xxx.xxx.test
mail.server.port=25
mail.sender=xxx@xxx.com
mail.user=xxx@xxx.com
mail.passwd=111111
# Test double
test.server.factor=3.0
# Test NumberFormat
test.server.testnumber=abc
# Test array
test.server.list=xxx.xxx.test1,xxx.xxx.test2,xxx.xxx.test3
# Test enum
test.server.enum1=MASTER
test.server.enum2=DEAD_SERVER
test.server.enum3=abc
# TLS
mail.smtp.starttls.enable=true
# SSL
mail.smtp.ssl.enable=false
mail.smtp.ssl.trust=xxx.xxx.com
#xls file path,need create if not exist
xls.file.path=/tmp/xls
# Enterprise WeChat configuration
enterprise.wechat.enable=false
enterprise.wechat.corp.id=xxxxxxx
enterprise.wechat.secret=xxxxxxx
enterprise.wechat.agent.id=xxxxxxx
enterprise.wechat.users=xxxxxxx
enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret
enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token
enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}
enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}

2
dolphinscheduler-api/pom.xml

@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.2.1-SNAPSHOT</version>
<version>1.3.1-SNAPSHOT</version>
</parent>
<artifactId>dolphinscheduler-api</artifactId>
<name>${project.artifactId}</name>

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java

@ -21,13 +21,15 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication
@ServletComponentScan
@ComponentScan({"org.apache.dolphinscheduler.api",
"org.apache.dolphinscheduler.dao",
"org.apache.dolphinscheduler.service"})
@ComponentScan(basePackages = {"org.apache.dolphinscheduler"},
excludeFilters = @ComponentScan.Filter(type = FilterType.REGEX,
pattern = "org.apache.dolphinscheduler.server.*"))
public class ApiApplicationServer extends SpringBootServletInitializer {
public static void main(String[] args) {

129
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java

@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AccessTokenService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -37,13 +38,14 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* access token controller
*/
@Api(tags = "ACCESS_TOKEN_TAG", position = 1)
@RestController
@RequestMapping("/access-token")
public class AccessTokenController extends BaseController{
public class AccessTokenController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenController.class);
@ -54,140 +56,125 @@ public class AccessTokenController extends BaseController{
/**
* create token
* @param loginUser login user
* @param userId token for user id
*
* @param loginUser login user
* @param userId token for user id
* @param expireTime expire time for the token
* @param token token
* @param token token
* @return create result state code
*/
@ApiIgnore
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_ACCESS_TOKEN_ERROR)
public Result createToken(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime,
@RequestParam(value = "token") String token){
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime,
@RequestParam(value = "token") String token) {
logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId,expireTime,token);
try {
Map<String, Object> result = accessTokenService.createToken(userId, expireTime, token);
return returnDataList(result);
}catch (Exception e){
logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e);
return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg());
}
userId, expireTime, token);
Map<String, Object> result = accessTokenService.createToken(userId, expireTime, token);
return returnDataList(result);
}
/**
* generate token string
* @param loginUser login user
* @param userId token for user
*
* @param loginUser login user
* @param userId token for user
* @param expireTime expire time
* @return token string
*/
@ApiIgnore
@PostMapping(value = "/generate")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(GENERATE_TOKEN_ERROR)
public Result generateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime){
logger.info("login user {}, generate token , userId : {} , token expire time : {}",loginUser,userId,expireTime);
try {
Map<String, Object> result = accessTokenService.generateToken(userId, expireTime);
return returnDataList(result);
}catch (Exception e){
logger.error(GENERATE_TOKEN_ERROR.getMsg(),e);
return error(GENERATE_TOKEN_ERROR.getCode(), GENERATE_TOKEN_ERROR.getMsg());
}
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime) {
logger.info("login user {}, generate token , userId : {} , token expire time : {}", loginUser, userId, expireTime);
Map<String, Object> result = accessTokenService.generateToken(userId, expireTime);
return returnDataList(result);
}
/**
* query access token list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @param pageSize page size
* @return token list of page number and page size
*/
@ApiOperation(value = "queryAccessTokenList", notes= "QUERY_ACCESS_TOKEN_LIST_NOTES")
@ApiOperation(value = "queryAccessTokenList", notes = "QUERY_ACCESS_TOKEN_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20")
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR)
public Result queryAccessTokenList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, list access token paging, pageNo: {}, searchVal: {}, pageSize: {}",
loginUser.getUserName(),pageNo,searchVal,pageSize);
try{
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize);
loginUser.getUserName(), pageNo, searchVal, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg(),e);
return error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getCode(),QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* delete access token by id
*
* @param loginUser login user
* @param id token id
* @param id token id
* @return delete result code
*/
@ApiIgnore
@PostMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_ACCESS_TOKEN_ERROR)
public Result delAccessTokenById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) {
@RequestParam(value = "id") int id) {
logger.info("login user {}, delete access token, id: {},", loginUser.getUserName(), id);
try {
Map<String, Object> result = accessTokenService.delAccessTokenById(loginUser, id);
return returnDataList(result);
}catch (Exception e){
logger.error(DELETE_ACCESS_TOKEN_ERROR.getMsg(),e);
return error(Status.DELETE_ACCESS_TOKEN_ERROR.getCode(), Status.DELETE_ACCESS_TOKEN_ERROR.getMsg());
}
Map<String, Object> result = accessTokenService.delAccessTokenById(loginUser, id);
return returnDataList(result);
}
/**
* update token
* @param loginUser login user
* @param id token id
* @param userId token for user
*
* @param loginUser login user
* @param id token id
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @param token token string
* @return update result code
*/
@ApiIgnore
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_ACCESS_TOKEN_ERROR)
public Result updateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "expireTime") String expireTime,
@RequestParam(value = "token") String token){
@RequestParam(value = "token") String token) {
logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(),
userId,expireTime,token);
try {
Map<String, Object> result = accessTokenService.updateToken(id,userId, expireTime, token);
return returnDataList(result);
}catch (Exception e){
logger.error(UPDATE_ACCESS_TOKEN_ERROR.getMsg(),e);
return error(UPDATE_ACCESS_TOKEN_ERROR.getCode(), UPDATE_ACCESS_TOKEN_ERROR.getMsg());
}
userId, expireTime, token);
Map<String, Object> result = accessTokenService.updateToken(id, userId, expireTime, token);
return returnDataList(result);
}
}

165
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java

@ -16,6 +16,7 @@
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AlertGroupService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -37,13 +38,15 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.HashMap;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* alert group controller
*/
@Api(tags = "ALERT_GROUP_TAG", position = 1)
@RestController
@RequestMapping("alert-group")
public class AlertGroupController extends BaseController{
public class AlertGroupController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class);
@ -53,171 +56,154 @@ public class AlertGroupController extends BaseController{
/**
* create alert group
* @param loginUser login user
* @param groupName group name
* @param groupType group type
*
* @param loginUser login user
* @param groupName group name
* @param groupType group type
* @param description description
* @return create result code
*/
@ApiOperation(value = "createAlertgroup", notes= "CREATE_ALERT_GROUP_NOTES")
@ApiOperation(value = "createAlertgroup", notes = "CREATE_ALERT_GROUP_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"),
@ApiImplicitParam(name = "description", value = "DESC", dataType ="String")
@ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType = "AlertType"),
@ApiImplicitParam(name = "description", value = "DESC", dataType = "String")
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_ALERT_GROUP_ERROR)
public Result createAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "groupName") String groupName,
@RequestParam(value = "groupType") AlertType groupType,
@RequestParam(value = "description",required = false) String description) {
@RequestParam(value = "groupName") String groupName,
@RequestParam(value = "groupType") AlertType groupType,
@RequestParam(value = "description", required = false) String description) {
logger.info("loginUser user {}, create alertgroup, groupName: {}, groupType: {}, desc: {}",
loginUser.getUserName(), groupName, groupType,description);
try {
Map<String, Object> result = alertGroupService.createAlertgroup(loginUser, groupName, groupType,description);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.CREATE_ALERT_GROUP_ERROR.getMsg(),e);
return error(Status.CREATE_ALERT_GROUP_ERROR.getCode(), Status.CREATE_ALERT_GROUP_ERROR.getMsg());
}
loginUser.getUserName(), groupName, groupType, description);
Map<String, Object> result = alertGroupService.createAlertgroup(loginUser, groupName, groupType, description);
return returnDataList(result);
}
/**
* alert group list
*
* @param loginUser login user
* @return alert group list
*/
@ApiOperation(value = "list", notes= "QUERY_ALERT_GROUP_LIST_NOTES")
@ApiOperation(value = "list", notes = "QUERY_ALERT_GROUP_LIST_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_ALL_ALERTGROUP_ERROR)
public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query all alertGroup",
loginUser.getUserName());
try {
HashMap<String, Object> result = alertGroupService.queryAlertgroup();
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg(), e);
return error(Status.QUERY_ALL_ALERTGROUP_ERROR.getCode(), Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg());
}
HashMap<String, Object> result = alertGroupService.queryAlertgroup();
return returnDataList(result);
}
/**
* paging query alarm group list
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @param pageSize page size
* @return alert group list page
*/
@ApiOperation(value = "queryAlertGroupListPaging", notes= "QUERY_ALERT_GROUP_LIST_PAGING_NOTES")
@ApiOperation(value = "queryAlertGroupListPaging", notes = "QUERY_ALERT_GROUP_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(LIST_PAGING_ALERT_GROUP_ERROR)
public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}",
loginUser.getUserName(),pageNo,searchVal,pageSize);
try{
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize);
loginUser.getUserName(), pageNo, searchVal, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg(),e);
return error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getCode(), Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* updateProcessInstance alert group
* @param loginUser login user
* @param id alert group id
* @param groupName group name
* @param groupType group type
*
* @param loginUser login user
* @param id alert group id
* @param groupName group name
* @param groupType group type
* @param description description
* @return update result code
*/
@ApiOperation(value = "updateAlertgroup", notes= "UPDATE_ALERT_GROUP_NOTES")
@ApiOperation(value = "updateAlertgroup", notes = "UPDATE_ALERT_GROUP_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"),
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"),
@ApiImplicitParam(name = "description", value = "DESC", dataType ="String")
@ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType = "AlertType"),
@ApiImplicitParam(name = "description", value = "DESC", dataType = "String")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_ALERT_GROUP_ERROR)
public Result updateAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "groupName") String groupName,
@RequestParam(value = "groupType") AlertType groupType,
@RequestParam(value = "description",required = false) String description) {
@RequestParam(value = "description", required = false) String description) {
logger.info("login user {}, updateProcessInstance alertgroup, groupName: {}, groupType: {}, desc: {}",
loginUser.getUserName(), groupName, groupType,description);
try {
Map<String, Object> result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, description);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UPDATE_ALERT_GROUP_ERROR.getMsg(),e);
return error(Status.UPDATE_ALERT_GROUP_ERROR.getCode(), Status.UPDATE_ALERT_GROUP_ERROR.getMsg());
}
loginUser.getUserName(), groupName, groupType, description);
Map<String, Object> result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, description);
return returnDataList(result);
}
/**
* delete alert group by id
*
* @param loginUser login user
* @param id alert group id
* @param id alert group id
* @return delete result code
*/
@ApiOperation(value = "delAlertgroupById", notes= "DELETE_ALERT_GROUP_BY_ID_NOTES")
@ApiOperation(value = "delAlertgroupById", notes = "DELETE_ALERT_GROUP_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100")
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_ALERT_GROUP_ERROR)
public Result delAlertgroupById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) {
@RequestParam(value = "id") int id) {
logger.info("login user {}, delete AlertGroup, id: {},", loginUser.getUserName(), id);
try {
Map<String, Object> result = alertGroupService.delAlertgroupById(loginUser, id);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.DELETE_ALERT_GROUP_ERROR.getMsg(),e);
return error(Status.DELETE_ALERT_GROUP_ERROR.getCode(), Status.DELETE_ALERT_GROUP_ERROR.getMsg());
}
Map<String, Object> result = alertGroupService.delAlertgroupById(loginUser, id);
return returnDataList(result);
}
/**
* check alert group exist
*
* @param loginUser login user
* @param groupName group name
* @return check result code
*/
@ApiOperation(value = "verifyGroupName", notes= "VERIFY_ALERT_GROUP_NAME_NOTES")
@ApiOperation(value = "verifyGroupName", notes = "VERIFY_ALERT_GROUP_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"),
})
@GetMapping(value = "/verify-group-name")
@ResponseStatus(HttpStatus.OK)
public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value ="groupName") String groupName) {
@RequestParam(value = "groupName") String groupName) {
logger.info("login user {}, verify group name: {}", loginUser.getUserName(), groupName);
boolean exist= alertGroupService.existGroupName(groupName);
boolean exist = alertGroupService.existGroupName(groupName);
Result result = new Result();
if (exist) {
logger.error("group {} has exist, can't create again.", groupName);
@ -233,29 +219,24 @@ public class AlertGroupController extends BaseController{
/**
* grant user
*
* @param loginUser login user
* @param userIds user ids in the group
* @param loginUser login user
* @param userIds user ids in the group
* @param alertgroupId alert group id
* @return grant result code
*/
@ApiOperation(value = "grantUser", notes= "GRANT_ALERT_GROUP_NOTES")
@ApiOperation(value = "grantUser", notes = "GRANT_ALERT_GROUP_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"),
@ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "userIds", value = "USER_IDS", required = true, dataType = "String")
})
@PostMapping(value = "/grant-user")
@ResponseStatus(HttpStatus.OK)
@ApiException(ALERT_GROUP_GRANT_USER_ERROR)
public Result grantUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "alertgroupId") int alertgroupId,
@RequestParam(value = "userIds") String userIds) {
logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId,userIds);
try {
Map<String, Object> result = alertGroupService.grantUser(loginUser, alertgroupId, userIds);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg(),e);
return error(Status.ALERT_GROUP_GRANT_USER_ERROR.getCode(), Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg());
}
@RequestParam(value = "alertgroupId") int alertgroupId,
@RequestParam(value = "userIds") String userIds) {
logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId, userIds);
Map<String, Object> result = alertGroupService.grantUser(loginUser, alertgroupId, userIds);
return returnDataList(result);
}
}

140
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -25,7 +26,6 @@ import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.apache.dolphinscheduler.api.enums.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -35,13 +35,15 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* data analysis controller
*/
@Api(tags = "DATA_ANALYSIS_TAG", position = 1)
@RestController
@RequestMapping("projects/analysis")
public class DataAnalysisController extends BaseController{
public class DataAnalysisController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisController.class);
@ -54,31 +56,27 @@ public class DataAnalysisController extends BaseController{
*
* @param loginUser login user
* @param startDate count start date
* @param endDate count end date
* @param endDate count end date
* @param projectId project id
* @return task instance count data
*/
@ApiOperation(value = "countTaskState", notes= "COUNT_TASK_STATE_NOTES")
@ApiOperation(value = "countTaskState", notes = "COUNT_TASK_STATE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/task-state-count")
@GetMapping(value = "/task-state-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(TASK_INSTANCE_STATE_COUNT_ERROR)
public Result countTaskState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countTaskStateByProject(loginUser,projectId, startDate, endDate);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg(),e);
return error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getCode(), Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg());
}
@RequestParam(value = "startDate", required = false) String startDate,
@RequestParam(value = "endDate", required = false) String endDate,
@RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) {
logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countTaskStateByProject(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}
/**
@ -86,31 +84,27 @@ public class DataAnalysisController extends BaseController{
*
* @param loginUser login user
* @param startDate start date
* @param endDate end date
* @param endDate end date
* @param projectId project id
* @return process instance data
*/
@ApiOperation(value = "countProcessInstanceState", notes= "COUNT_PROCESS_INSTANCE_NOTES")
@ApiOperation(value = "countProcessInstanceState", notes = "COUNT_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/process-state-count")
@GetMapping(value = "/process-state-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_PROCESS_INSTANCE_STATE_ERROR)
public Result countProcessInstanceState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id:{}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e);
return error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getCode(), Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg());
}
@RequestParam(value = "startDate", required = false) String startDate,
@RequestParam(value = "endDate", required = false) String endDate,
@RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) {
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id:{}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}
/**
@ -120,23 +114,19 @@ public class DataAnalysisController extends BaseController{
* @param projectId project id
* @return definition count in project id
*/
@ApiOperation(value = "countDefinitionByUser", notes= "COUNT_PROCESS_DEFINITION_BY_USER_NOTES")
@ApiOperation(value = "countDefinitionByUser", notes = "COUNT_PROCESS_DEFINITION_BY_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/define-user-count")
@GetMapping(value = "/define-user-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_PROCESS_DEFINITION_USER_ERROR)
public Result countDefinitionByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count process definition , user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(loginUser, projectId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg(),e);
return error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getCode(), Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg());
}
@RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) {
logger.info("count process definition , user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(loginUser, projectId);
return returnDataList(result);
}
@ -145,31 +135,27 @@ public class DataAnalysisController extends BaseController{
*
* @param loginUser login user
* @param startDate start date
* @param endDate end date
* @param endDate end date
* @param projectId project id
* @return command state in project id
*/
@ApiOperation(value = "countCommandState", notes= "COUNT_COMMAND_STATE_NOTES")
@ApiOperation(value = "countCommandState", notes = "COUNT_COMMAND_STATE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/command-state-count")
@GetMapping(value = "/command-state-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COMMAND_STATE_COUNT_ERROR)
public Result countCommandState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="startDate", required=false) String startDate,
@RequestParam(value="endDate", required=false) String endDate,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.COMMAND_STATE_COUNT_ERROR.getMsg(),e);
return error(Status.COMMAND_STATE_COUNT_ERROR.getCode(), Status.COMMAND_STATE_COUNT_ERROR.getMsg());
}
@RequestParam(value = "startDate", required = false) String startDate,
@RequestParam(value = "endDate", required = false) String endDate,
@RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) {
logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}",
loginUser.getUserName(), startDate, endDate, projectId);
Map<String, Object> result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate);
return returnDataList(result);
}
/**
@ -179,23 +165,19 @@ public class DataAnalysisController extends BaseController{
* @param projectId project id
* @return queue state count
*/
@ApiOperation(value = "countQueueState", notes= "COUNT_QUEUE_STATE_NOTES")
@ApiOperation(value = "countQueueState", notes = "COUNT_QUEUE_STATE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/queue-count")
@GetMapping(value = "/queue-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUEUE_COUNT_ERROR)
public Result countQueueState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){
try{
logger.info("count command state, user:{}, project id {}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countQueueState(loginUser, projectId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUEUE_COUNT_ERROR.getMsg(),e);
return error(Status.QUEUE_COUNT_ERROR.getCode(), Status.QUEUE_COUNT_ERROR.getMsg());
}
@RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) {
logger.info("count command state, user:{}, project id {}",
loginUser.getUserName(), projectId);
Map<String, Object> result = dataAnalysisService.countQueueState(loginUser, projectId);
return returnDataList(result);
}

367
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@ -16,18 +16,20 @@
*/
package org.apache.dolphinscheduler.api.controller;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.DataSourceService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -38,6 +40,7 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* data source controller
*/
@ -53,33 +56,36 @@ public class DataSourceController extends BaseController {
/**
* create data source
*
* @param loginUser login user
* @param name data source name
* @param note data source description
* @param type data source type
* @param host host
* @param port port
* @param database data base
* @param name data source name
* @param note data source description
* @param type data source type
* @param host host
* @param port port
* @param database data base
* @param principal principal
* @param userName user name
* @param password password
* @param other other arguments
* @param userName user name
* @param password password
* @param other other arguments
* @return create result code
*/
@ApiOperation(value = "createDataSource", notes= "CREATE_DATA_SOURCE_NOTES")
@ApiOperation(value = "createDataSource", notes = "CREATE_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String")
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String")
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_DATASOURCE_ERROR)
public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("name") String name,
@RequestParam(value = "note", required = false) String note,
@ -90,18 +96,13 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other) {
logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {},port: {},database : {},principal: {},userName : {} other: {}",
loginUser.getUserName(), name, note, type, host,port,database,principal,userName,other);
try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other);
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter);
return returnDataList(result);
} catch (Exception e) {
logger.error(CREATE_DATASOURCE_ERROR.getMsg(),e);
return error(Status.CREATE_DATASOURCE_ERROR.getCode(), Status.CREATE_DATASOURCE_ERROR.getMsg());
}
logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter);
return returnDataList(result);
}
@ -109,34 +110,36 @@ public class DataSourceController extends BaseController {
* updateProcessInstance data source
*
* @param loginUser login user
* @param name data source name
* @param note description
* @param type data source type
* @param other other arguments
* @param id data source di
* @param host host
* @param port port
* @param database database
* @param name data source name
* @param note description
* @param type data source type
* @param other other arguments
* @param id data source di
* @param host host
* @param port port
* @param database database
* @param principal principal
* @param userName user name
* @param password password
* @param userName user name
* @param password password
* @return update result code
*/
@ApiOperation(value = "updateDataSource", notes= "UPDATE_DATA_SOURCE_NOTES")
@ApiOperation(value = "updateDataSource", notes = "UPDATE_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String")
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_DATASOURCE_ERROR)
public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id,
@RequestParam("name") String name,
@ -148,72 +151,56 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other) {
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}",
loginUser.getUserName(), name, note, type, other);
try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal, userName, password, other);
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
return returnDataList(dataSource);
} catch (Exception e) {
logger.error(UPDATE_DATASOURCE_ERROR.getMsg(),e);
return error(UPDATE_DATASOURCE_ERROR.getCode(), UPDATE_DATASOURCE_ERROR.getMsg());
}
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
return returnDataList(dataSource);
}
/**
* query data source detail
*
* @param loginUser login user
* @param id datasource id
* @param id datasource id
* @return data source detail
*/
@ApiOperation(value = "queryDataSource", notes= "QUERY_DATA_SOURCE_NOTES")
@ApiOperation(value = "queryDataSource", notes = "QUERY_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/update-ui")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_ERROR)
public Result queryDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id) {
logger.info("login user {}, query datasource: {}",
loginUser.getUserName(), id);
try {
Map<String, Object> result = dataSourceService.queryDataSource(id);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e);
return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg());
}
Map<String, Object> result = dataSourceService.queryDataSource(id);
return returnDataList(result);
}
/**
* query datasouce by type
*
* @param loginUser login user
* @param type data source type
* @param type data source type
* @return data source list page
*/
@ApiOperation(value = "queryDataSourceList", notes= "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES")
@ApiOperation(value = "queryDataSourceList", notes = "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType")
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType")
})
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_ERROR)
public Result queryDataSourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") DbType type) {
try {
Map<String, Object> result = dataSourceService.queryDataSourceList(loginUser, type.ordinal());
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e);
return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg());
}
Map<String, Object> result = dataSourceService.queryDataSourceList(loginUser, type.ordinal());
return returnDataList(result);
}
/**
@ -221,66 +208,64 @@ public class DataSourceController extends BaseController {
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param pageNo page number
* @param pageSize page size
* @return data source list page
*/
@ApiOperation(value = "queryDataSourceListPaging", notes= "QUERY_DATA_SOURCE_LIST_PAGING_NOTES")
@ApiOperation(value = "queryDataSourceListPaging", notes = "QUERY_DATA_SOURCE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20")
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_ERROR)
public Result queryDataSourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
try {
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
} catch (Exception e) {
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e);
return error(QUERY_DATASOURCE_ERROR.getCode(), QUERY_DATASOURCE_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* connect datasource
*
* @param loginUser login user
* @param name data source name
* @param note data soruce description
* @param type data source type
* @param other other parameters
* @param host host
* @param port port
* @param database data base
* @param name data source name
* @param note data soruce description
* @param type data source type
* @param other other parameters
* @param host host
* @param port port
* @param database data base
* @param principal principal
* @param userName user name
* @param password password
* @param userName user name
* @param password password
* @return connect result code
*/
@ApiOperation(value = "connectDataSource", notes= "CONNECT_DATA_SOURCE_NOTES")
@ApiOperation(value = "connectDataSource", notes = "CONNECT_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"),
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String")
@ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"),
@ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"),
@ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"),
@ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"),
@ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String")
})
@PostMapping(value = "/connect")
@ResponseStatus(HttpStatus.OK)
@ApiException(CONNECT_DATASOURCE_FAILURE)
public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("name") String name,
@RequestParam(value = "note", required = false) String note,
@ -291,137 +276,115 @@ public class DataSourceController extends BaseController {
@RequestParam(value = "principal") String principal,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "password") String password,
@RequestParam(value = "connectType") DbConnectType connectType,
@RequestParam(value = "other") String other) {
logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}",
loginUser.getUserName(), name, note, type, other);
try {
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other);
Boolean isConnection = dataSourceService.checkConnection(type, parameter);
Result result = new Result();
if (isConnection) {
putMsg(result, SUCCESS);
} else {
putMsg(result, CONNECT_DATASOURCE_FAILURE);
}
return result;
} catch (Exception e) {
logger.error(CONNECT_DATASOURCE_FAILURE.getMsg(),e);
return error(CONNECT_DATASOURCE_FAILURE.getCode(), CONNECT_DATASOURCE_FAILURE.getMsg());
logger.info("login user {}, connect datasource: {}, note: {}, type: {}, connectType: {}, other: {}",
loginUser.getUserName(), name, note, type, connectType, other);
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
Boolean isConnection = dataSourceService.checkConnection(type, parameter);
Result result = new Result();
if (isConnection) {
putMsg(result, SUCCESS);
} else {
putMsg(result, CONNECT_DATASOURCE_FAILURE);
}
return result;
}
/**
* connection test
*
* @param loginUser login user
* @param id data source id
* @param id data source id
* @return connect result code
*/
@ApiOperation(value = "connectionTest", notes= "CONNECT_DATA_SOURCE_TEST_NOTES")
@ApiOperation(value = "connectionTest", notes = "CONNECT_DATA_SOURCE_TEST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/connect-by-id")
@ResponseStatus(HttpStatus.OK)
@ApiException(CONNECTION_TEST_FAILURE)
public Result connectionTest(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id) {
logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id);
try {
Boolean isConnection = dataSourceService.connectionTest(loginUser, id);
Result result = new Result();
Boolean isConnection = dataSourceService.connectionTest(loginUser, id);
Result result = new Result();
if (isConnection) {
putMsg(result, SUCCESS);
} else {
putMsg(result, CONNECTION_TEST_FAILURE);
}
return result;
} catch (Exception e) {
logger.error(CONNECTION_TEST_FAILURE.getMsg(),e);
return error(CONNECTION_TEST_FAILURE.getCode(), CONNECTION_TEST_FAILURE.getMsg());
if (isConnection) {
putMsg(result, SUCCESS);
} else {
putMsg(result, CONNECTION_TEST_FAILURE);
}
return result;
}
/**
* delete datasource by id
*
* @param loginUser login user
* @param id datasource id
* @param id datasource id
* @return delete result
*/
@ApiOperation(value = "delete", notes= "DELETE_DATA_SOURCE_NOTES")
@ApiOperation(value = "delete", notes = "DELETE_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_DATA_SOURCE_FAILURE)
public Result delete(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id) {
try {
logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id);
return dataSourceService.delete(loginUser, id);
} catch (Exception e) {
logger.error(DELETE_DATA_SOURCE_FAILURE.getMsg(),e);
return error(DELETE_DATA_SOURCE_FAILURE.getCode(), DELETE_DATA_SOURCE_FAILURE.getMsg());
}
logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id);
return dataSourceService.delete(loginUser, id);
}
/**
* verify datasource name
*
* @param loginUser login user
* @param name data source name
* @param name data source name
* @return true if data source name not exists.otherwise return false
*/
@ApiOperation(value = "verifyDataSourceName", notes= "VERIFY_DATA_SOURCE_NOTES")
@ApiOperation(value = "verifyDataSourceName", notes = "VERIFY_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String")
@ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String")
})
@GetMapping(value = "/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_DATASOURCE_NAME_FAILURE)
public Result verifyDataSourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "name") String name
) {
logger.info("login user {}, verfiy datasource name: {}",
loginUser.getUserName(), name);
try {
return dataSourceService.verifyDataSourceName(loginUser, name);
} catch (Exception e) {
logger.error(VERIFY_DATASOURCE_NAME_FAILURE.getMsg(), e);
return error(VERIFY_DATASOURCE_NAME_FAILURE.getCode(), VERIFY_DATASOURCE_NAME_FAILURE.getMsg());
}
return dataSourceService.verifyDataSourceName(loginUser, name);
}
/**
* unauthorized datasource
*
* @param loginUser login user
* @param userId user id
* @param userId user id
* @return unauthed data source result code
*/
@ApiOperation(value = "unauthDatasource", notes= "UNAUTHORIZED_DATA_SOURCE_NOTES")
@ApiOperation(value = "unauthDatasource", notes = "UNAUTHORIZED_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/unauth-datasource")
@ResponseStatus(HttpStatus.OK)
@ApiException(UNAUTHORIZED_DATASOURCE)
public Result unauthDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
try {
logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}",
loginUser.getUserName(), userId);
Map<String, Object> result = dataSourceService.unauthDatasource(loginUser, userId);
return returnDataList(result);
} catch (Exception e) {
logger.error(UNAUTHORIZED_DATASOURCE.getMsg(),e);
return error(UNAUTHORIZED_DATASOURCE.getCode(), UNAUTHORIZED_DATASOURCE.getMsg());
}
logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}",
loginUser.getUserName(), userId);
Map<String, Object> result = dataSourceService.unauthDatasource(loginUser, userId);
return returnDataList(result);
}
@ -429,26 +392,22 @@ public class DataSourceController extends BaseController {
* authorized datasource
*
* @param loginUser login user
* @param userId user id
* @param userId user id
* @return authorized result code
*/
@ApiOperation(value = "authedDatasource", notes= "AUTHORIZED_DATA_SOURCE_NOTES")
@ApiOperation(value = "authedDatasource", notes = "AUTHORIZED_DATA_SOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-datasource")
@ResponseStatus(HttpStatus.OK)
@ApiException(AUTHORIZED_DATA_SOURCE)
public Result authedDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
try {
logger.info("authorized data source, login user:{}, authorized useId:{}",
loginUser.getUserName(), userId);
Map<String, Object> result = dataSourceService.authedDatasource(loginUser, userId);
return returnDataList(result);
} catch (Exception e) {
logger.error(AUTHORIZED_DATA_SOURCE.getMsg(),e);
return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg());
}
logger.info("authorized data source, login user:{}, authorized useId:{}",
loginUser.getUserName(), userId);
Map<String, Object> result = dataSourceService.authedDatasource(loginUser, userId);
return returnDataList(result);
}
/**
@ -457,17 +416,13 @@ public class DataSourceController extends BaseController {
* @param loginUser login user
* @return user info data
*/
@ApiOperation(value = "getKerberosStartupState", notes= "GET_USER_INFO_NOTES")
@GetMapping(value="/kerberos-startup-state")
@ApiOperation(value = "getKerberosStartupState", notes = "GET_USER_INFO_NOTES")
@GetMapping(value = "/kerberos-startup-state")
@ResponseStatus(HttpStatus.OK)
public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
@ApiException(KERBEROS_STARTUP_STATE)
public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}", loginUser.getUserName());
try{
// if upload resource is HDFS and kerberos startup is true , else false
return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState());
}catch (Exception e){
logger.error(KERBEROS_STARTUP_STATE.getMsg(),e);
return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg());
}
// if upload resource is HDFS and kerberos startup is true , else false
return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState());
}
}

159
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java

@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.ExecuteType;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -32,8 +32,11 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.text.ParseException;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* execute process controller
@ -50,43 +53,45 @@ public class ExecutorController extends BaseController {
/**
* execute process instance
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param scheduleTime schedule time
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
* @param taskDependType task depend type
* @param execType execute type
* @param warningType warning type
* @param warningGroupId warning group id
* @param receivers receivers
* @param receiversCc receivers cc
* @param runMode run mode
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param scheduleTime schedule time
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
* @param taskDependType task depend type
* @param execType execute type
* @param warningType warning type
* @param warningGroupId warning group id
* @param receivers receivers
* @param receiversCc receivers cc
* @param runMode run mode
* @param processInstancePriority process instance priority
* @param workerGroupId worker group id
* @param timeout timeout
* @param workerGroup worker group
* @param timeout timeout
* @return start process result code
*/
@ApiOperation(value = "startProcessInstance", notes= "RUN_PROCESS_INSTANCE_NOTES")
@ApiOperation(value = "startProcessInstance", notes = "RUN_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType ="FailureStrategy"),
@ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType ="String"),
@ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType ="TaskDependType"),
@ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType ="CommandType"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE",required = true, dataType ="WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID",required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS",dataType ="String" ),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC",dataType ="String" ),
@ApiImplicitParam(name = "runMode", value = "RUN_MODE",dataType ="RunMode" ),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority" ),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int",example = "100"),
@ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int",example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"),
@ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"),
@ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"),
@ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", dataType = "String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", dataType = "String"),
@ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"),
@ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"),
@ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"),
})
@PostMapping(value = "start-process-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(START_PROCESS_INSTANCE_ERROR)
public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@ -101,99 +106,85 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "runMode", required = false) RunMode runMode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "timeout", required = false) Integer timeout) {
try {
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, "
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, "
+ "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroupId: {}, timeout: {}",
loginUser.getUserName(), projectName, processDefinitionId, scheduleTime,
failureStrategy, startNodeList, taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority,
workerGroupId, timeout);
if (timeout == null) {
timeout = Constants.MAX_TASK_TIMEOUT;
}
Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy,
startNodeList, taskDependType, warningType,
warningGroupId,receivers,receiversCc, runMode,processInstancePriority, workerGroupId, timeout);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.START_PROCESS_INSTANCE_ERROR.getMsg(),e);
return error(Status.START_PROCESS_INSTANCE_ERROR.getCode(), Status.START_PROCESS_INSTANCE_ERROR.getMsg());
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "timeout", required = false) Integer timeout) throws ParseException {
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, "
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, "
+ "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}",
loginUser.getUserName(), projectName, processDefinitionId, scheduleTime,
failureStrategy, startNodeList, taskDependType, warningType, workerGroup, receivers, receiversCc, runMode, processInstancePriority,
workerGroup, timeout);
if (timeout == null) {
timeout = Constants.MAX_TASK_TIMEOUT;
}
Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy,
startNodeList, taskDependType, warningType,
warningGroupId, receivers, receiversCc, runMode, processInstancePriority, workerGroup, timeout);
return returnDataList(result);
}
/**
* do action to process instancepause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param executeType execute type
* @param executeType execute type
* @return execute result code
*/
@ApiOperation(value = "execute", notes= "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES")
@ApiOperation(value = "execute", notes = "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType")
})
@PostMapping(value = "/execute")
@ResponseStatus(HttpStatus.OK)
@ApiException(EXECUTE_PROCESS_INSTANCE_ERROR)
public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("executeType") ExecuteType executeType
) {
try {
logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}",
loginUser.getUserName(), projectName, processInstanceId, executeType);
Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg(),e);
return error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getCode(), Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg());
}
logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}",
loginUser.getUserName(), projectName, processInstanceId, executeType);
Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType);
return returnDataList(result);
}
/**
* check process definition and all of the son process definitions is on line.
*
* @param loginUser login user
* @param loginUser login user
* @param processDefinitionId process definition id
* @return check result code
*/
@ApiOperation(value = "startCheckProcessDefinition", notes= "START_CHECK_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "startCheckProcessDefinition", notes = "START_CHECK_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/start-check")
@ResponseStatus(HttpStatus.OK)
@ApiException(CHECK_PROCESS_DEFINITION_ERROR)
public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
logger.info("login user {}, check process definition {}", loginUser.getUserName(), processDefinitionId);
try {
Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg(),e);
return error(Status.CHECK_PROCESS_DEFINITION_ERROR.getCode(), Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg());
}
Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId);
return returnDataList(result);
}
/**
* query recipients and copyers by process definition ID
*
* @param loginUser login user
* @param loginUser login user
* @param processDefinitionId process definition id
* @param processInstanceId process instance id
* @param processInstanceId process instance id
* @return receivers cc list
*/
@ApiIgnore
@ApiOperation(value = "getReceiverCc", notes= "GET_RECEIVER_CC_NOTES")
@ApiOperation(value = "getReceiverCc", notes = "GET_RECEIVER_CC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100")
@ -201,17 +192,13 @@ public class ExecutorController extends BaseController {
})
@GetMapping(value = "/get-receiver-cc")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR)
public Result getReceiverCc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionId",required = false) Integer processDefinitionId,
@RequestParam(value = "processInstanceId",required = false) Integer processInstanceId) {
@RequestParam(value = "processDefinitionId", required = false) Integer processDefinitionId,
@RequestParam(value = "processInstanceId", required = false) Integer processInstanceId) {
logger.info("login user {}, get process definition receiver and cc", loginUser.getUserName());
try {
Map<String, Object> result = execService.getReceiverCc(processDefinitionId,processInstanceId);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg(),e);
return error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg());
}
Map<String, Object> result = execService.getReceiverCc(processDefinitionId, processInstanceId);
return returnDataList(result);
}

58
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -25,7 +26,6 @@ import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.apache.dolphinscheduler.api.enums.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -35,6 +35,8 @@ import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* log controller
@ -52,61 +54,53 @@ public class LoggerController extends BaseController {
/**
* query task log
* @param loginUser login user
*
* @param loginUser login user
* @param taskInstanceId task instance id
* @param skipNum skip number
* @param limit limit
* @param skipNum skip number
* @param limit limit
* @return task log content
*/
@ApiOperation(value = "queryLog", notes= "QUERY_TASK_INSTANCE_LOG_NOTES")
@ApiOperation(value = "queryLog", notes = "QUERY_TASK_INSTANCE_LOG_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "taskInstId", value = "TASK_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType ="Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", dataType ="Int", example = "100")
@ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", dataType = "Int", example = "100")
})
@GetMapping(value = "/detail")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_INSTANCE_LOG_ERROR)
public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskInstId") int taskInstanceId,
@RequestParam(value = "taskInstanceId") int taskInstanceId,
@RequestParam(value = "skipLineNum") int skipNum,
@RequestParam(value = "limit") int limit) {
try {
logger.info(
"login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit);
return loggerService.queryLog(taskInstanceId, skipNum, limit);
} catch (Exception e) {
logger.error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg(), e);
return error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getCode(), Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg());
}
logger.info(
"login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit);
return loggerService.queryLog(taskInstanceId, skipNum, limit);
}
/**
* download log file
*
* @param loginUser login user
* @param loginUser login user
* @param taskInstanceId task instance id
* @return log file content
*/
@ApiOperation(value = "downloadTaskLog", notes= "DOWNLOAD_TASK_INSTANCE_LOG_NOTES")
@ApiOperation(value = "downloadTaskLog", notes = "DOWNLOAD_TASK_INSTANCE_LOG_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "taskInstId", value = "TASK_ID",dataType = "Int", example = "100")
@ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/download-log")
@ResponseBody
@ApiException(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR)
public ResponseEntity downloadTaskLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskInstId") int taskInstanceId) {
try {
byte[] logBytes = loggerService.getLogBytes(taskInstanceId);
return ResponseEntity
.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".log" + "\"")
.body(logBytes);
} catch (Exception e) {
logger.error(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg(), e);
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg());
}
@RequestParam(value = "taskInstanceId") int taskInstanceId) {
byte[] logBytes = loggerService.getLogBytes(taskInstanceId);
return ResponseEntity
.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".log" + "\"")
.body(logBytes);
}
}

91
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java

@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.security.Authenticator;
import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.api.utils.Result;
@ -42,7 +43,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* user login controller
*
* <p>
* swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html
*/
@Api(tags = "LOGIN_TAG", position = 1)
@ -63,81 +64,71 @@ public class LoginController extends BaseController {
/**
* login
*
* @param userName user name
* @param userName user name
* @param userPassword user password
* @param request request
* @param response response
* @param request request
* @param response response
* @return login result
*/
@ApiOperation(value = "login", notes= "LOGIN_NOTES")
@ApiOperation(value = "login", notes = "LOGIN_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType ="String")
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType = "String")
})
@PostMapping(value = "/login")
@ApiException(USER_LOGIN_FAILURE)
public Result login(@RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword,
HttpServletRequest request,
HttpServletResponse response) {
logger.info("login user name: {} ", userName);
try {
logger.info("login user name: {} ", userName);
//user name check
if (StringUtils.isEmpty(userName)) {
return error(Status.USER_NAME_NULL.getCode(),
Status.USER_NAME_NULL.getMsg());
}
// user ip check
String ip = getClientIpAddress(request);
if (StringUtils.isEmpty(ip)) {
return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg());
}
// verify username and password
Result<Map<String, String>> result = authenticator.authenticate(userName, userPassword, ip);
if (result.getCode() != Status.SUCCESS.getCode()) {
return result;
}
response.setStatus(HttpStatus.SC_OK);
Map<String, String> cookieMap = result.getData();
for (Map.Entry<String, String> cookieEntry : cookieMap.entrySet()) {
Cookie cookie = new Cookie(cookieEntry.getKey(), cookieEntry.getValue());
cookie.setHttpOnly(true);
response.addCookie(cookie);
}
//user name check
if (StringUtils.isEmpty(userName)) {
return error(Status.USER_NAME_NULL.getCode(),
Status.USER_NAME_NULL.getMsg());
}
// user ip check
String ip = getClientIpAddress(request);
if (StringUtils.isEmpty(ip)) {
return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg());
}
// verify username and password
Result<Map<String, String>> result = authenticator.authenticate(userName, userPassword, ip);
if (result.getCode() != Status.SUCCESS.getCode()) {
return result;
} catch (Exception e) {
logger.error(USER_LOGIN_FAILURE.getMsg(),e);
return error(USER_LOGIN_FAILURE.getCode(), USER_LOGIN_FAILURE.getMsg());
}
response.setStatus(HttpStatus.SC_OK);
Map<String, String> cookieMap = result.getData();
for (Map.Entry<String, String> cookieEntry : cookieMap.entrySet()) {
Cookie cookie = new Cookie(cookieEntry.getKey(), cookieEntry.getValue());
cookie.setHttpOnly(true);
response.addCookie(cookie);
}
return result;
}
/**
* sign out
*
* @param loginUser login user
* @param request request
* @param request request
* @return sign out result
*/
@ApiOperation(value = "signOut", notes = "SIGNOUT_NOTES")
@PostMapping(value = "/signOut")
@ApiException(SIGN_OUT_ERROR)
public Result signOut(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
HttpServletRequest request) {
try {
logger.info("login user:{} sign out", loginUser.getUserName());
String ip = getClientIpAddress(request);
sessionService.signOut(ip, loginUser);
//clear session
request.removeAttribute(Constants.SESSION_USER);
return success();
} catch (Exception e) {
logger.error(SIGN_OUT_ERROR.getMsg(),e);
return error(SIGN_OUT_ERROR.getCode(), SIGN_OUT_ERROR.getMsg());
}
logger.info("login user:{} sign out", loginUser.getUserName());
String ip = getClientIpAddress(request);
sessionService.signOut(ip, loginUser);
//clear session
request.removeAttribute(Constants.SESSION_USER);
return success();
}
}

63
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.MonitorService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -33,13 +34,14 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* monitor controller
*/
@Api(tags = "MONITOR_TAG", position = 1)
@RestController
@RequestMapping("/monitor")
public class MonitorController extends BaseController{
public class MonitorController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(MonitorController.class);
@ -48,84 +50,67 @@ public class MonitorController extends BaseController{
/**
* master list
*
* @param loginUser login user
* @return master list
*/
@ApiOperation(value = "listMaster", notes= "MASTER_LIST_NOTES")
@ApiOperation(value = "listMaster", notes = "MASTER_LIST_NOTES")
@GetMapping(value = "/master/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(LIST_MASTERS_ERROR)
public Result listMaster(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user: {}, query all master", loginUser.getUserName());
try{
logger.info("list master, user:{}", loginUser.getUserName());
Map<String, Object> result = monitorService.queryMaster(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(LIST_MASTERS_ERROR.getMsg(),e);
return error(LIST_MASTERS_ERROR.getCode(),
LIST_MASTERS_ERROR.getMsg());
}
logger.info("list master, user:{}", loginUser.getUserName());
Map<String, Object> result = monitorService.queryMaster(loginUser);
return returnDataList(result);
}
/**
* worker list
*
* @param loginUser login user
* @return worker information list
*/
@ApiOperation(value = "listWorker", notes= "WORKER_LIST_NOTES")
@ApiOperation(value = "listWorker", notes = "WORKER_LIST_NOTES")
@GetMapping(value = "/worker/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(LIST_WORKERS_ERROR)
public Result listWorker(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user: {}, query all workers", loginUser.getUserName());
try{
Map<String, Object> result = monitorService.queryWorker(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(LIST_WORKERS_ERROR.getMsg(),e);
return error(LIST_WORKERS_ERROR.getCode(),
LIST_WORKERS_ERROR.getMsg());
}
Map<String, Object> result = monitorService.queryWorker(loginUser);
return returnDataList(result);
}
/**
* query database state
*
* @param loginUser login user
* @return data base state
*/
@ApiOperation(value = "queryDatabaseState", notes= "QUERY_DATABASE_STATE_NOTES")
@ApiOperation(value = "queryDatabaseState", notes = "QUERY_DATABASE_STATE_NOTES")
@GetMapping(value = "/database")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATABASE_STATE_ERROR)
public Result queryDatabaseState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user: {}, query database state", loginUser.getUserName());
try{
Map<String, Object> result = monitorService.queryDatabaseState(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_DATABASE_STATE_ERROR.getMsg(),e);
return error(QUERY_DATABASE_STATE_ERROR.getCode(),
QUERY_DATABASE_STATE_ERROR.getMsg());
}
Map<String, Object> result = monitorService.queryDatabaseState(loginUser);
return returnDataList(result);
}
/**
* query zookeeper state
*
* @param loginUser login user
* @return zookeeper information list
*/
@ApiOperation(value = "queryZookeeperState", notes= "QUERY_ZOOKEEPER_STATE_NOTES")
@ApiOperation(value = "queryZookeeperState", notes = "QUERY_ZOOKEEPER_STATE_NOTES")
@GetMapping(value = "/zookeeper/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_ZOOKEEPER_STATE_ERROR)
public Result queryZookeeperState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user: {}, query zookeeper state", loginUser.getUserName());
try{
Map<String, Object> result = monitorService.queryZookeeperState(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_ZOOKEEPER_STATE_ERROR.getMsg(),e);
return error(QUERY_ZOOKEEPER_STATE_ERROR.getCode(),
QUERY_ZOOKEEPER_STATE_ERROR.getMsg());
}
Map<String, Object> result = monitorService.queryZookeeperState(loginUser);
return returnDataList(result);
}
}

526
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -16,7 +16,9 @@
*/
package org.apache.dolphinscheduler.api.controller;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -37,6 +39,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* process definition controller
@ -44,7 +48,7 @@ import java.util.Map;
@Api(tags = "PROCESS_DEFINITION_TAG", position = 2)
@RestController
@RequestMapping("projects/{projectName}/process")
public class ProcessDefinitionController extends BaseController{
public class ProcessDefinitionController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class);
@ -54,130 +58,144 @@ public class ProcessDefinitionController extends BaseController{
/**
* create process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param json process definition json
* @param name process definition name
* @param json process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
*/
@ApiOperation(value = "save", notes= "CREATE_PROCESS_DEFINITION_NOTES")
@ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"),
})
@PostMapping(value = "/save")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_PROCESS_DEFINITION)
public Result createProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "processDefinitionJson", required = true) String json,
@RequestParam(value = "locations", required = true) String locations,
@RequestParam(value = "connects", required = true) String connects,
@RequestParam(value = "description", required = false) String description) {
@RequestParam(value = "description", required = false) String description) throws JsonProcessingException {
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " +
"process_definition_json: {}, desc: {} locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, json, description, locations, connects);
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
description, locations, connects);
return returnDataList(result);
}
try {
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " +
"process_definition_json: {}, desc: {} locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, json, description, locations, connects);
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
description, locations, connects);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.CREATE_PROCESS_DEFINITION.getMsg(), e);
return error(Status.CREATE_PROCESS_DEFINITION.getCode(), Status.CREATE_PROCESS_DEFINITION.getMsg());
}
/**
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return copy result code
*/
@ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/copy")
@ResponseStatus(HttpStatus.OK)
@ApiException(COPY_PROCESS_DEFINITION_ERROR)
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId) throws JsonProcessingException {
logger.info("copy process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.copyProcessDefinition(loginUser, projectName, processId);
return returnDataList(result);
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@ApiOperation(value = "verify-name", notes = "VERIFY_PROCCESS_DEFINITION_NAME_NOTES")
@ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String")
})
@GetMapping(value = "/verify-name")
@ResponseStatus(HttpStatus.OK)
public Result verifyProccessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name){
try {
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}",
loginUser.getUserName(), projectName, name);
Map<String, Object> result = processDefinitionService.verifyProccessDefinitionName(loginUser, projectName, name);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg(),e);
return error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg());
}
@ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR)
public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name) {
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}",
loginUser.getUserName(), projectName, name);
Map<String, Object> result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name);
return returnDataList(result);
}
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
@ApiOperation(value = "updateProccessDefinition", notes= "UPDATE_PROCCESS_DEFINITION_NOTES")
@ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"),
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
public Result updateProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "id", required = true) int id,
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description) {
try {
logger.info("login user {}, update process define, project name: {}, process define name: {}, " +
"process_definition_json: {}, desc: {}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, processDefinitionJson,description, locations, connects);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
processDefinitionJson, description, locations, connects);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg(),e);
return error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getCode(), Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg());
}
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "id", required = true) int id,
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description) {
logger.info("login user {}, update process define, project name: {}, process define name: {}, " +
"process_definition_json: {}, desc: {}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
processDefinitionJson, description, locations, connects);
return returnDataList(result);
}
/**
* release process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param releaseState release state
* @return release result code
*/
@ApiOperation(value = "releaseProccessDefinition", notes= "RELEASE_PROCCESS_DEFINITION_NOTES")
@ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ -185,344 +203,296 @@ public class ProcessDefinitionController extends BaseController{
})
@PostMapping(value = "/release")
@ResponseStatus(HttpStatus.OK)
public Result releaseProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId,
@RequestParam(value = "releaseState", required = true) int releaseState) {
try {
logger.info("login user {}, release process definition, project name: {}, release state: {}",
loginUser.getUserName(), projectName, releaseState);
Map<String, Object> result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg(),e);
return error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getCode(), Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg());
}
@ApiException(RELEASE_PROCESS_DEFINITION_ERROR)
public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId,
@RequestParam(value = "releaseState", required = true) int releaseState) {
logger.info("login user {}, release process definition, project name: {}, release state: {}",
loginUser.getUserName(), projectName, releaseState);
Map<String, Object> result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState);
return returnDataList(result);
}
/**
* query datail of process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return process definition detail
*/
@ApiOperation(value = "queryProccessDefinitionById", notes= "QUERY_PROCCESS_DEFINITION_BY_ID_NOTES")
@ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="/select-by-id")
@GetMapping(value = "/select-by-id")
@ResponseStatus(HttpStatus.OK)
public Result queryProccessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR)
public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer processId
){
try{
logger.info("query datail of process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.queryProccessDefinitionById(loginUser, projectName, processId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg(),e);
return error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg());
}
) {
logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionById(loginUser, projectName, processId);
return returnDataList(result);
}
/**
* query proccess definition list
* query Process definition list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return process definition list
*/
@ApiOperation(value = "queryProccessDefinitionList", notes= "QUERY_PROCCESS_DEFINITION_LIST_NOTES")
@GetMapping(value="/list")
@ApiOperation(value = "queryProcessDefinitionList", notes = "QUERY_PROCESS_DEFINITION_LIST_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
public Result queryProccessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName
){
try{
logger.info("query proccess definition list, login user:{}, project name:{}",
loginUser.getUserName(), projectName);
Map<String, Object> result = processDefinitionService.queryProccessDefinitionList(loginUser, projectName);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e);
return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg());
}
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
) {
logger.info("query process definition list, login user:{}, project name:{}",
loginUser.getUserName(), projectName);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionList(loginUser, projectName);
return returnDataList(result);
}
/**
* query proccess definition list paging
* @param loginUser login user
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
@ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"),
@ApiImplicitParam(name = "userId", value = "USER_ID", required = false, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR)
public Result queryProcessDefinitionListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId,
@RequestParam("pageSize") Integer pageSize){
try{
logger.info("query proccess definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId);
@RequestParam("pageSize") Integer pageSize) {
logger.info("query process definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId);
return returnDataListPaging(result);
}
/**
* encapsulation treeview structure
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param limit limit
* @param id process definition id
* @param limit limit
* @return tree view json data
*/
@ApiOperation(value = "viewTree", notes= "VIEW_TREE_NOTES")
@ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="/view-tree")
@GetMapping(value = "/view-tree")
@ResponseStatus(HttpStatus.OK)
@ApiException(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR)
public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam("processId") Integer id,
@RequestParam("limit") Integer limit){
try{
Map<String, Object> result = processDefinitionService.viewTree(id, limit);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg(),e);
return error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getCode(), Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg());
}
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer id,
@RequestParam("limit") Integer limit) throws Exception {
Map<String, Object> result = processDefinitionService.viewTree(id, limit);
return returnDataList(result);
}
/**
*
* get tasks list by process definition id
*
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return task list
*/
@ApiOperation(value = "getNodeListByDefinitionId", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiOperation(value = "getNodeListByDefinitionId", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="gen-task-list")
@GetMapping(value = "gen-task-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR)
public Result getNodeListByDefinitionId(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId){
try {
logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e);
return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg());
}
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId) throws Exception {
logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId);
return returnDataList(result);
}
/**
*
* get tasks list by process definition id
*
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIdList process definition id list
* @return node list data
*/
@ApiOperation(value = "getNodeListByDefinitionIdList", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiOperation(value = "getNodeListByDefinitionIdList", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIdList", value = "PROCESS_DEFINITION_ID_LIST", required = true, type = "String")
})
@GetMapping(value="get-task-list")
@GetMapping(value = "get-task-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR)
public Result getNodeListByDefinitionIdList(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIdList") String processDefinitionIdList){
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIdList") String processDefinitionIdList) throws Exception {
try {
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}",
loginUser.getUserName(), projectName, processDefinitionIdList);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e);
return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg());
}
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}",
loginUser.getUserName(), projectName, processDefinitionIdList);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList);
return returnDataList(result);
}
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@ApiOperation(value = "deleteProcessDefinitionById", notes= "DELETE_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiOperation(value = "deleteProcessDefinitionById", notes = "DELETE_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/delete")
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINE_BY_ID_ERROR)
public Result deleteProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId
){
try{
logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg());
}
@RequestParam("processDefinitionId") Integer processDefinitionId
) {
logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
return returnDataList(result);
}
/**
* batch delete process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition id list
* @return delete result code
*/
@ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes= "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes = "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", type = "String")
})
@GetMapping(value="/batch-delete")
@GetMapping(value = "/batch-delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR)
public Result batchDeleteProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIds") String processDefinitionIds
){
try{
logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
Map<String, Object> result = new HashMap<>(5);
List<String> deleteFailedIdList = new ArrayList<>();
if(StringUtils.isNotEmpty(processDefinitionIds)){
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId:processDefinitionIdArray) {
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
try {
Map<String, Object> deleteResult = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
if(!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))){
deleteFailedIdList.add(strProcessDefinitionId);
logger.error((String)deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
@RequestParam("processDefinitionIds") String processDefinitionIds
) {
logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
Map<String, Object> result = new HashMap<>(5);
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processDefinitionIds)) {
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
try {
Map<String, Object> deleteResult = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) {
deleteFailedIdList.add(strProcessDefinitionId);
logger.error((String) deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
deleteFailedIdList.add(strProcessDefinitionId);
}
}
}
if(!deleteFailedIdList.isEmpty()){
putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR, String.join(",", deleteFailedIdList));
}else{
putMsg(result, Status.SUCCESS);
}
return returnDataList(result);
}catch (Exception e){
logger.error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(),e);
return error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg());
if (!deleteFailedIdList.isEmpty()) {
putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR, String.join(",", deleteFailedIdList));
} else {
putMsg(result, Status.SUCCESS);
}
return returnDataList(result);
}
/**
* export process definition by id
* batch export process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param response response
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param response response
*/
@ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCCESS_DEFINITION_BY_ID_NOTES")
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
})
@GetMapping(value="/export")
@GetMapping(value = "/export")
@ResponseBody
public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId,
HttpServletResponse response){
try{
logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processDefinitionId);
processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId,response);
}catch (Exception e){
logger.error(Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e);
public void batchExportProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIds") String processDefinitionIds,
HttpServletResponse response) {
try {
logger.info("batch export process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
processDefinitionService.batchExportProcessDefinitionByIds(loginUser, projectName, processDefinitionIds, response);
} catch (Exception e) {
logger.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e);
}
}
/**
* query proccess definition all by project id
* query process definition all by project id
*
* @param loginUser login user
* @param projectId project id
* @param projectId project id
* @return process definition list
*/
@ApiOperation(value = "queryProccessDefinitionAllByProjectId", notes= "QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES")
@GetMapping(value="/queryProccessDefinitionAllByProjectId")
@ApiOperation(value = "queryProcessDefinitionAllByProjectId", notes = "QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES")
@GetMapping(value = "/queryProcessDefinitionAllByProjectId")
@ResponseStatus(HttpStatus.OK)
public Result queryProccessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId){
try{
logger.info("query proccess definition list, login user:{}, project id:{}",
loginUser.getUserName(),projectId);
Map<String, Object> result = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e);
return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg());
}
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId) {
logger.info("query process definition list, login user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);
return returnDataList(result);
}
}

361
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -26,8 +27,6 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -35,6 +34,8 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -48,7 +49,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.*;
@Api(tags = "PROCESS_INSTANCE_TAG", position = 10)
@RestController
@RequestMapping("projects/{projectName}/instance")
public class ProcessInstanceController extends BaseController{
public class ProcessInstanceController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class);
@ -59,102 +60,94 @@ public class ProcessInstanceController extends BaseController{
/**
* query process instance list paging
*
* @param loginUser login user
* @param projectName project name
* @param pageNo page number
* @param pageSize page size
* @param loginUser login user
* @param projectName project name
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId process definition id
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startTime start time
* @param endTime end time
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startTime start time
* @param endTime end time
* @return process instance list
*/
@ApiOperation(value = "queryProcessInstanceList", notes= "QUERY_PROCESS_INSTANCE_LIST_NOTES")
@ApiOperation(value = "queryProcessInstanceList", notes = "QUERY_PROCESS_INSTANCE_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"),
@ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"),
@ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"),
@ApiImplicitParam(name = "host", value = "HOST", type ="String"),
@ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"),
@ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type = "ExecutionStatus"),
@ApiImplicitParam(name = "host", value = "HOST", type = "String"),
@ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100")
})
@GetMapping(value="list-paging")
@GetMapping(value = "list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR)
public Result queryProcessInstanceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "executorName", required = false) String executorName,
@RequestParam(value = "stateType", required = false) ExecutionStatus stateType,
@RequestParam(value = "host", required = false) String host,
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize){
try{
logger.info("query all process instance list, login user:{},project name:{}, define id:{}," +
"search value:{},executor name:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}",
loginUser.getUserName(), projectName, processDefinitionId, searchVal, executorName,stateType,host,
startTime, endTime, pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = processInstanceService.queryProcessInstanceList(
loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, executorName, stateType, host, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg());
}
@RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "executorName", required = false) String executorName,
@RequestParam(value = "stateType", required = false) ExecutionStatus stateType,
@RequestParam(value = "host", required = false) String host,
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
logger.info("query all process instance list, login user:{},project name:{}, define id:{}," +
"search value:{},executor name:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}",
loginUser.getUserName(), projectName, processDefinitionId, searchVal, executorName, stateType, host,
startTime, endTime, pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = processInstanceService.queryProcessInstanceList(
loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, executorName, stateType, host, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* query task list by process instance id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return task list for the process instance
*/
@ApiOperation(value = "queryTaskListByProcessId", notes= "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES")
@ApiOperation(value = "queryTaskListByProcessId", notes = "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/task-list-by-process-id")
@GetMapping(value = "/task-list-by-process-id")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR)
public Result queryTaskListByProcessId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId
) {
try{
logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
Map<String, Object> result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg(),e);
return error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getCode(), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg());
}
) throws IOException {
logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
Map<String, Object> result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId);
return returnDataList(result);
}
/**
* update process instance
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceJson process instance json
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param flag flag
* @param locations locations
* @param connects connects
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param flag flag
* @param locations locations
* @param connects connects
* @return update result code
*/
@ApiOperation(value = "updateProcessInstance", notes= "UPDATE_PROCESS_INSTANCE_NOTES")
@ApiOperation(value = "updateProcessInstance", notes = "UPDATE_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceJson", value = "PROCESS_INSTANCE_JSON", type = "String"),
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100"),
@ -164,243 +157,209 @@ public class ProcessInstanceController extends BaseController{
@ApiImplicitParam(name = "connects", value = "PROCESS_INSTANCE_CONNECTS", type = "String"),
@ApiImplicitParam(name = "flag", value = "RECOVERY_PROCESS_INSTANCE_FLAG", type = "Flag"),
})
@PostMapping(value="/update")
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROCESS_INSTANCE_ERROR)
public Result updateProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam( value = "processInstanceJson", required = false) String processInstanceJson,
@RequestParam( value = "processInstanceId") Integer processInstanceId,
@RequestParam( value = "scheduleTime", required = false) String scheduleTime,
@RequestParam( value = "syncDefine", required = true) Boolean syncDefine,
@RequestParam(value = "processInstanceJson", required = false) String processInstanceJson,
@RequestParam(value = "processInstanceId") Integer processInstanceId,
@RequestParam(value = "scheduleTime", required = false) String scheduleTime,
@RequestParam(value = "syncDefine", required = true) Boolean syncDefine,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam( value = "flag", required = false) Flag flag
){
try{
logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," +
"process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime,
syncDefine, flag, locations, connects);
Map<String, Object> result = processInstanceService.updateProcessInstance(loginUser, projectName,
processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects);
return returnDataList(result);
}catch (Exception e){
logger.error(UPDATE_PROCESS_INSTANCE_ERROR.getMsg(),e);
return error(Status.UPDATE_PROCESS_INSTANCE_ERROR.getCode(), Status.UPDATE_PROCESS_INSTANCE_ERROR.getMsg());
}
@RequestParam(value = "flag", required = false) Flag flag
) throws ParseException {
logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," +
"process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime,
syncDefine, flag, locations, connects);
Map<String, Object> result = processInstanceService.updateProcessInstance(loginUser, projectName,
processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects);
return returnDataList(result);
}
/**
* query process instance by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return process instance detail
*/
@ApiOperation(value = "queryProcessInstanceById", notes= "QUERY_PROCESS_INSTANCE_BY_ID_NOTES")
@ApiOperation(value = "queryProcessInstanceById", notes = "QUERY_PROCESS_INSTANCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/select-by-id")
@GetMapping(value = "/select-by-id")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_INSTANCE_BY_ID_ERROR)
public Result queryProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId
){
try{
logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
Map<String, Object> result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e);
return error(Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg());
}
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId
) {
logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
Map<String, Object> result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId);
return returnDataList(result);
}
/**
* delete process instance by id, at the same time,
* delete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return delete result code
*/
@ApiOperation(value = "deleteProcessInstanceById", notes= "DELETE_PROCESS_INSTANCE_BY_ID_NOTES")
@ApiOperation(value = "deleteProcessInstanceById", notes = "DELETE_PROCESS_INSTANCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/delete")
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_INSTANCE_BY_ID_ERROR)
public Result deleteProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId
){
try{
logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
// task queue
ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance();
Map<String, Object> result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue);
return returnDataList(result);
}catch (Exception e){
logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg());
}
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId
) {
logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}",
loginUser.getUserName(), projectName, processInstanceId);
// task queue
Map<String, Object> result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId);
return returnDataList(result);
}
/**
* query sub process instance detail info by task id
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param taskId task id
* @param taskId task id
* @return sub process instance detail
*/
@ApiOperation(value = "querySubProcessInstanceByTaskId", notes= "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES")
@ApiOperation(value = "querySubProcessInstanceByTaskId", notes = "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "taskId", value = "TASK_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/select-sub-process")
@GetMapping(value = "/select-sub-process")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR)
public Result querySubProcessInstanceByTaskId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("taskId") Integer taskId){
try{
Map<String, Object> result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg(),e);
return error(Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getCode(), Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg());
}
@RequestParam("taskId") Integer taskId) {
Map<String, Object> result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId);
return returnDataList(result);
}
/**
* query parent process instance detail info by sub process instance id
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param subId sub process id
* @param subId sub process id
* @return parent instance detail
*/
@ApiOperation(value = "queryParentInstanceBySubId", notes= "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES")
@ApiOperation(value = "queryParentInstanceBySubId", notes = "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "subId", value = "SUB_PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/select-parent-process")
@GetMapping(value = "/select-parent-process")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR)
public Result queryParentInstanceBySubId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("subId") Integer subId){
try{
Map<String, Object> result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg(),e);
return error(Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getCode(), Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg());
}
@RequestParam("subId") Integer subId) {
Map<String, Object> result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId);
return returnDataList(result);
}
/**
* query process instance global variables and local variables
*
* @param loginUser login user
* @param loginUser login user
* @param processInstanceId process instance id
* @return variables data
*/
@ApiOperation(value = "viewVariables", notes= "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES")
@ApiOperation(value = "viewVariables", notes = "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/view-variables")
@GetMapping(value = "/view-variables")
@ResponseStatus(HttpStatus.OK)
public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser
, @RequestParam("processInstanceId") Integer processInstanceId){
try{
Map<String, Object> result = processInstanceService.viewVariables(processInstanceId);
return returnDataList(result);
}catch (Exception e){
logger.error(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg(),e);
return error(Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg());
}
@ApiException(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR)
public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("processInstanceId") Integer processInstanceId) throws Exception {
Map<String, Object> result = processInstanceService.viewVariables(processInstanceId);
return returnDataList(result);
}
/**
* encapsulation gantt structure
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return gantt tree data
*/
@ApiOperation(value = "vieGanttTree", notes= "VIEW_GANTT_NOTES")
@ApiOperation(value = "vieGanttTree", notes = "VIEW_GANTT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100")
})
@GetMapping(value="/view-gantt")
@GetMapping(value = "/view-gantt")
@ResponseStatus(HttpStatus.OK)
@ApiException(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR)
public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId){
try{
Map<String, Object> result = processInstanceService.viewGantt(processInstanceId);
return returnDataList(result);
}catch (Exception e){
logger.error(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg(),e);
return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg());
}
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId) throws Exception {
Map<String, Object> result = processInstanceService.viewGantt(processInstanceId);
return returnDataList(result);
}
/**
* batch delete process instance by ids, at the same time,
* delete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceIds process instance id
* @return delete result code
*/
@GetMapping(value="/batch-delete")
@GetMapping(value = "/batch-delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR)
public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable String projectName,
@RequestParam("processInstanceIds") String processInstanceIds
){
try{
logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}",
loginUser.getUserName(), projectName, processInstanceIds);
// task queue
ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance();
Map<String, Object> result = new HashMap<>(5);
List<String> deleteFailedIdList = new ArrayList<>();
if(StringUtils.isNotEmpty(processInstanceIds)){
String[] processInstanceIdArray = processInstanceIds.split(",");
@PathVariable String projectName,
@RequestParam("processInstanceIds") String processInstanceIds
) {
logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}",
loginUser.getUserName(), projectName, processInstanceIds);
// task queue
Map<String, Object> result = new HashMap<>(5);
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processInstanceIds)) {
String[] processInstanceIdArray = processInstanceIds.split(",");
for (String strProcessInstanceId:processInstanceIdArray) {
int processInstanceId = Integer.parseInt(strProcessInstanceId);
try {
Map<String, Object> deleteResult = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue);
if(!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))){
deleteFailedIdList.add(strProcessInstanceId);
logger.error((String)deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
for (String strProcessInstanceId : processInstanceIdArray) {
int processInstanceId = Integer.parseInt(strProcessInstanceId);
try {
Map<String, Object> deleteResult = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId);
if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) {
deleteFailedIdList.add(strProcessInstanceId);
logger.error((String) deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
deleteFailedIdList.add(strProcessInstanceId);
}
}
if(deleteFailedIdList.size() > 0){
putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR, String.join(",", deleteFailedIdList));
}else{
putMsg(result, Status.SUCCESS);
}
return returnDataList(result);
}catch (Exception e){
logger.error(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg(),e);
return error(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg());
}
if (!deleteFailedIdList.isEmpty()) {
putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR, String.join(",", deleteFailedIdList));
} else {
putMsg(result, Status.SUCCESS);
}
return returnDataList(result);
}
}

177
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.Result;
@ -59,61 +59,53 @@ public class ProjectController extends BaseController {
/**
* create project
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param description description
* @return returns an error if it exists
*/
@ApiOperation(value = "createProject", notes= "CREATE_PROJECT_NOTES")
@ApiOperation(value = "createProject", notes = "CREATE_PROJECT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType ="String"),
@ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String")
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_PROJECT_ERROR)
public Result createProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectName") String projectName,
@RequestParam(value = "description", required = false) String description) {
try {
logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, description);
Map<String, Object> result = projectService.createProject(loginUser, projectName, description);
return returnDataList(result);
} catch (Exception e) {
logger.error(CREATE_PROJECT_ERROR.getMsg(), e);
return error(CREATE_PROJECT_ERROR.getCode(), CREATE_PROJECT_ERROR.getMsg());
}
logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, description);
Map<String, Object> result = projectService.createProject(loginUser, projectName, description);
return returnDataList(result);
}
/**
* updateProcessInstance project
*
* @param loginUser login user
* @param projectId project id
* @param loginUser login user
* @param projectId project id
* @param projectName project name
* @param description description
* @return update result code
*/
@ApiOperation(value = "updateProject", notes= "UPDATE_PROJECT_NOTES")
@ApiOperation(value = "updateProject", notes = "UPDATE_PROJECT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100"),
@ApiImplicitParam(name = "projectName",value = "PROJECT_NAME",dataType = "String"),
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROJECT_ERROR)
public Result updateProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId,
@RequestParam("projectName") String projectName,
@RequestParam(value = "description", required = false) String description) {
try {
logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, description);
Map<String, Object> result = projectService.update(loginUser, projectId, projectName, description);
return returnDataList(result);
} catch (Exception e) {
logger.error(UPDATE_PROJECT_ERROR.getMsg(), e);
return error(UPDATE_PROJECT_ERROR.getCode(), UPDATE_PROJECT_ERROR.getMsg());
}
logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, description);
Map<String, Object> result = projectService.update(loginUser, projectId, projectName, description);
return returnDataList(result);
}
/**
@ -123,23 +115,19 @@ public class ProjectController extends BaseController {
* @param projectId project id
* @return project detail information
*/
@ApiOperation(value = "queryProjectById", notes= "QUERY_PROJECT_BY_ID_NOTES")
@ApiOperation(value = "queryProjectById", notes = "QUERY_PROJECT_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/query-by-id")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROJECT_DETAILS_BY_ID_ERROR)
public Result queryProjectById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId) {
logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId);
try {
Map<String, Object> result = projectService.queryById(projectId);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg(), e);
return error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getCode(), QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg());
}
Map<String, Object> result = projectService.queryById(projectId);
return returnDataList(result);
}
/**
@ -147,33 +135,29 @@ public class ProjectController extends BaseController {
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @param pageSize page size
* @param pageNo page number
* @return project list which the login user have permission to see
*/
@ApiOperation(value = "queryProjectListPaging", notes= "QUERY_PROJECT_LIST_PAGING_NOTES")
@ApiOperation(value = "queryProjectListPaging", notes = "QUERY_PROJECT_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"),
@ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType ="Int", example = "20"),
@ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType ="Int", example = "1")
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType = "Int", example = "20"),
@ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType = "Int", example = "1")
})
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR)
public Result queryProjectListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize,
@RequestParam("pageNo") Integer pageNo
) {
try {
logger.info("login user {}, query project list paging", loginUser.getUserName());
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal);
return returnDataListPaging(result);
} catch (Exception e) {
logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e);
return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg());
}
logger.info("login user {}, query project list paging", loginUser.getUserName());
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal);
return returnDataListPaging(result);
}
/**
@ -183,49 +167,41 @@ public class ProjectController extends BaseController {
* @param projectId project id
* @return delete result code
*/
@ApiOperation(value = "deleteProjectById", notes= "DELETE_PROJECT_BY_ID_NOTES")
@ApiOperation(value = "deleteProjectById", notes = "DELETE_PROJECT_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROJECT_ERROR)
public Result deleteProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId
) {
try {
logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId);
Map<String, Object> result = projectService.deleteProject(loginUser, projectId);
return returnDataList(result);
} catch (Exception e) {
logger.error(DELETE_PROJECT_ERROR.getMsg(), e);
return error(DELETE_PROJECT_ERROR.getCode(), DELETE_PROJECT_ERROR.getMsg());
}
logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId);
Map<String, Object> result = projectService.deleteProject(loginUser, projectId);
return returnDataList(result);
}
/**
* query unauthorized project
*
* @param loginUser login user
* @param userId user id
* @param userId user id
* @return the projects which user have not permission to see
*/
@ApiOperation(value = "queryUnauthorizedProject", notes= "QUERY_UNAUTHORIZED_PROJECT_NOTES")
@ApiOperation(value = "queryUnauthorizedProject", notes = "QUERY_UNAUTHORIZED_PROJECT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/unauth-project")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_UNAUTHORIZED_PROJECT_ERROR)
public Result queryUnauthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
try {
logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, userId);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg(), e);
return error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getCode(), QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg());
}
logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, userId);
return returnDataList(result);
}
@ -233,73 +209,62 @@ public class ProjectController extends BaseController {
* query authorized project
*
* @param loginUser login user
* @param userId user id
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
@ApiOperation(value = "queryAuthorizedProject", notes= "QUERY_AUTHORIZED_PROJECT_NOTES")
@ApiOperation(value = "queryAuthorizedProject", notes = "QUERY_AUTHORIZED_PROJECT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-project")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_AUTHORIZED_PROJECT)
public Result queryAuthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
try {
logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId);
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, userId);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_AUTHORIZED_PROJECT.getMsg(), e);
return error(QUERY_AUTHORIZED_PROJECT.getCode(), QUERY_AUTHORIZED_PROJECT.getMsg());
}
logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId);
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, userId);
return returnDataList(result);
}
/**
* import process definition
* @param loginUser login user
* @param file resource file
*
* @param loginUser login user
* @param file resource file
* @param projectName project name
* @return import result code
*/
@ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCCESS_DEFINITION_NOTES")
@ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile")
})
@PostMapping(value="/import-definition")
@PostMapping(value = "/import-definition")
@ApiException(IMPORT_PROCESS_DEFINE_ERROR)
public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("file") MultipartFile file,
@RequestParam("projectName") String projectName){
try{
logger.info("import process definition by id, login user:{}, project: {}",
loginUser.getUserName(), projectName);
Map<String, Object> result = processDefinitionService.importProcessDefinition(loginUser, file, projectName);
return returnDataList(result);
}catch (Exception e){
logger.error(IMPORT_PROCESS_DEFINE_ERROR.getMsg(),e);
return error(IMPORT_PROCESS_DEFINE_ERROR.getCode(), IMPORT_PROCESS_DEFINE_ERROR.getMsg());
}
@RequestParam("projectName") String projectName) {
logger.info("import process definition by id, login user:{}, project: {}",
loginUser.getUserName(), projectName);
Map<String, Object> result = processDefinitionService.importProcessDefinition(loginUser, file, projectName);
return returnDataList(result);
}
/**
* query all project list
*
* @param loginUser login user
* @return all project list
*/
@ApiOperation(value = "queryAllProjectList", notes= "QUERY_ALL_PROJECT_LIST_NOTES")
@ApiOperation(value = "queryAllProjectList", notes = "QUERY_ALL_PROJECT_LIST_NOTES")
@GetMapping(value = "/query-project-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR)
public Result queryAllProjectList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
try {
logger.info("login user {}, query all project list", loginUser.getUserName());
Map<String, Object> result = projectService.queryAllProjectList();
return returnDataList(result);
} catch (Exception e) {
logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e);
return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg());
}
logger.info("login user {}, query all project list", loginUser.getUserName());
Map<String, Object> result = projectService.queryAllProjectList();
return returnDataList(result);
}
}

137
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java

@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.QueueService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -36,6 +37,8 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* queue controller
@ -43,7 +46,7 @@ import java.util.Map;
@Api(tags = "QUEUE_TAG", position = 1)
@RestController
@RequestMapping("/queue")
public class QueueController extends BaseController{
public class QueueController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(QueueController.class);
@ -53,151 +56,131 @@ public class QueueController extends BaseController{
/**
* query queue list
*
* @param loginUser login user
* @return queue list
*/
@ApiOperation(value = "queryList", notes= "QUERY_QUEUE_LIST_NOTES")
@GetMapping(value="/list")
@ApiOperation(value = "queryList", notes = "QUERY_QUEUE_LIST_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
try{
logger.info("login user {}, query queue list", loginUser.getUserName());
Map<String, Object> result = queueService.queryList(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e);
return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg());
}
@ApiException(QUERY_QUEUE_LIST_ERROR)
public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query queue list", loginUser.getUserName());
Map<String, Object> result = queueService.queryList(loginUser);
return returnDataList(result);
}
/**
* query queue list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @param pageSize page size
* @return queue list
*/
@ApiOperation(value = "queryQueueListPaging", notes= "QUERY_QUEUE_LIST_PAGING_NOTES")
@ApiOperation(value = "queryQueueListPaging", notes = "QUERY_QUEUE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20")
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_QUEUE_LIST_ERROR)
public Result queryQueueListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
try{
logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(),searchVal);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = queueService.queryList(loginUser,searchVal,pageNo,pageSize);
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(), searchVal);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e);
return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = queueService.queryList(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* create queue
*
* @param loginUser login user
* @param queue queue
* @param queue queue
* @param queueName queue name
* @return create result
*/
@ApiOperation(value = "createQueue", notes= "CREATE_QUEUE_NOTES")
@ApiOperation(value = "createQueue", notes = "CREATE_QUEUE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true,dataType ="String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String")
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String")
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_QUEUE_ERROR)
public Result createQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName) {
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName) {
logger.info("login user {}, create queue, queue: {}, queueName: {}",
loginUser.getUserName(), queue, queueName);
try {
Map<String, Object> result = queueService.createQueue(loginUser,queue,queueName);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.CREATE_QUEUE_ERROR.getMsg(),e);
return error(Status.CREATE_QUEUE_ERROR.getCode(), Status.CREATE_QUEUE_ERROR.getMsg());
}
Map<String, Object> result = queueService.createQueue(loginUser, queue, queueName);
return returnDataList(result);
}
/**
* update queue
*
* @param loginUser login user
* @param queue queue
* @param id queue id
* @param queue queue
* @param id queue id
* @param queueName queue name
* @return update result code
*/
@ApiOperation(value = "updateQueue", notes= "UPDATE_QUEUE_NOTES")
@ApiOperation(value = "updateQueue", notes = "UPDATE_QUEUE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String")
@ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(UPDATE_QUEUE_ERROR)
public Result updateQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName) {
logger.info("login user {}, update queue, id: {}, queue: {}, queueName: {}",
loginUser.getUserName(), id,queue, queueName);
try {
Map<String, Object> result = queueService.updateQueue(loginUser,id,queue,queueName);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UPDATE_QUEUE_ERROR.getMsg(),e);
return error(Status.UPDATE_QUEUE_ERROR.getCode(), Status.UPDATE_QUEUE_ERROR.getMsg());
}
loginUser.getUserName(), id, queue, queueName);
Map<String, Object> result = queueService.updateQueue(loginUser, id, queue, queueName);
return returnDataList(result);
}
/**
* verify queue and queue name
*
* @param loginUser login user
* @param queue queue
* @param queue queue
* @param queueName queue name
* @return true if the queue name not exists, otherwise return false
*/
@ApiOperation(value = "verifyQueue", notes= "VERIFY_QUEUE_NOTES")
@ApiOperation(value = "verifyQueue", notes = "VERIFY_QUEUE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String")
@ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String")
})
@PostMapping(value = "/verify-queue")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_QUEUE_ERROR)
public Result verifyQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value ="queue") String queue,
@RequestParam(value ="queueName") String queueName
@RequestParam(value = "queue") String queue,
@RequestParam(value = "queueName") String queueName
) {
try{
logger.info("login user {}, verfiy queue: {} queue name: {}",
loginUser.getUserName(),queue,queueName);
return queueService.verifyQueue(queue,queueName);
}catch (Exception e){
logger.error(Status.VERIFY_QUEUE_ERROR.getMsg(),e);
return error(Status.VERIFY_QUEUE_ERROR.getCode(), Status.VERIFY_QUEUE_ERROR.getMsg());
}
logger.info("login user {}, verfiy queue: {} queue name: {}",
loginUser.getUserName(), queue, queueName);
return queueService.verifyQueue(queue, queueName);
}

715
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java

File diff suppressed because it is too large Load Diff

246
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
@ -34,6 +34,7 @@ import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.io.IOException;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
@ -60,33 +61,34 @@ public class SchedulerController extends BaseController {
/**
* create schedule
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param processInstancePriority process instance priority
* @param receivers receivers
* @param receiversCc receivers cc
* @param workerGroupId worker group id
* @param receivers receivers
* @param receiversCc receivers cc
* @param workerGroup worker group
* @return create result code
*/
@ApiOperation(value = "createSchedule", notes= "CREATE_SCHEDULE_NOTES")
@ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"),
})
@PostMapping("/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_SCHEDULE_ERROR)
public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") Integer processDefinitionId,
@ -96,52 +98,48 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException {
logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}",
loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId,
failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId);
try {
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId);
failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup);
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup);
return returnDataList(result);
} catch (Exception e) {
logger.error(CREATE_SCHEDULE_ERROR.getMsg(), e);
return error(CREATE_SCHEDULE_ERROR.getCode(), CREATE_SCHEDULE_ERROR.getMsg());
}
return returnDataList(result);
}
/**
* updateProcessInstance schedule
*
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param receivers receivers
* @param workerGroupId worker group id
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param receivers receivers
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @param receiversCc receivers cc
* @param receiversCc receivers cc
* @return update result code
*/
@ApiOperation(value = "updateSchedule", notes= "UPDATE_SCHEDULE_NOTES")
@ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"),
})
@PostMapping("/update")
@ApiException(UPDATE_SCHEDULE_ERROR)
public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "id") Integer id,
@ -151,196 +149,164 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException {
logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}",
loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy,
receivers, receiversCc, processInstancePriority, workerGroupId);
receivers, receiversCc, processInstancePriority, workerGroup);
try {
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroupId);
return returnDataList(result);
} catch (Exception e) {
logger.error(UPDATE_SCHEDULE_ERROR.getMsg(), e);
return error(Status.UPDATE_SCHEDULE_ERROR.getCode(), Status.UPDATE_SCHEDULE_ERROR.getMsg());
}
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroup);
return returnDataList(result);
}
/**
* publish schedule setScheduleState
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param id scheduler id
* @return publish result code
*/
@ApiOperation(value = "online", notes= "ONLINE_SCHEDULE_NOTES")
@ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping("/online")
@ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR)
public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName,
@RequestParam("id") Integer id) {
logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}",
loginUser.getUserName(), projectName, id);
try {
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE);
return returnDataList(result);
} catch (Exception e) {
logger.error(PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg(), e);
return error(Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getCode(), Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg());
}
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE);
return returnDataList(result);
}
/**
* offline schedule
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param id schedule id
* @param id schedule id
* @return operation result code
*/
@ApiOperation(value = "offline", notes= "OFFLINE_SCHEDULE_NOTES")
@ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping("/offline")
@ApiException(OFFLINE_SCHEDULE_ERROR)
public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName,
@RequestParam("id") Integer id) {
logger.info("login user {}, schedule offline, project name: {}, process definition id: {}",
loginUser.getUserName(), projectName, id);
try {
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE);
return returnDataList(result);
} catch (Exception e) {
logger.error(OFFLINE_SCHEDULE_ERROR.getMsg(), e);
return error(Status.OFFLINE_SCHEDULE_ERROR.getCode(), Status.OFFLINE_SCHEDULE_ERROR.getMsg());
}
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE);
return returnDataList(result);
}
/**
* query schedule list paging
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return schedule list page
*/
@ApiOperation(value = "queryScheduleListPaging", notes= "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true,dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100")
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100")
})
@GetMapping("/list-paging")
@GetMapping("/list-paging")
@ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR)
public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam Integer processDefinitionId,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, query schedule, project name: {}, process definition id: {}",
loginUser.getUserName(), projectName, processDefinitionId);
try {
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg());
}
logger.info("login user {}, query schedule, project name: {}, process definition id: {}",
loginUser.getUserName(), projectName, processDefinitionId);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* delete schedule by id
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param scheduleId scheule id
* @param scheduleId scheule id
* @return delete result code
*/
@ApiOperation(value = "deleteScheduleById", notes= "OFFLINE_SCHEDULE_NOTES")
@ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value="/delete")
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR)
public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser,
@PathVariable String projectName,
@RequestParam("scheduleId") Integer scheduleId
){
try{
logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}",
loginUser.getUserName(), projectName, scheduleId);
Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId);
return returnDataList(result);
}catch (Exception e){
logger.error(DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getCode(), Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg());
}
@PathVariable String projectName,
@RequestParam("scheduleId") Integer scheduleId
) {
logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}",
loginUser.getUserName(), projectName, scheduleId);
Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId);
return returnDataList(result);
}
/**
* query schedule list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return schedule list
*/
@ApiOperation(value = "queryScheduleList", notes= "QUERY_SCHEDULE_LIST_NOTES")
@ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES")
@PostMapping("/list")
@ApiException(QUERY_SCHEDULE_LIST_ERROR)
public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) {
try {
logger.info("login user {}, query schedule list, project name: {}",
loginUser.getUserName(), projectName);
Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_SCHEDULE_LIST_ERROR.getMsg(), e);
return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg());
}
logger.info("login user {}, query schedule list, project name: {}",
loginUser.getUserName(), projectName);
Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName);
return returnDataList(result);
}
/**
* preview schedule
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param schedule schedule expression
* @param schedule schedule expression
* @return the next five fire time
*/
@ApiOperation(value = "previewSchedule", notes= "PREVIEW_SCHEDULE_NOTES")
@ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
})
@PostMapping("/preview")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(PREVIEW_SCHEDULE_ERROR)
public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "schedule") String schedule
){
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "schedule") String schedule
) {
logger.info("login user {}, project name: {}, preview schedule: {}",
loginUser.getUserName(), projectName, schedule);
try {
Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule);
return returnDataList(result);
} catch (Exception e) {
logger.error(PREVIEW_SCHEDULE_ERROR.getMsg(), e);
return error(PREVIEW_SCHEDULE_ERROR.getCode(), PREVIEW_SCHEDULE_ERROR.getMsg());
}
Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule);
return returnDataList(result);
}
}

65
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TaskInstanceService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -24,7 +25,6 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.apache.dolphinscheduler.api.enums.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -34,13 +34,15 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR;
/**
* task instance controller
*/
@Api(tags = "TASK_INSTANCE_TAG", position = 11)
@RestController
@RequestMapping("/projects/{projectName}/task-instance")
public class TaskInstanceController extends BaseController{
public class TaskInstanceController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class);
@ -51,34 +53,35 @@ public class TaskInstanceController extends BaseController{
/**
* query task list paging
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param searchVal search value
* @param taskName task name
* @param stateType state type
* @param host host
* @param startTime start time
* @param endTime end time
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @param taskName task name
* @param stateType state type
* @param host host
* @param startTime start time
* @param endTime end time
* @param pageNo page number
* @param pageSize page size
* @return task list page
*/
@ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES")
@ApiOperation(value = "queryTaskListPaging", notes = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID",required = false, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"),
@ApiImplicitParam(name = "taskName", value = "TASK_NAME", type ="String"),
@ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"),
@ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"),
@ApiImplicitParam(name = "host", value = "HOST", type ="String"),
@ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"),
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = false, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "taskName", value = "TASK_NAME", type = "String"),
@ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"),
@ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type = "ExecutionStatus"),
@ApiImplicitParam(name = "host", value = "HOST", type = "String"),
@ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"),
@ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping("/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_LIST_PAGING_ERROR)
public Result queryTaskListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId,
@ -90,20 +93,14 @@ public class TaskInstanceController extends BaseController{
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize){
try{
logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}",
projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = taskInstanceService.queryTaskListPaging(
loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_TASK_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg());
}
@RequestParam("pageSize") Integer pageSize) {
logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}",
projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = taskInstanceService.queryTaskListPaging(
loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize);
return returnDataListPaging(result);
}
}

90
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java

@ -17,11 +17,11 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TaskRecordService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.api.enums.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -31,13 +31,15 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* data quality controller
*/
@ApiIgnore
@RestController
@RequestMapping("/projects/task-record")
public class TaskRecordController extends BaseController{
public class TaskRecordController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(TaskRecordController.class);
@ -49,20 +51,21 @@ public class TaskRecordController extends BaseController{
/**
* query task record list page
*
* @param loginUser login user
* @param taskName task name
* @param state state
* @param loginUser login user
* @param taskName task name
* @param state state
* @param sourceTable source table
* @param destTable destination table
* @param taskDate task date
* @param startTime start time
* @param endTime end time
* @param pageNo page numbere
* @param pageSize page size
* @param destTable destination table
* @param taskDate task date
* @param startTime start time
* @param endTime end time
* @param pageNo page numbere
* @param pageSize page size
* @return task record list
*/
@GetMapping("/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR)
public Result queryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "state", required = false) String state,
@ -73,59 +76,48 @@ public class TaskRecordController extends BaseController{
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize
){
) {
try{
logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}",
taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize);
taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg());
}
}
/**
* query history task record list paging
*
* @param loginUser login user
* @param taskName task name
* @param state state
* @param loginUser login user
* @param taskName task name
* @param state state
* @param sourceTable source table
* @param destTable destination table
* @param taskDate task date
* @param startTime start time
* @param endTime end time
* @param pageNo page number
* @param pageSize page size
* @param destTable destination table
* @param taskDate task date
* @param startTime start time
* @param endTime end time
* @param pageNo page number
* @param pageSize page size
* @return history task record list
*/
@GetMapping("/history-list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR)
public Result queryHistoryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "state", required = false) String state,
@RequestParam(value = "sourceTable", required = false) String sourceTable,
@RequestParam(value = "destTable", required = false) String destTable,
@RequestParam(value = "taskDate", required = false) String taskDate,
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize
){
try{
logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}",
taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg());
}
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "state", required = false) String state,
@RequestParam(value = "sourceTable", required = false) String sourceTable,
@RequestParam(value = "destTable", required = false) String destTable,
@RequestParam(value = "taskDate", required = false) String taskDate,
@RequestParam(value = "startDate", required = false) String startTime,
@RequestParam(value = "endDate", required = false) String endTime,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize
) {
logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}",
taskName, state, taskDate, startTime, endTime);
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize);
return returnDataListPaging(result);
}
}

174
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java

@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.TenantService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -36,6 +37,8 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* tenant controller
@ -43,7 +46,7 @@ import java.util.Map;
@Api(tags = "TENANT_TAG", position = 1)
@RestController
@RequestMapping("/tenant")
public class TenantController extends BaseController{
public class TenantController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(TenantController.class);
@ -54,38 +57,33 @@ public class TenantController extends BaseController{
/**
* create tenant
*
* @param loginUser login user
* @param tenantCode tenant code
* @param tenantName tenant name
* @param queueId queue id
* @param loginUser login user
* @param tenantCode tenant code
* @param tenantName tenant name
* @param queueId queue id
* @param description description
* @return create result code
*/
@ApiOperation(value = "createTenant", notes= "CREATE_TENANT_NOTES")
@ApiOperation(value = "createTenant", notes = "CREATE_TENANT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"),
@ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int",example = "100"),
@ApiImplicitParam(name = "description", value = "TENANT_DESC", dataType ="String")
@ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "description", value = "TENANT_DESC", dataType = "String")
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_TENANT_ERROR)
public Result createTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "tenantName") String tenantName,
@RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description",required = false) String description) {
@RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "tenantName") String tenantName,
@RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description", required = false) String description) throws Exception {
logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}",
loginUser.getUserName(), tenantCode, tenantName, queueId,description);
try {
Map<String, Object> result = tenantService.createTenant(loginUser,tenantCode,tenantName,queueId,description);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.CREATE_TENANT_ERROR.getMsg(),e);
return error(Status.CREATE_TENANT_ERROR.getCode(), Status.CREATE_TENANT_ERROR.getMsg());
}
loginUser.getUserName(), tenantCode, tenantName, queueId, description);
Map<String, Object> result = tenantService.createTenant(loginUser, tenantCode, tenantName, queueId, description);
return returnDataList(result);
}
@ -94,36 +92,32 @@ public class TenantController extends BaseController{
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param pageNo page number
* @param pageSize page size
* @return tenant list page
*/
@ApiOperation(value = "queryTenantlistPaging", notes= "QUERY_TENANT_LIST_PAGING_NOTES")
@ApiOperation(value = "queryTenantlistPaging", notes = "QUERY_TENANT_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20")
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TENANT_LIST_PAGING_ERROR)
public Result queryTenantlistPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}",
loginUser.getUserName(),pageNo,searchVal,pageSize);
try{
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize);
loginUser.getUserName(), pageNo, searchVal, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getCode(), Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
@ -133,113 +127,95 @@ public class TenantController extends BaseController{
* @param loginUser login user
* @return tenant list
*/
@ApiOperation(value = "queryTenantlist", notes= "QUERY_TENANT_LIST_NOTES")
@GetMapping(value="/list")
@ApiOperation(value = "queryTenantlist", notes = "QUERY_TENANT_LIST_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
@ApiException(QUERY_TENANT_LIST_ERROR)
public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query tenant list", loginUser.getUserName());
try{
Map<String, Object> result = tenantService.queryTenantList(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_TENANT_LIST_ERROR.getMsg(),e);
return error(Status.QUERY_TENANT_LIST_ERROR.getCode(), Status.QUERY_TENANT_LIST_ERROR.getMsg());
}
Map<String, Object> result = tenantService.queryTenantList(loginUser);
return returnDataList(result);
}
/**
* udpate tenant
*
* @param loginUser login user
* @param id tennat id
* @param tenantCode tennat code
* @param tenantName tennat name
* @param queueId queue id
* @param loginUser login user
* @param id tennat id
* @param tenantCode tennat code
* @param tenantName tennat name
* @param queueId queue id
* @param description description
* @return update result code
*/
@ApiOperation(value = "updateTenant", notes= "UPDATE_TENANT_NOTES")
@ApiOperation(value = "updateTenant", notes = "UPDATE_TENANT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"),
@ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"),
@ApiImplicitParam(name = "description", value = "TENANT_DESC", type ="String")
@ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "description", value = "TENANT_DESC", type = "String")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_TENANT_ERROR)
public Result updateTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "tenantName") String tenantName,
@RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description",required = false) String description) {
@RequestParam(value = "id") int id,
@RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "tenantName") String tenantName,
@RequestParam(value = "queueId") int queueId,
@RequestParam(value = "description", required = false) String description) throws Exception {
logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, description: {}",
loginUser.getUserName(), tenantCode, tenantName, queueId,description);
try {
Map<String, Object> result = tenantService.updateTenant(loginUser,id,tenantCode, tenantName, queueId, description);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UPDATE_TENANT_ERROR.getMsg(),e);
return error(Status.UPDATE_TENANT_ERROR.getCode(), Status.UPDATE_TENANT_ERROR.getMsg());
}
loginUser.getUserName(), tenantCode, tenantName, queueId, description);
Map<String, Object> result = tenantService.updateTenant(loginUser, id, tenantCode, tenantName, queueId, description);
return returnDataList(result);
}
/**
* delete tenant by id
*
* @param loginUser login user
* @param id tenant id
* @param id tenant id
* @return delete result code
*/
@ApiOperation(value = "deleteTenantById", notes= "DELETE_TENANT_NOTES")
@ApiOperation(value = "deleteTenantById", notes = "DELETE_TENANT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100")
@ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_TENANT_BY_ID_ERROR)
public Result deleteTenantById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) {
@RequestParam(value = "id") int id) throws Exception {
logger.info("login user {}, delete tenant, tenantId: {},", loginUser.getUserName(), id);
try {
Map<String, Object> result = tenantService.deleteTenantById(loginUser,id);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.DELETE_TENANT_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_TENANT_BY_ID_ERROR.getCode(), Status.DELETE_TENANT_BY_ID_ERROR.getMsg());
}
Map<String, Object> result = tenantService.deleteTenantById(loginUser, id);
return returnDataList(result);
}
/**
* verify tenant code
*
* @param loginUser login user
* @param loginUser login user
* @param tenantCode tenant code
* @return true if tenant code can user, otherwise return false
*/
@ApiOperation(value = "verifyTenantCode", notes= "VERIFY_TENANT_CODE_NOTES")
@ApiOperation(value = "verifyTenantCode", notes = "VERIFY_TENANT_CODE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String")
})
@GetMapping(value = "/verify-tenant-code")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_TENANT_CODE_ERROR)
public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value ="tenantCode") String tenantCode
@RequestParam(value = "tenantCode") String tenantCode
) {
try{
logger.info("login user {}, verfiy tenant code: {}",
loginUser.getUserName(),tenantCode);
return tenantService.verifyTenantCode(tenantCode);
}catch (Exception e){
logger.error(Status.VERIFY_TENANT_CODE_ERROR.getMsg(),e);
return error(Status.VERIFY_TENANT_CODE_ERROR.getCode(), Status.VERIFY_TENANT_CODE_ERROR.getMsg());
}
logger.info("login user {}, verfiy tenant code: {}",
loginUser.getUserName(), tenantCode);
return tenantService.verifyTenantCode(tenantCode);
}

359
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -36,14 +37,16 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* user controller
*/
@Api(tags = "USERS_TAG" , position = 14)
@Api(tags = "USERS_TAG", position = 14)
@RestController
@RequestMapping("/users")
public class UsersController extends BaseController{
public class UsersController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(UsersController.class);
@ -52,20 +55,20 @@ public class UsersController extends BaseController{
/**
* create user
*
* @param loginUser login user
* @param userName user name
*
* @param loginUser login user
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return create result code
*/
@ApiOperation(value = "createUser", notes= "CREATE_USER_NOTES")
@ApiOperation(value = "createUser", notes = "CREATE_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"),
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"),
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"),
@ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"),
@ -73,81 +76,73 @@ public class UsersController extends BaseController{
})
@PostMapping(value = "/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_USER_ERROR)
public Result createUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "queue",required = false,defaultValue = "") String queue,
@RequestParam(value = "email") String email,
@RequestParam(value = "phone", required = false) String phone) {
@RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "queue", required = false, defaultValue = "") String queue,
@RequestParam(value = "email") String email,
@RequestParam(value = "phone", required = false) String phone) throws Exception {
logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}",
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue);
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue);
try {
Map<String, Object> result = usersService.createUser(loginUser, userName, userPassword,email,tenantId, phone,queue);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.CREATE_USER_ERROR.getMsg(),e);
return error(Status.CREATE_USER_ERROR.getCode(), Status.CREATE_USER_ERROR.getMsg());
}
Map<String, Object> result = usersService.createUser(loginUser, userName, userPassword, email, tenantId, phone, queue);
return returnDataList(result);
}
/**
* query user list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search avlue
* @param pageSize page size
* @param pageSize page size
* @return user list page
*/
@ApiOperation(value = "queryUserList", notes= "QUERY_USER_LIST_NOTES")
@ApiOperation(value = "queryUserList", notes = "QUERY_USER_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type ="String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String")
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type = "String"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String")
})
@GetMapping(value="/list-paging")
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_USER_LIST_PAGING_ERROR)
public Result queryUserList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize){
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}",
loginUser.getUserName(),pageNo,searchVal,pageSize);
try{
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if(result.get(Constants.STATUS) != Status.SUCCESS){
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize);
loginUser.getUserName(), pageNo, searchVal, pageSize);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_USER_LIST_PAGING_ERROR.getMsg(),e);
return error(Status.QUERY_USER_LIST_PAGING_ERROR.getCode(), Status.QUERY_USER_LIST_PAGING_ERROR.getMsg());
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* update user
*
* @param loginUser login user
* @param id user id
* @param userName user name
* @param loginUser login user
* @param id user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @return update result code
*/
@ApiOperation(value = "updateUser", notes= "UPDATE_USER_NOTES")
@ApiOperation(value = "updateUser", notes = "UPDATE_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"),
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"),
@ApiImplicitParam(name = "id", value = "USER_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"),
@ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"),
@ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"),
@ -155,103 +150,88 @@ public class UsersController extends BaseController{
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_USER_ERROR)
public Result updateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "queue",required = false,defaultValue = "") String queue,
@RequestParam(value = "email") String email,
@RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "phone", required = false) String phone) {
@RequestParam(value = "id") int id,
@RequestParam(value = "userName") String userName,
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "queue", required = false, defaultValue = "") String queue,
@RequestParam(value = "email") String email,
@RequestParam(value = "tenantId") int tenantId,
@RequestParam(value = "phone", required = false) String phone) throws Exception {
logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}",
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue);
try {
Map<String, Object> result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UPDATE_USER_ERROR.getMsg(),e);
return error(Status.UPDATE_USER_ERROR.getCode(), Status.UPDATE_USER_ERROR.getMsg());
}
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue);
Map<String, Object> result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue);
return returnDataList(result);
}
/**
* delete user by id
*
* @param loginUser login user
* @param id user id
* @param id user id
* @return delete result code
*/
@ApiOperation(value = "delUserById", notes= "DELETE_USER_BY_ID_NOTES")
@ApiOperation(value = "delUserById", notes = "DELETE_USER_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100")
@ApiImplicitParam(name = "id", value = "USER_ID", dataType = "Int", example = "100")
})
@PostMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_USER_BY_ID_ERROR)
public Result delUserById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int id) {
@RequestParam(value = "id") int id) throws Exception {
logger.info("login user {}, delete user, userId: {},", loginUser.getUserName(), id);
try {
Map<String, Object> result = usersService.deleteUserById(loginUser, id);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.DELETE_USER_BY_ID_ERROR.getMsg(),e);
return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg());
}
Map<String, Object> result = usersService.deleteUserById(loginUser, id);
return returnDataList(result);
}
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@ApiOperation(value = "grantProject", notes= "GRANT_PROJECT_NOTES")
@ApiOperation(value = "grantProject", notes = "GRANT_PROJECT_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS",type = "String")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS", type = "String")
})
@PostMapping(value = "/grant-project")
@ResponseStatus(HttpStatus.OK)
@ApiException(GRANT_PROJECT_ERROR)
public Result grantProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "projectIds") String projectIds) {
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId,projectIds);
try {
Map<String, Object> result = usersService.grantProject(loginUser, userId, projectIds);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GRANT_PROJECT_ERROR.getMsg(),e);
return error(Status.GRANT_PROJECT_ERROR.getCode(), Status.GRANT_PROJECT_ERROR.getMsg());
}
@RequestParam(value = "userId") int userId,
@RequestParam(value = "projectIds") String projectIds) {
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId, projectIds);
Map<String, Object> result = usersService.grantProject(loginUser, userId, projectIds);
return returnDataList(result);
}
/**
* grant resource
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param resourceIds resource id array
* @return grant result code
*/
@ApiOperation(value = "grantResource", notes= "GRANT_RESOURCE_NOTES")
@ApiOperation(value = "grantResource", notes = "GRANT_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS",type = "String")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS", type = "String")
})
@PostMapping(value = "/grant-file")
@ResponseStatus(HttpStatus.OK)
@ApiException(GRANT_RESOURCE_ERROR)
public Result grantResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "resourceIds") String resourceIds) {
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,resourceIds);
try {
Map<String, Object> result = usersService.grantResources(loginUser, userId, resourceIds);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GRANT_RESOURCE_ERROR.getMsg(),e);
return error(Status.GRANT_RESOURCE_ERROR.getCode(), Status.GRANT_RESOURCE_ERROR.getMsg());
}
@RequestParam(value = "userId") int userId,
@RequestParam(value = "resourceIds") String resourceIds) {
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId, resourceIds);
Map<String, Object> result = usersService.grantResources(loginUser, userId, resourceIds);
return returnDataList(result);
}
@ -259,58 +239,49 @@ public class UsersController extends BaseController{
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@ApiOperation(value = "grantUDFFunc", notes= "GRANT_UDF_FUNC_NOTES")
@ApiOperation(value = "grantUDFFunc", notes = "GRANT_UDF_FUNC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "udfIds", value = "UDF_IDS",type = "String")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "udfIds", value = "UDF_IDS", type = "String")
})
@PostMapping(value = "/grant-udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(GRANT_UDF_FUNCTION_ERROR)
public Result grantUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "udfIds") String udfIds) {
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,udfIds);
try {
Map<String, Object> result = usersService.grantUDFFunction(loginUser, userId, udfIds);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GRANT_UDF_FUNCTION_ERROR.getMsg(),e);
return error(Status.GRANT_UDF_FUNCTION_ERROR.getCode(), Status.GRANT_UDF_FUNCTION_ERROR.getMsg());
}
@RequestParam(value = "userId") int userId,
@RequestParam(value = "udfIds") String udfIds) {
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId, udfIds);
Map<String, Object> result = usersService.grantUDFFunction(loginUser, userId, udfIds);
return returnDataList(result);
}
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@ApiOperation(value = "grantDataSource", notes= "GRANT_DATASOURCE_NOTES")
@ApiOperation(value = "grantDataSource", notes = "GRANT_DATASOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"),
@ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS",type = "String")
@ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS", type = "String")
})
@PostMapping(value = "/grant-datasource")
@ResponseStatus(HttpStatus.OK)
@ApiException(GRANT_DATASOURCE_ERROR)
public Result grantDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "datasourceIds") String datasourceIds) {
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(),userId,datasourceIds);
try {
Map<String, Object> result = usersService.grantDataSource(loginUser, userId, datasourceIds);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GRANT_DATASOURCE_ERROR.getMsg(),e);
return error(Status.GRANT_DATASOURCE_ERROR.getCode(), Status.GRANT_DATASOURCE_ERROR.getMsg());
}
@RequestParam(value = "userId") int userId,
@RequestParam(value = "datasourceIds") String datasourceIds) {
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId, datasourceIds);
Map<String, Object> result = usersService.grantDataSource(loginUser, userId, datasourceIds);
return returnDataList(result);
}
@ -320,18 +291,14 @@ public class UsersController extends BaseController{
* @param loginUser login user
* @return user info
*/
@ApiOperation(value = "getUserInfo", notes= "GET_USER_INFO_NOTES")
@GetMapping(value="/get-user-info")
@ApiOperation(value = "getUserInfo", notes = "GET_USER_INFO_NOTES")
@GetMapping(value = "/get-user-info")
@ResponseStatus(HttpStatus.OK)
public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
@ApiException(GET_USER_INFO_ERROR)
public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {},get user info", loginUser.getUserName());
try{
Map<String, Object> result = usersService.getUserInfo(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.GET_USER_INFO_ERROR.getMsg(),e);
return error(Status.GET_USER_INFO_ERROR.getCode(), Status.GET_USER_INFO_ERROR.getMsg());
}
Map<String, Object> result = usersService.getUserInfo(loginUser);
return returnDataList(result);
}
/**
@ -340,18 +307,14 @@ public class UsersController extends BaseController{
* @param loginUser login user
* @return user list
*/
@ApiOperation(value = "listUser", notes= "LIST_USER_NOTES")
@GetMapping(value="/list")
@ApiOperation(value = "listUser", notes = "LIST_USER_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){
@ApiException(USER_LIST_ERROR)
public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, user list", loginUser.getUserName());
try{
Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.USER_LIST_ERROR.getMsg(),e);
return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg());
}
Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser);
return returnDataList(result);
}
@ -361,17 +324,13 @@ public class UsersController extends BaseController{
* @param loginUser login user
* @return user list
*/
@GetMapping(value="/list-all")
@GetMapping(value = "/list-all")
@ResponseStatus(HttpStatus.OK)
public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){
@ApiException(USER_LIST_ERROR)
public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, user list", loginUser.getUserName());
try{
Map<String, Object> result = usersService.queryUserList(loginUser);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.USER_LIST_ERROR.getMsg(),e);
return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg());
}
Map<String, Object> result = usersService.queryUserList(loginUser);
return returnDataList(result);
}
@ -379,79 +338,71 @@ public class UsersController extends BaseController{
* verify username
*
* @param loginUser login user
* @param userName user name
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
@ApiOperation(value = "verifyUserName", notes= "VERIFY_USER_NAME_NOTES")
@ApiOperation(value = "verifyUserName", notes = "VERIFY_USER_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String")
@ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String")
})
@GetMapping(value = "/verify-user-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_USERNAME_ERROR)
public Result verifyUserName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value ="userName") String userName
@RequestParam(value = "userName") String userName
) {
try{
logger.info("login user {}, verfiy user name: {}",
loginUser.getUserName(),userName);
return usersService.verifyUserName(userName);
}catch (Exception e){
logger.error(Status.VERIFY_USERNAME_ERROR.getMsg(),e);
return error(Status.VERIFY_USERNAME_ERROR.getCode(), Status.VERIFY_USERNAME_ERROR.getMsg());
}
logger.info("login user {}, verfiy user name: {}",
loginUser.getUserName(), userName);
return usersService.verifyUserName(userName);
}
/**
* unauthorized user
*
* @param loginUser login user
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
@ApiOperation(value = "unauthorizedUser", notes= "UNAUTHORIZED_USER_NOTES")
@ApiOperation(value = "unauthorizedUser", notes = "UNAUTHORIZED_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String")
@ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID", type = "String")
})
@GetMapping(value = "/unauth-user")
@ResponseStatus(HttpStatus.OK)
@ApiException(UNAUTHORIZED_USER_ERROR)
public Result unauthorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("alertgroupId") Integer alertgroupId) {
try{
logger.info("unauthorized user, login user:{}, alert group id:{}",
loginUser.getUserName(), alertgroupId);
Map<String, Object> result = usersService.unauthorizedUser(loginUser, alertgroupId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.UNAUTHORIZED_USER_ERROR.getMsg(),e);
return error(Status.UNAUTHORIZED_USER_ERROR.getCode(), Status.UNAUTHORIZED_USER_ERROR.getMsg());
}
logger.info("unauthorized user, login user:{}, alert group id:{}",
loginUser.getUserName(), alertgroupId);
Map<String, Object> result = usersService.unauthorizedUser(loginUser, alertgroupId);
return returnDataList(result);
}
/**
* authorized user
*
* @param loginUser login user
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
@ApiOperation(value = "authorizedUser", notes= "AUTHORIZED_USER_NOTES")
@ApiOperation(value = "authorizedUser", notes = "AUTHORIZED_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String")
@ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID", type = "String")
})
@GetMapping(value = "/authed-user")
@ResponseStatus(HttpStatus.OK)
@ApiException(AUTHORIZED_USER_ERROR)
public Result authorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("alertgroupId") Integer alertgroupId) {
try{
try {
logger.info("authorized user , login user:{}, alert group id:{}",
loginUser.getUserName(), alertgroupId);
Map<String, Object> result = usersService.authorizedUser(loginUser, alertgroupId);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(),e);
} catch (Exception e) {
logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e);
return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg());
}
}

109
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.WorkerGroupService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -36,88 +36,53 @@ import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
/**
* worker group controller
*/
@Api(tags = "WORKER_GROUP_TAG", position = 1)
@RestController
@RequestMapping("/worker-group")
public class WorkerGroupController extends BaseController{
public class WorkerGroupController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(WorkerGroupController.class);
@Autowired
WorkerGroupService workerGroupService;
/**
* create or update a worker group
*
* @param loginUser login user
* @param id worker group id
* @param name worker group name
* @param ipList ip list
* @return create or update result code
*/
@ApiOperation(value = "saveWorkerGroup", notes= "CREATE_WORKER_GROUP_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"),
@ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String")
})
@PostMapping(value = "/save")
@ResponseStatus(HttpStatus.OK)
public Result saveWorkerGroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id", required = false, defaultValue = "0") int id,
@RequestParam(value = "name") String name,
@RequestParam(value = "ipList") String ipList
) {
logger.info("save worker group: login user {}, id:{}, name: {}, ipList: {} ",
loginUser.getUserName(), id, name, ipList);
try {
Map<String, Object> result = workerGroupService.saveWorkerGroup(loginUser,id, name, ipList);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.SAVE_ERROR.getMsg(),e);
return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg());
}
}
/**
* query worker groups paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @param pageSize page size
* @return worker group list page
*/
@ApiOperation(value = "queryAllWorkerGroupsPaging", notes= "QUERY_WORKER_GROUP_PAGING_NOTES")
@ApiOperation(value = "queryAllWorkerGroupsPaging", notes = "QUERY_WORKER_GROUP_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"),
@ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"),
@ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String")
@ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType = "String")
})
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_WORKER_GROUP_FAIL)
public Result queryAllWorkerGroupsPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
logger.info("query all worker group paging: login user {}, pageNo:{}, pageSize:{}, searchVal:{}",
loginUser.getUserName() , pageNo, pageSize, searchVal);
try {
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = workerGroupService.queryAllGroupPaging(loginUser,pageNo, pageSize, searchVal);
return returnDataListPaging(result);
}catch (Exception e){
logger.error(Status.QUERY_WORKER_GROUP_FAIL.getMsg(),e);
return error(Status.QUERY_WORKER_GROUP_FAIL.getCode(), Status.QUERY_WORKER_GROUP_FAIL.getMsg());
}
loginUser.getUserName(), pageNo, pageSize, searchVal);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = workerGroupService.queryAllGroupPaging(loginUser, pageNo, pageSize, searchVal);
return returnDataListPaging(result);
}
/**
@ -126,48 +91,18 @@ public class WorkerGroupController extends BaseController{
* @param loginUser login user
* @return all worker group list
*/
@ApiOperation(value = "queryAllWorkerGroups", notes= "QUERY_WORKER_GROUP_LIST_NOTES")
@ApiOperation(value = "queryAllWorkerGroups", notes = "QUERY_WORKER_GROUP_LIST_NOTES")
@GetMapping(value = "/all-groups")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_WORKER_GROUP_FAIL)
public Result queryAllWorkerGroups(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser
) {
logger.info("query all worker group: login user {}",
loginUser.getUserName() );
try {
Map<String, Object> result = workerGroupService.queryAllGroup();
return returnDataList(result);
}catch (Exception e){
logger.error(Status.QUERY_WORKER_GROUP_FAIL.getMsg(),e);
return error(Status.QUERY_WORKER_GROUP_FAIL.getCode(), Status.QUERY_WORKER_GROUP_FAIL.getMsg());
}
loginUser.getUserName());
Map<String, Object> result = workerGroupService.queryAllGroup();
return returnDataList(result);
}
/**
* delete worker group by id
* @param loginUser login user
* @param id group id
* @return delete result code
*/
@ApiOperation(value = "deleteById", notes= "DELETE_WORKER_GROUP_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", required = true, dataType = "Int", example = "10"),
})
@GetMapping(value = "/delete-by-id")
@ResponseStatus(HttpStatus.OK)
public Result deleteById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") Integer id
) {
logger.info("delete worker group: login user {}, id:{} ",
loginUser.getUserName() , id);
try {
Map<String, Object> result = workerGroupService.deleteWorkerGroupById(id);
return returnDataList(result);
}catch (Exception e){
logger.error(Status.DELETE_WORKER_GROUP_FAIL.getMsg(),e);
return error(Status.DELETE_WORKER_GROUP_FAIL.getCode(), Status.DELETE_WORKER_GROUP_FAIL.getMsg());
}
}
}

16
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java

@ -96,19 +96,11 @@ public class ProcessMeta {
*/
private String scheduleProcessInstancePriority;
/**
* worker group id
*/
private Integer scheduleWorkerGroupId;
/**
* worker group name
*/
private String scheduleWorkerGroupName;
public ProcessMeta() {
}
public String getProjectName() {
return projectName;
}
@ -229,14 +221,6 @@ public class ProcessMeta {
this.scheduleProcessInstancePriority = scheduleProcessInstancePriority;
}
public Integer getScheduleWorkerGroupId() {
return scheduleWorkerGroupId;
}
public void setScheduleWorkerGroupId(int scheduleWorkerGroupId) {
this.scheduleWorkerGroupId = scheduleWorkerGroupId;
}
public String getScheduleWorkerGroupName() {
return scheduleWorkerGroupName;
}

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -27,6 +27,8 @@ public enum Status {
SUCCESS(0, "success", "成功"),
INTERNAL_SERVER_ERROR_ARGS(10000, "Internal Server Error: {0}", "服务端异常: {0}"),
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
@ -134,7 +136,7 @@ public enum Status {
UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list", "查询工作流列表错误"),
QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"),
@ -146,7 +148,7 @@ public enum Status {
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error", "分页查询工作流定义列表错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123,"sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"),
@ -166,15 +168,13 @@ public enum Status {
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"),
DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"),
COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
@ -190,7 +190,8 @@ public enum Status {
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"),
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"),
@ -213,8 +214,8 @@ public enum Status {
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017,"data %s not valid", "数据[%s]无效"),
DATA_IS_NULL(50018,"data %s is null", "数据[%s]不能为空"),
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid", "流程节点[%s]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
@ -225,6 +226,7 @@ public enum Status {
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028,"batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),

3
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java

@ -83,6 +83,9 @@ public class AccessTokenService extends BaseService {
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0.");
}
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java

@ -29,8 +29,6 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -108,14 +106,12 @@ public class DataAnalysisService extends BaseService{
List<ExecuteStatusCount> taskInstanceStateCounts =
taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds);
if (taskInstanceStateCounts != null && !taskInstanceStateCounts.isEmpty()) {
if (taskInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.TASK_INSTANCE_STATE_COUNT_ERROR);
}
return result;
return result;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
@ -155,14 +151,12 @@ public class DataAnalysisService extends BaseService{
processInstanceMapper.countInstanceStateByUser(start, end,
projectIdArray);
if (processInstanceStateCounts != null && !processInstanceStateCounts.isEmpty()) {
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.COUNT_PROCESS_INSTANCE_STATE_ERROR);
}
return result;
return result;
}
@ -236,7 +230,7 @@ public class DataAnalysisService extends BaseService{
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
start, end, projectIdArray);
//
Map<CommandType,Map<String,Integer>> dataMap = new HashMap<>();
@ -318,9 +312,8 @@ public class DataAnalysisService extends BaseService{
return result;
}
ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance();
List<String> tasksQueueList = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE);
List<String> tasksKillList = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_KILL);
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String,Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER){

59
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -17,10 +17,15 @@
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
@ -30,10 +35,6 @@ import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
@ -210,12 +211,20 @@ public class DataSourceService extends BaseService{
String parameter = dataSource.getConnectionParams();
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if(DbType.ORACLE.equals(dataSource.getType())){
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if(DbConnectType.ORACLE_SID.equals(connectType)){
hostSeperator = Constants.AT_SIGN;
}
}
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address);
String[] hostsPorts = getHostsAndPort(address,hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
@ -251,6 +260,10 @@ public class DataSourceService extends BaseService{
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
if (connectType != null) {
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
@ -473,12 +486,16 @@ public class DataSourceService extends BaseService{
* @return datasource parameter
*/
public String buildParameter(String name, String desc, DbType type, String host,
String port, String database,String principal,String userName,
String password, String other) {
String address = buildAddress(type, host, port);
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other) {
String address = buildAddress(type, host, port, connectType);
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6);
String jdbcUrl = address + "/" + database;
if (Constants.ORACLE.equals(type.name())) {
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
if (CommonUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
jdbcUrl += ";principal=" + principal;
@ -497,7 +514,6 @@ public class DataSourceService extends BaseService{
separator = ";";
}
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6);
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
@ -531,7 +547,7 @@ public class DataSourceService extends BaseService{
}
private String buildAddress(DbType type, String host, String port) {
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
@ -552,7 +568,11 @@ public class DataSourceService extends BaseService{
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
sb.append(Constants.JDBC_ORACLE);
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
@ -663,12 +683,23 @@ public class DataSourceService extends BaseService{
/**
* get host and port by address
*
* @param address
* @param address address
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address) {
return getHostsAndPort(address,Constants.DOUBLE_SLASH);
}
/**
* get host and port by address
*
* @param address address
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address,String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(Constants.DOUBLE_SLASH);
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);

41
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -86,7 +86,7 @@ public class ExecutorService extends BaseService{
* @param receivers receivers
* @param receiversCc receivers cc
* @param processInstancePriority process instance priority
* @param workerGroupId worker group id
* @param workerGroup worker group name
* @param runMode run mode
* @param timeout timeout
* @return execute process instance code
@ -97,7 +97,7 @@ public class ExecutorService extends BaseService{
FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
String receivers, String receiversCc, RunMode runMode,
Priority processInstancePriority, int workerGroupId, Integer timeout) throws ParseException {
Priority processInstancePriority, String workerGroup, Integer timeout) throws ParseException {
Map<String, Object> result = new HashMap<>(5);
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
@ -135,7 +135,7 @@ public class ExecutorService extends BaseService{
*/
int create = this.createCommand(commandType, processDefinitionId,
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(),
warningGroupId, runMode,processInstancePriority, workerGroupId);
warningGroupId, runMode,processInstancePriority, workerGroup);
if(create > 0 ){
/**
* according to the process definition ID updateProcessInstance and CC recipient
@ -463,25 +463,26 @@ public class ExecutorService extends BaseService{
/**
* create command
*
* @param commandType
* @param processDefineId
* @param nodeDep
* @param failureStrategy
* @param startNodeList
* @param schedule
* @param warningType
* @param excutorId
* @param warningGroupId
* @param runMode
* @return
* @param commandType commandType
* @param processDefineId processDefineId
* @param nodeDep nodeDep
* @param failureStrategy failureStrategy
* @param startNodeList startNodeList
* @param schedule schedule
* @param warningType warningType
* @param executorId executorId
* @param warningGroupId warningGroupId
* @param runMode runMode
* @param processInstancePriority processInstancePriority
* @param workerGroup workerGroup
* @return command id
* @throws ParseException
*/
private int createCommand(CommandType commandType, int processDefineId,
TaskDependType nodeDep, FailureStrategy failureStrategy,
String startNodeList, String schedule, WarningType warningType,
int excutorId, int warningGroupId,
RunMode runMode,Priority processInstancePriority, int workerGroupId){
int executorId, int warningGroupId,
RunMode runMode,Priority processInstancePriority, String workerGroup) throws ParseException {
/**
* instantiate command schedule instance
@ -509,10 +510,10 @@ public class ExecutorService extends BaseService{
command.setWarningType(warningType);
}
command.setCommandParam(JSONUtils.toJson(cmdParam));
command.setExecutorId(excutorId);
command.setExecutorId(executorId);
command.setWarningGroupId(warningGroupId);
command.setProcessInstancePriority(processInstancePriority);
command.setWorkerGroupId(workerGroupId);
command.setWorkerGroup(workerGroup);
Date start = null;
Date end = null;
@ -569,7 +570,7 @@ public class ExecutorService extends BaseService{
processDefineId, schedule);
}
}else{
command.setCommandParam(JSONUtils.toJson(cmdParam));
return processService.createCommand(command);
}

34
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java

@ -21,6 +21,7 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
@ -64,25 +65,24 @@ public class LoggerService {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null){
return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg());
}
String host = taskInstance.getHost();
if(StringUtils.isEmpty(host)){
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())){
return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg());
}
String host = getHost(taskInstance.getHost());
Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT);
String log = logClient.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(),skipLineNum,limit);
result.setData(log);
logger.info(log);
return result;
}
/**
* get log size
*
@ -91,10 +91,24 @@ public class LoggerService {
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null){
throw new RuntimeException("task instance is null");
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())){
throw new RuntimeException("task instance is null or host is null");
}
String host = taskInstance.getHost();
String host = getHost(taskInstance.getHost());
return logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath());
}
/**
* get host
* @param address address
* @return old version return true ,otherwise return false
*/
private String getHost(String address){
if (Host.isOldVersion(address)){
return address;
}
return Host.of(address).getIp();
}
}

412
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@ -44,6 +44,7 @@ import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -95,9 +96,6 @@ public class ProcessDefinitionService extends BaseDAGService {
@Autowired
private ProcessService processService;
@Autowired
private WorkerGroupMapper workerGroupMapper;
/**
* create process definition
*
@ -111,8 +109,13 @@ public class ProcessDefinitionService extends BaseDAGService {
* @return create result code
* @throws JsonProcessingException JsonProcessingException
*/
public Map<String, Object> createProcessDefinition(User loginUser, String projectName, String name,
String processDefinitionJson, String desc, String locations, String connects) throws JsonProcessingException {
public Map<String, Object> createProcessDefinition(User loginUser,
String projectName,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
@ -143,10 +146,11 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = processData.getGlobalParams();
if (globalParamsList != null && globalParamsList.size() > 0) {
if (CollectionUtils.isNotEmpty(globalParamsList)) {
Set<Property> globalParamsSet = new HashSet<>(globalParamsList);
globalParamsList = new ArrayList<>(globalParamsSet);
processDefine.setGlobalParamList(globalParamsList);
@ -171,8 +175,10 @@ public class ProcessDefinitionService extends BaseDAGService {
for(TaskNode taskNode : tasks){
String taskParameter = taskNode.getParams();
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(),taskParameter);
Set<Integer> tempSet = params.getResourceFilesList().stream().map(t->t.getId()).collect(Collectors.toSet());
resourceIds.addAll(tempSet);
if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().stream().map(t->t.getId()).collect(Collectors.toSet());
resourceIds.addAll(tempSet);
}
}
StringBuilder sb = new StringBuilder();
@ -187,13 +193,13 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query proccess definition list
* query process definition list
*
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
public Map<String, Object> queryProccessDefinitionList(User loginUser, String projectName) {
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
@ -213,7 +219,7 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query proccess definition list paging
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
@ -255,7 +261,7 @@ public class ProcessDefinitionService extends BaseDAGService {
* @param processId process definition id
* @return process definition detail
*/
public Map<String, Object> queryProccessDefinitionById(User loginUser, String projectName, Integer processId) {
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
@ -277,6 +283,41 @@ public class ProcessDefinitionService extends BaseDAGService {
return result;
}
/**
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return copy result code
*/
public Map<String, Object> copyProcessDefinition(User loginUser, String projectName, Integer processId) throws JsonProcessingException{
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
} else {
return createProcessDefinition(
loginUser,
projectName,
processDefinition.getName()+"_copy_"+System.currentTimeMillis(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
processDefinition.getConnects());
}
}
/**
* update process definition
*
@ -333,10 +374,11 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = new ArrayList<>();
if (processData.getGlobalParams() != null && processData.getGlobalParams().size() > 0) {
if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) {
Set<Property> userDefParamsSet = new HashSet<>(processData.getGlobalParams());
globalParamsList = new ArrayList<>(userDefParamsSet);
}
@ -360,22 +402,22 @@ public class ProcessDefinitionService extends BaseDAGService {
* @param name name
* @return true if process definition name not exists, otherwise false
*/
public Map<String, Object> verifyProccessDefinitionName(User loginUser, String projectName, String name) {
public Map<String, Object> verifyProcessDefinitionName(User loginUser, String projectName, String name) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name);
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROCESS_INSTANCE_EXIST, name);
}
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name);
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROCESS_INSTANCE_EXIST, name);
}
return result;
}
@ -475,12 +517,25 @@ public class ProcessDefinitionService extends BaseDAGService {
ProcessDefinition processDefinition = processDefineMapper.selectById(id);
switch (state) {
case ONLINE: {
case ONLINE:
// To check resources whether they are already cancel authorized or deleted
String resourceIds = processDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(),e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, "releaseState");
return result;
}
}
processDefinition.setReleaseState(state);
processDefineMapper.updateById(processDefinition);
break;
}
case OFFLINE: {
case OFFLINE:
processDefinition.setReleaseState(state);
processDefineMapper.updateById(processDefinition);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(
@ -495,11 +550,9 @@ public class ProcessDefinitionService extends BaseDAGService {
SchedulerService.deleteSchedule(project.getId(), schedule.getId());
}
break;
}
default: {
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState");
return result;
}
}
putMsg(result, Status.SUCCESS);
@ -507,14 +560,18 @@ public class ProcessDefinitionService extends BaseDAGService {
}
/**
* export process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param response response
* batch export process definition by ids
* @param loginUser
* @param projectName
* @param processDefinitionIds
* @param response
*/
public void exportProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId, HttpServletResponse response) {
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response){
if(StringUtils.isEmpty(processDefinitionIds)){
return;
}
//export project info
Project project = projectMapper.queryByName(projectName);
@ -522,39 +579,68 @@ public class ProcessDefinitionService extends BaseDAGService {
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus == Status.SUCCESS) {
if(resultStatus != Status.SUCCESS){
return;
}
List<ProcessMeta> processDefinitionList =
getProcessDefinitionList(processDefinitionIds);
if(CollectionUtils.isNotEmpty(processDefinitionList)){
downloadProcessDefinitionFile(response, processDefinitionList);
}
}
/**
* get process definition list by ids
* @param processDefinitionIds
* @return
*/
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds){
List<ProcessMeta> processDefinitionList = new ArrayList<>();
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
//get workflow info
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (null != processDefinition) {
String exportProcessJson = exportProcessMetaDataStr(processDefinitionId, processDefinition);
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
response.setHeader("Content-Disposition", "attachment;filename="+processDefinition.getName()+".json");
BufferedOutputStream buff = null;
ServletOutputStream out = null;
processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition));
}
}
return processDefinitionList;
}
/**
* download the process definition file
* @param response
* @param processDefinitionList
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<ProcessMeta> processDefinitionList) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(JSON.toJSONString(processDefinitionList).getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
}finally {
if (null != buff) {
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(exportProcessJson.getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
}finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
}
@ -567,6 +653,17 @@ public class ProcessDefinitionService extends BaseDAGService {
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId,processDefinition));
}
/**
* get export process metadata string
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
//correct task param which has data source or dependent param
String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson);
@ -583,14 +680,6 @@ public class ProcessDefinitionService extends BaseDAGService {
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty()) {
Schedule schedule = schedules.get(0);
WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId());
if (null == workerGroup && schedule.getWorkerGroupId() == -1) {
workerGroup = new WorkerGroup();
workerGroup.setId(-1);
workerGroup.setName("");
}
exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString());
exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId());
exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime()));
@ -599,14 +688,10 @@ public class ProcessDefinitionService extends BaseDAGService {
exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy()));
exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE));
exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority()));
if (null != workerGroup) {
exportProcessMeta.setScheduleWorkerGroupId(workerGroup.getId());
exportProcessMeta.setScheduleWorkerGroupName(workerGroup.getName());
}
exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup());
}
//create workflow json file
return JSONUtils.toJsonString(exportProcessMeta);
return exportProcessMeta;
}
/**
@ -653,24 +738,36 @@ public class ProcessDefinitionService extends BaseDAGService {
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file);
ProcessMeta processMeta = JSONUtils.parseObject(processMetaJson, ProcessMeta.class);
List<ProcessMeta> processMetaList = JSON.parseArray(processMetaJson,ProcessMeta.class);
//check file content
if (null == processMeta) {
if (CollectionUtils.isEmpty(processMetaList)) {
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
if (StringUtils.isEmpty(processMeta.getProjectName())) {
putMsg(result, Status.DATA_IS_NULL, "projectName");
return result;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
return result;
for(ProcessMeta processMeta:processMetaList){
if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)){
return result;
}
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
return result;
return result;
}
/**
* check and import process definition
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @return
*/
private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
if(!checkImportanceParams(processMeta,result)){
return false;
}
//deal with process name
@ -682,31 +779,94 @@ public class ProcessDefinitionService extends BaseDAGService {
processDefinitionName, 1);
}
//add special task param
String importProcessParam = addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject);
//unique check
Map<String, Object> checkResult = verifyProcessDefinitionName(loginUser, currentProjectName, processDefinitionName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (Status.SUCCESS.equals(status)) {
putMsg(result, Status.SUCCESS);
} else {
result.putAll(checkResult);
return false;
}
// get create process result
Map<String, Object> createProcessResult =
getCreateProcessResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject));
if(createProcessResult == null){
return false;
}
Map<String, Object> createProcessResult;
//create process definition
Integer processDefinitionId =
Objects.isNull(createProcessResult.get("processDefinitionId"))?
null:Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
//scheduler param
return getImportProcessScheduleResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
processDefinitionId);
}
/**
* get create process result
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @param processDefinitionName
* @param importProcessParam
* @return
*/
private Map<String, Object> getCreateProcessResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
String importProcessParam){
Map<String, Object> createProcessResult = null;
try {
createProcessResult = createProcessDefinition(loginUser
,currentProjectName,
processDefinitionName,
processDefinitionName+"_import_"+System.currentTimeMillis(),
importProcessParam,
processMeta.getProcessDefinitionDescription(),
processMeta.getProcessDefinitionLocations(),
processMeta.getProcessDefinitionConnects());
putMsg(result, Status.SUCCESS);
} catch (JsonProcessingException e) {
logger.error("import process meta json data: {}", e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return result;
}
putMsg(result, Status.SUCCESS);
//create process definition
Integer processDefinitionId = null;
if (null != createProcessResult && Objects.nonNull(createProcessResult.get("processDefinitionId"))) {
processDefinitionId = Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
}
//scheduler param
return createProcessResult;
}
/**
* get import process schedule result
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @param processDefinitionName
* @param processDefinitionId
* @return
*/
private boolean getImportProcessScheduleResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
Integer processDefinitionId) {
if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) {
int scheduleInsert = importProcessSchedule(loginUser,
currentProjectName,
@ -716,11 +876,33 @@ public class ProcessDefinitionService extends BaseDAGService {
if (0 == scheduleInsert) {
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return result;
return false;
}
}
return true;
}
return result;
/**
* check importance params
* @param processMeta
* @param result
* @return
*/
private boolean checkImportanceParams(ProcessMeta processMeta,Map<String, Object> result){
if (StringUtils.isEmpty(processMeta.getProjectName())) {
putMsg(result, Status.DATA_IS_NULL, "projectName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
return false;
}
return true;
}
/**
@ -802,15 +984,9 @@ public class ProcessDefinitionService extends BaseDAGService {
if (null != processMeta.getScheduleProcessInstancePriority()) {
scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority()));
}
if (null != processMeta.getScheduleWorkerGroupId()) {
scheduleObj.setWorkerGroupId(processMeta.getScheduleWorkerGroupId());
} else {
if (null != processMeta.getScheduleWorkerGroupName()) {
List<WorkerGroup> workerGroups = workerGroupMapper.queryWorkerGroupByName(processMeta.getScheduleWorkerGroupName());
if(CollectionUtils.isNotEmpty(workerGroups)){
scheduleObj.setWorkerGroupId(workerGroups.get(0).getId());
}
}
if (null != processMeta.getScheduleWorkerGroupName()) {
scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName());
}
return scheduleMapper.insert(scheduleObj);
@ -1032,12 +1208,12 @@ public class ProcessDefinitionService extends BaseDAGService {
/**
* query proccess definition all by project id
* query process definition all by project id
*
* @param projectId project id
* @return process definitions in the project
*/
public Map<String, Object> queryProccessDefinitionAllByProjectId(Integer projectId) {
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(5);

82
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -39,7 +39,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -92,8 +91,7 @@ public class ProcessInstanceService extends BaseDAGService {
@Autowired
LoggerService loggerService;
@Autowired
WorkerGroupMapper workerGroupMapper;
@Autowired
UsersService usersService;
@ -116,18 +114,7 @@ public class ProcessInstanceService extends BaseDAGService {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
String workerGroupName = "";
if(processInstance.getWorkerGroupId() == -1){
workerGroupName = DEFAULT;
}else{
WorkerGroup workerGroup = workerGroupMapper.selectById(processInstance.getWorkerGroupId());
if(workerGroup != null){
workerGroupName = workerGroup.getName();
}else{
workerGroupName = DEFAULT;
}
}
processInstance.setWorkerGroupName(workerGroupName);
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setReceivers(processDefinition.getReceivers());
processInstance.setReceiversCc(processDefinition.getReceiversCc());
@ -233,7 +220,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
AddDependResultForTaskList(taskInstanceList);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
@ -247,9 +234,9 @@ public class ProcessInstanceService extends BaseDAGService {
* add dependent result for dependent task
* @param taskInstanceList
*/
private void AddDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for(TaskInstance taskInstance: taskInstanceList){
if(taskInstance.getTaskType().toUpperCase().equals(TaskType.DEPENDENT.toString())){
if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){
Result logResult = loggerService.queryLog(
taskInstance.getId(), 0, 4098);
if(logResult.getCode() == Status.SUCCESS.ordinal()){
@ -408,11 +395,10 @@ public class ProcessInstanceService extends BaseDAGService {
processInstance.setProcessInstanceJson(processInstanceJson);
processInstance.setGlobalParams(globalParams);
}
// int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson,
// globalParams, schedule, flag, locations, connects);
int update = processService.updateProcessInstance(processInstance);
int updateDefine = 1;
if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) {
if (Boolean.TRUE.equals(syncDefine) && StringUtils.isNotEmpty(processInstanceJson)) {
processDefinition.setProcessDefinitionJson(processInstanceJson);
processDefinition.setGlobalParams(originDefParams);
processDefinition.setLocations(locations);
@ -476,11 +462,10 @@ public class ProcessInstanceService extends BaseDAGService {
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param tasksQueue task queue
* @return delete result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId, ITaskQueue tasksQueue) {
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
@ -491,61 +476,18 @@ public class ProcessInstanceService extends BaseDAGService {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//process instance priority
int processInstancePriority = processInstance.getProcessInstancePriority().ordinal();
// delete zk queue
if (CollectionUtils.isNotEmpty(taskInstanceList)){
for (TaskInstance taskInstance : taskInstanceList){
// task instance priority
int taskInstancePriority = taskInstance.getTaskInstancePriority().ordinal();
StringBuilder nodeValueSb = new StringBuilder(100);
nodeValueSb.append(processInstancePriority)
.append(UNDERLINE)
.append(processInstanceId)
.append(UNDERLINE)
.append(taskInstancePriority)
.append(UNDERLINE)
.append(taskInstance.getId())
.append(UNDERLINE);
int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance);
WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId);
if(workerGroup == null){
nodeValueSb.append(DEFAULT_WORKER_ID);
}else {
String ips = workerGroup.getIpList();
StringBuilder ipSb = new StringBuilder(100);
String[] ipArray = ips.split(COMMA);
for (String ip : ipArray) {
long ipLong = IpUtils.ipToLong(ip);
ipSb.append(ipLong).append(COMMA);
}
if(ipSb.length() > 0) {
ipSb.deleteCharAt(ipSb.length() - 1);
}
nodeValueSb.append(ipSb);
}
logger.info("delete task queue node : {}",nodeValueSb.toString());
tasksQueue.removeNode(org.apache.dolphinscheduler.common.Constants.DOLPHINSCHEDULER_TASKS_QUEUE, nodeValueSb.toString());
}
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
@ -615,7 +557,7 @@ public class ProcessInstanceService extends BaseDAGService {
Map<String,Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType",taskNode.getType());
localParamsMap.put("localParamsList",localParamsList);
if (localParamsList.size() > 0) {
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}
}

246
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java

@ -26,16 +26,15 @@ import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -43,8 +42,10 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import static org.apache.dolphinscheduler.common.Constants.*;
@ -176,6 +177,21 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
}
if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid);
if (parentResource == null) {
putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, parentResource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
}
// file is empty
if (file.isEmpty()) {
logger.error("file is empty: {}", file.getOriginalFilename());
@ -218,9 +234,6 @@ public class ResourcesService extends BaseService {
}
Date now = new Date();
Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now);
try {
@ -301,7 +314,6 @@ public class ResourcesService extends BaseService {
return result;
}
if (name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
putMsg(result, Status.SUCCESS);
return result;
@ -309,9 +321,10 @@ public class ResourcesService extends BaseService {
//check resource aleady exists
String originFullName = resource.getFullName();
String originResourceName = resource.getAlias();
String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/")+1),name);
if (!resource.getAlias().equals(name) && checkResourceExists(fullName, 0, type.ordinal())) {
if (!originResourceName.equals(name) && checkResourceExists(fullName, 0, type.ordinal())) {
logger.error("resource {} already exists, can't recreate", name);
putMsg(result, Status.RESOURCE_EXIST);
return result;
@ -322,25 +335,54 @@ public class ResourcesService extends BaseService {
if (StringUtils.isEmpty(tenantCode)){
return result;
}
String nameWithSuffix = name;
String originResourceName = resource.getAlias();
if (!resource.isDirectory()) {
//get the file suffix
String suffix = originResourceName.substring(originResourceName.lastIndexOf("."));
// verify whether the resource exists in storage
// get the path of origin file in storage
String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
try {
if (!HadoopUtils.getInstance().exists(originHdfsFileName)) {
logger.error("{} not exist", originHdfsFileName);
putMsg(result,Status.RESOURCE_NOT_EXIST);
return result;
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
throw new ServiceException(Status.HDFS_OPERATION_ERROR);
}
//if the name without suffix then add it ,else use the origin name
if(!name.endsWith(suffix)){
nameWithSuffix = nameWithSuffix + suffix;
if (!resource.isDirectory()) {
//get the origin file suffix
String originSuffix = FileUtils.suffix(originFullName);
String suffix = FileUtils.suffix(fullName);
boolean suffixIsChanged = false;
if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) {
suffixIsChanged = true;
}
if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) {
suffixIsChanged = true;
}
//verify whether suffix is changed
if (suffixIsChanged) {
//need verify whether this resource is authorized to other users
Map<String, Object> columnMap = new HashMap<>();
columnMap.put("resources_id", resourceId);
List<ResourcesUser> resourcesUsers = resourceUserMapper.selectByMap(columnMap);
if (CollectionUtils.isNotEmpty(resourcesUsers)) {
List<Integer> userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList());
List<User> users = userMapper.selectBatchIds(userIds);
String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString();
logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames);
putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames);
return result;
}
}
}
// updateResource data
List<Integer> childrenResource = listAllChildren(resource);
String oldFullName = resource.getFullName();
List<Integer> childrenResource = listAllChildren(resource,false);
Date now = new Date();
resource.setAlias(nameWithSuffix);
resource.setAlias(name);
resource.setFullName(fullName);
resource.setDescription(desc);
resource.setUpdateTime(now);
@ -348,10 +390,11 @@ public class ResourcesService extends BaseService {
try {
resourcesMapper.updateById(resource);
if (resource.isDirectory() && CollectionUtils.isNotEmpty(childrenResource)) {
String matcherFullName = Matcher.quoteReplacement(fullName);
List<Resource> childResourceList = new ArrayList<>();
List<Resource> resourceList = resourcesMapper.listResourceByIds(childrenResource.toArray(new Integer[childrenResource.size()]));
childResourceList = resourceList.stream().map(t -> {
t.setFullName(t.getFullName().replaceFirst(oldFullName, fullName));
t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName));
t.setUpdateTime(now);
return t;
}).collect(Collectors.toList());
@ -369,29 +412,24 @@ public class ResourcesService extends BaseService {
result.setData(resultMap);
} catch (Exception e) {
logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e);
throw new RuntimeException(Status.UPDATE_RESOURCE_ERROR.getMsg());
throw new ServiceException(Status.UPDATE_RESOURCE_ERROR);
}
// if name unchanged, return directly without moving on HDFS
if (originResourceName.equals(name)) {
return result;
}
// get file hdfs path
// delete hdfs file by type
String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName);
// get the path of dest file in hdfs
String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName);
try {
if (HadoopUtils.getInstance().exists(originHdfsFileName)) {
logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true);
} else {
logger.error("{} not exist", originHdfsFileName);
putMsg(result,Status.RESOURCE_NOT_EXIST);
}
logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName);
HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true);
} catch (Exception e) {
logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e);
putMsg(result,Status.HDFS_COPY_FAIL);
throw new ServiceException(Status.HDFS_COPY_FAIL);
}
return result;
@ -416,6 +454,14 @@ public class ResourcesService extends BaseService {
if (isAdmin(loginUser)) {
userId= 0;
}
if (direcotryId != -1) {
Resource directory = resourcesMapper.selectById(direcotryId);
if (directory == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
}
IPage<Resource> resourceIPage = resourcesMapper.queryResourcePaging(page,
userId,direcotryId, type.ordinal(), searchVal);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
@ -505,8 +551,12 @@ public class ResourcesService extends BaseService {
Map<String, Object> result = new HashMap<>(5);
Set<Resource> allResourceList = getAllResources(loginUser, type);
Visitor resourceTreeVisitor = new ResourceTreeVisitor(new ArrayList<>(allResourceList));
int userId = loginUser.getId();
if(isAdmin(loginUser)){
userId = 0;
}
List<Resource> allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0);
Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList);
//JSONArray jsonArray = JSON.parseArray(JSON.toJSONString(resourceTreeVisitor.visit().getChildren(), SerializerFeature.SortField));
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
putMsg(result,Status.SUCCESS);
@ -514,34 +564,6 @@ public class ResourcesService extends BaseService {
return result;
}
/**
* get all resources
* @param loginUser login user
* @return all resource set
*/
private Set<Resource> getAllResources(User loginUser, ResourceType type) {
int userId = loginUser.getId();
boolean listChildren = true;
if(isAdmin(loginUser)){
userId = 0;
listChildren = false;
}
List<Resource> resourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal());
Set<Resource> allResourceList = new HashSet<>(resourceList);
if (listChildren) {
Set<Integer> authorizedIds = new HashSet<>();
List<Resource> authorizedDirecoty = resourceList.stream().filter(t->t.getUserId() != loginUser.getId() && t.isDirectory()).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(authorizedDirecoty)) {
for(Resource resource : authorizedDirecoty){
authorizedIds.addAll(listAllChildren(resource));
}
List<Resource> childrenResources = resourcesMapper.listResourceByIds(authorizedIds.toArray(new Integer[authorizedIds.size()]));
allResourceList.addAll(childrenResources);
}
}
return allResourceList;
}
/**
* query resource list
*
@ -552,8 +574,11 @@ public class ResourcesService extends BaseService {
public Map<String, Object> queryResourceJarList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(5);
Set<Resource> allResourceList = getAllResources(loginUser, type);
int userId = loginUser.getId();
if(isAdmin(loginUser)){
userId = 0;
}
List<Resource> allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0);
List<Resource> resources = new ResourceFilter(".jar",new ArrayList<>(allResourceList)).filter();
Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources);
result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren());
@ -592,15 +617,6 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//if resource type is UDF,need check whether it is bound by UDF functon
if (resource.getType() == (ResourceType.UDF)) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(new int[]{resourceId});
if (CollectionUtils.isNotEmpty(udfFuncs)) {
logger.error("can't be deleted,because it is bound by UDF functions:{}",udfFuncs.toString());
putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName());
return result;
}
}
String tenantCode = getTenantCode(resource.getUserId(),result);
if (StringUtils.isEmpty(tenantCode)){
@ -608,10 +624,22 @@ public class ResourcesService extends BaseService {
}
// get all resource id of process definitions those is released
Map<Integer, Set<Integer>> resourceProcessMap = getResourceProcessMap();
List<Map<String, Object>> list = processDefinitionMapper.listResources();
Map<Integer, Set<Integer>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
// get all children of the resource
List<Integer> allChildren = listAllChildren(resource);
List<Integer> allChildren = listAllChildren(resource,true);
Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]);
//if resource type is UDF,need check whether it is bound by UDF functon
if (resource.getType() == (ResourceType.UDF)) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
logger.error("can't be deleted,because it is bound by UDF functions:{}",udfFuncs.toString());
putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName());
return result;
}
}
if (resourceIdSet.contains(resource.getPid())) {
logger.error("can't be deleted,because it is used of process definition");
@ -632,8 +660,8 @@ public class ResourcesService extends BaseService {
String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
//delete data in database
resourcesMapper.deleteIds(allChildren.toArray(new Integer[allChildren.size()]));
resourceUserMapper.deleteResourceUser(0, resourceId);
resourcesMapper.deleteIds(needDeleteResourceIdArray);
resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray);
//delete file on hdfs
HadoopUtils.getInstance().delete(hdfsFilename, true);
@ -977,8 +1005,21 @@ public class ResourcesService extends BaseService {
logger.error("resource id {} is directory,can't download it", resourceId);
throw new RuntimeException("cant't download directory");
}
User user = userMapper.queryDetailsById(resource.getUserId());
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode();
int userId = resource.getUserId();
User user = userMapper.selectById(userId);
if(user == null){
logger.error("user id {} not exists", userId);
throw new RuntimeException(String.format("resource owner id %d not exist",userId));
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if(tenant == null){
logger.error("tenant id {} not exists", user.getTenantId());
throw new RuntimeException(String.format("The tenant id %d of resource owner not exist",user.getTenantId()));
}
String tenantCode = tenant.getTenantCode();
String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName());
@ -1144,8 +1185,8 @@ public class ResourcesService extends BaseService {
*/
private String getTenantCode(int userId,Result result){
User user = userMapper.queryDetailsById(userId);
if(user == null){
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("user {} not exists", userId);
putMsg(result, Status.USER_NOT_EXIST,userId);
return null;
@ -1162,12 +1203,13 @@ public class ResourcesService extends BaseService {
/**
* list all children id
* @param resource resource
* @param resource resource
* @param containSelf whether add self to children list
* @return all children id
*/
List<Integer> listAllChildren(Resource resource){
List<Integer> listAllChildren(Resource resource,boolean containSelf){
List<Integer> childList = new ArrayList<>();
if (resource.getId() != -1) {
if (resource.getId() != -1 && containSelf) {
childList.add(resource.getId());
}
@ -1191,38 +1233,4 @@ public class ResourcesService extends BaseService {
}
}
/**
* get resource process map key is resource id,value is the set of process definition
* @return resource process definition map
*/
private Map<Integer,Set<Integer>> getResourceProcessMap(){
Map<Integer, String> map = new HashMap<>();
Map<Integer, Set<Integer>> result = new HashMap<>();
List<Map<String, Object>> list = processDefinitionMapper.listResources();
if (CollectionUtils.isNotEmpty(list)) {
for (Map<String, Object> tempMap : list) {
map.put((Integer) tempMap.get("id"), (String)tempMap.get("resource_ids"));
}
}
for (Map.Entry<Integer, String> entry : map.entrySet()) {
Integer mapKey = entry.getKey();
String[] arr = entry.getValue().split(",");
Set<Integer> mapValues = Arrays.stream(arr).map(Integer::parseInt).collect(Collectors.toSet());
for (Integer value : mapValues) {
if (result.containsKey(value)) {
Set<Integer> set = result.get(value);
set.add(mapKey);
result.put(value, set);
} else {
Set<Integer> set = new HashSet<>();
set.add(mapKey);
result.put(value, set);
}
}
}
return result;
}
}

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -92,7 +92,7 @@ public class SchedulerService extends BaseService {
* @param processInstancePriority process instance priority
* @param receivers receivers
* @param receiversCc receivers cc
* @param workerGroupId worker group id
* @param workerGroup worker group
* @return create result code
* @throws IOException ioexception
*/
@ -106,7 +106,7 @@ public class SchedulerService extends BaseService {
String receivers,
String receiversCc,
Priority processInstancePriority,
int workerGroupId) throws IOException {
String workerGroup) throws IOException {
Map<String, Object> result = new HashMap<String, Object>(5);
@ -156,7 +156,7 @@ public class SchedulerService extends BaseService {
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(processInstancePriority);
scheduleObj.setWorkerGroupId(workerGroupId);
scheduleObj.setWorkerGroup(workerGroup);
scheduleMapper.insert(scheduleObj);
/**
@ -182,7 +182,7 @@ public class SchedulerService extends BaseService {
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroupId worker group id
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @param receiversCc receiver cc
* @param receivers receivers
@ -202,7 +202,7 @@ public class SchedulerService extends BaseService {
String receiversCc,
ReleaseState scheduleStatus,
Priority processInstancePriority,
int workerGroupId) throws IOException {
String workerGroup) throws IOException {
Map<String, Object> result = new HashMap<String, Object>(5);
Project project = projectMapper.queryByName(projectName);
@ -266,7 +266,7 @@ public class SchedulerService extends BaseService {
if (scheduleStatus != null) {
schedule.setReleaseState(scheduleStatus);
}
schedule.setWorkerGroupId(workerGroupId);
schedule.setWorkerGroup(workerGroup);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(processInstancePriority);
scheduleMapper.updateById(schedule);

130
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

@ -16,29 +16,31 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
/**
* user service
@ -72,6 +74,9 @@ public class UsersService extends BaseService {
@Autowired
private AlertGroupMapper alertGroupMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create user, only system admin have permission
@ -331,18 +336,18 @@ public class UsersService extends BaseService {
List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) {
for (Resource resource : fileResourcesList) {
HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true);
}
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath);
}
//udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) {
for (Resource resource : udfResourceList) {
HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true);
}
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath);
}
//Delete the user from the old tenant directory
@ -420,6 +425,7 @@ public class UsersService extends BaseService {
* @param projectIds project id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
@ -469,6 +475,7 @@ public class UsersService extends BaseService {
* @param resourceIds resource id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
@ -481,23 +488,74 @@ public class UsersService extends BaseService {
return result;
}
Set<Integer> needAuthorizeResIds = new HashSet();
if (StringUtils.isNotBlank(resourceIds)) {
String[] resourceFullIdArr = resourceIds.split(",");
// need authorize resource id set
for (String resourceFullId : resourceFullIdArr) {
String[] resourceIdArr = resourceFullId.split("-");
for (int i=0;i<=resourceIdArr.length-1;i++) {
int resourceIdValue = Integer.parseInt(resourceIdArr[i]);
needAuthorizeResIds.add(resourceIdValue);
}
}
}
//get the authorized resource id list by user id
List<Resource> oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId);
//if resource type is UDF,need check whether it is bound by UDF functon
Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(t -> t.getId()).collect(Collectors.toSet());
//get the unauthorized resource id list
oldAuthorizedResIds.removeAll(needAuthorizeResIds);
if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) {
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId);
Map<Integer, Set<Integer>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
}
resourcesUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
return result;
}
String[] resourcesIdArr = resourceIds.split(",");
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
for (String resourceId : resourcesIdArr) {
Date now = new Date();
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setUserId(userId);
resourcesUser.setResourcesId(Integer.parseInt(resourceId));
resourcesUser.setPerm(7);
resourcesUser.setResourcesId(resourceIdValue);
if (resource.isDirectory()) {
resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM);
}else{
resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
}
resourcesUser.setCreateTime(now);
resourcesUser.setUpdateTime(now);
resourcesUserMapper.insert(resourcesUser);
}
putMsg(result, Status.SUCCESS);
@ -514,6 +572,7 @@ public class UsersService extends BaseService {
* @param udfIds udf id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(5);
@ -560,6 +619,7 @@ public class UsersService extends BaseService {
* @param datasourceIds data source id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
@ -807,4 +867,40 @@ public class UsersService extends BaseService {
return msg;
}
/**
* copy resource files
* @param resourceComponent resource component
* @param srcBasePath src base path
* @param dstBasePath dst base path
* @throws IOException io exception
*/
private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException {
List<ResourceComponent> components = resourceComponent.getChildren();
if (CollectionUtils.isNotEmpty(components)) {
for (ResourceComponent component:components) {
// verify whether exist
if (!HadoopUtils.getInstance().exists(String.format("%s/%s",srcBasePath,component.getFullName()))){
logger.error("resource file: {} not exist,copy error",component.getFullName());
throw new ServiceException(Status.RESOURCE_NOT_EXIST);
}
if (!component.isDirctory()) {
// copy it to dst
HadoopUtils.getInstance().copy(String.format("%s/%s",srcBasePath,component.getFullName()),String.format("%s/%s",dstBasePath,component.getFullName()),false,true);
continue;
}
if(CollectionUtils.isEmpty(component.getChildren())) {
// if not exist,need create it
if (!HadoopUtils.getInstance().exists(String.format("%s/%s",dstBasePath,component.getFullName()))) {
HadoopUtils.getInstance().mkdir(String.format("%s/%s",dstBasePath,component.getFullName()));
}
}else{
copyResourceFiles(component,srcBasePath,dstBasePath);
}
}
}
}
}

193
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java

@ -16,26 +16,24 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
* work group service
@ -44,87 +42,13 @@ import java.util.Map;
public class WorkerGroupService extends BaseService {
@Autowired
WorkerGroupMapper workerGroupMapper;
@Autowired
ProcessInstanceMapper processInstanceMapper;
/**
* create or update a worker group
*
* @param loginUser login user
* @param id worker group id
* @param name worker group name
* @param ipList ip list
* @return create or update result code
*/
public Map<String, Object> saveWorkerGroup(User loginUser,int id, String name, String ipList){
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (checkAdmin(loginUser, result)){
return result;
}
if(StringUtils.isEmpty(name)){
putMsg(result, Status.NAME_NULL);
return result;
}
Date now = new Date();
WorkerGroup workerGroup = null;
if(id != 0){
workerGroup = workerGroupMapper.selectById(id);
//check exist
if (workerGroup == null){
workerGroup = new WorkerGroup();
workerGroup.setCreateTime(now);
}
}else{
workerGroup = new WorkerGroup();
workerGroup.setCreateTime(now);
}
workerGroup.setName(name);
workerGroup.setIpList(ipList);
workerGroup.setUpdateTime(now);
if(checkWorkerGroupNameExists(workerGroup)){
putMsg(result, Status.NAME_EXIST, workerGroup.getName());
return result;
}
if(workerGroup.getId() != 0 ){
workerGroupMapper.updateById(workerGroup);
}else{
workerGroupMapper.insert(workerGroup);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check worker group name exists
* @param workerGroup
* @return
*/
private boolean checkWorkerGroupNameExists(WorkerGroup workerGroup) {
@Autowired
protected ZookeeperCachedOperator zookeeperCachedOperator;
List<WorkerGroup> workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName());
if(CollectionUtils.isNotEmpty(workerGroupList)){
// new group has same name..
if(workerGroup.getId() == 0){
return true;
}
// update group...
for(WorkerGroup group : workerGroupList){
if(group.getId() != workerGroup.getId()){
return true;
}
}
}
return false;
}
/**
* query worker group paging
@ -137,53 +61,100 @@ public class WorkerGroupService extends BaseService {
*/
public Map<String,Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) {
// list from index
Integer fromIndex = (pageNo - 1) * pageSize;
// list to index
Integer toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
Page<WorkerGroup> page = new Page(pageNo, pageSize);
IPage<WorkerGroup> workerGroupIPage = workerGroupMapper.queryListPaging(
page, searchVal);
List<WorkerGroup> workerGroups = getWorkerGroups(true);
List<WorkerGroup> resultDataList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(workerGroups)){
List<WorkerGroup> searchValDataList = new ArrayList<>();
if (StringUtils.isNotEmpty(searchVal)){
for (WorkerGroup workerGroup : workerGroups){
if (workerGroup.getName().contains(searchVal)){
searchValDataList.add(workerGroup);
}
}
}else {
searchValDataList = workerGroups;
}
if (searchValDataList.size() < pageSize){
toIndex = (pageNo - 1) * pageSize + searchValDataList.size();
}
resultDataList = searchValDataList.subList(fromIndex, toIndex);
}
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)workerGroupIPage.getTotal());
pageInfo.setLists(workerGroupIPage.getRecords());
pageInfo.setTotalCount(resultDataList.size());
pageInfo.setLists(resultDataList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete worker group by id
* @param id worker group id
* @return delete result code
* query all worker group
*
* @return all worker group list
*/
@Transactional(rollbackFor = Exception.class)
public Map<String,Object> deleteWorkerGroupById(Integer id) {
public Map<String,Object> queryAllGroup() {
Map<String, Object> result = new HashMap<>();
Map<String, Object> result = new HashMap<>(5);
List<WorkerGroup> workerGroups = getWorkerGroups(false);
List<ProcessInstance> processInstances = processInstanceMapper.queryByWorkerGroupIdAndStatus(id, Constants.NOT_TERMINATED_STATES);
if(CollectionUtils.isNotEmpty(processInstances)){
putMsg(result, Status.DELETE_WORKER_GROUP_BY_ID_FAIL, processInstances.size());
return result;
}
workerGroupMapper.deleteById(id);
processInstanceMapper.updateProcessInstanceByWorkerGroupId(id, Constants.DEFAULT_WORKER_ID);
Set<String> availableWorkerGroupSet = workerGroups.stream()
.map(workerGroup -> workerGroup.getName())
.collect(Collectors.toSet());
result.put(Constants.DATA_LIST, availableWorkerGroupSet);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query all worker group
* get worker groups
*
* @return all worker group list
* @param isPaging whether paging
* @return WorkerGroup list
*/
public Map<String,Object> queryAllGroup() {
Map<String, Object> result = new HashMap<>(5);
List<WorkerGroup> workerGroupList = workerGroupMapper.queryAllWorkerGroup();
result.put(Constants.DATA_LIST, workerGroupList);
putMsg(result, Status.SUCCESS);
return result;
private List<WorkerGroup> getWorkerGroups(boolean isPaging) {
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot()+"/nodes" +"/worker";
List<String> workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath);
// available workerGroup list
List<String> availableWorkerGroupList = new ArrayList<>();
List<WorkerGroup> workerGroups = new ArrayList<>();
for (String workerGroup : workerGroupList){
String workerGroupPath= workerPath + "/" + workerGroup;
List<String> childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath);
if (CollectionUtils.isNotEmpty(childrenNodes)){
availableWorkerGroupList.add(workerGroup);
WorkerGroup wg = new WorkerGroup();
wg.setName(workerGroup);
if (isPaging){
wg.setIpList(childrenNodes);
String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0));
wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[3]));
wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[4]));
}
workerGroups.add(wg);
}
}
return workerGroups;
}
}

53
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java

@ -16,6 +16,10 @@
*/
package org.apache.dolphinscheduler.api.utils;
import org.apache.dolphinscheduler.api.enums.Status;
import java.text.MessageFormat;
/**
* result
*
@ -37,13 +41,58 @@ public class Result<T> {
*/
private T data;
public Result(){}
public Result() {
}
public Result(Integer code , String msg){
public Result(Integer code, String msg) {
this.code = code;
this.msg = msg;
}
private Result(T data) {
this.code = 0;
this.data = data;
}
private Result(Status status) {
if (status != null) {
this.code = status.getCode();
this.msg = status.getMsg();
}
}
/**
* Call this function if there is success
*
* @param data data
* @param <T> type
* @return resule
*/
public static <T> Result<T> success(T data) {
return new Result<>(data);
}
/**
* Call this function if there is any error
*
* @param status status
* @return result
*/
public static Result error(Status status) {
return new Result(status);
}
/**
* Call this function if there is any error
*
* @param status status
* @param args args
* @return result
*/
public static Result errorWithArgs(Status status, Object... args) {
return new Result(status.getCode(), MessageFormat.format(status.getMsg(), args));
}
public Integer getCode() {
return code;
}

17
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -166,15 +166,16 @@ SIGNOUT_NOTES=logout
USER_PASSWORD=user password
UPDATE_PROCESS_INSTANCE_NOTES=update process instance
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name
VERIFY_PROCESS_DEFINITION_NAME_NOTES=verify process definition name
LOGIN_NOTES=user login
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition
UPDATE_PROCESS_DEFINITION_NOTES=update process definition
PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
PAGE_NO=page no
PROCESS_INSTANCE_ID=process instance id
@ -190,7 +191,7 @@ LIMIT=limit
VIEW_TREE_NOTES=view tree
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id
PROCESS_DEFINITION_ID_LIST=process definition id list
QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id
QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query process definition all by project id
DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id
BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id
@ -251,3 +252,5 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids

17
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -166,15 +166,16 @@ SIGNOUT_NOTES=logout
USER_PASSWORD=user password
UPDATE_PROCESS_INSTANCE_NOTES=update process instance
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name
VERIFY_PROCESS_DEFINITION_NAME_NOTES=verify process definition name
LOGIN_NOTES=user login
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition
UPDATE_PROCESS_DEFINITION_NOTES=update process definition
PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
PAGE_NO=page no
PROCESS_INSTANCE_ID=process instance id
@ -190,7 +191,7 @@ LIMIT=limit
VIEW_TREE_NOTES=view tree
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id
PROCESS_DEFINITION_ID_LIST=process definition id list
QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id
QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query process definition all by project id
DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id
BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id
@ -251,3 +252,5 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids

18
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -165,14 +165,15 @@ SIGNOUT_NOTES=退出登录
USER_PASSWORD=用户密码
UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例
QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=验证流程定义名字
VERIFY_PROCESS_DEFINITION_NAME_NOTES=验证流程定义名字
LOGIN_NOTES=用户登录
UPDATE_PROCCESS_DEFINITION_NOTES=更新流程定义
UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义
PROCESS_DEFINITION_ID=流程定义ID
RELEASE_PROCCESS_DEFINITION_NOTES=发布流程定义
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID
QUERY_PROCCESS_DEFINITION_LIST_NOTES=查询流程定义列表
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表
RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID
COPY_PROCESS_DEFINITION_NOTES=复制流程定义
QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表
QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义
PAGE_NO=页码号
PROCESS_INSTANCE_ID=流程实例ID
@ -188,7 +189,7 @@ LIMIT=显示多少条
VIEW_TREE_NOTES=树状图
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID
PROCESS_DEFINITION_ID_LIST=流程定义id列表
QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID
QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID
BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=批量删除流程定义通过流程定义ID集合
DELETE_PROCESS_DEFINITION_BY_ID_NOTES=删除流程定义通过流程定义ID
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID
@ -249,3 +250,6 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源
AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源
DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义

17
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java

@ -56,6 +56,23 @@ public class AccessTokenControllerTest extends AbstractControllerTest{
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testExceptionHandler() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userId","-1");
paramsMap.add("expireTime","2019-12-18 00:00:00");
paramsMap.add("token","507f5aeaaa2093dbdff5d5522ce00510");
MvcResult mvcResult = mockMvc.perform(post("/access-token/create")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.CREATE_ACCESS_TOKEN_ERROR.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testGenerateToken() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();

4
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java

@ -39,6 +39,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
* data source controller test
*/
public class DataSourceControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(DataSourceControllerTest.class);
@Ignore
@ -95,6 +96,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{
@Ignore
@Test
public void testQueryDataSource() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
@ -169,6 +171,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{
}
@Ignore
@Test
public void testConnectionTest() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
@ -248,6 +251,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{
@Ignore
@Test
public void testDelete() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();

509
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@ -17,314 +17,335 @@
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.*;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.springframework.mock.web.MockHttpServletResponse;
import javax.servlet.http.HttpServletResponse;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* process definition controller test
*/
public class ProcessDefinitionControllerTest extends AbstractControllerTest{
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessDefinitionControllerTest{
private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class);
@InjectMocks
private ProcessDefinitionController processDefinitionController;
@Mock
private ProcessDefinitionService processDefinitionService;
protected User user;
@Before
public void before(){
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName("admin");
user = loginUser;
}
@Test
public void testCreateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name","dag_test");
paramsMap.add("processDefinitionJson",json);
paramsMap.add("locations", locations);
paramsMap.add("connects", "[]");
paramsMap.add("description", "desc test");
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/save","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json,
description, locations, connects)).thenReturn(result);
@Test
public void testVerifyProccessDefinitionName() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name","dag_test");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/verify-name","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testVerifyProccessDefinitionNameNotExit() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name","dag_test_1");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/verify-name","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
@Test
public void testVerifyProcessDefinitionName() throws Exception {
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.PROCESS_INSTANCE_EXIST);
String projectName = "test";
String name = "dag_test";
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result);
Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name);
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue());
}
@Test
public void UpdateProccessDefinition() throws Exception {
public void updateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name","dag_test_update");
paramsMap.add("id","91");
paramsMap.add("processDefinitionJson",json);
paramsMap.add("locations", locations);
paramsMap.add("connects", "[]");
paramsMap.add("description", "desc test update");
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/update","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testReleaseProccessDefinition() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processId","91");
paramsMap.add("releaseState",String.valueOf(ReleaseState.OFFLINE));
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/release","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testReleaseProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionById() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectName(projectName);
processDefinition.setConnects(connects);
processDefinition.setDescription(description);
processDefinition.setId(id);
processDefinition.setLocations(locations);
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testQueryProccessDefinitionById() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processId","91");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/select-by-id","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testCopyProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.copyProcessDefinition(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testQueryProccessDefinitionList() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/list","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testQueryProcessDefinitionList() throws Exception {
String projectName = "test";
List<ProcessDefinition> resourceList = getDefinitionList();
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resourceList);
Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionList(user, projectName);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
public List<ProcessDefinition> getDefinitionList(){
List<ProcessDefinition> resourceList = new ArrayList<>();
@Test
public void testQueryProcessDefinitionListPaging() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("pageNo","1");
paramsMap.add("searchVal","test");
paramsMap.add("userId","");
paramsMap.add("pageSize", "1");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/list-paging","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectName(projectName);
processDefinition.setConnects(connects);
processDefinition.setDescription(description);
processDefinition.setId(id);
processDefinition.setLocations(locations);
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
String name2 = "dag_test";
int id2 = 2;
ProcessDefinition processDefinition2 = new ProcessDefinition();
processDefinition2.setProjectName(projectName);
processDefinition2.setConnects(connects);
processDefinition2.setDescription(description);
processDefinition2.setId(id2);
processDefinition2.setLocations(locations);
processDefinition2.setName(name2);
processDefinition2.setProcessDefinitionJson(json);
resourceList.add(processDefinition);
resourceList.add(processDefinition2);
return resourceList;
}
@Test
public void testViewTree() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processId","91");
paramsMap.add("limit","30");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/view-tree","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testDeleteProcessDefinitionById() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
@Test
public void testGetNodeListByDefinitionId() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionId","40");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/gen-task-list","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testGetNodeListByDefinitionIdList() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionIdList","40,90,91");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/get-task-list","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
String projectName = "test";
String idList = "1,2,3";
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList);
@Ignore
@Test
public void testExportProcessDefinitionById() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionId","91");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/export","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
// .andExpect(status().isOk())
// .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testQueryProccessDefinitionAllByProjectId() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectId","9");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/queryProccessDefinitionAllByProjectId","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testQueryProcessDefinitionAllByProjectId() throws Exception{
int projectId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testViewTree() throws Exception{
String projectName = "test";
int processId = 1;
int limit = 2;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user,projectName,processId,limit);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testDeleteProcessDefinitionById() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionId","73");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/delete","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testQueryProcessDefinitionListPaging() throws Exception{
String projectName = "test";
int pageNo = 1;
int pageSize = 10;
String searchVal = "";
int userId = 1;
Map<String,Object> result = new HashMap<>();
putMsg(result,Status.SUCCESS);
result.put(Constants.DATA_LIST,new PageInfo<Resource>(1,10));
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testBatchDeleteProcessDefinitionByIds() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionIds","54,62");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/batch-delete","cxc_1113")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
public void testBatchExportProcessDefinitionByIds() throws Exception{
String processDefinitionIds = "1,2";
String projectName = "test";
HttpServletResponse response = new MockHttpServletResponse();
ProcessDefinitionService service = new ProcessDefinitionService();
ProcessDefinitionService spy = Mockito.spy(service);
Mockito.doNothing().when(spy).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
}
}

37
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java

@ -28,8 +28,6 @@ import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -47,7 +45,6 @@ import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
@PrepareForTest({TaskQueueFactory.class})
public class DataAnalysisServiceTest {
@InjectMocks
@ -74,8 +71,7 @@ public class DataAnalysisServiceTest {
@Mock
TaskInstanceMapper taskInstanceMapper;
@Mock
ITaskQueue taskQueue;
@Mock
ProcessService processService;
@ -118,9 +114,6 @@ public class DataAnalysisServiceTest {
Map<String, Object> result = dataAnalysisService.countTaskStateByProject(user, 2, startDate, endDate);
Assert.assertTrue(result.isEmpty());
// task instance state count error
result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate);
Assert.assertEquals(Status.TASK_INSTANCE_STATE_COUNT_ERROR,result.get(Constants.STATUS));
//SUCCESS
Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(DateUtils.getScheduleDate(startDate),
@ -141,10 +134,6 @@ public class DataAnalysisServiceTest {
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(user,2,startDate,endDate);
Assert.assertTrue(result.isEmpty());
//COUNT_PROCESS_INSTANCE_STATE_ERROR
result = dataAnalysisService.countProcessInstanceStateByProject(user,1,startDate,endDate);
Assert.assertEquals(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR,result.get(Constants.STATUS));
//SUCCESS
Mockito.when(processInstanceMapper.countInstanceStateByUser(DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts());
@ -183,30 +172,6 @@ public class DataAnalysisServiceTest {
}
@Test
public void testCountQueueState(){
PowerMockito.mockStatic(TaskQueueFactory.class);
List<String> taskQueueList = new ArrayList<>(1);
taskQueueList.add("1_0_1_1_-1");
List<String> taskKillList = new ArrayList<>(1);
taskKillList.add("1-0");
PowerMockito.when(taskQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE)).thenReturn(taskQueueList);
PowerMockito.when(taskQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_KILL)).thenReturn(taskKillList);
PowerMockito.when(TaskQueueFactory.getTaskQueueInstance()).thenReturn(taskQueue);
//checkProject false
Map<String, Object> result = dataAnalysisService.countQueueState(user,2);
Assert.assertTrue(result.isEmpty());
result = dataAnalysisService.countQueueState(user,1);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
//admin
user.setUserType(UserType.ADMIN_USER);
result = dataAnalysisService.countQueueState(user,1);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
}
/**
* get list
* @return

65
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java

@ -16,38 +16,85 @@
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = ApiApplicationServer.class)
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
public class DataSourceServiceTest {
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class);
@Autowired
@InjectMocks
private DataSourceService dataSourceService;
@Mock
private DataSourceMapper dataSourceMapper;
@Test
public void queryDataSourceList(){
public void queryDataSourceListTest(){
User loginUser = new User();
loginUser.setId(27);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal());
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS));
}
@Test
public void verifyDataSourceNameTest(){
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
String dataSourceName = "dataSource1";
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(getDataSourceList());
Result result = dataSourceService.verifyDataSourceName(loginUser, dataSourceName);
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(),result.getMsg());
}
@Test
public void queryDataSourceTest(){
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null);
Map<String, Object> result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.RESOURCE_NOT_EXIST.getCode());
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(getOracleDataSource());
result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.SUCCESS.getCode());
}
private List<DataSource> getDataSourceList(){
List<DataSource> dataSources = new ArrayList<>();
dataSources.add(getOracleDataSource());
return dataSources;
}
private DataSource getOracleDataSource(){
DataSource dataSource = new DataSource();
dataSource.setName("test");
dataSource.setNote("Note");
dataSource.setType(DbType.ORACLE);
dataSource.setConnectionParams("{\"connectType\":\"ORACLE_SID\",\"address\":\"jdbc:oracle:thin:@192.168.xx.xx:49161\",\"database\":\"XE\",\"jdbcUrl\":\"jdbc:oracle:thin:@192.168.xx.xx:49161/XE\",\"user\":\"system\",\"password\":\"oracle\"}");
return dataSource;
}
}

10
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java

@ -122,7 +122,7 @@ public class ExecutorService2Test {
null, null,
null, null, 0,
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
@ -142,7 +142,7 @@ public class ExecutorService2Test {
null, null,
null, null, 0,
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS));
verify(processService, times(0)).createCommand(any(Command.class));
}catch (Exception e){
@ -162,7 +162,7 @@ public class ExecutorService2Test {
null, null,
null, null, 0,
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
@ -182,7 +182,7 @@ public class ExecutorService2Test {
null, null,
null, null, 0,
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, 0, 110);
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(31)).createCommand(any(Command.class));
}catch (Exception e){
@ -202,7 +202,7 @@ public class ExecutorService2Test {
null, null,
null, null, 0,
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, 0, 110);
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(15)).createCommand(any(Command.class));
}catch (Exception e){

13
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java

@ -52,12 +52,17 @@ public class LoggerServiceTest {
//TASK_INSTANCE_NOT_FOUND
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
//HOST NOT FOUND
result = loggerService.queryLog(1,1,1);
try {
//HOST NOT FOUND OR ILLEGAL
result = loggerService.queryLog(1, 1, 1);
} catch (RuntimeException e) {
Assert.assertTrue(true);
logger.error("testQueryDataSourceList error {}", e.getMessage());
}
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
//SUCCESS
taskInstance.setHost("127.0.0.1");
taskInstance.setHost("127.0.0.1:8080");
taskInstance.setLogPath("/temp/log");
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
result = loggerService.queryLog(1,1,1);
@ -87,7 +92,7 @@ public class LoggerServiceTest {
}
//success
taskInstance.setHost("127.0.0.1");
taskInstance.setHost("127.0.0.1:8080");
taskInstance.setLogPath("/temp/log");
//if use @RunWith(PowerMockRunner.class) mock object,sonarcloud will not calculate the coverage,
// so no assert will be added here

150
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -39,8 +39,6 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.skyscreamer.jsonassert.JSONAssert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.web.multipart.MultipartFile;
@ -54,7 +52,6 @@ import java.util.*;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
public class ProcessDefinitionServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceTest.class);
@InjectMocks
ProcessDefinitionService processDefinitionService;
@ -74,8 +71,7 @@ public class ProcessDefinitionServiceTest {
@Mock
private ScheduleMapper scheduleMapper;
@Mock
private WorkerGroupMapper workerGroupMapper;
@Mock
private ProcessService processService;
@ -110,7 +106,7 @@ public class ProcessDefinitionServiceTest {
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
@Test
public void testQueryProccessDefinitionList() {
public void testQueryProcessDefinitionList() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
@ -124,7 +120,7 @@ public class ProcessDefinitionServiceTest {
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1");
Map<String, Object> map = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success
@ -133,7 +129,7 @@ public class ProcessDefinitionServiceTest {
List<ProcessDefinition> resourceList = new ArrayList<>();
resourceList.add(getProcessDefinition());
Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList);
Map<String, Object> checkSuccessRes = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1");
Map<String, Object> checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1");
Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS));
}
@ -174,7 +170,7 @@ public class ProcessDefinitionServiceTest {
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProccessDefinitionById(loginUser,
Map<String, Object> map = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
@ -182,17 +178,58 @@ public class ProcessDefinitionServiceTest {
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(processDefineMapper.selectById(1)).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProccessDefinitionById(loginUser,
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS));
//instance exit
Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition());
Map<String, Object> successRes = processDefinitionService.queryProccessDefinitionById(loginUser,
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testCopyProcessDefinition() throws Exception{
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>(5);
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
//instance exit
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
Map<String, Object> createProcessResult = new HashMap<>(5);
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.createProcessDefinition(
loginUser,
definition.getProjectName(),
definition.getName(),
definition.getProcessDefinitionJson(),
definition.getDescription(),
definition.getLocations(),
definition.getConnects())).thenReturn(createProcessResult);
Map<String, Object> successRes = processDefinitionService.copyProcessDefinition(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void deleteProcessDefinitionByIdTest() throws Exception {
String projectName = "project_test1";
@ -274,6 +311,7 @@ public class ProcessDefinitionServiceTest {
@Test
public void testReleaseProcessDefinition() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
@ -298,20 +336,21 @@ public class ProcessDefinitionServiceTest {
46, ReleaseState.ONLINE.getCode());
Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS));
//process definition offline
List<Schedule> schedules = new ArrayList<>();
Schedule schedule = getSchedule();
schedules.add(schedule);
Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
46, ReleaseState.OFFLINE.getCode());
Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
//release error code
Map<String, Object> failRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
46, 2);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS));
//FIXME has function exit code 1 when exception
//process definition offline
// List<Schedule> schedules = new ArrayList<>();
// Schedule schedule = getSchedule();
// schedules.add(schedule);
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
// 46, ReleaseState.OFFLINE.getCode());
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
}
@Test
@ -328,20 +367,20 @@ public class ProcessDefinitionServiceTest {
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.verifyProccessDefinitionName(loginUser,
Map<String, Object> map = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, process not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser,
Map<String, Object> processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS));
//process exist
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(getProcessDefinition());
Map<String, Object> processExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser,
Map<String, Object> processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS));
}
@ -411,14 +450,14 @@ public class ProcessDefinitionServiceTest {
}
@Test
public void testQueryProccessDefinitionAllByProjectId() {
public void testQueryProcessDefinitionAllByProjectId() {
int projectId = 1;
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(shellJson);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryAllDefinitionList(projectId)).thenReturn(processDefinitionList);
Map<String, Object> successRes = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId);
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@ -482,7 +521,6 @@ public class ProcessDefinitionServiceTest {
@Test
public void testExportProcessMetaDataStr() {
Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(getSchedulerList());
Mockito.when(workerGroupMapper.selectById(-1)).thenReturn(null);
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(sqlDependentJson);
@ -525,17 +563,14 @@ public class ProcessDefinitionServiceTest {
WorkerGroup workerGroup = new WorkerGroup();
workerGroup.setName("ds-test-workergroup");
workerGroup.setId(2);
List<WorkerGroup> workerGroups = new ArrayList<>();
workerGroups.add(workerGroup);
Mockito.when(workerGroupMapper.queryWorkerGroupByName("ds-test")).thenReturn(workerGroups);
processMetaCron.setScheduleWorkerGroupName("ds-test");
int insertFlagWorker = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, insertFlagWorker);
Mockito.when(workerGroupMapper.queryWorkerGroupByName("ds-test")).thenReturn(null);
int workerNullFlag = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, workerNullFlag);
@ -611,7 +646,7 @@ public class ProcessDefinitionServiceTest {
Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "shell-4")).thenReturn(null);
Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "testProject")).thenReturn(shellDefinition2);
processDefinitionService.importSubProcess(loginUser,testProject,jsonArray,subProcessIdMap);
processDefinitionService.importSubProcess(loginUser,testProject, jsonArray, subProcessIdMap);
String correctSubJson = jsonArray.toString();
@ -622,14 +657,14 @@ public class ProcessDefinitionServiceTest {
@Test
public void testImportProcessDefinitionById() throws IOException {
String processJson = "{\"projectName\":\"testProject\",\"processDefinitionName\":\"shell-4\"," +
String processJson = "[{\"projectName\":\"testProject\",\"processDefinitionName\":\"shell-4\"," +
"\"processDefinitionJson\":\"{\\\"tenantId\\\":1,\\\"globalParams\\\":[]," +
"\\\"tasks\\\":[{\\\"workerGroupId\\\":-1,\\\"description\\\":\\\"\\\",\\\"runFlag\\\":\\\"NORMAL\\\"," +
"\\\"tasks\\\":[{\\\"workerGroupId\\\":\\\"default\\\",\\\"description\\\":\\\"\\\",\\\"runFlag\\\":\\\"NORMAL\\\"," +
"\\\"type\\\":\\\"SHELL\\\",\\\"params\\\":{\\\"rawScript\\\":\\\"#!/bin/bash\\\\necho \\\\\\\"shell-4\\\\\\\"\\\"," +
"\\\"localParams\\\":[],\\\"resourceList\\\":[]},\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}," +
"\\\"maxRetryTimes\\\":\\\"0\\\",\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-4\\\"," +
"\\\"dependence\\\":{},\\\"retryInterval\\\":\\\"1\\\",\\\"preTasks\\\":[],\\\"id\\\":\\\"tasks-84090\\\"}," +
"{\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-5\\\",\\\"workerGroupId\\\":-1," +
"{\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-5\\\",\\\"workerGroupId\\\":\\\"default\\\\," +
"\\\"description\\\":\\\"\\\",\\\"dependence\\\":{},\\\"preTasks\\\":[\\\"shell-4\\\"],\\\"id\\\":\\\"tasks-87364\\\"," +
"\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SUB_PROCESS\\\",\\\"params\\\":{\\\"processDefinitionId\\\":46}," +
"\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}}],\\\"timeout\\\":0}\"," +
@ -637,13 +672,13 @@ public class ProcessDefinitionServiceTest {
"\\\"targetarr\\\":\\\"\\\",\\\"x\\\":128,\\\"y\\\":114},\\\"tasks-87364\\\":{\\\"name\\\":\\\"shell-5\\\"," +
"\\\"targetarr\\\":\\\"tasks-84090\\\",\\\"x\\\":266,\\\"y\\\":115}}\"," +
"\"processDefinitionConnects\":\"[{\\\"endPointSourceId\\\":\\\"tasks-84090\\\"," +
"\\\"endPointTargetId\\\":\\\"tasks-87364\\\"}]\"}";
"\\\"endPointTargetId\\\":\\\"tasks-87364\\\"}]\"}]";
String subProcessJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-52423\",\"name\":\"shell-5\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo \\\"shell-5\\\"\"},\"description\":\"\"," +
"\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":\\\"default\\\\," +
"\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
FileUtils.writeStringToFile(new File("/tmp/task.json"),processJson);
@ -674,25 +709,25 @@ public class ProcessDefinitionServiceTest {
Mockito.when(processDefineMapper.queryByDefineId(46)).thenReturn(shellDefinition2);
//import process
Map<String, Object> importProcessResult = processDefinitionService.importProcessDefinition(loginUser, multipartFile, currentProjectName);
Assert.assertEquals(Status.SUCCESS, importProcessResult.get(Constants.STATUS));
boolean delete = file.delete();
Assert.assertTrue(delete);
String processMetaJson = "";
improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
processMetaJson = "{\"scheduleWorkerGroupId\":-1}";
improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\"}";
improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\",\"processDefinitionName\":\"test_definition\"}";
improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
// Map<String, Object> importProcessResult = processDefinitionService.importProcessDefinition(loginUser, multipartFile, currentProjectName);
//
// Assert.assertEquals(Status.SUCCESS, importProcessResult.get(Constants.STATUS));
//
// boolean delete = file.delete();
//
// Assert.assertTrue(delete);
// String processMetaJson = "";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\"}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\",\"processDefinitionName\":\"test_definition\"}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
}
@ -763,12 +798,14 @@ public class ProcessDefinitionServiceTest {
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition(){
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(46);
processDefinition.setName("test_pdf");
processDefinition.setProjectId(2);
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
}
@ -803,7 +840,7 @@ public class ProcessDefinitionServiceTest {
schedule.setProcessInstancePriority(Priority.MEDIUM);
schedule.setWarningType(WarningType.NONE);
schedule.setWarningGroupId(1);
schedule.setWorkerGroupId(-1);
schedule.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP);
return schedule;
}
@ -822,7 +859,6 @@ public class ProcessDefinitionServiceTest {
processMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy()));
processMeta.setScheduleReleaseState(String.valueOf(schedule.getReleaseState()));
processMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority()));
processMeta.setScheduleWorkerGroupId(schedule.getWorkerGroupId());
processMeta.setScheduleWorkerGroupName("workgroup1");
return processMeta;
}

14
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java

@ -80,8 +80,7 @@ public class ProcessInstanceServiceTest {
@Mock
LoggerService loggerService;
@Mock
WorkerGroupMapper workerGroupMapper;
@Mock
UsersService usersService;
@ -163,7 +162,6 @@ public class ProcessInstanceServiceTest {
//project auth success
ProcessInstance processInstance = getProcessInstance();
processInstance.setWorkerGroupId(-1);
processInstance.setReceivers("xxx@qq.com");
processInstance.setReceiversCc("xxx@qq.com");
processInstance.setProcessDefinitionId(46);
@ -178,16 +176,11 @@ public class ProcessInstanceServiceTest {
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
//worker group null
processInstance.setWorkerGroupId(1);
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null);
Map<String, Object> workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS));
//worker group exist
WorkerGroup workerGroup = getWorkGroup();
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(workerGroup);
processInstance.setWorkerGroupId(1);
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null);
Map<String, Object> workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS));
}
@ -394,8 +387,6 @@ public class ProcessInstanceServiceTest {
//project auth fail
when(projectMapper.queryByName(projectName)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
//process instance null
Project project = getProject(projectName);
@ -403,8 +394,6 @@ public class ProcessInstanceServiceTest {
when(projectMapper.queryByName(projectName)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
Map<String, Object> processInstanceNullRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any());
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
}
@Test
@ -496,7 +485,6 @@ public class ProcessInstanceServiceTest {
*/
private WorkerGroup getWorkGroup() {
WorkerGroup workerGroup = new WorkerGroup();
workerGroup.setId(1);
workerGroup.setName("test_workergroup");
return workerGroup;
}

57
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java

@ -19,12 +19,16 @@ package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
@ -37,7 +41,6 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.omg.CORBA.Any;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
@ -172,10 +175,29 @@ public class ResourcesServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg());
//RESOURCE_NOT_EXIST
user.setId(1);
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(),Mockito.anyString())).thenReturn("test1");
try {
Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(false);
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());
//SUCCESS
user.setId(1);
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
try {
Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(true);
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE);
logger.info(result.toString());
@ -187,33 +209,28 @@ public class ResourcesServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg());
//USER_NOT_EXIST
Mockito.when(userMapper.queryDetailsById(Mockito.anyInt())).thenReturn(null);
Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF);
logger.info(result.toString());
Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode());
//TENANT_NOT_EXIST
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null);
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg());
//RESOURCE_NOT_EXIST
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test1");
//SUCCESS
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test");
try {
Mockito.when(hadoopUtils.exists("test")).thenReturn(true);
} catch (IOException e) {
e.printStackTrace();
PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(),Mockito.anyString(),true,true)).thenReturn(true);
} catch (Exception e) {
logger.error(e.getMessage(),e);
}
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF);
logger.info(result.toString());
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg());
//SUCCESS
PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test");
result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg());
@ -242,7 +259,7 @@ public class ResourcesServiceTest {
User loginUser = new User();
loginUser.setId(0);
loginUser.setUserType(UserType.ADMIN_USER);
Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0)).thenReturn(getResourceList());
Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0,0)).thenReturn(getResourceList());
Map<String, Object> result = resourcesService.queryResourceList(loginUser, ResourceType.FILE);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
@ -279,7 +296,7 @@ public class ResourcesServiceTest {
//TENANT_NOT_EXIST
loginUser.setUserType(UserType.ADMIN_USER);
loginUser.setTenantId(2);
Mockito.when(userMapper.queryDetailsById(Mockito.anyInt())).thenReturn(loginUser);
Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(loginUser);
result = resourcesService.delete(loginUser,1);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg());
@ -373,7 +390,7 @@ public class ResourcesServiceTest {
//TENANT_NOT_EXIST
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
result = resourcesService.readResource(1,1,10);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg());
@ -478,7 +495,7 @@ public class ResourcesServiceTest {
//TENANT_NOT_EXIST
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
result = resourcesService.updateResourceContent(1,"content");
logger.info(result.toString());
Assert.assertTrue(Status.TENANT_NOT_EXIST.getCode() == result.getCode());
@ -497,7 +514,7 @@ public class ResourcesServiceTest {
PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true);
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser());
Mockito.when(userMapper.selectById(1)).thenReturn(getUser());
org.springframework.core.io.Resource resourceMock = Mockito.mock(org.springframework.core.io.Resource.class);
try {
//resource null

30
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java

@ -18,13 +18,16 @@ package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.avro.generic.GenericData;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
@ -68,6 +71,8 @@ public class UsersServiceTest {
private DataSourceUserMapper datasourceUserMapper;
@Mock
private AlertGroupMapper alertGroupMapper;
@Mock
private ResourceMapper resourceMapper;
private String queueName ="UsersServiceTestQueue";
@ -301,9 +306,13 @@ public class UsersServiceTest {
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
when(resourceMapper.queryAuthorizedResourceList(1)).thenReturn(new ArrayList<Resource>());
when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource());
result = usersService.grantResources(loginUser, 1, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@ -476,11 +485,30 @@ public class UsersServiceTest {
return user;
}
/**
* get tenant
* @return tenant
*/
private Tenant getTenant(){
Tenant tenant = new Tenant();
tenant.setId(1);
return tenant;
}
/**
* get resource
* @return resource
*/
private Resource getResource(){
Resource resource = new Resource();
resource.setPid(-1);
resource.setUserId(1);
resource.setDescription("ResourcesServiceTest.jar");
resource.setAlias("ResourcesServiceTest.jar");
resource.setFullName("/ResourcesServiceTest.jar");
resource.setType(ResourceType.FILE);
return resource;
}
}

117
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java

@ -26,13 +26,16 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.internal.matchers.Any;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -40,6 +43,7 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@RunWith(MockitoJUnitRunner.class)
public class WorkerGroupServiceTest {
@ -48,94 +52,55 @@ public class WorkerGroupServiceTest {
@InjectMocks
private WorkerGroupService workerGroupService;
@Mock
private WorkerGroupMapper workerGroupMapper;
@Mock
private ProcessInstanceMapper processInstanceMapper;
@Mock
private ZookeeperCachedOperator zookeeperCachedOperator;
private String groupName="groupName000001";
@Before
public void init(){
ZookeeperConfig zookeeperConfig = new ZookeeperConfig();
zookeeperConfig.setDsRoot("/dolphinscheduler_qzw");
Mockito.when(zookeeperCachedOperator.getZookeeperConfig()).thenReturn(zookeeperConfig);
/**
* create or update a worker group
*/
@Test
public void testSaveWorkerGroup(){
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot()+"/nodes" +"/worker";
User user = new User();
// general user add
user.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = workerGroupService.saveWorkerGroup(user, 0, groupName, "127.0.0.1");
logger.info(result.toString());
Assert.assertEquals( Status.USER_NO_OPERATION_PERM.getMsg(),(String) result.get(Constants.MSG));
List<String> workerGroupStrList = new ArrayList<>();
workerGroupStrList.add("default");
workerGroupStrList.add("test");
Mockito.when(zookeeperCachedOperator.getChildrenKeys(workerPath)).thenReturn(workerGroupStrList);
//success
user.setUserType(UserType.ADMIN_USER);
result = workerGroupService.saveWorkerGroup(user, 0, groupName, "127.0.0.1");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),(String)result.get(Constants.MSG));
// group name exist
Mockito.when(workerGroupMapper.selectById(2)).thenReturn(getWorkerGroup(2));
Mockito.when(workerGroupMapper.queryWorkerGroupByName(groupName)).thenReturn(getList());
result = workerGroupService.saveWorkerGroup(user, 2, groupName, "127.0.0.1");
logger.info(result.toString());
Assert.assertEquals(Status.NAME_EXIST,result.get(Constants.STATUS));
List<String> defaultIpList = new ArrayList<>();
defaultIpList.add("192.168.220.188:1234");
defaultIpList.add("192.168.220.189:1234");
Mockito.when(zookeeperCachedOperator.getChildrenKeys(workerPath + "/default")).thenReturn(defaultIpList);
Mockito.when(zookeeperCachedOperator.get(workerPath + "/default" + "/" + defaultIpList.get(0))).thenReturn("0.02,0.23,0.03,2020-05-08 11:24:14,2020-05-08 14:22:24");
}
/**
* query worker group paging
*/
@Test
public void testQueryAllGroupPaging(){
public void testQueryAllGroupPaging(){
User user = new User();
// general user add
user.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = workerGroupService.queryAllGroupPaging(user, 1, 10, groupName);
logger.info(result.toString());
Assert.assertEquals((String) result.get(Constants.MSG), Status.USER_NO_OPERATION_PERM.getMsg());
//success
user.setUserType(UserType.ADMIN_USER);
Page<WorkerGroup> page = new Page<>(1,10);
page.setRecords(getList());
page.setSize(1L);
Mockito.when(workerGroupMapper.queryListPaging(Mockito.any(Page.class), Mockito.eq(groupName))).thenReturn(page);
result = workerGroupService.queryAllGroupPaging(user, 1, 10, groupName);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),(String)result.get(Constants.MSG));
PageInfo<WorkerGroup> pageInfo = (PageInfo<WorkerGroup>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists()));
Map<String, Object> result = workerGroupService.queryAllGroupPaging(user, 1, 10, null);
PageInfo<WorkerGroup> pageInfo = (PageInfo) result.get(Constants.DATA_LIST);
Assert.assertEquals(pageInfo.getLists().size(),1);
}
/**
* delete group by id
*/
@Test
public void testDeleteWorkerGroupById(){
//DELETE_WORKER_GROUP_BY_ID_FAIL
Mockito.when(processInstanceMapper.queryByWorkerGroupIdAndStatus(1, Constants.NOT_TERMINATED_STATES)).thenReturn(getProcessInstanceList());
Map<String, Object> result = workerGroupService.deleteWorkerGroupById(1);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_WORKER_GROUP_BY_ID_FAIL.getCode(),((Status) result.get(Constants.STATUS)).getCode());
//correct
result = workerGroupService.deleteWorkerGroupById(2);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),(String)result.get(Constants.MSG));
}
@Test
public void testQueryAllGroup(){
Mockito.when(workerGroupMapper.queryAllWorkerGroup()).thenReturn(getList());
public void testQueryAllGroup() throws Exception {
Map<String, Object> result = workerGroupService.queryAllGroup();
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(),(String)result.get(Constants.MSG));
List<WorkerGroup> workerGroupList = (List<WorkerGroup>) result.get(Constants.DATA_LIST);
Assert.assertTrue(workerGroupList.size()>0);
Set<String> workerGroups = (Set<String>) result.get(Constants.DATA_LIST);
Assert.assertEquals(workerGroups.size(), 1);
}
@ -149,25 +114,5 @@ public class WorkerGroupServiceTest {
processInstances.add(new ProcessInstance());
return processInstances;
}
/**
* get Group
* @return
*/
private WorkerGroup getWorkerGroup(int id){
WorkerGroup workerGroup = new WorkerGroup();
workerGroup.setName(groupName);
workerGroup.setId(id);
return workerGroup;
}
private WorkerGroup getWorkerGroup(){
return getWorkerGroup(1);
}
private List<WorkerGroup> getList(){
List<WorkerGroup> list = new ArrayList<>();
list.add(getWorkerGroup());
return list;
}
}

13
dolphinscheduler-common/pom.xml

@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.2.1-SNAPSHOT</version>
<version>1.3.1-SNAPSHOT</version>
</parent>
<artifactId>dolphinscheduler-common</artifactId>
<name>dolphinscheduler-common</name>
@ -29,15 +29,17 @@
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<codehaus.janino.version>3.1.0</codehaus.janino.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
@ -586,10 +588,5 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.janino</groupId>
<artifactId>janino</artifactId>
<version>${codehaus.janino.version}</version>
</dependency>
</dependencies>
</project>

242
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -25,9 +25,45 @@ import java.util.regex.Pattern;
* Constants
*/
public final class Constants {
private Constants() {
throw new IllegalStateException("Constants class");
}
/**
* quartz config
*/
public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId";
public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon";
public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties";
public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class";
public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount";
public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons";
public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority";
public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class";
public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix";
public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered";
public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold";
public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval";
public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock";
public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource";
public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class";
/**
* quartz config default value
*/
public static final String QUARTZ_TABLE_PREFIX = "QRTZ_";
public static final String QUARTZ_MISFIRETHRESHOLD = "60000";
public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000";
public static final String QUARTZ_DATASOURCE = "myDs";
public static final String QUARTZ_THREADCOUNT = "25";
public static final String QUARTZ_THREADPRIORITY = "5";
public static final String QUARTZ_INSTANCENAME = "DolphinScheduler";
public static final String QUARTZ_INSTANCEID = "AUTO";
public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true";
/**
* common properties path
*/
@ -56,9 +92,11 @@ public final class Constants {
/**
* yarn.resourcemanager.ha.rm.idsfs.defaultFS
* yarn.resourcemanager.ha.rm.ids
*/
public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids";
public static final String YARN_RESOURCEMANAGER_HA_XX = "xx";
/**
* yarn.application.status.address
@ -72,31 +110,25 @@ public final class Constants {
public static final String HDFS_ROOT_USER = "hdfs.root.user";
/**
* hdfs configuration
* data.store2hdfs.basepath
* hdfs/s3 configuration
* resource.upload.path
*/
public static final String DATA_STORE_2_HDFS_BASEPATH = "data.store2hdfs.basepath";
public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path";
/**
* data.basedir.path
* data basedir path
*/
public static final String DATA_BASEDIR_PATH = "data.basedir.path";
/**
* data.download.basedir.path
*/
public static final String DATA_DOWNLOAD_BASEDIR_PATH = "data.download.basedir.path";
/**
* process.exec.basepath
*/
public static final String PROCESS_EXEC_BASEPATH = "process.exec.basepath";
/**
* dolphinscheduler.env.path
*/
public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path";
/**
* environment properties default path
*/
public static final String ENV_PATH = "env/dolphinscheduler_env.sh";
/**
* python home
@ -108,30 +140,38 @@ public final class Constants {
*/
public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs";
public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties";
/**
* development.state
*/
public static final String DEVELOPMENT_STATE = "development.state";
public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true";
/**
* string true
*/
public static final String STRING_TRUE = "true";
/**
* res.upload.startup.type
* string false
*/
public static final String RES_UPLOAD_STARTUP_TYPE = "res.upload.startup.type";
public static final String STRING_FALSE = "false";
/**
* zookeeper quorum
* resource storage type
*/
public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum";
public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type";
/**
* MasterServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/masters";
public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master";
/**
* WorkerServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/workers";
public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker";
/**
* all servers directory registered in zookeeper
@ -143,10 +183,6 @@ public final class Constants {
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters";
/**
* WorkerServer lock directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_WORKERS = "/lock/workers";
/**
* MasterServer failover directory registered in zookeeper
@ -163,16 +199,17 @@ public final class Constants {
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters";
/**
* need send warn times when master server or worker server failover
*/
public static final int DOLPHINSCHEDULER_WARN_TIMES_FAILOVER = 3;
/**
* comma ,
*/
public static final String COMMA = ",";
/**
* slash /
*/
public static final String SLASH = "/";
/**
* COLON :
*/
@ -197,38 +234,11 @@ public final class Constants {
* EQUAL SIGN
*/
public static final String EQUAL_SIGN = "=";
/**
* ZOOKEEPER_SESSION_TIMEOUT
* AT SIGN
*/
public static final String ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout";
public static final String ZOOKEEPER_CONNECTION_TIMEOUT = "zookeeper.connection.timeout";
public static final String ZOOKEEPER_RETRY_SLEEP = "zookeeper.retry.sleep";
public static final String ZOOKEEPER_RETRY_BASE_SLEEP = "zookeeper.retry.base.sleep";
public static final String ZOOKEEPER_RETRY_MAX_SLEEP = "zookeeper.retry.max.sleep";
public static final String ZOOKEEPER_RETRY_MAXTIME = "zookeeper.retry.maxtime";
public static final String MASTER_HEARTBEAT_INTERVAL = "master.heartbeat.interval";
public static final String MASTER_EXEC_THREADS = "master.exec.threads";
public static final String MASTER_EXEC_TASK_THREADS = "master.exec.task.number";
public static final String MASTER_COMMIT_RETRY_TIMES = "master.task.commit.retryTimes";
public static final String MASTER_COMMIT_RETRY_INTERVAL = "master.task.commit.interval";
public static final String AT_SIGN = "@";
public static final String WORKER_EXEC_THREADS = "worker.exec.threads";
public static final String WORKER_HEARTBEAT_INTERVAL = "worker.heartbeat.interval";
public static final String WORKER_FETCH_TASK_NUM = "worker.fetch.task.num";
public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg";
@ -239,21 +249,6 @@ public final class Constants {
public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory";
/**
* dolphinscheduler tasks queue
*/
public static final String DOLPHINSCHEDULER_TASKS_QUEUE = "tasks_queue";
/**
* dolphinscheduler need kill tasks queue
*/
public static final String DOLPHINSCHEDULER_TASKS_KILL = "tasks_kill";
public static final String ZOOKEEPER_DOLPHINSCHEDULER_ROOT = "zookeeper.dolphinscheduler.root";
public static final String SCHEDULER_QUEUE_IMPL = "dolphinscheduler.queue.impl";
/**
* date format of yyyy-MM-dd HH:mm:ss
*/
@ -345,26 +340,6 @@ public final class Constants {
public static final int MAX_TASK_TIMEOUT = 24 * 3600;
/**
* heartbeat threads number
*/
public static final int DEFAUL_WORKER_HEARTBEAT_THREAD_NUM = 1;
/**
* heartbeat interval
*/
public static final int DEFAULT_WORKER_HEARTBEAT_INTERVAL = 60;
/**
* worker fetch task number
*/
public static final int DEFAULT_WORKER_FETCH_TASK_NUM = 1;
/**
* worker execute threads number
*/
public static final int DEFAULT_WORKER_EXEC_THREAD_NUM = 10;
/**
* master cpu load
*/
@ -386,16 +361,6 @@ public final class Constants {
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* master execute threads number
*/
public static final int DEFAULT_MASTER_EXEC_THREAD_NUM = 100;
/**
* default master concurrent task execute num
*/
public static final int DEFAULT_MASTER_TASK_EXEC_NUM = 20;
/**
* default log cache rows num,output when reach the number
@ -403,33 +368,11 @@ public final class Constants {
public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16;
/**
* log flush intervaloutput when reach the interval
* log flush interval?output when reach the interval
*/
public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000;
/**
* default master heartbeat thread number
*/
public static final int DEFAULT_MASTER_HEARTBEAT_THREAD_NUM = 1;
/**
* default master heartbeat interval
*/
public static final int DEFAULT_MASTER_HEARTBEAT_INTERVAL = 60;
/**
* default master commit retry times
*/
public static final int DEFAULT_MASTER_COMMIT_RETRY_TIMES = 5;
/**
* default master commit retry interval
*/
public static final int DEFAULT_MASTER_COMMIT_RETRY_INTERVAL = 3000;
/**
* time unit secong to minutes
*/
@ -448,9 +391,9 @@ public final class Constants {
public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN";
/**
* task record configuration path
* datasource configuration path
*/
public static final String APPLICATION_PROPERTIES = "application.properties";
public static final String DATASOURCE_PROPERTIES = "/datasource.properties";
public static final String TASK_RECORD_URL = "task.record.datasource.url";
@ -568,7 +511,7 @@ public final class Constants {
/**
* heartbeat for zk info length
*/
public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 5;
public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10;
/**
@ -746,7 +689,7 @@ public final class Constants {
* application regex
*/
public static final String APPLICATION_REGEX = "application_\\d+_\\d+";
public static final String PID = "pid";
public static final String PID = OSUtils.isWindows() ? "handle" : "pid";
/**
* month_begin
*/
@ -864,7 +807,7 @@ public final class Constants {
*/
public static final String HIVE_CONF = "hiveconf:";
//flink 任务
//flink ??
public static final String FLINK_YARN_CLUSTER = "yarn-cluster";
public static final String FLINK_RUN_MODE = "-m";
public static final String FLINK_YARN_SLOT = "-ys";
@ -899,26 +842,20 @@ public final class Constants {
/**
* data total
* 数据总数
*/
public static final String COUNT = "count";
/**
* page size
* 每页数据条数
*/
public static final String PAGE_SIZE = "pageSize";
/**
* current page no
* 当前页码
*/
public static final String PAGE_NUMBER = "pageNo";
/**
* result
*/
public static final String RESULT = "result";
/**
*
@ -972,7 +909,8 @@ public final class Constants {
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE = "jdbc:oracle:thin:@//";
public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@";
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
@ -982,6 +920,7 @@ public final class Constants {
public static final String JDBC_URL = "jdbcUrl";
public static final String PRINCIPAL = "principal";
public static final String OTHER = "other";
public static final String ORACLE_DB_CONNECT_TYPE = "connectType";
/**
@ -1000,10 +939,35 @@ public final class Constants {
*/
public static final String DATASOURCE_PASSWORD_REGEX = "(?<=(\"password\":\")).*?(?=(\"))";
/**
* default worker group
*/
public static final String DEFAULT_WORKER_GROUP = "default";
public static final Integer TASK_INFO_LENGTH = 5;
/**
* new
* schedule time
*/
public static final String PARAMETER_SHECDULE_TIME = "schedule.time";
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
/**
* plugin configurations
*/
public static final String PLUGIN_JAR_SUFFIX = ".jar";
public static final int NORAML_NODE_STATUS = 0;
public static final int ABNORMAL_NODE_STATUS = 1;
}

9
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java

@ -65,4 +65,13 @@ public enum CommandType {
public String getDescp() {
return descp;
}
public static CommandType of(Integer status){
for(CommandType cmdType : values()){
if(cmdType.getCode() == status){
return cmdType;
}
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java

@ -57,4 +57,14 @@ public enum DbType {
public String getDescp() {
return descp;
}
public static DbType of(int type){
for(DbType ty : values()){
if(ty.getCode() == type){
return ty;
}
}
throw new IllegalArgumentException("invalid type : " + type);
}
}

22
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java

@ -76,7 +76,7 @@ public enum ExecutionStatus {
* @return status
*/
public boolean typeIsFailure(){
return this == FAILURE || this == NEED_FAULT_TOLERANCE || this == KILL;
return this == FAILURE || this == NEED_FAULT_TOLERANCE;
}
/**
@ -86,14 +86,14 @@ public enum ExecutionStatus {
public boolean typeIsFinished(){
return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause()
|| typeIsWaittingThread();
|| typeIsStop();
}
/**
* status is waiting thread
* @return status
*/
public boolean typeIsWaittingThread(){
public boolean typeIsWaitingThread(){
return this == WAITTING_THREAD;
}
@ -104,6 +104,13 @@ public enum ExecutionStatus {
public boolean typeIsPause(){
return this == PAUSE;
}
/**
* status is pause
* @return status
*/
public boolean typeIsStop(){
return this == STOP;
}
/**
* status is running
@ -128,4 +135,13 @@ public enum ExecutionStatus {
public String getDescp() {
return descp;
}
public static ExecutionStatus of(int status){
for(ExecutionStatus es : values()){
if(es.getCode() == status){
return es;
}
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save