easyscheduler
5 years ago
committed by
GitHub
42 changed files with 1291 additions and 1812 deletions
@ -1,31 +0,0 @@ |
|||||||
server { |
|
||||||
listen 8888;# 访问端口 |
|
||||||
server_name localhost; |
|
||||||
#charset koi8-r; |
|
||||||
#access_log /var/log/nginx/host.access.log main; |
|
||||||
location / { |
|
||||||
root /opt/escheduler/front/server; # 静态文件目录 |
|
||||||
index index.html index.html; |
|
||||||
} |
|
||||||
location /escheduler { |
|
||||||
proxy_pass http://127.0.0.1:12345; # 接口地址 |
|
||||||
proxy_set_header Host $host; |
|
||||||
proxy_set_header X-Real-IP $remote_addr; |
|
||||||
proxy_set_header x_real_ipP $remote_addr; |
|
||||||
proxy_set_header remote_addr $remote_addr; |
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; |
|
||||||
proxy_http_version 1.1; |
|
||||||
proxy_connect_timeout 4s; |
|
||||||
proxy_read_timeout 30s; |
|
||||||
proxy_send_timeout 12s; |
|
||||||
proxy_set_header Upgrade $http_upgrade; |
|
||||||
proxy_set_header Connection "upgrade"; |
|
||||||
} |
|
||||||
#error_page 404 /404.html; |
|
||||||
# redirect server error pages to the static page /50x.html |
|
||||||
# |
|
||||||
error_page 500 502 503 504 /50x.html; |
|
||||||
location = /50x.html { |
|
||||||
root /usr/share/nginx/html; |
|
||||||
} |
|
||||||
} |
|
@ -1,310 +0,0 @@ |
|||||||
#!/bin/sh |
|
||||||
|
|
||||||
workDir=`/opt/easyscheduler` |
|
||||||
workDir=`cd ${workDir};pwd` |
|
||||||
|
|
||||||
#To be compatible with MacOS and Linux |
|
||||||
txt="" |
|
||||||
if [[ "$OSTYPE" == "darwin"* ]]; then |
|
||||||
# Mac OSX |
|
||||||
txt="''" |
|
||||||
elif [[ "$OSTYPE" == "linux-gnu" ]]; then |
|
||||||
# linux |
|
||||||
txt="" |
|
||||||
elif [[ "$OSTYPE" == "cygwin" ]]; then |
|
||||||
# POSIX compatibility layer and Linux environment emulation for Windows |
|
||||||
echo "Easy Scheduler not support Windows operating system" |
|
||||||
exit 1 |
|
||||||
elif [[ "$OSTYPE" == "msys" ]]; then |
|
||||||
# Lightweight shell and GNU utilities compiled for Windows (part of MinGW) |
|
||||||
echo "Easy Scheduler not support Windows operating system" |
|
||||||
exit 1 |
|
||||||
elif [[ "$OSTYPE" == "win32" ]]; then |
|
||||||
echo "Easy Scheduler not support Windows operating system" |
|
||||||
exit 1 |
|
||||||
elif [[ "$OSTYPE" == "freebsd"* ]]; then |
|
||||||
# ... |
|
||||||
txt="" |
|
||||||
else |
|
||||||
# Unknown. |
|
||||||
echo "Operating system unknown, please tell us(submit issue) for better service" |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
|
|
||||||
source ${workDir}/conf/config/run_config.conf |
|
||||||
source ${workDir}/conf/config/install_config.conf |
|
||||||
|
|
||||||
# mysql配置 |
|
||||||
# mysql 地址,端口 |
|
||||||
mysqlHost="127.0.0.1:3306" |
|
||||||
|
|
||||||
# mysql 数据库名称 |
|
||||||
mysqlDb="easyscheduler" |
|
||||||
|
|
||||||
# mysql 用户名 |
|
||||||
mysqlUserName="easyscheduler" |
|
||||||
|
|
||||||
# mysql 密码 |
|
||||||
mysqlPassword="easyschedulereasyscheduler" |
|
||||||
|
|
||||||
# conf/config/install_config.conf配置 |
|
||||||
# 安装路径,不要当前路径(pwd)一样 |
|
||||||
installPath="/opt/easyscheduler" |
|
||||||
|
|
||||||
# 部署用户 |
|
||||||
deployUser="escheduler" |
|
||||||
|
|
||||||
# zk集群 |
|
||||||
zkQuorum="192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181" |
|
||||||
|
|
||||||
# 安装hosts |
|
||||||
ips="ark0,ark1,ark2,ark3,ark4" |
|
||||||
|
|
||||||
# conf/config/run_config.conf配置 |
|
||||||
# 运行Master的机器 |
|
||||||
masters="ark0,ark1" |
|
||||||
|
|
||||||
# 运行Worker的机器 |
|
||||||
workers="ark2,ark3,ark4" |
|
||||||
|
|
||||||
# 运行Alert的机器 |
|
||||||
alertServer="ark3" |
|
||||||
|
|
||||||
# 运行Api的机器 |
|
||||||
apiServers="ark1" |
|
||||||
|
|
||||||
# alert配置 |
|
||||||
# 邮件协议 |
|
||||||
mailProtocol="SMTP" |
|
||||||
|
|
||||||
# 邮件服务host |
|
||||||
mailServerHost="smtp.exmail.qq.com" |
|
||||||
|
|
||||||
# 邮件服务端口 |
|
||||||
mailServerPort="25" |
|
||||||
|
|
||||||
# 发送人 |
|
||||||
mailSender="xxxxxxxxxx" |
|
||||||
|
|
||||||
# 发送人密码 |
|
||||||
mailPassword="xxxxxxxxxx" |
|
||||||
|
|
||||||
# 下载Excel路径 |
|
||||||
xlsFilePath="/tmp/xls" |
|
||||||
|
|
||||||
|
|
||||||
# hadoop 配置 |
|
||||||
# 是否启动hdfs,如果启动则为true,需要配置以下hadoop相关参数; |
|
||||||
# 不启动设置为false,如果为false,以下配置不需要修改 |
|
||||||
hdfsStartupSate="false" |
|
||||||
|
|
||||||
# namenode地址,支持HA,需要将core-site.xml和hdfs-site.xml放到conf目录下 |
|
||||||
namenodeFs="hdfs://mycluster:8020" |
|
||||||
|
|
||||||
# resourcemanager HA配置,如果是单resourcemanager,这里为空即可 |
|
||||||
yarnHaIps="192.168.xx.xx,192.168.xx.xx" |
|
||||||
|
|
||||||
# 如果是单 resourcemanager,只需要配置一个主机名称,如果是resourcemanager HA,则默认配置就好 |
|
||||||
singleYarnIp="ark1" |
|
||||||
|
|
||||||
# hdfs根路径,根路径的owner必须是部署用户 |
|
||||||
hdfsPath="/escheduler" |
|
||||||
|
|
||||||
# common 配置 |
|
||||||
# 程序路径 |
|
||||||
programPath="/tmp/escheduler" |
|
||||||
|
|
||||||
#下载路径 |
|
||||||
downloadPath="/tmp/escheduler/download" |
|
||||||
|
|
||||||
# 任务执行路径 |
|
||||||
execPath="/tmp/escheduler/exec" |
|
||||||
|
|
||||||
# SHELL环境变量路径 |
|
||||||
shellEnvPath="$installPath/conf/env/.escheduler_env.sh" |
|
||||||
|
|
||||||
# Python换将变量路径 |
|
||||||
pythonEnvPath="$installPath/conf/env/escheduler_env.py" |
|
||||||
|
|
||||||
# 资源文件的后缀 |
|
||||||
resSuffixs="txt,log,sh,conf,cfg,py,java,sql,hql,xml" |
|
||||||
|
|
||||||
# 开发状态,如果是true,对于SHELL脚本可以在execPath目录下查看封装后的SHELL脚本,如果是false则执行完成直接删除 |
|
||||||
devState="true" |
|
||||||
|
|
||||||
# zk 配置 |
|
||||||
# zk根目录 |
|
||||||
zkRoot="/escheduler" |
|
||||||
|
|
||||||
# 用来记录挂掉机器的zk目录 |
|
||||||
zkDeadServers="/escheduler/dead-servers" |
|
||||||
|
|
||||||
# masters目录 |
|
||||||
zkMasters="/escheduler/masters" |
|
||||||
|
|
||||||
# workers目录 |
|
||||||
zkWorkers="/escheduler/workers" |
|
||||||
|
|
||||||
# zk master分布式锁 |
|
||||||
mastersLock="/escheduler/lock/masters" |
|
||||||
|
|
||||||
# zk worker分布式锁 |
|
||||||
workersLock="/escheduler/lock/workers" |
|
||||||
|
|
||||||
# zk master容错分布式锁 |
|
||||||
mastersFailover="/escheduler/lock/failover/masters" |
|
||||||
|
|
||||||
# zk worker容错分布式锁 |
|
||||||
workersFailover="/escheduler/lock/failover/masters" |
|
||||||
|
|
||||||
# zk session 超时 |
|
||||||
zkSessionTimeout="300" |
|
||||||
|
|
||||||
# zk 连接超时 |
|
||||||
zkConnectionTimeout="300" |
|
||||||
|
|
||||||
# zk 重试间隔 |
|
||||||
zkRetrySleep="100" |
|
||||||
|
|
||||||
# zk重试最大次数 |
|
||||||
zkRetryMaxtime="5" |
|
||||||
|
|
||||||
|
|
||||||
# master 配置 |
|
||||||
# master执行线程最大数,流程实例的最大并行度 |
|
||||||
masterExecThreads="100" |
|
||||||
|
|
||||||
# master任务执行线程最大数,每一个流程实例的最大并行度 |
|
||||||
masterExecTaskNum="20" |
|
||||||
|
|
||||||
# master心跳间隔 |
|
||||||
masterHeartbeatInterval="10" |
|
||||||
|
|
||||||
# master任务提交重试次数 |
|
||||||
masterTaskCommitRetryTimes="5" |
|
||||||
|
|
||||||
# master任务提交重试时间间隔 |
|
||||||
masterTaskCommitInterval="100" |
|
||||||
|
|
||||||
# master最大cpu平均负载,用来判断master是否还有执行能力 |
|
||||||
masterMaxCupLoadAvg="10" |
|
||||||
|
|
||||||
# master预留内存,用来判断master是否还有执行能力 |
|
||||||
masterReservedMemory="1" |
|
||||||
|
|
||||||
|
|
||||||
# worker 配置 |
|
||||||
# worker执行线程 |
|
||||||
workerExecThreads="100" |
|
||||||
|
|
||||||
# worker心跳间隔 |
|
||||||
workerHeartbeatInterval="10" |
|
||||||
|
|
||||||
# worker一次抓取任务数 |
|
||||||
workerFetchTaskNum="10" |
|
||||||
|
|
||||||
# worker最大cpu平均负载,用来判断master是否还有执行能力 |
|
||||||
workerMaxCupLoadAvg="10" |
|
||||||
|
|
||||||
# worker预留内存,用来判断master是否还有执行能力 |
|
||||||
workerReservedMemory="1" |
|
||||||
|
|
||||||
# api 配置 |
|
||||||
# api 服务端口 |
|
||||||
apiServerPort="12345" |
|
||||||
|
|
||||||
# api session 超时 |
|
||||||
apiServerSessionTimeout="7200" |
|
||||||
|
|
||||||
# api 上下文路径 |
|
||||||
apiServerContextPath="/escheduler/" |
|
||||||
|
|
||||||
# spring 最大文件大小 |
|
||||||
springMaxFileSize="1024MB" |
|
||||||
|
|
||||||
# spring 最大请求文件大小 |
|
||||||
springMaxRequestSize="1024MB" |
|
||||||
|
|
||||||
# api 最大post请求大小 |
|
||||||
apiMaxHttpPostSize="5000000" |
|
||||||
|
|
||||||
# 1,替换文件 |
|
||||||
echo "1,替换文件" |
|
||||||
sed -i ${txt} "s#spring.datasource.url.*#spring.datasource.url=jdbc:mysql://${mysqlHost}/${mysqlDb}?characterEncoding=UTF-8#g" conf/dao/data_source.properties |
|
||||||
sed -i ${txt} "s#spring.datasource.username.*#spring.datasource.username=${mysqlUserName}#g" conf/dao/data_source.properties |
|
||||||
sed -i ${txt} "s#spring.datasource.password.*#spring.datasource.password=${mysqlPassword}#g" conf/dao/data_source.properties |
|
||||||
|
|
||||||
sed -i ${txt} "s#org.quartz.dataSource.myDs.URL.*#org.quartz.dataSource.myDs.URL=jdbc:mysql://${mysqlHost}/${mysqlDb}?characterEncoding=UTF-8#g" conf/quartz.properties |
|
||||||
sed -i ${txt} "s#org.quartz.dataSource.myDs.user.*#org.quartz.dataSource.myDs.user=${mysqlUserName}#g" conf/quartz.properties |
|
||||||
sed -i ${txt} "s#org.quartz.dataSource.myDs.password.*#org.quartz.dataSource.myDs.password=${mysqlPassword}#g" conf/quartz.properties |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#fs.defaultFS.*#fs.defaultFS=${namenodeFs}#g" conf/common/hadoop/hadoop.properties |
|
||||||
sed -i ${txt} "s#yarn.resourcemanager.ha.rm.ids.*#yarn.resourcemanager.ha.rm.ids=${yarnHaIps}#g" conf/common/hadoop/hadoop.properties |
|
||||||
sed -i ${txt} "s#yarn.application.status.address.*#yarn.application.status.address=http://${singleYarnIp}:8088/ws/v1/cluster/apps/%s#g" conf/common/hadoop/hadoop.properties |
|
||||||
|
|
||||||
sed -i ${txt} "s#data.basedir.path.*#data.basedir.path=${programPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#data.download.basedir.path.*#data.download.basedir.path=${downloadPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#process.exec.basepath.*#process.exec.basepath=${execPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#data.store2hdfs.basepath.*#data.store2hdfs.basepath=${hdfsPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#hdfs.startup.state.*#hdfs.startup.state=${hdfsStartupSate}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#escheduler.env.path.*#escheduler.env.path=${shellEnvPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#escheduler.env.py.*#escheduler.env.py=${pythonEnvPath}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#resource.view.suffixs.*#resource.view.suffixs=${resSuffixs}#g" conf/common/common.properties |
|
||||||
sed -i ${txt} "s#development.state.*#development.state=${devState}#g" conf/common/common.properties |
|
||||||
|
|
||||||
sed -i ${txt} "s#zookeeper.quorum.*#zookeeper.quorum=${zkQuorum}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.root.*#zookeeper.escheduler.root=${zkRoot}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.dead.servers.*#zookeeper.escheduler.dead.servers=${zkDeadServers}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.masters.*#zookeeper.escheduler.masters=${zkMasters}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.workers.*#zookeeper.escheduler.workers=${zkWorkers}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.lock.masters.*#zookeeper.escheduler.lock.masters=${mastersLock}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.lock.workers.*#zookeeper.escheduler.lock.workers=${workersLock}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.lock.failover.masters.*#zookeeper.escheduler.lock.failover.masters=${mastersFailover}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.escheduler.lock.failover.workers.*#zookeeper.escheduler.lock.failover.workers=${workersFailover}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.session.timeout.*#zookeeper.session.timeout=${zkSessionTimeout}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.connection.timeout.*#zookeeper.connection.timeout=${zkConnectionTimeout}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.retry.sleep.*#zookeeper.retry.sleep=${zkRetrySleep}#g" conf/zookeeper.properties |
|
||||||
sed -i ${txt} "s#zookeeper.retry.maxtime.*#zookeeper.retry.maxtime=${zkRetryMaxtime}#g" conf/zookeeper.properties |
|
||||||
|
|
||||||
sed -i ${txt} "s#master.exec.threads.*#master.exec.threads=${masterExecThreads}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.exec.task.number.*#master.exec.task.number=${masterExecTaskNum}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.heartbeat.interval.*#master.heartbeat.interval=${masterHeartbeatInterval}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.task.commit.retryTimes.*#master.task.commit.retryTimes=${masterTaskCommitRetryTimes}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.task.commit.interval.*#master.task.commit.interval=${masterTaskCommitInterval}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.max.cpuload.avg.*#master.max.cpuload.avg=${masterMaxCupLoadAvg}#g" conf/master.properties |
|
||||||
sed -i ${txt} "s#master.reserved.memory.*#master.reserved.memory=${masterReservedMemory}#g" conf/master.properties |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#worker.exec.threads.*#worker.exec.threads=${workerExecThreads}#g" conf/worker.properties |
|
||||||
sed -i ${txt} "s#worker.heartbeat.interval.*#worker.heartbeat.interval=${workerHeartbeatInterval}#g" conf/worker.properties |
|
||||||
sed -i ${txt} "s#worker.fetch.task.num.*#worker.fetch.task.num=${workerFetchTaskNum}#g" conf/worker.properties |
|
||||||
sed -i ${txt} "s#worker.max.cpuload.avg.*#worker.max.cpuload.avg=${workerMaxCupLoadAvg}#g" conf/worker.properties |
|
||||||
sed -i ${txt} "s#worker.reserved.memory.*#worker.reserved.memory=${workerReservedMemory}#g" conf/worker.properties |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#server.port.*#server.port=${apiServerPort}#g" conf/application.properties |
|
||||||
sed -i ${txt} "s#server.session.timeout.*#server.session.timeout=${apiServerSessionTimeout}#g" conf/application.properties |
|
||||||
sed -i ${txt} "s#server.context-path.*#server.context-path=${apiServerContextPath}#g" conf/application.properties |
|
||||||
sed -i ${txt} "s#spring.http.multipart.max-file-size.*#spring.http.multipart.max-file-size=${springMaxFileSize}#g" conf/application.properties |
|
||||||
sed -i ${txt} "s#spring.http.multipart.max-request-size.*#spring.http.multipart.max-request-size=${springMaxRequestSize}#g" conf/application.properties |
|
||||||
sed -i ${txt} "s#server.max-http-post-size.*#server.max-http-post-size=${apiMaxHttpPostSize}#g" conf/application.properties |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#mail.protocol.*#mail.protocol=${mailProtocol}#g" conf/alert.properties |
|
||||||
sed -i ${txt} "s#mail.server.host.*#mail.server.host=${mailServerHost}#g" conf/alert.properties |
|
||||||
sed -i ${txt} "s#mail.server.port.*#mail.server.port=${mailServerPort}#g" conf/alert.properties |
|
||||||
sed -i ${txt} "s#mail.sender.*#mail.sender=${mailSender}#g" conf/alert.properties |
|
||||||
sed -i ${txt} "s#mail.passwd.*#mail.passwd=${mailPassword}#g" conf/alert.properties |
|
||||||
sed -i ${txt} "s#xls.file.path.*#xls.file.path=${xlsFilePath}#g" conf/alert.properties |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#installPath.*#installPath=${installPath}#g" conf/config/install_config.conf |
|
||||||
sed -i ${txt} "s#deployUser.*#deployUser=${deployUser}#g" conf/config/install_config.conf |
|
||||||
sed -i ${txt} "s#ips.*#ips=${ips}#g" conf/config/install_config.conf |
|
||||||
|
|
||||||
|
|
||||||
sed -i ${txt} "s#masters.*#masters=${masters}#g" conf/config/run_config.conf |
|
||||||
sed -i ${txt} "s#workers.*#workers=${workers}#g" conf/config/run_config.conf |
|
||||||
sed -i ${txt} "s#alertServer.*#alertServer=${alertServer}#g" conf/config/run_config.conf |
|
||||||
sed -i ${txt} "s#apiServers.*#apiServers=${apiServers}#g" conf/config/run_config.conf |
|
@ -1,105 +0,0 @@ |
|||||||
#!/bin/sh |
|
||||||
|
|
||||||
# execute any pre-init scripts |
|
||||||
for i in /scripts/pre-init.d/*sh |
|
||||||
do |
|
||||||
if [ -e "${i}" ]; then |
|
||||||
echo "[i] pre-init.d - processing $i" |
|
||||||
. "${i}" |
|
||||||
fi |
|
||||||
done |
|
||||||
|
|
||||||
if [ -d "/run/mysqld" ]; then |
|
||||||
echo "[i] mysqld already present, skipping creation" |
|
||||||
chown -R mysql:mysql /run/mysqld |
|
||||||
else |
|
||||||
echo "[i] mysqld not found, creating...." |
|
||||||
mkdir -p /run/mysqld |
|
||||||
chown -R mysql:mysql /run/mysqld |
|
||||||
fi |
|
||||||
|
|
||||||
if [ -d /var/lib/mysql/mysql ]; then |
|
||||||
echo "[i] MySQL directory already present, skipping creation" |
|
||||||
chown -R mysql:mysql /var/lib/mysql |
|
||||||
else |
|
||||||
echo "[i] MySQL data directory not found, creating initial DBs" |
|
||||||
|
|
||||||
chown -R mysql:mysql /var/lib/mysql |
|
||||||
|
|
||||||
mysql_install_db --user=mysql --ldata=/var/lib/mysql > /dev/null |
|
||||||
|
|
||||||
if [ "$MYSQL_ROOT_PASSWORD" = "" ]; then |
|
||||||
MYSQL_ROOT_PASSWORD=`pwgen 16 1` |
|
||||||
echo "[i] MySQL root Password: $MYSQL_ROOT_PASSWORD" |
|
||||||
fi |
|
||||||
|
|
||||||
MYSQL_DATABASE="easyscheduler" |
|
||||||
MYSQL_USER="easyscheduler" |
|
||||||
MYSQL_PASSWORD="easyschedulereasyscheduler" |
|
||||||
|
|
||||||
tfile=`mktemp` |
|
||||||
if [ ! -f "$tfile" ]; then |
|
||||||
return 1 |
|
||||||
fi |
|
||||||
|
|
||||||
cat << EOF > $tfile |
|
||||||
USE mysql; |
|
||||||
FLUSH PRIVILEGES ; |
|
||||||
GRANT ALL ON *.* TO 'root'@'%' identified by '$MYSQL_ROOT_PASSWORD' WITH GRANT OPTION ; |
|
||||||
GRANT ALL ON *.* TO 'root'@'localhost' identified by '$MYSQL_ROOT_PASSWORD' WITH GRANT OPTION ; |
|
||||||
SET PASSWORD FOR 'root'@'localhost'=PASSWORD('${MYSQL_ROOT_PASSWORD}') ; |
|
||||||
DROP DATABASE IF EXISTS test ; |
|
||||||
FLUSH PRIVILEGES ; |
|
||||||
EOF |
|
||||||
|
|
||||||
if [ "$MYSQL_DATABASE" != "" ]; then |
|
||||||
echo "[i] Creating database: $MYSQL_DATABASE" |
|
||||||
echo "CREATE DATABASE IF NOT EXISTS \`$MYSQL_DATABASE\` CHARACTER SET utf8 COLLATE utf8_general_ci;" >> $tfile |
|
||||||
|
|
||||||
if [ "$MYSQL_USER" != "" ]; then |
|
||||||
echo "[i] Creating user: $MYSQL_USER with password $MYSQL_PASSWORD" |
|
||||||
echo "GRANT ALL ON \`$MYSQL_DATABASE\`.* to '$MYSQL_USER'@'%' IDENTIFIED BY '$MYSQL_PASSWORD';" >> $tfile |
|
||||||
fi |
|
||||||
fi |
|
||||||
|
|
||||||
/usr/bin/mysqld --user=mysql --bootstrap --verbose=0 --skip-name-resolve --skip-networking=0 < $tfile |
|
||||||
rm -f $tfile |
|
||||||
|
|
||||||
for f in /docker-entrypoint-initdb.d/*; do |
|
||||||
case "$f" in |
|
||||||
*.sql) echo "$0: running $f"; /usr/bin/mysqld --user=mysql --bootstrap --verbose=0 --skip-name-resolve --skip-networking=0 < "$f"; echo ;; |
|
||||||
*.sql.gz) echo "$0: running $f"; gunzip -c "$f" | /usr/bin/mysqld --user=mysql --bootstrap --verbose=0 --skip-name-resolve --skip-networking=0 < "$f"; echo ;; |
|
||||||
*) echo "$0: ignoring or entrypoint initdb empty $f" ;; |
|
||||||
esac |
|
||||||
echo |
|
||||||
done |
|
||||||
|
|
||||||
echo |
|
||||||
echo 'MySQL init process done. Ready for start up.' |
|
||||||
echo |
|
||||||
|
|
||||||
echo "exec /usr/bin/mysqld --user=mysql --console --skip-name-resolve --skip-networking=0" "$@" |
|
||||||
fi |
|
||||||
|
|
||||||
# execute any pre-exec scripts |
|
||||||
for i in /scripts/pre-exec.d/*sh |
|
||||||
do |
|
||||||
if [ -e "${i}" ]; then |
|
||||||
echo "[i] pre-exec.d - processing $i" |
|
||||||
. ${i} |
|
||||||
fi |
|
||||||
done |
|
||||||
|
|
||||||
mysql -ueasyscheduler -peasyschedulereasyscheduler --one-database easyscheduler -h127.0.0.1 < /opt/easyscheduler/sql/escheduler.sql |
|
||||||
mysql -ueasyscheduler -peasyschedulereasyscheduler --one-database easyscheduler -h127.0.0.1 < /opt/easyscheduler/sql/quartz.sql |
|
||||||
source /etc/profile |
|
||||||
zkServer.sh start |
|
||||||
cd /opt/easyscheduler |
|
||||||
rm -rf /etc/nginx/conf.d/default.conf |
|
||||||
sh ./bin/escheduler-daemon.sh start master-server |
|
||||||
sh ./bin/escheduler-daemon.sh start worker-server |
|
||||||
sh ./bin/escheduler-daemon.sh start api-server |
|
||||||
sh ./bin/escheduler-daemon.sh start logger-server |
|
||||||
sh ./bin/escheduler-daemon.sh start alert-server |
|
||||||
nginx -c /etc/nginx/nginx.conf |
|
||||||
exec /usr/bin/mysqld --user=mysql --console --skip-name-resolve --skip-networking=0 $@ |
|
@ -1,30 +0,0 @@ |
|||||||
# The number of milliseconds of each tick |
|
||||||
tickTime=2000 |
|
||||||
# The number of ticks that the initial |
|
||||||
# synchronization phase can take |
|
||||||
initLimit=10 |
|
||||||
# The number of ticks that can pass between |
|
||||||
# sending a request and getting an acknowledgement |
|
||||||
syncLimit=5 |
|
||||||
# the directory where the snapshot is stored. |
|
||||||
# do not use /tmp for storage, /tmp here is just |
|
||||||
# example sakes. |
|
||||||
dataDir=/tmp/zookeeper |
|
||||||
# the port at which the clients will connect |
|
||||||
clientPort=2181 |
|
||||||
# the maximum number of client connections. |
|
||||||
# increase this if you need to handle more clients |
|
||||||
#maxClientCnxns=60 |
|
||||||
# |
|
||||||
# Be sure to read the maintenance section of the |
|
||||||
# administrator guide before turning on autopurge. |
|
||||||
# |
|
||||||
# http://zookeeper.apache.org/doc/current/zookeeperAdmin.html#sc_maintenance |
|
||||||
# |
|
||||||
# The number of snapshots to retain in dataDir |
|
||||||
#autopurge.snapRetainCount=3 |
|
||||||
# Purge task interval in hours |
|
||||||
# Set to "0" to disable auto purge feature |
|
||||||
#autopurge.purgeInterval=1 |
|
||||||
dataDir=/opt/zookeeper/data |
|
||||||
dataLogDir=/opt/zookeeper/logs |
|
@ -1,109 +0,0 @@ |
|||||||
/* |
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
|
||||||
* contributor license agreements. See the NOTICE file distributed with |
|
||||||
* this work for additional information regarding copyright ownership. |
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
|
||||||
* (the "License"); you may not use this file except in compliance with |
|
||||||
* the License. You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
*/ |
|
||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
import com.google.common.base.MoreObjects; |
|
||||||
import com.google.common.base.Optional; |
|
||||||
import com.google.common.collect.Lists; |
|
||||||
import io.swagger.annotations.ApiImplicitParam; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.builders.ParameterBuilder; |
|
||||||
import springfox.documentation.schema.ModelRef; |
|
||||||
import springfox.documentation.service.AllowableValues; |
|
||||||
import springfox.documentation.service.Parameter; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.service.OperationBuilderPlugin; |
|
||||||
import springfox.documentation.spi.service.contexts.OperationContext; |
|
||||||
import springfox.documentation.spring.web.DescriptionResolver; |
|
||||||
import springfox.documentation.swagger.common.SwaggerPluginSupport; |
|
||||||
|
|
||||||
import java.util.List; |
|
||||||
import java.util.Locale; |
|
||||||
|
|
||||||
import static com.google.common.base.Strings.emptyToNull; |
|
||||||
import static springfox.documentation.schema.Types.isBaseType; |
|
||||||
import static springfox.documentation.swagger.common.SwaggerPluginSupport.SWAGGER_PLUGIN_ORDER; |
|
||||||
import static springfox.documentation.swagger.readers.parameter.Examples.examples; |
|
||||||
import static springfox.documentation.swagger.schema.ApiModelProperties.allowableValueFromString; |
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiImplicitParamPlugin implements OperationBuilderPlugin { |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private DescriptionResolver descriptions; |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
static Parameter implicitParameter(MessageSource messageSource, DescriptionResolver descriptions, ApiImplicitParam param) { |
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
ModelRef modelRef = maybeGetModelRef(param); |
|
||||||
return new ParameterBuilder() |
|
||||||
.name(param.name()) |
|
||||||
.description(descriptions.resolve(messageSource.getMessage(param.value(), null, locale))) |
|
||||||
.defaultValue(param.defaultValue()) |
|
||||||
.required(param.required()) |
|
||||||
.allowMultiple(param.allowMultiple()) |
|
||||||
.modelRef(modelRef) |
|
||||||
.allowableValues(allowableValueFromString(param.allowableValues())) |
|
||||||
.parameterType(emptyToNull(param.paramType())) |
|
||||||
.parameterAccess(param.access()) |
|
||||||
.order(SWAGGER_PLUGIN_ORDER) |
|
||||||
.scalarExample(param.example()) |
|
||||||
.complexExamples(examples(param.examples())) |
|
||||||
.build(); |
|
||||||
} |
|
||||||
|
|
||||||
private static ModelRef maybeGetModelRef(ApiImplicitParam param) { |
|
||||||
String dataType = MoreObjects.firstNonNull(emptyToNull(param.dataType()), "string"); |
|
||||||
AllowableValues allowableValues = null; |
|
||||||
if (isBaseType(dataType)) { |
|
||||||
allowableValues = allowableValueFromString(param.allowableValues()); |
|
||||||
} |
|
||||||
if (param.allowMultiple()) { |
|
||||||
return new ModelRef("", new ModelRef(dataType, allowableValues)); |
|
||||||
} |
|
||||||
return new ModelRef(dataType, allowableValues); |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(OperationContext context) { |
|
||||||
context.operationBuilder().parameters(readParameters(context)); |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return SwaggerPluginSupport.pluginDoesApply(delimiter); |
|
||||||
} |
|
||||||
|
|
||||||
private List<Parameter> readParameters(OperationContext context) { |
|
||||||
Optional<ApiImplicitParam> annotation = context.findAnnotation(ApiImplicitParam.class); |
|
||||||
List<Parameter> parameters = Lists.newArrayList(); |
|
||||||
if (annotation.isPresent()) { |
|
||||||
parameters.add(implicitParameter(messageSource, descriptions, annotation.get())); |
|
||||||
} |
|
||||||
return parameters; |
|
||||||
} |
|
||||||
|
|
||||||
} |
|
@ -1,71 +0,0 @@ |
|||||||
/* |
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
|
||||||
* contributor license agreements. See the NOTICE file distributed with |
|
||||||
* this work for additional information regarding copyright ownership. |
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
|
||||||
* (the "License"); you may not use this file except in compliance with |
|
||||||
* the License. You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
*/ |
|
||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
import com.google.common.base.Optional; |
|
||||||
import com.google.common.collect.Lists; |
|
||||||
import io.swagger.annotations.ApiImplicitParam; |
|
||||||
import io.swagger.annotations.ApiImplicitParams; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.service.Parameter; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.service.OperationBuilderPlugin; |
|
||||||
import springfox.documentation.spi.service.contexts.OperationContext; |
|
||||||
import springfox.documentation.spring.web.DescriptionResolver; |
|
||||||
|
|
||||||
import java.util.List; |
|
||||||
|
|
||||||
import static springfox.documentation.swagger.common.SwaggerPluginSupport.pluginDoesApply; |
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiImplicitParamsPlugin implements OperationBuilderPlugin { |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private DescriptionResolver descriptions; |
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(OperationContext context) { |
|
||||||
context.operationBuilder().parameters(readParameters(context)); |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return pluginDoesApply(delimiter); |
|
||||||
} |
|
||||||
|
|
||||||
private List<Parameter> readParameters(OperationContext context) { |
|
||||||
Optional<ApiImplicitParams> annotation = context.findAnnotation(ApiImplicitParams.class); |
|
||||||
|
|
||||||
List<Parameter> parameters = Lists.newArrayList(); |
|
||||||
if (annotation.isPresent()) { |
|
||||||
for (ApiImplicitParam param : annotation.get().value()) { |
|
||||||
parameters.add(SwaggerApiImplicitParamPlugin.implicitParameter(messageSource, descriptions, param)); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return parameters; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
} |
|
@ -1,74 +0,0 @@ |
|||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import com.fasterxml.classmate.TypeResolver; |
|
||||||
import io.swagger.annotations.ApiModel; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.AnnotationUtils; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.schema.ModelReference; |
|
||||||
import springfox.documentation.schema.TypeNameExtractor; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.schema.ModelBuilderPlugin; |
|
||||||
import springfox.documentation.spi.schema.contexts.ModelContext; |
|
||||||
|
|
||||||
import java.util.ArrayList; |
|
||||||
import java.util.List; |
|
||||||
import java.util.Locale; |
|
||||||
|
|
||||||
import static springfox.documentation.schema.ResolvedTypes.*; |
|
||||||
import static springfox.documentation.swagger.common.SwaggerPluginSupport.*; |
|
||||||
|
|
||||||
/** |
|
||||||
* NOTE : not useful |
|
||||||
*/ |
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiModelPlugin implements ModelBuilderPlugin { |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private TypeResolver typeResolver; |
|
||||||
@Autowired |
|
||||||
private TypeNameExtractor typeNameExtractor; |
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(ModelContext context) { |
|
||||||
ApiModel annotation = AnnotationUtils.findAnnotation(forClass(context), ApiModel.class); |
|
||||||
if (annotation != null) { |
|
||||||
List<ModelReference> modelRefs = new ArrayList<ModelReference>(); |
|
||||||
for (Class<?> each : annotation.subTypes()) { |
|
||||||
modelRefs.add(modelRefFactory(context, typeNameExtractor) |
|
||||||
.apply(typeResolver.resolve(each))); |
|
||||||
} |
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
context.getBuilder() |
|
||||||
.description(messageSource.getMessage(annotation.description(), null, locale)) |
|
||||||
.discriminator(annotation.discriminator()) |
|
||||||
.subTypes(modelRefs); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
private Class<?> forClass(ModelContext context) { |
|
||||||
return typeResolver.resolve(context.getType()).getErasedType(); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
// @Override
|
|
||||||
// public boolean supports(DocumentationType delimiter) {
|
|
||||||
// return pluginDoesApply(delimiter);
|
|
||||||
// }
|
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return true; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
@ -1,219 +0,0 @@ |
|||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
|
|
||||||
import com.fasterxml.classmate.ResolvedType; |
|
||||||
import com.fasterxml.classmate.TypeResolver; |
|
||||||
import com.google.common.base.Function; |
|
||||||
import com.google.common.base.Optional; |
|
||||||
import com.google.common.base.Splitter; |
|
||||||
import com.google.common.base.Strings; |
|
||||||
import com.google.common.collect.Lists; |
|
||||||
import io.swagger.annotations.ApiModelProperty; |
|
||||||
import org.slf4j.Logger; |
|
||||||
import org.slf4j.LoggerFactory; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.AnnotationUtils; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.service.AllowableListValues; |
|
||||||
import springfox.documentation.service.AllowableRangeValues; |
|
||||||
import springfox.documentation.service.AllowableValues; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.schema.ModelPropertyBuilderPlugin; |
|
||||||
import springfox.documentation.spi.schema.contexts.ModelPropertyContext; |
|
||||||
import springfox.documentation.spring.web.DescriptionResolver; |
|
||||||
import springfox.documentation.swagger.common.SwaggerPluginSupport; |
|
||||||
import springfox.documentation.swagger.schema.ApiModelProperties; |
|
||||||
|
|
||||||
import java.lang.reflect.AnnotatedElement; |
|
||||||
import java.lang.reflect.Method; |
|
||||||
import java.util.Collections; |
|
||||||
import java.util.List; |
|
||||||
import java.util.Locale; |
|
||||||
import java.util.regex.Matcher; |
|
||||||
import java.util.regex.Pattern; |
|
||||||
|
|
||||||
import static com.google.common.collect.Lists.newArrayList; |
|
||||||
import static org.springframework.util.StringUtils.hasText; |
|
||||||
import static springfox.documentation.schema.Annotations.*; |
|
||||||
import static springfox.documentation.swagger.schema.ApiModelProperties.*; |
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiModelPropertyPlugin implements ModelPropertyBuilderPlugin { |
|
||||||
private static final Logger LOGGER = LoggerFactory.getLogger(ApiModelProperties.class); |
|
||||||
private static final Pattern RANGE_PATTERN = Pattern.compile("range([\\[(])(.*),(.*)([])])$"); |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private DescriptionResolver descriptions; |
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(ModelPropertyContext context) { |
|
||||||
Optional<ApiModelProperty> annotation = Optional.absent(); |
|
||||||
|
|
||||||
if (context.getAnnotatedElement().isPresent()) { |
|
||||||
annotation = annotation.or(findApiModePropertyAnnotation(context.getAnnotatedElement().get())); |
|
||||||
} |
|
||||||
if (context.getBeanPropertyDefinition().isPresent()) { |
|
||||||
annotation = annotation.or(findPropertyAnnotation( |
|
||||||
context.getBeanPropertyDefinition().get(), |
|
||||||
ApiModelProperty.class)); |
|
||||||
} |
|
||||||
if (annotation.isPresent()) { |
|
||||||
context.getBuilder() |
|
||||||
.allowableValues(annotation.transform(toAllowableValues()).orNull()) |
|
||||||
.required(annotation.transform(toIsRequired()).or(false)) |
|
||||||
.readOnly(annotation.transform(toIsReadOnly()).or(false)) |
|
||||||
.description(annotation.transform(toDescription(descriptions)).orNull()) |
|
||||||
.isHidden(annotation.transform(toHidden()).or(false)) |
|
||||||
.type(annotation.transform(toType(context.getResolver())).orNull()) |
|
||||||
.position(annotation.transform(toPosition()).or(0)) |
|
||||||
.example(annotation.transform(toExample()).orNull()); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
static Function<ApiModelProperty, AllowableValues> toAllowableValues() { |
|
||||||
return new Function<ApiModelProperty, AllowableValues>() { |
|
||||||
@Override |
|
||||||
public AllowableValues apply(ApiModelProperty annotation) { |
|
||||||
return allowableValueFromString(annotation.allowableValues()); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
public static AllowableValues allowableValueFromString(String allowableValueString) { |
|
||||||
AllowableValues allowableValues = new AllowableListValues(Lists.<String>newArrayList(), "LIST"); |
|
||||||
String trimmed = allowableValueString.trim(); |
|
||||||
Matcher matcher = RANGE_PATTERN.matcher(trimmed.replaceAll(" ", "")); |
|
||||||
if (matcher.matches()) { |
|
||||||
if (matcher.groupCount() != 4) { |
|
||||||
LOGGER.warn("Unable to parse range specified {} correctly", trimmed); |
|
||||||
} else { |
|
||||||
allowableValues = new AllowableRangeValues( |
|
||||||
matcher.group(2).contains("infinity") ? null : matcher.group(2), |
|
||||||
matcher.group(1).equals("("), |
|
||||||
matcher.group(3).contains("infinity") ? null : matcher.group(3), |
|
||||||
matcher.group(4).equals(")")); |
|
||||||
} |
|
||||||
} else if (trimmed.contains(",")) { |
|
||||||
Iterable<String> split = Splitter.on(',').trimResults().omitEmptyStrings().split(trimmed); |
|
||||||
allowableValues = new AllowableListValues(newArrayList(split), "LIST"); |
|
||||||
} else if (hasText(trimmed)) { |
|
||||||
List<String> singleVal = Collections.singletonList(trimmed); |
|
||||||
allowableValues = new AllowableListValues(singleVal, "LIST"); |
|
||||||
} |
|
||||||
return allowableValues; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, Boolean> toIsRequired() { |
|
||||||
return new Function<ApiModelProperty, Boolean>() { |
|
||||||
@Override |
|
||||||
public Boolean apply(ApiModelProperty annotation) { |
|
||||||
return annotation.required(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, Integer> toPosition() { |
|
||||||
return new Function<ApiModelProperty, Integer>() { |
|
||||||
@Override |
|
||||||
public Integer apply(ApiModelProperty annotation) { |
|
||||||
return annotation.position(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, Boolean> toIsReadOnly() { |
|
||||||
return new Function<ApiModelProperty, Boolean>() { |
|
||||||
@Override |
|
||||||
public Boolean apply(ApiModelProperty annotation) { |
|
||||||
return annotation.readOnly(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, Boolean> toAllowEmptyValue() { |
|
||||||
return new Function<ApiModelProperty, Boolean>() { |
|
||||||
@Override |
|
||||||
public Boolean apply(ApiModelProperty annotation) { |
|
||||||
return annotation.allowEmptyValue(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
Function<ApiModelProperty, String> toDescription( |
|
||||||
final DescriptionResolver descriptions) { |
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
return new Function<ApiModelProperty, String>() { |
|
||||||
@Override |
|
||||||
public String apply(ApiModelProperty annotation) { |
|
||||||
String description = ""; |
|
||||||
if (!Strings.isNullOrEmpty(annotation.value())) { |
|
||||||
description = messageSource.getMessage(annotation.value(), null, "" ,locale); |
|
||||||
} else if (!Strings.isNullOrEmpty(annotation.notes())) { |
|
||||||
description = messageSource.getMessage(annotation.notes(), null, "" ,locale); |
|
||||||
} |
|
||||||
return descriptions.resolve(description); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, ResolvedType> toType(final TypeResolver resolver) { |
|
||||||
return new Function<ApiModelProperty, ResolvedType>() { |
|
||||||
@Override |
|
||||||
public ResolvedType apply(ApiModelProperty annotation) { |
|
||||||
try { |
|
||||||
return resolver.resolve(Class.forName(annotation.dataType())); |
|
||||||
} catch (ClassNotFoundException e) { |
|
||||||
return resolver.resolve(Object.class); |
|
||||||
} |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
public static Optional<ApiModelProperty> findApiModePropertyAnnotation(AnnotatedElement annotated) { |
|
||||||
Optional<ApiModelProperty> annotation = Optional.absent(); |
|
||||||
|
|
||||||
if (annotated instanceof Method) { |
|
||||||
// If the annotated element is a method we can use this information to check superclasses as well
|
|
||||||
annotation = Optional.fromNullable(AnnotationUtils.findAnnotation(((Method) annotated), ApiModelProperty.class)); |
|
||||||
} |
|
||||||
|
|
||||||
return annotation.or(Optional.fromNullable(AnnotationUtils.getAnnotation(annotated, ApiModelProperty.class))); |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, Boolean> toHidden() { |
|
||||||
return new Function<ApiModelProperty, Boolean>() { |
|
||||||
@Override |
|
||||||
public Boolean apply(ApiModelProperty annotation) { |
|
||||||
return annotation.hidden(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
static Function<ApiModelProperty, String> toExample() { |
|
||||||
return new Function<ApiModelProperty, String>() { |
|
||||||
@Override |
|
||||||
public String apply(ApiModelProperty annotation) { |
|
||||||
String example = ""; |
|
||||||
if (!Strings.isNullOrEmpty(annotation.example())) { |
|
||||||
example = annotation.example(); |
|
||||||
} |
|
||||||
return example; |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return SwaggerPluginSupport.pluginDoesApply(delimiter); |
|
||||||
} |
|
||||||
} |
|
@ -1,141 +0,0 @@ |
|||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
import java.util.List; |
|
||||||
import java.util.Locale; |
|
||||||
import java.util.Set; |
|
||||||
|
|
||||||
import com.google.common.base.Function; |
|
||||||
import com.google.common.base.Optional; |
|
||||||
import com.google.common.base.Splitter; |
|
||||||
import com.google.common.collect.Sets; |
|
||||||
import io.swagger.annotations.Api; |
|
||||||
import org.slf4j.Logger; |
|
||||||
import org.slf4j.LoggerFactory; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
|
|
||||||
import io.swagger.annotations.ApiOperation; |
|
||||||
import org.springframework.util.StringUtils; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.service.OperationBuilderPlugin; |
|
||||||
import springfox.documentation.spi.service.contexts.OperationContext; |
|
||||||
import springfox.documentation.spring.web.DescriptionResolver; |
|
||||||
import springfox.documentation.spring.web.readers.operation.DefaultTagsProvider; |
|
||||||
import springfox.documentation.swagger.common.SwaggerPluginSupport; |
|
||||||
|
|
||||||
import static com.google.common.base.Strings.nullToEmpty; |
|
||||||
import static com.google.common.collect.FluentIterable.from; |
|
||||||
import static com.google.common.collect.Lists.newArrayList; |
|
||||||
import static com.google.common.collect.Sets.*; |
|
||||||
import static springfox.documentation.service.Tags.emptyTags; |
|
||||||
|
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiOperationPlugin implements OperationBuilderPlugin { |
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(SwaggerApiOperationPlugin.class); |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private DescriptionResolver descriptions; |
|
||||||
@Autowired |
|
||||||
private DefaultTagsProvider tagsProvider; |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(OperationContext context) { |
|
||||||
|
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
Set<String> defaultTags = tagsProvider.tags(context); |
|
||||||
Sets.SetView<String> tags = union(operationTags(context), controllerTags(context)); |
|
||||||
if (tags.isEmpty()) { |
|
||||||
context.operationBuilder().tags(defaultTags); |
|
||||||
} else { |
|
||||||
context.operationBuilder().tags(tags); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
Optional<ApiOperation> apiOperationAnnotation = context.findAnnotation(ApiOperation.class); |
|
||||||
if (apiOperationAnnotation.isPresent()) { |
|
||||||
ApiOperation operation = apiOperationAnnotation.get(); |
|
||||||
|
|
||||||
if (StringUtils.hasText(operation.nickname())) { |
|
||||||
// Populate the value of nickname annotation into uniqueId
|
|
||||||
context.operationBuilder().uniqueId(operation.nickname()); |
|
||||||
context.operationBuilder().codegenMethodNameStem(operation.nickname()); |
|
||||||
} |
|
||||||
|
|
||||||
if (StringUtils.hasText(apiOperationAnnotation.get().notes())) { |
|
||||||
context.operationBuilder().notes(descriptions.resolve(messageSource.getMessage(apiOperationAnnotation.get().notes(), null, "", locale))); |
|
||||||
} |
|
||||||
|
|
||||||
if (apiOperationAnnotation.get().position() > 0) { |
|
||||||
context.operationBuilder().position(apiOperationAnnotation.get().position()); |
|
||||||
} |
|
||||||
|
|
||||||
if (StringUtils.hasText(apiOperationAnnotation.get().value())) { |
|
||||||
context.operationBuilder().summary(descriptions.resolve(apiOperationAnnotation.get().value())); |
|
||||||
} |
|
||||||
|
|
||||||
context.operationBuilder().consumes(asSet(nullToEmpty(apiOperationAnnotation.get().consumes()))); |
|
||||||
context.operationBuilder().produces(asSet(nullToEmpty(apiOperationAnnotation.get().produces()))); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
private Set<String> controllerTags(OperationContext context) { |
|
||||||
Optional<Api> controllerAnnotation = context.findControllerAnnotation(Api.class); |
|
||||||
return controllerAnnotation.transform(tagsFromController()).or(Sets.<String>newHashSet()); |
|
||||||
} |
|
||||||
|
|
||||||
private Set<String> operationTags(OperationContext context) { |
|
||||||
Optional<ApiOperation> annotation = context.findAnnotation(ApiOperation.class); |
|
||||||
return annotation.transform(tagsFromOperation()).or(Sets.<String>newHashSet()); |
|
||||||
} |
|
||||||
|
|
||||||
private Function<ApiOperation, Set<String>> tagsFromOperation() { |
|
||||||
return new Function<ApiOperation, Set<String>>() { |
|
||||||
@Override |
|
||||||
public Set<String> apply(ApiOperation input) { |
|
||||||
Set<String> tags = newTreeSet(); |
|
||||||
tags.addAll(from(newArrayList(input.tags())).filter(emptyTags()).toSet()); |
|
||||||
return tags; |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
private Function<Api, Set<String>> tagsFromController() { |
|
||||||
return new Function<Api, Set<String>>() { |
|
||||||
@Override |
|
||||||
public Set<String> apply(Api input) { |
|
||||||
Set<String> tags = newTreeSet(); |
|
||||||
tags.addAll(from(newArrayList(input.tags())).filter(emptyTags()).toSet()); |
|
||||||
return tags; |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
private Set<String> asSet(String mediaTypes) { |
|
||||||
return newHashSet(Splitter.on(',') |
|
||||||
.trimResults() |
|
||||||
.omitEmptyStrings() |
|
||||||
.splitToList(mediaTypes)); |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return SwaggerPluginSupport.pluginDoesApply(delimiter); |
|
||||||
// return true;
|
|
||||||
} |
|
||||||
|
|
||||||
} |
|
@ -1,115 +0,0 @@ |
|||||||
/* |
|
||||||
* |
|
||||||
* Copyright 2015-2019 the original author or authors. |
|
||||||
* |
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
|
||||||
* you may not use this file except in compliance with the License. |
|
||||||
* You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
* |
|
||||||
* |
|
||||||
*/ |
|
||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
import com.fasterxml.classmate.ResolvedType; |
|
||||||
import com.google.common.base.Function; |
|
||||||
import com.google.common.base.Optional; |
|
||||||
import io.swagger.annotations.ApiParam; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.schema.Collections; |
|
||||||
import springfox.documentation.schema.Enums; |
|
||||||
import springfox.documentation.schema.Example; |
|
||||||
import springfox.documentation.service.AllowableValues; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.schema.EnumTypeDeterminer; |
|
||||||
import springfox.documentation.spi.service.ParameterBuilderPlugin; |
|
||||||
import springfox.documentation.spi.service.contexts.ParameterContext; |
|
||||||
import springfox.documentation.spring.web.DescriptionResolver; |
|
||||||
import springfox.documentation.swagger.schema.ApiModelProperties; |
|
||||||
|
|
||||||
import java.util.Locale; |
|
||||||
|
|
||||||
import static com.google.common.base.Strings.emptyToNull; |
|
||||||
import static com.google.common.base.Strings.isNullOrEmpty; |
|
||||||
import static springfox.documentation.swagger.common.SwaggerPluginSupport.SWAGGER_PLUGIN_ORDER; |
|
||||||
import static springfox.documentation.swagger.common.SwaggerPluginSupport.pluginDoesApply; |
|
||||||
import static springfox.documentation.swagger.readers.parameter.Examples.examples; |
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiParamPlugin implements ParameterBuilderPlugin { |
|
||||||
@Autowired |
|
||||||
private DescriptionResolver descriptions; |
|
||||||
@Autowired |
|
||||||
private EnumTypeDeterminer enumTypeDeterminer; |
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(ParameterContext context) { |
|
||||||
Optional<ApiParam> apiParam = context.resolvedMethodParameter().findAnnotation(ApiParam.class); |
|
||||||
context.parameterBuilder() |
|
||||||
.allowableValues(allowableValues( |
|
||||||
context.alternateFor(context.resolvedMethodParameter().getParameterType()), |
|
||||||
apiParam.transform(toAllowableValue()).or(""))); |
|
||||||
if (apiParam.isPresent()) { |
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
ApiParam annotation = apiParam.get(); |
|
||||||
context.parameterBuilder().name(emptyToNull(annotation.name())) |
|
||||||
.description(emptyToNull(descriptions.resolve(messageSource.getMessage(annotation.value(), null, "",locale)))) |
|
||||||
.parameterAccess(emptyToNull(annotation.access())) |
|
||||||
.defaultValue(emptyToNull(annotation.defaultValue())) |
|
||||||
.allowMultiple(annotation.allowMultiple()) |
|
||||||
.allowEmptyValue(annotation.allowEmptyValue()) |
|
||||||
.required(annotation.required()) |
|
||||||
.scalarExample(new Example(annotation.example())) |
|
||||||
.complexExamples(examples(annotation.examples())) |
|
||||||
.hidden(annotation.hidden()) |
|
||||||
.collectionFormat(annotation.collectionFormat()) |
|
||||||
.order(SWAGGER_PLUGIN_ORDER); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
private Function<ApiParam, String> toAllowableValue() { |
|
||||||
return new Function<ApiParam, String>() { |
|
||||||
@Override |
|
||||||
public String apply(ApiParam input) { |
|
||||||
return input.allowableValues(); |
|
||||||
} |
|
||||||
}; |
|
||||||
} |
|
||||||
|
|
||||||
private AllowableValues allowableValues(ResolvedType parameterType, String allowableValueString) { |
|
||||||
AllowableValues allowableValues = null; |
|
||||||
if (!isNullOrEmpty(allowableValueString)) { |
|
||||||
allowableValues = ApiModelProperties.allowableValueFromString(allowableValueString); |
|
||||||
} else { |
|
||||||
if (enumTypeDeterminer.isEnum(parameterType.getErasedType())) { |
|
||||||
allowableValues = Enums.allowableValues(parameterType.getErasedType()); |
|
||||||
} |
|
||||||
if (Collections.isContainerType(parameterType)) { |
|
||||||
allowableValues = Enums.allowableValues(Collections.collectionElementType(parameterType).getErasedType()); |
|
||||||
} |
|
||||||
} |
|
||||||
return allowableValues; |
|
||||||
} |
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return pluginDoesApply(delimiter); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
@ -1,74 +0,0 @@ |
|||||||
/* |
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
|
||||||
* contributor license agreements. See the NOTICE file distributed with |
|
||||||
* this work for additional information regarding copyright ownership. |
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
|
||||||
* (the "License"); you may not use this file except in compliance with |
|
||||||
* the License. You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
*/ |
|
||||||
package cn.escheduler.api.configuration; |
|
||||||
|
|
||||||
import io.swagger.annotations.Api; |
|
||||||
import org.apache.commons.lang.StringUtils; |
|
||||||
import org.slf4j.Logger; |
|
||||||
import org.slf4j.LoggerFactory; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.context.MessageSource; |
|
||||||
import org.springframework.context.i18n.LocaleContextHolder; |
|
||||||
import org.springframework.core.Ordered; |
|
||||||
import org.springframework.core.annotation.Order; |
|
||||||
import org.springframework.stereotype.Component; |
|
||||||
import springfox.documentation.spi.DocumentationType; |
|
||||||
import springfox.documentation.spi.service.OperationBuilderPlugin; |
|
||||||
import springfox.documentation.spi.service.contexts.OperationContext; |
|
||||||
|
|
||||||
import java.util.HashSet; |
|
||||||
import java.util.List; |
|
||||||
import java.util.Locale; |
|
||||||
import java.util.Set; |
|
||||||
|
|
||||||
@Component |
|
||||||
@Order(Ordered.HIGHEST_PRECEDENCE - 10) |
|
||||||
public class SwaggerApiPlugin implements OperationBuilderPlugin { |
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(SwaggerApiPlugin.class); |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private MessageSource messageSource; |
|
||||||
|
|
||||||
@Override |
|
||||||
public void apply(OperationContext context) { |
|
||||||
Locale locale = LocaleContextHolder.getLocale(); |
|
||||||
|
|
||||||
List<Api> list = context.findAllAnnotations(Api.class); |
|
||||||
if (list.size() > 0) { |
|
||||||
Api api = list.get(0); |
|
||||||
|
|
||||||
Set<String> tagsSet = new HashSet<>(1); |
|
||||||
|
|
||||||
if(api.tags() != null && api.tags().length > 0){ |
|
||||||
tagsSet.add(StringUtils.isNotBlank(api.tags()[0]) ? messageSource.getMessage(api.tags()[0], null, locale) : " "); |
|
||||||
} |
|
||||||
|
|
||||||
context.operationBuilder().hidden(api.hidden()) |
|
||||||
.tags(tagsSet).build(); |
|
||||||
|
|
||||||
} |
|
||||||
|
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
@Override |
|
||||||
public boolean supports(DocumentationType delimiter) { |
|
||||||
return true; |
|
||||||
} |
|
||||||
|
|
||||||
} |
|
@ -1,85 +0,0 @@ |
|||||||
/* |
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
|
||||||
* contributor license agreements. See the NOTICE file distributed with |
|
||||||
* this work for additional information regarding copyright ownership. |
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
|
||||||
* (the "License"); you may not use this file except in compliance with |
|
||||||
* the License. You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
*/ |
|
||||||
package cn.escheduler.api.controller; |
|
||||||
|
|
||||||
import cn.escheduler.api.service.ServerService; |
|
||||||
import cn.escheduler.api.utils.Constants; |
|
||||||
import cn.escheduler.api.utils.Result; |
|
||||||
import cn.escheduler.dao.model.User; |
|
||||||
import org.slf4j.Logger; |
|
||||||
import org.slf4j.LoggerFactory; |
|
||||||
import org.springframework.beans.factory.annotation.Autowired; |
|
||||||
import org.springframework.http.HttpStatus; |
|
||||||
import org.springframework.web.bind.annotation.*; |
|
||||||
|
|
||||||
import java.util.Map; |
|
||||||
|
|
||||||
import static cn.escheduler.api.enums.Status.LIST_MASTERS_ERROR; |
|
||||||
import static cn.escheduler.api.enums.Status.LIST_WORKERS_ERROR; |
|
||||||
|
|
||||||
/** |
|
||||||
* server controller |
|
||||||
*/ |
|
||||||
@RestController |
|
||||||
@RequestMapping("process") |
|
||||||
public class ServerController extends BaseController{ |
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class); |
|
||||||
|
|
||||||
@Autowired |
|
||||||
private ServerService serverService; |
|
||||||
|
|
||||||
/** |
|
||||||
* master list |
|
||||||
* @param loginUser |
|
||||||
* @return |
|
||||||
*/ |
|
||||||
@GetMapping(value = "/master/list") |
|
||||||
@ResponseStatus(HttpStatus.OK) |
|
||||||
public Result listMaster(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { |
|
||||||
logger.info("login user: {}, query all master", loginUser.getUserName()); |
|
||||||
try{ |
|
||||||
logger.info("list master, user:{}", loginUser.getUserName()); |
|
||||||
Map<String, Object> result = serverService.queryMaster(loginUser); |
|
||||||
return returnDataList(result); |
|
||||||
}catch (Exception e){ |
|
||||||
logger.error(LIST_MASTERS_ERROR.getMsg(),e); |
|
||||||
return error(LIST_MASTERS_ERROR.getCode(), |
|
||||||
LIST_MASTERS_ERROR.getMsg()); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
/** |
|
||||||
* worker list |
|
||||||
* @param loginUser |
|
||||||
* @return |
|
||||||
*/ |
|
||||||
@GetMapping(value = "/worker/list") |
|
||||||
@ResponseStatus(HttpStatus.OK) |
|
||||||
public Result listWorker(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { |
|
||||||
logger.info("login user: {}, query all workers", loginUser.getUserName()); |
|
||||||
try{ |
|
||||||
Map<String, Object> result = serverService.queryWorker(loginUser); |
|
||||||
return returnDataList(result); |
|
||||||
}catch (Exception e){ |
|
||||||
logger.error(LIST_WORKERS_ERROR.getMsg(),e); |
|
||||||
return error(LIST_WORKERS_ERROR.getCode(), |
|
||||||
LIST_WORKERS_ERROR.getMsg()); |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,216 @@ |
|||||||
|
QUERY_SCHEDULE_LIST_NOTES=query schedule list |
||||||
|
DESC=description |
||||||
|
GROUP_NAME=group name |
||||||
|
GROUP_TYPE=group type |
||||||
|
QUERY_ALERT_GROUP_LIST_NOTES=query alert group list |
||||||
|
UPDATE_ALERT_GROUP_NOTES=update alert group |
||||||
|
DELETE_ALERT_GROUP_BY_ID_NOTES=delete alert group by id |
||||||
|
VERIFY_ALERT_GROUP_NAME_NOTES=verify alert group name, check alert group exist or not |
||||||
|
GRANT_ALERT_GROUP_NOTES=grant alert group |
||||||
|
USER_IDS=user id list |
||||||
|
ALERT_GROUP_TAG=alert group related operation |
||||||
|
CREATE_ALERT_GROUP_NOTES=create alert group |
||||||
|
WORKER_GROUP_TAG=worker group related operation |
||||||
|
SAVE_WORKER_GROUP_NOTES=create worker group |
||||||
|
WORKER_GROUP_NAME=worker group name |
||||||
|
WORKER_IP_LIST=worker ip list, eg. 192.168.1.1,192.168.1.2 |
||||||
|
QUERY_WORKER_GROUP_PAGING_NOTES=query worker group paging |
||||||
|
QUERY_WORKER_GROUP_LIST_NOTES=query worker group list |
||||||
|
DELETE_WORKER_GROUP_BY_ID_NOTES=delete worker group by id |
||||||
|
DATA_ANALYSIS_TAG=analysis related operation of task state |
||||||
|
COUNT_TASK_STATE_NOTES=count task state |
||||||
|
COUNT_PROCESS_INSTANCE_NOTES=count process instance state |
||||||
|
COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user |
||||||
|
COUNT_COMMAND_STATE_NOTES=count command state |
||||||
|
COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ |
||||||
|
|
||||||
|
ACCESS_TOKEN_TAG=access token related operation |
||||||
|
MONITOR_TAG=monitor related operation |
||||||
|
MASTER_LIST_NOTES=master server list |
||||||
|
WORKER_LIST_NOTES=worker server list |
||||||
|
QUERY_DATABASE_STATE_NOTES=query database state |
||||||
|
QUERY_ZOOKEEPER_STATE_NOTES=QUERY ZOOKEEPER STATE |
||||||
|
TASK_STATE=task instance state |
||||||
|
SOURCE_TABLE=SOURCE TABLE |
||||||
|
DEST_TABLE=dest table |
||||||
|
TASK_DATE=task date |
||||||
|
QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=query history task record list paging |
||||||
|
DATA_SOURCE_TAG=data source related operation |
||||||
|
CREATE_DATA_SOURCE_NOTES=create data source |
||||||
|
DATA_SOURCE_NAME=data source name |
||||||
|
DATA_SOURCE_NOTE=data source desc |
||||||
|
DB_TYPE=database type |
||||||
|
DATA_SOURCE_HOST=DATA SOURCE HOST |
||||||
|
DATA_SOURCE_PORT=data source port |
||||||
|
DATABASE_NAME=database name |
||||||
|
QUEUE_TAG=queue related operation |
||||||
|
QUERY_QUEUE_LIST_NOTES=query queue list |
||||||
|
QUERY_QUEUE_LIST_PAGING_NOTES=query queue list paging |
||||||
|
CREATE_QUEUE_NOTES=create queue |
||||||
|
YARN_QUEUE_NAME=yarn(hadoop) queue name |
||||||
|
QUEUE_ID=queue id |
||||||
|
TENANT_DESC=tenant desc |
||||||
|
QUERY_TENANT_LIST_PAGING_NOTES=query tenant list paging |
||||||
|
QUERY_TENANT_LIST_NOTES=query tenant list |
||||||
|
UPDATE_TENANT_NOTES=update tenant |
||||||
|
DELETE_TENANT_NOTES=delete tenant |
||||||
|
RESOURCES_TAG=resource center related operation |
||||||
|
CREATE_RESOURCE_NOTES=create resource |
||||||
|
RESOURCE_TYPE=resource file type |
||||||
|
RESOURCE_NAME=resource name |
||||||
|
RESOURCE_DESC=resource file desc |
||||||
|
RESOURCE_FILE=resource file |
||||||
|
RESOURCE_ID=resource id |
||||||
|
QUERY_RESOURCE_LIST_NOTES=query resource list |
||||||
|
DELETE_RESOURCE_BY_ID_NOTES=delete resource by id |
||||||
|
VIEW_RESOURCE_BY_ID_NOTES=view resource by id |
||||||
|
ONLINE_CREATE_RESOURCE_NOTES=online create resource |
||||||
|
SUFFIX=resource file suffix |
||||||
|
CONTENT=resource file content |
||||||
|
UPDATE_RESOURCE_NOTES=edit resource file online |
||||||
|
DOWNLOAD_RESOURCE_NOTES=download resource file |
||||||
|
CREATE_UDF_FUNCTION_NOTES=create udf function |
||||||
|
UDF_TYPE=UDF type |
||||||
|
FUNC_NAME=function name |
||||||
|
CLASS_NAME=package and class name |
||||||
|
ARG_TYPES=arguments |
||||||
|
UDF_DESC=udf desc |
||||||
|
VIEW_UDF_FUNCTION_NOTES=view udf function |
||||||
|
UPDATE_UDF_FUNCTION_NOTES=update udf function |
||||||
|
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging |
||||||
|
VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name |
||||||
|
DELETE_UDF_FUNCTION_NOTES=delete udf function |
||||||
|
AUTHORIZED_FILE_NOTES=authorized file |
||||||
|
UNAUTHORIZED_FILE_NOTES=unauthorized file |
||||||
|
AUTHORIZED_UDF_FUNC_NOTES=authorized udf func |
||||||
|
UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func |
||||||
|
VERIFY_QUEUE_NOTES=verify queue |
||||||
|
TENANT_TAG=tenant related operation |
||||||
|
CREATE_TENANT_NOTES=create tenant |
||||||
|
TENANT_CODE=tenant code |
||||||
|
TENANT_NAME=tenant name |
||||||
|
QUEUE_NAME=queue name |
||||||
|
PASSWORD=password |
||||||
|
DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} |
||||||
|
PROJECT_TAG=project related operation |
||||||
|
CREATE_PROJECT_NOTES=create project |
||||||
|
PROJECT_DESC=project description |
||||||
|
UPDATE_PROJECT_NOTES=update project |
||||||
|
PROJECT_ID=project id |
||||||
|
QUERY_PROJECT_BY_ID_NOTES=query project info by project id |
||||||
|
QUERY_PROJECT_LIST_PAGING_NOTES=QUERY PROJECT LIST PAGING |
||||||
|
DELETE_PROJECT_BY_ID_NOTES=delete project by id |
||||||
|
QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project |
||||||
|
QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project |
||||||
|
TASK_RECORD_TAG=task record related operation |
||||||
|
QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging |
||||||
|
CREATE_TOKEN_NOTES=create token ,note: please login first |
||||||
|
QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging |
||||||
|
SCHEDULE=schedule |
||||||
|
WARNING_TYPE=warning type(sending strategy) |
||||||
|
WARNING_GROUP_ID=warning group id |
||||||
|
FAILURE_STRATEGY=failure strategy |
||||||
|
RECEIVERS=receivers |
||||||
|
RECEIVERS_CC=receivers cc |
||||||
|
WORKER_GROUP_ID=worker server group id |
||||||
|
PROCESS_INSTANCE_PRIORITY=process instance priority |
||||||
|
UPDATE_SCHEDULE_NOTES=update schedule |
||||||
|
SCHEDULE_ID=schedule id |
||||||
|
ONLINE_SCHEDULE_NOTES=online schedule |
||||||
|
OFFLINE_SCHEDULE_NOTES=offline schedule |
||||||
|
QUERY_SCHEDULE_NOTES=query schedule |
||||||
|
QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging |
||||||
|
LOGIN_TAG=User login related operations |
||||||
|
USER_NAME=user name |
||||||
|
PROJECT_NAME=project name |
||||||
|
CREATE_PROCESS_DEFINITION_NOTES=create process definition |
||||||
|
PROCESS_DEFINITION_NAME=process definition name |
||||||
|
PROCESS_DEFINITION_JSON=process definition detail info (json format) |
||||||
|
PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) |
||||||
|
PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) |
||||||
|
PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) |
||||||
|
PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) |
||||||
|
PROCESS_DEFINITION_DESC=process definition desc |
||||||
|
PROCESS_DEFINITION_TAG=process definition related opertation |
||||||
|
SIGNOUT_NOTES=logout |
||||||
|
USER_PASSWORD=user password |
||||||
|
UPDATE_PROCESS_INSTANCE_NOTES=update process instance |
||||||
|
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list |
||||||
|
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name |
||||||
|
LOGIN_NOTES=user login |
||||||
|
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition |
||||||
|
PROCESS_DEFINITION_ID=process definition id |
||||||
|
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition |
||||||
|
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging |
||||||
|
PAGE_NO=page no |
||||||
|
PROCESS_INSTANCE_ID=process instance id |
||||||
|
PROCESS_INSTANCE_JSON=process instance info(json format) |
||||||
|
SCHEDULE_TIME=schedule time |
||||||
|
SYNC_DEFINE=update the information of the process instance to the process definition\ |
||||||
|
|
||||||
|
RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance |
||||||
|
SEARCH_VAL=search val |
||||||
|
USER_ID=user id |
||||||
|
PAGE_SIZE=page size |
||||||
|
LIMIT=limit |
||||||
|
VIEW_TREE_NOTES=view tree |
||||||
|
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id |
||||||
|
PROCESS_DEFINITION_ID_LIST=process definition id list |
||||||
|
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id |
||||||
|
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id |
||||||
|
TASK_ID=task instance id |
||||||
|
SKIP_LINE_NUM=skip line num |
||||||
|
QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log |
||||||
|
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log |
||||||
|
USERS_TAG=users related operation |
||||||
|
SCHEDULER_TAG=scheduler related operation |
||||||
|
CREATE_SCHEDULE_NOTES=create schedule |
||||||
|
CREATE_USER_NOTES=create user |
||||||
|
TENANT_ID=tenant id |
||||||
|
QUEUE=queue |
||||||
|
EMAIL=email |
||||||
|
PHONE=phone |
||||||
|
QUERY_USER_LIST_NOTES=query user list |
||||||
|
UPDATE_USER_NOTES=update user |
||||||
|
DELETE_USER_BY_ID_NOTES=delete user by id |
||||||
|
GRANT_PROJECT_NOTES=GRANT PROJECT |
||||||
|
PROJECT_IDS=project ids(string format, multiple projects separated by ",") |
||||||
|
GRANT_RESOURCE_NOTES=grant resource file |
||||||
|
RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") |
||||||
|
GET_USER_INFO_NOTES=get user info |
||||||
|
LIST_USER_NOTES=list user |
||||||
|
VERIFY_USER_NAME_NOTES=verify user name |
||||||
|
UNAUTHORIZED_USER_NOTES=cancel authorization |
||||||
|
ALERT_GROUP_ID=alert group id |
||||||
|
AUTHORIZED_USER_NOTES=authorized user |
||||||
|
GRANT_UDF_FUNC_NOTES=grant udf function |
||||||
|
UDF_IDS=udf ids(string format, multiple udf functions separated by ",") |
||||||
|
GRANT_DATASOURCE_NOTES=grant datasource |
||||||
|
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") |
||||||
|
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id |
||||||
|
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id |
||||||
|
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables |
||||||
|
VIEW_GANTT_NOTES=view gantt |
||||||
|
SUB_PROCESS_INSTANCE_ID=sub process instance id |
||||||
|
TASK_NAME=task instance name |
||||||
|
TASK_INSTANCE_TAG=task instance related operation |
||||||
|
LOGGER_TAG=log related operation |
||||||
|
PROCESS_INSTANCE_TAG=process instance related operation |
||||||
|
EXECUTION_STATUS=runing status for workflow and task nodes |
||||||
|
HOST=ip address of running task |
||||||
|
START_DATE=start date |
||||||
|
END_DATE=end date |
||||||
|
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id |
||||||
|
UPDATE_DATA_SOURCE_NOTES=update data source |
||||||
|
DATA_SOURCE_ID=DATA SOURCE ID |
||||||
|
QUERY_DATA_SOURCE_NOTES=query data source by id |
||||||
|
QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=query data source list by database type |
||||||
|
QUERY_DATA_SOURCE_LIST_PAGING_NOTES=query data source list paging |
||||||
|
CONNECT_DATA_SOURCE_NOTES=CONNECT DATA SOURCE |
||||||
|
CONNECT_DATA_SOURCE_TEST_NOTES=connect data source test |
||||||
|
DELETE_DATA_SOURCE_NOTES=delete data source |
||||||
|
VERIFY_DATA_SOURCE_NOTES=verify data source |
||||||
|
UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source |
||||||
|
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source |
@ -0,0 +1,216 @@ |
|||||||
|
QUERY_SCHEDULE_LIST_NOTES=query schedule list |
||||||
|
DESC=description |
||||||
|
GROUP_NAME=group name |
||||||
|
GROUP_TYPE=group type |
||||||
|
QUERY_ALERT_GROUP_LIST_NOTES=query alert group list |
||||||
|
UPDATE_ALERT_GROUP_NOTES=update alert group |
||||||
|
DELETE_ALERT_GROUP_BY_ID_NOTES=delete alert group by id |
||||||
|
VERIFY_ALERT_GROUP_NAME_NOTES=verify alert group name, check alert group exist or not |
||||||
|
GRANT_ALERT_GROUP_NOTES=grant alert group |
||||||
|
USER_IDS=user id list |
||||||
|
ALERT_GROUP_TAG=alert group related operation |
||||||
|
CREATE_ALERT_GROUP_NOTES=create alert group |
||||||
|
WORKER_GROUP_TAG=worker group related operation |
||||||
|
SAVE_WORKER_GROUP_NOTES=create worker group |
||||||
|
WORKER_GROUP_NAME=worker group name |
||||||
|
WORKER_IP_LIST=worker ip list, eg. 192.168.1.1,192.168.1.2 |
||||||
|
QUERY_WORKER_GROUP_PAGING_NOTES=query worker group paging |
||||||
|
QUERY_WORKER_GROUP_LIST_NOTES=query worker group list |
||||||
|
DELETE_WORKER_GROUP_BY_ID_NOTES=delete worker group by id |
||||||
|
DATA_ANALYSIS_TAG=analysis related operation of task state |
||||||
|
COUNT_TASK_STATE_NOTES=count task state |
||||||
|
COUNT_PROCESS_INSTANCE_NOTES=count process instance state |
||||||
|
COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user |
||||||
|
COUNT_COMMAND_STATE_NOTES=count command state |
||||||
|
COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ |
||||||
|
|
||||||
|
ACCESS_TOKEN_TAG=access token related operation |
||||||
|
MONITOR_TAG=monitor related operation |
||||||
|
MASTER_LIST_NOTES=master server list |
||||||
|
WORKER_LIST_NOTES=worker server list |
||||||
|
QUERY_DATABASE_STATE_NOTES=query database state |
||||||
|
QUERY_ZOOKEEPER_STATE_NOTES=QUERY ZOOKEEPER STATE |
||||||
|
TASK_STATE=task instance state |
||||||
|
SOURCE_TABLE=SOURCE TABLE |
||||||
|
DEST_TABLE=dest table |
||||||
|
TASK_DATE=task date |
||||||
|
QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=query history task record list paging |
||||||
|
DATA_SOURCE_TAG=data source related operation |
||||||
|
CREATE_DATA_SOURCE_NOTES=create data source |
||||||
|
DATA_SOURCE_NAME=data source name |
||||||
|
DATA_SOURCE_NOTE=data source desc |
||||||
|
DB_TYPE=database type |
||||||
|
DATA_SOURCE_HOST=DATA SOURCE HOST |
||||||
|
DATA_SOURCE_PORT=data source port |
||||||
|
DATABASE_NAME=database name |
||||||
|
QUEUE_TAG=queue related operation |
||||||
|
QUERY_QUEUE_LIST_NOTES=query queue list |
||||||
|
QUERY_QUEUE_LIST_PAGING_NOTES=query queue list paging |
||||||
|
CREATE_QUEUE_NOTES=create queue |
||||||
|
YARN_QUEUE_NAME=yarn(hadoop) queue name |
||||||
|
QUEUE_ID=queue id |
||||||
|
TENANT_DESC=tenant desc |
||||||
|
QUERY_TENANT_LIST_PAGING_NOTES=query tenant list paging |
||||||
|
QUERY_TENANT_LIST_NOTES=query tenant list |
||||||
|
UPDATE_TENANT_NOTES=update tenant |
||||||
|
DELETE_TENANT_NOTES=delete tenant |
||||||
|
RESOURCES_TAG=resource center related operation |
||||||
|
CREATE_RESOURCE_NOTES=create resource |
||||||
|
RESOURCE_TYPE=resource file type |
||||||
|
RESOURCE_NAME=resource name |
||||||
|
RESOURCE_DESC=resource file desc |
||||||
|
RESOURCE_FILE=resource file |
||||||
|
RESOURCE_ID=resource id |
||||||
|
QUERY_RESOURCE_LIST_NOTES=query resource list |
||||||
|
DELETE_RESOURCE_BY_ID_NOTES=delete resource by id |
||||||
|
VIEW_RESOURCE_BY_ID_NOTES=view resource by id |
||||||
|
ONLINE_CREATE_RESOURCE_NOTES=online create resource |
||||||
|
SUFFIX=resource file suffix |
||||||
|
CONTENT=resource file content |
||||||
|
UPDATE_RESOURCE_NOTES=edit resource file online |
||||||
|
DOWNLOAD_RESOURCE_NOTES=download resource file |
||||||
|
CREATE_UDF_FUNCTION_NOTES=create udf function |
||||||
|
UDF_TYPE=UDF type |
||||||
|
FUNC_NAME=function name |
||||||
|
CLASS_NAME=package and class name |
||||||
|
ARG_TYPES=arguments |
||||||
|
UDF_DESC=udf desc |
||||||
|
VIEW_UDF_FUNCTION_NOTES=view udf function |
||||||
|
UPDATE_UDF_FUNCTION_NOTES=update udf function |
||||||
|
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging |
||||||
|
VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name |
||||||
|
DELETE_UDF_FUNCTION_NOTES=delete udf function |
||||||
|
AUTHORIZED_FILE_NOTES=authorized file |
||||||
|
UNAUTHORIZED_FILE_NOTES=unauthorized file |
||||||
|
AUTHORIZED_UDF_FUNC_NOTES=authorized udf func |
||||||
|
UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func |
||||||
|
VERIFY_QUEUE_NOTES=verify queue |
||||||
|
TENANT_TAG=tenant related operation |
||||||
|
CREATE_TENANT_NOTES=create tenant |
||||||
|
TENANT_CODE=tenant code |
||||||
|
TENANT_NAME=tenant name |
||||||
|
QUEUE_NAME=queue name |
||||||
|
PASSWORD=password |
||||||
|
DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} |
||||||
|
PROJECT_TAG=project related operation |
||||||
|
CREATE_PROJECT_NOTES=create project |
||||||
|
PROJECT_DESC=project description |
||||||
|
UPDATE_PROJECT_NOTES=update project |
||||||
|
PROJECT_ID=project id |
||||||
|
QUERY_PROJECT_BY_ID_NOTES=query project info by project id |
||||||
|
QUERY_PROJECT_LIST_PAGING_NOTES=QUERY PROJECT LIST PAGING |
||||||
|
DELETE_PROJECT_BY_ID_NOTES=delete project by id |
||||||
|
QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project |
||||||
|
QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project |
||||||
|
TASK_RECORD_TAG=task record related operation |
||||||
|
QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging |
||||||
|
CREATE_TOKEN_NOTES=create token ,note: please login first |
||||||
|
QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging |
||||||
|
SCHEDULE=schedule |
||||||
|
WARNING_TYPE=warning type(sending strategy) |
||||||
|
WARNING_GROUP_ID=warning group id |
||||||
|
FAILURE_STRATEGY=failure strategy |
||||||
|
RECEIVERS=receivers |
||||||
|
RECEIVERS_CC=receivers cc |
||||||
|
WORKER_GROUP_ID=worker server group id |
||||||
|
PROCESS_INSTANCE_PRIORITY=process instance priority |
||||||
|
UPDATE_SCHEDULE_NOTES=update schedule |
||||||
|
SCHEDULE_ID=schedule id |
||||||
|
ONLINE_SCHEDULE_NOTES=online schedule |
||||||
|
OFFLINE_SCHEDULE_NOTES=offline schedule |
||||||
|
QUERY_SCHEDULE_NOTES=query schedule |
||||||
|
QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging |
||||||
|
LOGIN_TAG=User login related operations |
||||||
|
USER_NAME=user name |
||||||
|
PROJECT_NAME=project name |
||||||
|
CREATE_PROCESS_DEFINITION_NOTES=create process definition |
||||||
|
PROCESS_DEFINITION_NAME=process definition name |
||||||
|
PROCESS_DEFINITION_JSON=process definition detail info (json format) |
||||||
|
PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) |
||||||
|
PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) |
||||||
|
PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) |
||||||
|
PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) |
||||||
|
PROCESS_DEFINITION_DESC=process definition desc |
||||||
|
PROCESS_DEFINITION_TAG=process definition related opertation |
||||||
|
SIGNOUT_NOTES=logout |
||||||
|
USER_PASSWORD=user password |
||||||
|
UPDATE_PROCESS_INSTANCE_NOTES=update process instance |
||||||
|
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list |
||||||
|
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name |
||||||
|
LOGIN_NOTES=user login |
||||||
|
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition |
||||||
|
PROCESS_DEFINITION_ID=process definition id |
||||||
|
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition |
||||||
|
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging |
||||||
|
PAGE_NO=page no |
||||||
|
PROCESS_INSTANCE_ID=process instance id |
||||||
|
PROCESS_INSTANCE_JSON=process instance info(json format) |
||||||
|
SCHEDULE_TIME=schedule time |
||||||
|
SYNC_DEFINE=update the information of the process instance to the process definition\ |
||||||
|
|
||||||
|
RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance |
||||||
|
SEARCH_VAL=search val |
||||||
|
USER_ID=user id |
||||||
|
PAGE_SIZE=page size |
||||||
|
LIMIT=limit |
||||||
|
VIEW_TREE_NOTES=view tree |
||||||
|
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id |
||||||
|
PROCESS_DEFINITION_ID_LIST=process definition id list |
||||||
|
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id |
||||||
|
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id |
||||||
|
TASK_ID=task instance id |
||||||
|
SKIP_LINE_NUM=skip line num |
||||||
|
QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log |
||||||
|
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log |
||||||
|
USERS_TAG=users related operation |
||||||
|
SCHEDULER_TAG=scheduler related operation |
||||||
|
CREATE_SCHEDULE_NOTES=create schedule |
||||||
|
CREATE_USER_NOTES=create user |
||||||
|
TENANT_ID=tenant id |
||||||
|
QUEUE=queue |
||||||
|
EMAIL=email |
||||||
|
PHONE=phone |
||||||
|
QUERY_USER_LIST_NOTES=query user list |
||||||
|
UPDATE_USER_NOTES=update user |
||||||
|
DELETE_USER_BY_ID_NOTES=delete user by id |
||||||
|
GRANT_PROJECT_NOTES=GRANT PROJECT |
||||||
|
PROJECT_IDS=project ids(string format, multiple projects separated by ",") |
||||||
|
GRANT_RESOURCE_NOTES=grant resource file |
||||||
|
RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") |
||||||
|
GET_USER_INFO_NOTES=get user info |
||||||
|
LIST_USER_NOTES=list user |
||||||
|
VERIFY_USER_NAME_NOTES=verify user name |
||||||
|
UNAUTHORIZED_USER_NOTES=cancel authorization |
||||||
|
ALERT_GROUP_ID=alert group id |
||||||
|
AUTHORIZED_USER_NOTES=authorized user |
||||||
|
GRANT_UDF_FUNC_NOTES=grant udf function |
||||||
|
UDF_IDS=udf ids(string format, multiple udf functions separated by ",") |
||||||
|
GRANT_DATASOURCE_NOTES=grant datasource |
||||||
|
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") |
||||||
|
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id |
||||||
|
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id |
||||||
|
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables |
||||||
|
VIEW_GANTT_NOTES=view gantt |
||||||
|
SUB_PROCESS_INSTANCE_ID=sub process instance id |
||||||
|
TASK_NAME=task instance name |
||||||
|
TASK_INSTANCE_TAG=task instance related operation |
||||||
|
LOGGER_TAG=log related operation |
||||||
|
PROCESS_INSTANCE_TAG=process instance related operation |
||||||
|
EXECUTION_STATUS=runing status for workflow and task nodes |
||||||
|
HOST=ip address of running task |
||||||
|
START_DATE=start date |
||||||
|
END_DATE=end date |
||||||
|
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id |
||||||
|
UPDATE_DATA_SOURCE_NOTES=update data source |
||||||
|
DATA_SOURCE_ID=DATA SOURCE ID |
||||||
|
QUERY_DATA_SOURCE_NOTES=query data source by id |
||||||
|
QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=query data source list by database type |
||||||
|
QUERY_DATA_SOURCE_LIST_PAGING_NOTES=query data source list paging |
||||||
|
CONNECT_DATA_SOURCE_NOTES=CONNECT DATA SOURCE |
||||||
|
CONNECT_DATA_SOURCE_TEST_NOTES=connect data source test |
||||||
|
DELETE_DATA_SOURCE_NOTES=delete data source |
||||||
|
VERIFY_DATA_SOURCE_NOTES=verify data source |
||||||
|
UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source |
||||||
|
AUTHORIZED_DATA_SOURCE_NOTES=authorized data source |
@ -0,0 +1,216 @@ |
|||||||
|
QUERY_SCHEDULE_LIST_NOTES=查询定时列表 |
||||||
|
DESC=备注(描述) |
||||||
|
GROUP_NAME=组名称 |
||||||
|
GROUP_TYPE=组类型 |
||||||
|
QUERY_ALERT_GROUP_LIST_NOTES=告警组列表\ |
||||||
|
|
||||||
|
UPDATE_ALERT_GROUP_NOTES=编辑(更新)告警组 |
||||||
|
DELETE_ALERT_GROUP_BY_ID_NOTES=删除告警组通过ID |
||||||
|
VERIFY_ALERT_GROUP_NAME_NOTES=检查告警组是否存在 |
||||||
|
GRANT_ALERT_GROUP_NOTES=授权告警组 |
||||||
|
USER_IDS=用户ID列表 |
||||||
|
ALERT_GROUP_TAG=告警组相关操作 |
||||||
|
WORKER_GROUP_TAG=Worker分组管理 |
||||||
|
SAVE_WORKER_GROUP_NOTES=创建Worker分组\ |
||||||
|
|
||||||
|
WORKER_GROUP_NAME=Worker分组名称 |
||||||
|
WORKER_IP_LIST=Worker ip列表,注意:多个IP地址以逗号分割\ |
||||||
|
|
||||||
|
QUERY_WORKER_GROUP_PAGING_NOTES=Worker分组管理 |
||||||
|
QUERY_WORKER_GROUP_LIST_NOTES=查询worker group分组 |
||||||
|
DELETE_WORKER_GROUP_BY_ID_NOTES=删除worker group通过ID |
||||||
|
DATA_ANALYSIS_TAG=任务状态分析相关操作 |
||||||
|
COUNT_TASK_STATE_NOTES=任务状态统计 |
||||||
|
COUNT_PROCESS_INSTANCE_NOTES=统计流程实例状态 |
||||||
|
COUNT_PROCESS_DEFINITION_BY_USER_NOTES=统计用户创建的流程定义 |
||||||
|
COUNT_COMMAND_STATE_NOTES=统计命令状态 |
||||||
|
COUNT_QUEUE_STATE_NOTES=统计队列里任务状态 |
||||||
|
ACCESS_TOKEN_TAG=access token相关操作,需要先登录 |
||||||
|
MONITOR_TAG=监控相关操作 |
||||||
|
MASTER_LIST_NOTES=master服务列表 |
||||||
|
WORKER_LIST_NOTES=worker服务列表 |
||||||
|
QUERY_DATABASE_STATE_NOTES=查询数据库状态 |
||||||
|
QUERY_ZOOKEEPER_STATE_NOTES=查询Zookeeper状态 |
||||||
|
TASK_STATE=任务实例状态 |
||||||
|
SOURCE_TABLE=源表 |
||||||
|
DEST_TABLE=目标表 |
||||||
|
TASK_DATE=任务时间 |
||||||
|
QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=分页查询历史任务记录列表 |
||||||
|
DATA_SOURCE_TAG=数据源相关操作 |
||||||
|
CREATE_DATA_SOURCE_NOTES=创建数据源 |
||||||
|
DATA_SOURCE_NAME=数据源名称 |
||||||
|
DATA_SOURCE_NOTE=数据源描述 |
||||||
|
DB_TYPE=数据源类型 |
||||||
|
DATA_SOURCE_HOST=IP主机名 |
||||||
|
DATA_SOURCE_PORT=数据源端口 |
||||||
|
DATABASE_NAME=数据库名 |
||||||
|
QUEUE_TAG=队列相关操作 |
||||||
|
QUERY_QUEUE_LIST_NOTES=查询队列列表 |
||||||
|
QUERY_QUEUE_LIST_PAGING_NOTES=分页查询队列列表 |
||||||
|
CREATE_QUEUE_NOTES=创建队列 |
||||||
|
YARN_QUEUE_NAME=hadoop yarn队列名 |
||||||
|
QUEUE_ID=队列ID |
||||||
|
TENANT_DESC=租户描述 |
||||||
|
QUERY_TENANT_LIST_PAGING_NOTES=分页查询租户列表 |
||||||
|
QUERY_TENANT_LIST_NOTES=查询租户列表 |
||||||
|
UPDATE_TENANT_NOTES=更新租户 |
||||||
|
DELETE_TENANT_NOTES=删除租户 |
||||||
|
RESOURCES_TAG=资源中心相关操作 |
||||||
|
CREATE_RESOURCE_NOTES=创建资源 |
||||||
|
RESOURCE_TYPE=资源文件类型 |
||||||
|
RESOURCE_NAME=资源文件名称 |
||||||
|
RESOURCE_DESC=资源文件描述 |
||||||
|
RESOURCE_FILE=资源文件 |
||||||
|
RESOURCE_ID=资源ID |
||||||
|
QUERY_RESOURCE_LIST_NOTES=查询资源列表 |
||||||
|
DELETE_RESOURCE_BY_ID_NOTES=删除资源通过ID |
||||||
|
VIEW_RESOURCE_BY_ID_NOTES=浏览资源通通过ID |
||||||
|
ONLINE_CREATE_RESOURCE_NOTES=在线创建资源 |
||||||
|
SUFFIX=资源文件后缀 |
||||||
|
CONTENT=资源文件内容 |
||||||
|
UPDATE_RESOURCE_NOTES=在线更新资源文件 |
||||||
|
DOWNLOAD_RESOURCE_NOTES=下载资源文件 |
||||||
|
CREATE_UDF_FUNCTION_NOTES=创建UDF函数 |
||||||
|
UDF_TYPE=UDF类型 |
||||||
|
FUNC_NAME=函数名称 |
||||||
|
CLASS_NAME=包名类名 |
||||||
|
ARG_TYPES=参数 |
||||||
|
UDF_DESC=udf描述,使用说明 |
||||||
|
VIEW_UDF_FUNCTION_NOTES=查看udf函数 |
||||||
|
UPDATE_UDF_FUNCTION_NOTES=更新udf函数 |
||||||
|
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表 |
||||||
|
VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名 |
||||||
|
DELETE_UDF_FUNCTION_NOTES=删除UDF函数 |
||||||
|
AUTHORIZED_FILE_NOTES=授权文件 |
||||||
|
UNAUTHORIZED_FILE_NOTES=取消授权文件 |
||||||
|
AUTHORIZED_UDF_FUNC_NOTES=授权udf函数 |
||||||
|
UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权 |
||||||
|
VERIFY_QUEUE_NOTES=验证队列 |
||||||
|
TENANT_TAG=租户相关操作 |
||||||
|
CREATE_TENANT_NOTES=创建租户 |
||||||
|
TENANT_CODE=租户编码 |
||||||
|
TENANT_NAME=租户名称 |
||||||
|
QUEUE_NAME=队列名 |
||||||
|
PASSWORD=密码 |
||||||
|
DATA_SOURCE_OTHER=jdbc连接参数,格式为:{"key1":"value1",...} |
||||||
|
PROJECT_TAG=项目相关操作 |
||||||
|
CREATE_PROJECT_NOTES=创建项目 |
||||||
|
PROJECT_DESC=项目描述 |
||||||
|
UPDATE_PROJECT_NOTES=更新项目 |
||||||
|
PROJECT_ID=项目ID |
||||||
|
QUERY_PROJECT_BY_ID_NOTES=通过项目ID查询项目信息 |
||||||
|
QUERY_PROJECT_LIST_PAGING_NOTES=分页查询项目列表 |
||||||
|
DELETE_PROJECT_BY_ID_NOTES=删除项目通过ID |
||||||
|
QUERY_UNAUTHORIZED_PROJECT_NOTES=查询未授权的项目 |
||||||
|
QUERY_AUTHORIZED_PROJECT_NOTES=查询授权项目 |
||||||
|
TASK_RECORD_TAG=任务记录相关操作 |
||||||
|
QUERY_TASK_RECORD_LIST_PAGING_NOTES=分页查询任务记录列表 |
||||||
|
CREATE_TOKEN_NOTES=创建token,注意需要先登录 |
||||||
|
QUERY_ACCESS_TOKEN_LIST_NOTES=分页查询access token列表 |
||||||
|
SCHEDULE=定时 |
||||||
|
WARNING_TYPE=发送策略 |
||||||
|
WARNING_GROUP_ID=发送组ID |
||||||
|
FAILURE_STRATEGY=失败策略 |
||||||
|
RECEIVERS=收件人 |
||||||
|
RECEIVERS_CC=收件人(抄送) |
||||||
|
WORKER_GROUP_ID=Worker Server分组ID |
||||||
|
PROCESS_INSTANCE_PRIORITY=流程实例优先级 |
||||||
|
UPDATE_SCHEDULE_NOTES=更新定时 |
||||||
|
SCHEDULE_ID=定时ID |
||||||
|
ONLINE_SCHEDULE_NOTES=定时上线 |
||||||
|
OFFLINE_SCHEDULE_NOTES=定时下线 |
||||||
|
QUERY_SCHEDULE_NOTES=查询定时 |
||||||
|
QUERY_SCHEDULE_LIST_PAGING_NOTES=分页查询定时 |
||||||
|
LOGIN_TAG=用户登录相关操作 |
||||||
|
USER_NAME=用户名 |
||||||
|
PROJECT_NAME=项目名称 |
||||||
|
CREATE_PROCESS_DEFINITION_NOTES=创建流程定义 |
||||||
|
PROCESS_DEFINITION_NAME=流程定义名称 |
||||||
|
PROCESS_DEFINITION_JSON=流程定义详细信息(json格式) |
||||||
|
PROCESS_DEFINITION_LOCATIONS=流程定义节点坐标位置信息(json格式) |
||||||
|
PROCESS_INSTANCE_LOCATIONS=流程实例节点坐标位置信息(json格式) |
||||||
|
PROCESS_DEFINITION_CONNECTS=流程定义节点图标连接信息(json格式) |
||||||
|
PROCESS_INSTANCE_CONNECTS=流程实例节点图标连接信息(json格式) |
||||||
|
PROCESS_DEFINITION_DESC=流程定义描述信息 |
||||||
|
PROCESS_DEFINITION_TAG=流程定义相关操作 |
||||||
|
SIGNOUT_NOTES=退出登录 |
||||||
|
USER_PASSWORD=用户密码 |
||||||
|
UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 |
||||||
|
QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 |
||||||
|
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=验证流程定义名字 |
||||||
|
LOGIN_NOTES=用户登录 |
||||||
|
UPDATE_PROCCESS_DEFINITION_NOTES=更新流程定义 |
||||||
|
PROCESS_DEFINITION_ID=流程定义ID |
||||||
|
RELEASE_PROCCESS_DEFINITION_NOTES=发布流程定义 |
||||||
|
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_NOTES=查询流程定义列表 |
||||||
|
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 |
||||||
|
PAGE_NO=页码号 |
||||||
|
PROCESS_INSTANCE_ID=流程实例ID |
||||||
|
PROCESS_INSTANCE_JSON=流程实例信息(json格式) |
||||||
|
SCHEDULE_TIME=定时时间 |
||||||
|
SYNC_DEFINE=更新流程实例的信息是否同步到流程定义 |
||||||
|
RECOVERY_PROCESS_INSTANCE_FLAG=是否恢复流程实例 |
||||||
|
SEARCH_VAL=搜索值 |
||||||
|
USER_ID=用户ID |
||||||
|
PAGE_SIZE=页大小 |
||||||
|
LIMIT=显示多少条 |
||||||
|
VIEW_TREE_NOTES=树状图 |
||||||
|
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID |
||||||
|
PROCESS_DEFINITION_ID_LIST=流程定义id列表 |
||||||
|
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID |
||||||
|
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=删除流程实例通过流程实例ID |
||||||
|
TASK_ID=任务实例ID |
||||||
|
SKIP_LINE_NUM=忽略行数 |
||||||
|
QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 |
||||||
|
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 |
||||||
|
USERS_TAG=用户相关操作 |
||||||
|
SCHEDULER_TAG=定时相关操作 |
||||||
|
CREATE_SCHEDULE_NOTES=创建定时 |
||||||
|
CREATE_USER_NOTES=创建用户 |
||||||
|
TENANT_ID=租户ID |
||||||
|
QUEUE=使用的队列 |
||||||
|
EMAIL=邮箱 |
||||||
|
PHONE=手机号 |
||||||
|
QUERY_USER_LIST_NOTES=查询用户列表 |
||||||
|
UPDATE_USER_NOTES=更新用户 |
||||||
|
DELETE_USER_BY_ID_NOTES=删除用户通过ID |
||||||
|
GRANT_PROJECT_NOTES=授权项目 |
||||||
|
PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) |
||||||
|
GRANT_RESOURCE_NOTES=授权资源文件 |
||||||
|
RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) |
||||||
|
GET_USER_INFO_NOTES=获取用户信息 |
||||||
|
LIST_USER_NOTES=用户列表 |
||||||
|
VERIFY_USER_NAME_NOTES=验证用户名 |
||||||
|
UNAUTHORIZED_USER_NOTES=取消授权 |
||||||
|
ALERT_GROUP_ID=报警组ID |
||||||
|
AUTHORIZED_USER_NOTES=授权用户 |
||||||
|
GRANT_UDF_FUNC_NOTES=授权udf函数 |
||||||
|
UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) |
||||||
|
GRANT_DATASOURCE_NOTES=授权数据源 |
||||||
|
DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) |
||||||
|
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=查询子流程实例通过任务实例ID |
||||||
|
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=查询父流程实例信息通过子流程实例ID |
||||||
|
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=查询流程实例全局变量和局部变量 |
||||||
|
VIEW_GANTT_NOTES=浏览Gantt图 |
||||||
|
SUB_PROCESS_INSTANCE_ID=子流程是咧ID |
||||||
|
TASK_NAME=任务实例名 |
||||||
|
TASK_INSTANCE_TAG=任务实例相关操作 |
||||||
|
LOGGER_TAG=日志相关操作 |
||||||
|
PROCESS_INSTANCE_TAG=流程实例相关操作 |
||||||
|
EXECUTION_STATUS=工作流和任务节点的运行状态 |
||||||
|
HOST=运行任务的主机IP地址 |
||||||
|
START_DATE=开始时间 |
||||||
|
END_DATE=结束时间 |
||||||
|
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=通过流程实例ID查询任务列表 |
||||||
|
UPDATE_DATA_SOURCE_NOTES=更新数据源 |
||||||
|
DATA_SOURCE_ID=数据源ID |
||||||
|
QUERY_DATA_SOURCE_NOTES=查询数据源通过ID |
||||||
|
QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=查询数据源列表通过数据源类型 |
||||||
|
QUERY_DATA_SOURCE_LIST_PAGING_NOTES=分页查询数据源列表 |
||||||
|
CONNECT_DATA_SOURCE_NOTES=连接数据源 |
||||||
|
CONNECT_DATA_SOURCE_TEST_NOTES=连接数据源测试 |
||||||
|
DELETE_DATA_SOURCE_NOTES=删除数据源 |
||||||
|
VERIFY_DATA_SOURCE_NOTES=验证数据源 |
||||||
|
UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源 |
||||||
|
AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源 |
@ -0,0 +1,42 @@ |
|||||||
|
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||||
|
<configuration scan="true" scanPeriod="120 seconds"> |
||||||
|
<logger name="org.apache.zookeeper" level="WARN"/> |
||||||
|
<logger name="org.apache.hbase" level="WARN"/> |
||||||
|
<logger name="org.apache.hadoop" level="WARN"/> |
||||||
|
|
||||||
|
<property name="log.base" value="logs" /> |
||||||
|
|
||||||
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||||
|
<encoder> |
||||||
|
<pattern> |
||||||
|
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||||
|
</pattern> |
||||||
|
<charset>UTF-8</charset> |
||||||
|
</encoder> |
||||||
|
</appender> |
||||||
|
|
||||||
|
<appender name="APISERVERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||||
|
<!-- Log level filter --> |
||||||
|
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||||
|
<level>INFO</level> |
||||||
|
</filter> |
||||||
|
<file>${log.base}/escheduler-api-server.log</file> |
||||||
|
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||||
|
<fileNamePattern>${log.base}/escheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||||
|
<maxHistory>168</maxHistory> |
||||||
|
<maxFileSize>64MB</maxFileSize> |
||||||
|
</rollingPolicy> |
||||||
|
|
||||||
|
<encoder> |
||||||
|
<pattern> |
||||||
|
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||||
|
</pattern> |
||||||
|
<charset>UTF-8</charset> |
||||||
|
</encoder> |
||||||
|
|
||||||
|
</appender> |
||||||
|
|
||||||
|
<root level="INFO"> |
||||||
|
<appender-ref ref="STDOUT" /> |
||||||
|
</root> |
||||||
|
</configuration> |
@ -1,98 +0,0 @@ |
|||||||
QUERY_SCHEDULE_LIST_NOTES=query schedule list |
|
||||||
SCHEDULE=schedule |
|
||||||
WARNING_TYPE=warning type(sending strategy) |
|
||||||
WARNING_GROUP_ID=warning group id |
|
||||||
FAILURE_STRATEGY=failure strategy |
|
||||||
RECEIVERS=receivers |
|
||||||
RECEIVERS_CC=receivers cc |
|
||||||
WORKER_GROUP_ID=worker server group id |
|
||||||
PROCESS_INSTANCE_PRIORITY=process instance priority |
|
||||||
UPDATE_SCHEDULE_NOTES=update schedule |
|
||||||
SCHEDULE_ID=schedule id |
|
||||||
ONLINE_SCHEDULE_NOTES=online schedule |
|
||||||
OFFLINE_SCHEDULE_NOTES=offline schedule |
|
||||||
QUERY_SCHEDULE_NOTES=query schedule |
|
||||||
QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging |
|
||||||
LOGIN_TAG=User login related operations |
|
||||||
USER_NAME=user name |
|
||||||
PROJECT_NAME=project name |
|
||||||
CREATE_PROCESS_DEFINITION_NOTES=create process definition |
|
||||||
PROCESS_DEFINITION_NAME=process definition name |
|
||||||
PROCESS_DEFINITION_JSON=process definition detail info (json format) |
|
||||||
PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) |
|
||||||
PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) |
|
||||||
PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) |
|
||||||
PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) |
|
||||||
PROCESS_DEFINITION_DESC=process definition desc |
|
||||||
PROCESS_DEFINITION_TAG=process definition related opertation |
|
||||||
SIGNOUT_NOTES=logout |
|
||||||
USER_PASSWORD=user password |
|
||||||
UPDATE_PROCESS_INSTANCE_NOTES=update process instance |
|
||||||
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list |
|
||||||
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name |
|
||||||
LOGIN_NOTES=user login |
|
||||||
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition |
|
||||||
PROCESS_DEFINITION_ID=process definition id |
|
||||||
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition |
|
||||||
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging |
|
||||||
PAGE_NO=page no |
|
||||||
PROCESS_INSTANCE_ID=process instance id |
|
||||||
PROCESS_INSTANCE_JSON=process instance info(json format) |
|
||||||
SCHEDULE_TIME=schedule time |
|
||||||
SYNC_DEFINE=update the information of the process instance to the process definition\ |
|
||||||
|
|
||||||
RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance |
|
||||||
SEARCH_VAL=search val |
|
||||||
USER_ID=user id |
|
||||||
PAGE_SIZE=page size |
|
||||||
LIMIT=limit |
|
||||||
VIEW_TREE_NOTES=view tree |
|
||||||
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id |
|
||||||
PROCESS_DEFINITION_ID_LIST=PROCESS DEFINITION ID LIST |
|
||||||
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id |
|
||||||
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id |
|
||||||
TASK_ID=TASK INSTANCE ID |
|
||||||
SKIP_LINE_NUM=skip line num |
|
||||||
QUERY_TASK_INSTANCE_LOG_NOTES=QUERY TASK INSTANCE LOG |
|
||||||
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log |
|
||||||
USERS_TAG=users related operation |
|
||||||
SCHEDULER_TAG=scheduler related operation |
|
||||||
CREATE_SCHEDULE_NOTES=CREATE SCHEDULE |
|
||||||
CREATE_USER_NOTES=create user |
|
||||||
TENANT_ID=tenant id |
|
||||||
QUEUE=queue |
|
||||||
EMAIL=email |
|
||||||
PHONE=phone |
|
||||||
QUERY_USER_LIST_NOTES=query user list |
|
||||||
UPDATE_USER_NOTES=update user |
|
||||||
DELETE_USER_BY_ID_NOTES=delete user by id |
|
||||||
GRANT_PROJECT_NOTES=GRANT PROJECT |
|
||||||
PROJECT_IDS=project ids(string format, multiple projects separated by ",") |
|
||||||
GRANT_RESOURCE_NOTES=grant resource file |
|
||||||
RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") |
|
||||||
GET_USER_INFO_NOTES=get user info |
|
||||||
LIST_USER_NOTES=list user |
|
||||||
VERIFY_USER_NAME_NOTES=verify user name |
|
||||||
UNAUTHORIZED_USER_NOTES=cancel authorization |
|
||||||
ALERT_GROUP_ID=alert group id |
|
||||||
AUTHORIZED_USER_NOTES=authorized user |
|
||||||
GRANT_UDF_FUNC_NOTES=grant udf function |
|
||||||
UDF_IDS=udf ids(string format, multiple udf functions separated by ",") |
|
||||||
GRANT_DATASOURCE_NOTES=GRANT DATASOURCE |
|
||||||
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") |
|
||||||
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id |
|
||||||
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id |
|
||||||
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables |
|
||||||
VIEW_GANTT_NOTES=view gantt |
|
||||||
SUB_PROCESS_INSTANCE_ID=sub process instance id |
|
||||||
TASK_NAME=task instance name |
|
||||||
TASK_INSTANCE_TAG=task instance related operation |
|
||||||
LOGGER_TAG=log related operation |
|
||||||
PROCESS_INSTANCE_TAG=process instance related operation |
|
||||||
EXECUTION_STATUS=runing status for workflow and task nodes |
|
||||||
HOST=ip address of running task |
|
||||||
START_DATE=start date |
|
||||||
END_DATE=end date |
|
||||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id |
|
@ -1,98 +0,0 @@ |
|||||||
QUERY_SCHEDULE_LIST_NOTES=query schedule list |
|
||||||
SCHEDULE=schedule |
|
||||||
WARNING_TYPE=warning type(sending strategy) |
|
||||||
WARNING_GROUP_ID=warning group id |
|
||||||
FAILURE_STRATEGY=failure strategy |
|
||||||
RECEIVERS=receivers |
|
||||||
RECEIVERS_CC=receivers cc |
|
||||||
WORKER_GROUP_ID=worker server group id |
|
||||||
PROCESS_INSTANCE_PRIORITY=process instance priority |
|
||||||
UPDATE_SCHEDULE_NOTES=update schedule |
|
||||||
SCHEDULE_ID=schedule id |
|
||||||
ONLINE_SCHEDULE_NOTES=online schedule |
|
||||||
OFFLINE_SCHEDULE_NOTES=offline schedule |
|
||||||
QUERY_SCHEDULE_NOTES=query schedule |
|
||||||
QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging |
|
||||||
LOGIN_TAG=User login related operations |
|
||||||
USER_NAME=user name |
|
||||||
PROJECT_NAME=project name |
|
||||||
CREATE_PROCESS_DEFINITION_NOTES=create process definition |
|
||||||
PROCESS_DEFINITION_NAME=process definition name |
|
||||||
PROCESS_DEFINITION_JSON=process definition detail info (json format) |
|
||||||
PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) |
|
||||||
PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) |
|
||||||
PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) |
|
||||||
PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) |
|
||||||
PROCESS_DEFINITION_DESC=process definition desc |
|
||||||
PROCESS_DEFINITION_TAG=process definition related opertation |
|
||||||
SIGNOUT_NOTES=logout |
|
||||||
USER_PASSWORD=user password |
|
||||||
UPDATE_PROCESS_INSTANCE_NOTES=update process instance |
|
||||||
QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list |
|
||||||
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name |
|
||||||
LOGIN_NOTES=user login |
|
||||||
UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition |
|
||||||
PROCESS_DEFINITION_ID=process definition id |
|
||||||
RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition |
|
||||||
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging |
|
||||||
PAGE_NO=page no |
|
||||||
PROCESS_INSTANCE_ID=process instance id |
|
||||||
PROCESS_INSTANCE_JSON=process instance info(json format) |
|
||||||
SCHEDULE_TIME=schedule time |
|
||||||
SYNC_DEFINE=update the information of the process instance to the process definition\ |
|
||||||
|
|
||||||
RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance |
|
||||||
SEARCH_VAL=search val |
|
||||||
USER_ID=user id |
|
||||||
PAGE_SIZE=page size |
|
||||||
LIMIT=limit |
|
||||||
VIEW_TREE_NOTES=view tree |
|
||||||
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id |
|
||||||
PROCESS_DEFINITION_ID_LIST=PROCESS DEFINITION ID LIST |
|
||||||
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id |
|
||||||
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id |
|
||||||
TASK_ID=TASK INSTANCE ID |
|
||||||
SKIP_LINE_NUM=skip line num |
|
||||||
QUERY_TASK_INSTANCE_LOG_NOTES=QUERY TASK INSTANCE LOG |
|
||||||
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log |
|
||||||
USERS_TAG=users related operation |
|
||||||
SCHEDULER_TAG=scheduler related operation |
|
||||||
CREATE_SCHEDULE_NOTES=CREATE SCHEDULE |
|
||||||
CREATE_USER_NOTES=create user |
|
||||||
TENANT_ID=tenant id |
|
||||||
QUEUE=queue |
|
||||||
EMAIL=email |
|
||||||
PHONE=phone |
|
||||||
QUERY_USER_LIST_NOTES=query user list |
|
||||||
UPDATE_USER_NOTES=update user |
|
||||||
DELETE_USER_BY_ID_NOTES=delete user by id |
|
||||||
GRANT_PROJECT_NOTES=GRANT PROJECT |
|
||||||
PROJECT_IDS=project ids(string format, multiple projects separated by ",") |
|
||||||
GRANT_RESOURCE_NOTES=grant resource file |
|
||||||
RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") |
|
||||||
GET_USER_INFO_NOTES=get user info |
|
||||||
LIST_USER_NOTES=list user |
|
||||||
VERIFY_USER_NAME_NOTES=verify user name |
|
||||||
UNAUTHORIZED_USER_NOTES=cancel authorization |
|
||||||
ALERT_GROUP_ID=alert group id |
|
||||||
AUTHORIZED_USER_NOTES=authorized user |
|
||||||
GRANT_UDF_FUNC_NOTES=grant udf function |
|
||||||
UDF_IDS=udf ids(string format, multiple udf functions separated by ",") |
|
||||||
GRANT_DATASOURCE_NOTES=GRANT DATASOURCE |
|
||||||
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") |
|
||||||
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id |
|
||||||
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id |
|
||||||
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables |
|
||||||
VIEW_GANTT_NOTES=view gantt |
|
||||||
SUB_PROCESS_INSTANCE_ID=sub process instance id |
|
||||||
TASK_NAME=task instance name |
|
||||||
TASK_INSTANCE_TAG=task instance related operation |
|
||||||
LOGGER_TAG=log related operation |
|
||||||
PROCESS_INSTANCE_TAG=process instance related operation |
|
||||||
EXECUTION_STATUS=runing status for workflow and task nodes |
|
||||||
HOST=ip address of running task |
|
||||||
START_DATE=start date |
|
||||||
END_DATE=end date |
|
||||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id |
|
@ -1,97 +0,0 @@ |
|||||||
QUERY_SCHEDULE_LIST_NOTES=查询定时列表 |
|
||||||
SCHEDULE=定时 |
|
||||||
WARNING_TYPE=发送策略 |
|
||||||
WARNING_GROUP_ID=发送组ID |
|
||||||
FAILURE_STRATEGY=失败策略 |
|
||||||
RECEIVERS=收件人 |
|
||||||
RECEIVERS_CC=收件人(抄送) |
|
||||||
WORKER_GROUP_ID=Worker Server分组ID |
|
||||||
PROCESS_INSTANCE_PRIORITY=流程实例优先级 |
|
||||||
UPDATE_SCHEDULE_NOTES=更新定时 |
|
||||||
SCHEDULE_ID=定时ID |
|
||||||
ONLINE_SCHEDULE_NOTES=定时上线 |
|
||||||
OFFLINE_SCHEDULE_NOTES=定时下线 |
|
||||||
QUERY_SCHEDULE_NOTES=查询定时 |
|
||||||
QUERY_SCHEDULE_LIST_PAGING_NOTES=分页查询定时 |
|
||||||
LOGIN_TAG=用户登录相关操作 |
|
||||||
USER_NAME=用户名 |
|
||||||
PROJECT_NAME=项目名称 |
|
||||||
CREATE_PROCESS_DEFINITION_NOTES=创建流程定义 |
|
||||||
PROCESS_DEFINITION_NAME=流程定义名称 |
|
||||||
PROCESS_DEFINITION_JSON=流程定义详细信息(json格式) |
|
||||||
PROCESS_DEFINITION_LOCATIONS=流程定义节点坐标位置信息(json格式) |
|
||||||
PROCESS_INSTANCE_LOCATIONS=流程实例节点坐标位置信息(json格式) |
|
||||||
PROCESS_DEFINITION_CONNECTS=流程定义节点图标连接信息(json格式) |
|
||||||
PROCESS_INSTANCE_CONNECTS=流程实例节点图标连接信息(json格式) |
|
||||||
PROCESS_DEFINITION_DESC=流程定义描述信息 |
|
||||||
PROCESS_DEFINITION_TAG=流程定义相关操作 |
|
||||||
SIGNOUT_NOTES=退出登录 |
|
||||||
USER_PASSWORD=用户密码 |
|
||||||
UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 |
|
||||||
QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 |
|
||||||
VERIFY_PROCCESS_DEFINITION_NAME_NOTES=验证流程定义名字 |
|
||||||
LOGIN_NOTES=用户登录 |
|
||||||
UPDATE_PROCCESS_DEFINITION_NOTES=更新流程定义 |
|
||||||
PROCESS_DEFINITION_ID=流程定义ID |
|
||||||
RELEASE_PROCCESS_DEFINITION_NOTES=发布流程定义 |
|
||||||
QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_NOTES=查询流程定义列表 |
|
||||||
QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 |
|
||||||
PAGE_NO=页码号 |
|
||||||
PROCESS_INSTANCE_ID=流程实例ID |
|
||||||
PROCESS_INSTANCE_JSON=流程实例信息(json格式) |
|
||||||
SCHEDULE_TIME=定时时间 |
|
||||||
SYNC_DEFINE=更新流程实例的信息是否同步到流程定义 |
|
||||||
RECOVERY_PROCESS_INSTANCE_FLAG=是否恢复流程实例 |
|
||||||
SEARCH_VAL=搜索值 |
|
||||||
USER_ID=用户ID |
|
||||||
PAGE_SIZE=页大小 |
|
||||||
LIMIT=显示多少条 |
|
||||||
VIEW_TREE_NOTES=树状图 |
|
||||||
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID |
|
||||||
PROCESS_DEFINITION_ID_LIST=流程定义id列表 |
|
||||||
QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID |
|
||||||
DELETE_PROCESS_INSTANCE_BY_ID_NOTES=删除流程实例通过流程实例ID |
|
||||||
TASK_ID=任务实例ID |
|
||||||
SKIP_LINE_NUM=忽略行数 |
|
||||||
QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 |
|
||||||
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 |
|
||||||
USERS_TAG=用户相关操作 |
|
||||||
SCHEDULER_TAG=定时相关操作 |
|
||||||
CREATE_SCHEDULE_NOTES=创建定时 |
|
||||||
CREATE_USER_NOTES=创建用户 |
|
||||||
TENANT_ID=租户ID |
|
||||||
QUEUE=使用的队列 |
|
||||||
EMAIL=邮箱 |
|
||||||
PHONE=手机号 |
|
||||||
QUERY_USER_LIST_NOTES=查询用户列表 |
|
||||||
UPDATE_USER_NOTES=更新用户 |
|
||||||
DELETE_USER_BY_ID_NOTES=删除用户通过ID |
|
||||||
GRANT_PROJECT_NOTES=授权项目 |
|
||||||
PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) |
|
||||||
GRANT_RESOURCE_NOTES=授权资源文件 |
|
||||||
RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) |
|
||||||
GET_USER_INFO_NOTES=获取用户信息 |
|
||||||
LIST_USER_NOTES=用户列表 |
|
||||||
VERIFY_USER_NAME_NOTES=验证用户名 |
|
||||||
UNAUTHORIZED_USER_NOTES=取消授权 |
|
||||||
ALERT_GROUP_ID=报警组ID |
|
||||||
AUTHORIZED_USER_NOTES=授权用户 |
|
||||||
GRANT_UDF_FUNC_NOTES=授权udf函数 |
|
||||||
UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) |
|
||||||
GRANT_DATASOURCE_NOTES=授权数据源 |
|
||||||
DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) |
|
||||||
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=查询子流程实例通过任务实例ID |
|
||||||
QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=查询父流程实例信息通过子流程实例ID |
|
||||||
QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=查询流程实例全局变量和局部变量 |
|
||||||
VIEW_GANTT_NOTES=浏览Gantt图 |
|
||||||
SUB_PROCESS_INSTANCE_ID=子流程是咧ID |
|
||||||
TASK_NAME=任务实例名 |
|
||||||
TASK_INSTANCE_TAG=任务实例相关操作 |
|
||||||
LOGGER_TAG=日志相关操作 |
|
||||||
PROCESS_INSTANCE_TAG=流程实例相关操作 |
|
||||||
EXECUTION_STATUS=工作流和任务节点的运行状态 |
|
||||||
HOST=运行任务的主机IP地址 |
|
||||||
START_DATE=开始时间 |
|
||||||
END_DATE=结束时间 |
|
||||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=通过流程实例ID查询任务列表 |
|
Loading…
Reference in new issue