Browse Source

[DSIP-50][UDF Manage] Remove unused udf manage function (#16212)

* Remove unused udf manage function
dev
xiangzihao 4 months ago committed by GitHub
parent
commit
e8566818b0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 4
      .github/workflows/e2e.yml
  2. 8
      docs/configs/docsdev.js
  3. 2
      docs/docs/en/architecture/metadata.md
  4. 2
      docs/docs/en/architecture/task-structure.md
  5. 3
      docs/docs/en/contribute/frontend-development.md
  6. 2
      docs/docs/en/guide/resource/configuration.md
  7. 2
      docs/docs/en/guide/resource/intro.md
  8. 45
      docs/docs/en/guide/resource/udf-manage.md
  9. 6
      docs/docs/en/guide/security/security.md
  10. 7
      docs/docs/en/guide/task/sql.md
  11. 4
      docs/docs/en/guide/upgrade/incompatible.md
  12. 2
      docs/docs/zh/architecture/metadata.md
  13. 2
      docs/docs/zh/architecture/task-structure.md
  14. 3
      docs/docs/zh/contribute/frontend-development.md
  15. 2
      docs/docs/zh/guide/resource/configuration.md
  16. 2
      docs/docs/zh/guide/resource/intro.md
  17. 46
      docs/docs/zh/guide/resource/udf-manage.md
  18. 6
      docs/docs/zh/guide/security/security.md
  19. 2
      docs/docs/zh/guide/task/sql.md
  20. 4
      docs/docs/zh/guide/upgrade/incompatible.md
  21. BIN
      docs/img/new_ui/dev/resource/create-udf.png
  22. BIN
      docs/img/new_ui/dev/resource/demo/udf-demo01.png
  23. BIN
      docs/img/new_ui/dev/resource/demo/udf-demo02.png
  24. BIN
      docs/img/new_ui/dev/resource/demo/udf-demo03.png
  25. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorUtils.java
  26. 1
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java
  27. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java
  28. 46
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/UdfFunctionAuditOperatorImpl.java
  29. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java
  30. 193
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java
  31. 86
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  32. 15
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  33. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java
  34. 118
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java
  35. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  36. 4
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java
  37. 399
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java
  38. 72
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java
  39. 16
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  40. 16
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  41. 16
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  42. 178
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java
  43. 18
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java
  44. 94
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/permission/UdfFuncPermissionCheckTest.java
  45. 306
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java
  46. 31
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
  47. 6
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java
  48. 3
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditModelType.java
  49. 1
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java
  50. 59
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java
  51. 61
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java
  52. 157
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java
  53. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java
  54. 44
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java
  55. 118
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java
  56. 29
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml
  57. 189
      dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml
  58. 50
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java
  59. 184
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java
  60. 280
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java
  61. 188
      dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FunctionManageE2ETest.java
  62. 229
      dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UdfManageE2ETest.java
  63. 201
      dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FunctionManagePage.java
  64. 22
      dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java
  65. 197
      dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/UdfManagePage.java
  66. 19
      dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java
  67. 3
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
  68. 79
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java
  69. 2
      dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java
  70. 18
      dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java
  71. 5
      dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/AbstractStorageOperator.java
  72. 7
      dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorTest.java
  73. 16
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java
  74. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java
  75. 56
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/UdfType.java
  76. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java
  77. 3
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java
  78. 133
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/UdfFuncParameters.java
  79. 3
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java
  80. 2
      dolphinscheduler-task-plugin/dolphinscheduler-task-remoteshell/src/test/java/org/apache/dolphinscheduler/plugin/task/remoteshell/RemoteShellTaskTest.java
  81. 95
      dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java
  82. 20
      dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/resource/MigrateResourceService.java
  83. 1
      dolphinscheduler-ui/src/layouts/content/components/sidebar/index.tsx
  84. 1
      dolphinscheduler-ui/src/layouts/content/components/user/use-dropdown.ts
  85. 16
      dolphinscheduler-ui/src/layouts/content/use-dataList.ts
  86. 1
      dolphinscheduler-ui/src/locales/en_US/menu.ts
  87. 4
      dolphinscheduler-ui/src/locales/en_US/project.ts
  88. 19
      dolphinscheduler-ui/src/locales/en_US/resource.ts
  89. 4
      dolphinscheduler-ui/src/locales/en_US/security.ts
  90. 1
      dolphinscheduler-ui/src/locales/zh_CN/menu.ts
  91. 3
      dolphinscheduler-ui/src/locales/zh_CN/project.ts
  92. 17
      dolphinscheduler-ui/src/locales/zh_CN/resource.ts
  93. 4
      dolphinscheduler-ui/src/locales/zh_CN/security.ts
  94. 34
      dolphinscheduler-ui/src/router/modules/resources.ts
  95. 108
      dolphinscheduler-ui/src/service/modules/resources/index.ts
  96. 15
      dolphinscheduler-ui/src/service/modules/resources/types.ts
  97. 9
      dolphinscheduler-ui/src/service/modules/users/index.ts
  98. 5
      dolphinscheduler-ui/src/service/modules/users/types.ts
  99. 1
      dolphinscheduler-ui/src/service/service.ts
  100. 1
      dolphinscheduler-ui/src/store/user/types.ts
  101. Some files were not shown because too many files have changed in this diff Show More

4
.github/workflows/e2e.yml

@ -114,10 +114,6 @@ jobs:
# class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest
- name: FileManage
class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest
- name: UdfManage
class: org.apache.dolphinscheduler.e2e.cases.UdfManageE2ETest
- name: FunctionManage
class: org.apache.dolphinscheduler.e2e.cases.FunctionManageE2ETest
- name: MysqlDataSource
class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest
- name: ClickhouseDataSource

8
docs/configs/docsdev.js

@ -432,10 +432,6 @@ export default {
title: 'File Manage',
link: '/en-us/docs/dev/user_doc/guide/resource/file-manage.html'
},
{
title: 'UDF Manage',
link: '/en-us/docs/dev/user_doc/guide/resource/udf-manage.html'
},
{
title: 'Task Group Manage',
link: '/en-us/docs/dev/user_doc/guide/resource/task-group.html'
@ -1153,10 +1149,6 @@ export default {
title: '文件管理',
link: '/zh-cn/docs/dev/user_doc/guide/resource/file-manage.html'
},
{
title: 'UDF 管理',
link: '/zh-cn/docs/dev/user_doc/guide/resource/udf-manage.html'
},
{
title: '任务组管理',
link: '/zh-cn/docs/dev/user_doc/guide/resource/task-group.html'

2
docs/docs/en/architecture/metadata.md

@ -22,8 +22,6 @@ see sql files in `dolphinscheduler/dolphinscheduler-dao/src/main/resources/sql`
- User can have multiple projects, user project authorization completes the relationship binding using `project_id` and `user_id` in `t_ds_relation_project_user` table.
- The `user_id` in the `t_ds_projcet` table represents the user who create the project, and the `user_id` in the `t_ds_relation_project_user` table represents users who have permission to the project.
- The `user_id` in the `t_ds_resources` table represents the user who create the resource, and the `user_id` in `t_ds_relation_resources_user` represents the user who has permissions to the resource.
- The `user_id` in the `t_ds_udfs` table represents the user who create the UDF, and the `user_id` in the `t_ds_relation_udfs_user` table represents a user who has permission to the UDF.
### Project - Tenant - ProcessDefinition - Schedule

2
docs/docs/en/architecture/task-structure.md

@ -146,7 +146,6 @@ No.|parameter name||type|description |note
5| |type |String |database type
6| |datasource |Int |datasource id
7| |sql |String |query SQL statement
8| |udfs | String| udf functions|specify UDF function ids, separate by comma
9| |sqlType | String| SQL node type |0 for query and 1 for none-query SQL
10| |title |String | mail title
11| |receivers |String |receivers
@ -180,7 +179,6 @@ No.|parameter name||type|description |note
"type":"MYSQL",
"datasource":1,
"sql":"select id , namge , age from emp where id = ${id}",
"udfs":"",
"sqlType":"0",
"title":"xxxx@xxx.com",
"receivers":"xxxx@xxx.com",

3
docs/docs/en/contribute/frontend-development.md

@ -163,9 +163,6 @@ Resource Management => `http://localhost:8888/#/resource/file`
```
| File Management
| udf Management
- Resource Management
- Function management
```
Data Source Management => `http://localhost:8888/#/datasource/list`

2
docs/docs/en/guide/resource/configuration.md

@ -1,6 +1,6 @@
# Resource Center Configuration
- You could use `Resource Center` to upload text files, UDFs and other task-related files.
- You could use `Resource Center` to upload text files and other task-related files.
- You could configure `Resource Center` to use distributed file system like [Hadoop](https://hadoop.apache.org/docs/r2.7.0/) (2.6+), [MinIO](https://github.com/minio/minio) cluster or remote storage products like [AWS S3](https://aws.amazon.com/s3/), [Alibaba Cloud OSS](https://www.aliyun.com/product/oss), [Huawei Cloud OBS](https://support.huaweicloud.com/obs/index.html) etc.
- You could configure `Resource Center` to use local file system. If you deploy `DolphinScheduler` in `Standalone` mode, you could configure it to use local file system for `Resource Center` without the need of an external `HDFS` system or `S3`.
- Furthermore, if you deploy `DolphinScheduler` in `Cluster` mode, you could use [S3FS-FUSE](https://github.com/s3fs-fuse/s3fs-fuse) to mount `S3` or [JINDO-FUSE](https://help.aliyun.com/document_detail/187410.html) to mount `OSS` to your machines and use the local file system for `Resource Center`. In this way, you could operate remote files as if on your local machines.

2
docs/docs/en/guide/resource/intro.md

@ -1,5 +1,5 @@
# Resource Center Introduction
The Resource Center is typically used for uploading files, UDF functions, and task group management. For a stand-alone
The Resource Center is typically used for uploading files and task group management. For a stand-alone
environment, you can select the local file directory as the upload folder (**this operation does not require Hadoop or HDFS deployment**).
Of course, you can also choose to upload to Hadoop or MinIO cluster. In this case, you need to have Hadoop (2.6+) or MinIO and other related environments.

45
docs/docs/en/guide/resource/udf-manage.md

@ -1,45 +0,0 @@
# UDF Manage
## Resource Management
- The resource management and file management functions are similar. The difference is that the resource management is the UDF upload function, and the file management uploads the user programs, scripts and configuration files.
- It mainly includes the following operations: rename, download, delete, etc.
- Upload UDF resources: Same as uploading files.
## Function Management
### Create UDF function
Click `Create UDF Function`, enter the UDF function parameters, select the UDF resource, and click `Submit` to create the UDF function.
Currently only temporary UDF functions for HIVE are supported.
- UDF function name: Enter the name of the UDF function.
- Package name Class name: Enter the full path of the UDF function.
- UDF resource: Set the resource file corresponding to the created UDF function.
![create-udf](../../../../img/new_ui/dev/resource/create-udf.png)
## Example
### Write UDF functions
Users can customize the desired UDF function according to actual production requirements. Here's a function that appends "HelloWorld" to the end of any string. As shown below:
![code-udf](../../../../img/new_ui/dev/resource/demo/udf-demo01.png)
### Configure the UDF function
Before configuring UDF functions, you need to upload the required function jar package through resource management. Then enter the function management and configure the relevant information. As shown below:
![conf-udf](../../../../img/new_ui/dev/resource/demo/udf-demo02.png)
### Use UDF functions
In the process of using UDF functions, users only need to pay attention to the specific function writing, and upload the configuration through the resource center. The system will automatically configure the create function statement, refer to the following: [SqlTask](https://github.com/apache/dolphinscheduler/blob/923f3f38e3271d7f1d22b3abc3497cecb6957e4a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java#L507-L531)
Enter the workflow to define an SQL node, the data source type is HIVE, and the data source instance type is HIVE/IMPALA.
- SQL statement: `select HwUdf("abc");` This function is used in the same way as the built-in functions, and can be accessed directly using the function name.
- UDF function: Select the one configured for the resource center.
![use-udf](../../../../img/new_ui/dev/resource/demo/udf-demo03.png)

6
docs/docs/en/guide/security/security.md

@ -103,8 +103,8 @@ public void doPOSTParam()throws Exception{
## Granted Permissions
* Granted permissions include project permissions, resource permissions, data source permissions, and UDF function permissions.
* Administrators can authorize projects, resources, data sources, and UDF functions that ordinary users do not create. Because the authorization methods of projects, resources, data sources and UDF functions are all the same, the project authorization is used as an example to introduce.
* Granted permissions include project permissions, resource permissions, data source permissions.
* Administrators can authorize projects, resources, data sources that ordinary users do not create. Because the authorization methods of projects, resources, data sources are all the same, the project authorization is used as an example to introduce.
* Note: For projects created by the user, the user has all permissions. Therefore, permission changes to projects created by users themselves are not valid.
- The administrator enters the `Security Center -> User Management` page, and clicks the "Authorize" button of the user to be authorized, as shown in the following figure:
@ -118,7 +118,7 @@ public void doPOSTParam()throws Exception{
![no-permission-error](../../../../img/new_ui/dev/security/no-permission-error.png)
- Resources, data sources, and UDF function authorization are the same as project authorization.
- Resources, data sources authorization are the same as project authorization.
## Worker Grouping

7
docs/docs/en/guide/task/sql.md

@ -21,12 +21,11 @@ Refer to [datasource-setting](../howto/datasource-setting.md) `DataSource Center
- Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md) `Default Task Parameters` section for default parameters.
| **Parameter** | **Description** |
|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---|
| Data source | Select the corresponding DataSource. |
| SQL type | Supports query and non-query. <ul><li>Query: supports `DML select` type commands, which return a result set. You can specify three templates for email notification as form, attachment or form attachment;</li><li>Non-query: support `DDL` all commands and `DML update, delete, insert` three types of commands;<ul><li>Segmented execution symbol: When the data source does not support executing multiple SQL statements at a time, the symbol for splitting SQL statements is provided to call the data source execution method multiple times. Example: 1. When the Hive data source is selected as the data source, please do not use `;\n` due to Hive JDBC does not support executing multiple SQL statements at one time; 2. When the MySQL data source is selected as the data source, and multi-segment SQL statements are to be executed, this parameter needs to be filled in with a semicolon `;. Because the MySQL data source does not support executing multiple SQL statements at one time.</li></ul></li></ul> |
| SQL parameter | The input parameter format is `key1=value1;key2=value2...`. |
| SQL statement | SQL statement. |
| UDF function | For Hive DataSources, you can refer to UDF functions created in the resource center, but other DataSource do not support UDF functions. |
| SQL statement | SQL statement. | |
| Custom parameters | SQL task type, and stored procedure is a custom parameter order, to set customized parameter type and data type for the method is the same as the stored procedure task type. The difference is that the custom parameter of the SQL task type replaces the `${variable}` in the SQL statement. |
| Pre-SQL | Pre-SQL executes before the SQL statement. |
| Post-SQL | Post-SQL executes after the SQL statement. |
@ -57,5 +56,3 @@ Table created in the Pre-SQL, after use in the SQL statement, cleaned in the Pos
## Note
Pay attention to the selection of SQL type. If it is an insert operation, need to change to "Non-Query" type.
To compatible with long session,UDF function are created by the syntax(CREATE OR REPLACE)

4
docs/docs/en/guide/upgrade/incompatible.md

@ -26,3 +26,7 @@ This document records the incompatible updates between each version. You need to
* Add required field `database` in /datasources/tables && /datasources/tableColumns Api [#14406](https://github.com/apache/dolphinscheduler/pull/14406)
## 3.3.0
* Remove the `udf-manage` function from the `resource center` ([#16209])

2
docs/docs/zh/architecture/metadata.md

@ -20,8 +20,6 @@
- 一个用户可以有多个项目,用户项目授权通过`t_ds_relation_project_user`表完成project_id和user_id的关系绑定;<br />
- `t_ds_projcet`表中的`user_id`表示创建该项目的用户,`t_ds_relation_project_user`表中的`user_id`表示对项目有权限的用户;<br />
- `t_ds_resources`表中的`user_id`表示创建该资源的用户,`t_ds_relation_resources_user`中的`user_id`表示对资源有权限的用户;<br />
- `t_ds_udfs`表中的`user_id`表示创建该UDF的用户,`t_ds_relation_udfs_user`表中的`user_id`表示对UDF有权限的用户;<br />
### 项目 - 租户 - 工作流定义 - 定时

2
docs/docs/zh/architecture/task-structure.md

@ -145,7 +145,6 @@
5| |type |String | 数据库类型
6| |datasource |Int | 数据源id
7| |sql |String | 查询SQL语句
8| |udfs | String| udf函数|UDF函数id,以逗号分隔.
9| |sqlType | String| SQL节点类型 |0 查询 , 1 非查询
10| |title |String | 邮件标题
11| |receivers |String | 收件人
@ -179,7 +178,6 @@
"type":"MYSQL",
"datasource":1,
"sql":"select id , namge , age from emp where id = ${id}",
"udfs":"",
"sqlType":"0",
"title":"xxxx@xxx.com",
"receivers":"xxxx@xxx.com",

3
docs/docs/zh/contribute/frontend-development.md

@ -163,9 +163,6 @@ npm install node-sass --unsafe-perm #单独安装node-sass依赖
```
| 文件管理
| UDF管理
- 资源管理
- 函数管理
```
数据源管理 => `http://localhost:8888/#/datasource/list`

2
docs/docs/zh/guide/resource/configuration.md

@ -1,6 +1,6 @@
# 资源中心配置详情
- 资源中心通常用于上传文件、UDF 函数,以及任务组管理等操作。
- 资源中心通常用于上传文件以及任务组管理等操作。
- 资源中心可以对接分布式的文件存储系统,如[Hadoop](https://hadoop.apache.org/docs/r2.7.0/)(2.6+)或者[MinIO](https://github.com/minio/minio)集群,也可以对接远端的对象存储,如[AWS S3](https://aws.amazon.com/s3/)或者[阿里云 OSS](https://www.aliyun.com/product/oss),[华为云 OBS](https://support.huaweicloud.com/obs/index.html) 等。
- 资源中心也可以直接对接本地文件系统。在单机模式下,您无需依赖`Hadoop`或`S3`一类的外部存储系统,可以方便地对接本地文件系统进行体验。
- 除此之外,对于集群模式下的部署,您可以通过使用[S3FS-FUSE](https://github.com/s3fs-fuse/s3fs-fuse)将`S3`挂载到本地,或者使用[JINDO-FUSE](https://help.aliyun.com/document_detail/187410.html)将`OSS`挂载到本地等,再用资源中心对接本地文件系统方式来操作远端对象存储中的文件。

2
docs/docs/zh/guide/resource/intro.md

@ -1,4 +1,4 @@
# 资源中心简介
资源中心通常用于上传文件、UDF 函数和任务组管理。 对于 standalone 环境,可以选择本地文件目录作为上传文件夹(此操作不需要Hadoop部署)。当然,你也可以
资源中心通常用于上传文件和任务组管理。 对于 standalone 环境,可以选择本地文件目录作为上传文件夹(此操作不需要Hadoop部署)。当然,你也可以
选择上传到 Hadoop 或者 MinIO 集群。 在这种情况下,您需要有 Hadoop(2.6+)或 MinIO 等相关环境。

46
docs/docs/zh/guide/resource/udf-manage.md

@ -1,46 +0,0 @@
# UDF 管理
- 资源管理和文件管理功能类似,不同之处是资源管理是上传的 UDF 函数,文件管理上传的是用户程序,脚本及配置文件。
- 主要包括以下操作:重命名、下载、删除等。
* 上传 UDF 资源
> 和上传文件相同。
## 函数管理
* 创建 UDF 函数
> 点击“创建 UDF 函数”,输入 UDF 函数参数,选择udf资源,点击“提交”,创建 UDF 函数。
> 目前只支持 HIVE 的临时 UDF 函数
- UDF 函数名称:输入 UDF 函数时的名称
- 包名类名:输入 UDF 函数的全路径
- UDF 资源:设置创建的 UDF 对应的资源文件
![create-udf](../../../../img/new_ui/dev/resource/create-udf.png)
## 任务样例
### 编写 UDF 函数
用户可以根据实际生产需求,自定义想要的 UDF 函数。这里编写一个在任意字符串的末尾添加 "HelloWorld" 的函数。如下图所示:
![code-udf](../../../../img/new_ui/dev/resource/demo/udf-demo01.png)
### 配置 UDF 函数
配置 UDF 函数前,需要先通过资源管理上传所需的函数 jar 包。然后进入函数管理,配置相关信息即可。如下图所示:
![conf-udf](../../../../img/new_ui/dev/resource/demo/udf-demo02.png)
### 使用 UDF 函数
在使用 UDF 函数过程中,用户只需关注具体的函数编写,通过资源中心上传配置完成即可。系统会自动配置 create function 语句,参考如下:[SqlTask](https://github.com/apache/dolphinscheduler/blob/923f3f38e3271d7f1d22b3abc3497cecb6957e4a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java#L507-L531)
进入工作流定义一个 SQL 节点,数据源类型选择为 HIVE,数据源实例类型为 HIVE/IMPALA。
- SQL 语句:`select HwUdf("abc");` 该函数与内置函数使用方式一样,直接使用函数名称即可访问。
- UDF 函数:选择资源中心所配置的即可。
![use-udf](../../../../img/new_ui/dev/resource/demo/udf-demo03.png)

6
docs/docs/zh/guide/security/security.md

@ -97,8 +97,8 @@
## 授予权限
* 授予权限包括项目权限,资源权限,数据源权限,UDF函数权限,k8s命名空间。
* 管理员可以对普通用户进行非其创建的项目、资源、数据源、UDF函数、k8s命名空间。因为项目、资源、数据源、UDF函数、k8s命名空间授权方式都是一样的,所以以项目授权为例介绍。
* 授予权限包括项目权限,数据源权限和k8s命名空间。
* 管理员可以对普通用户进行非其创建的项目、数据源k8s命名空间。因为项目、数据源、k8s命名空间授权方式都是一样的,所以以项目授权为例介绍。
* 注意:对于用户自己创建的项目,该用户默认拥有所有的权限,因此对用户自己创建的项目进行权限变更是无效的。
- 管理员进入`安全中心->用户管理页面`,点击需授权用户的“授权”按钮,如下图所示:
@ -112,7 +112,7 @@
![no-permission-error](../../../../img/new_ui/dev/security/no-permission-error.png)
- 资源、数据源、UDF 函数授权同项目授权。
- 数据源授权同项目授权。
## Worker 分组

2
docs/docs/zh/guide/task/sql.md

@ -26,7 +26,6 @@ SQL任务类型,用于连接数据库并执行相应SQL。
- 默认采用`;\n`作为SQL分隔符,拆分成多段SQL语句执行。Hive的JDBC不支持一次执行多段SQL语句,请不要使用`;\n`。
- sql参数:输入参数格式为key1=value1;key2=value2…
- sql语句:SQL语句
- UDF函数:对于HIVE类型的数据源,可以引用资源中心中创建的UDF函数,其他类型的数据源暂不支持UDF函数。
- 自定义参数:SQL任务类型,而存储过程是自定义参数顺序,给方法设置值自定义参数类型和数据类型,同存储过程任务类型一样。区别在于SQL任务类型自定义参数会替换sql语句中${变量}。
- 前置sql:前置sql在sql语句之前执行。
- 后置sql:后置sql在sql语句之后执行。
@ -57,5 +56,4 @@ SQL任务类型,用于连接数据库并执行相应SQL。
## 注意事项
* 注意SQL类型的选择,如果是INSERT等操作需要选择非查询类型。
* 为了兼容长会话情况,UDF函数的创建是通过CREATE OR REPLACE语句

4
docs/docs/zh/guide/upgrade/incompatible.md

@ -24,3 +24,7 @@
* 在 /datasources/tables && /datasources/tableColumns 接口中添加了必选字段`database` [#14406](https://github.com/apache/dolphinscheduler/pull/14406)
## 3.3.0
* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209])

BIN
docs/img/new_ui/dev/resource/create-udf.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

BIN
docs/img/new_ui/dev/resource/demo/udf-demo01.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

BIN
docs/img/new_ui/dev/resource/demo/udf-demo02.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 82 KiB

BIN
docs/img/new_ui/dev/resource/demo/udf-demo03.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 132 KiB

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorUtils.java

@ -26,7 +26,6 @@ import org.apache.dolphinscheduler.common.enums.AuditOperationType;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.dao.entity.AuditLog;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import java.lang.reflect.Field;
import java.util.ArrayList;
@ -172,23 +171,13 @@ public class OperatorUtils {
return map;
}
public static boolean isUdfResource(Map<String, Object> paramsMap) {
ResourceType resourceType = (ResourceType) paramsMap.get(Constants.STRING_PLUGIN_PARAM_TYPE);
return resourceType != null && resourceType.equals(ResourceType.UDF);
}
public static boolean isFolder(String name) {
return name != null && name.endsWith("/");
}
public static String getFileAuditObject(AuditType auditType, Map<String, Object> paramsMap, String name) {
boolean isUdfResource = isUdfResource(paramsMap);
boolean isFolder = auditType == AuditType.FOLDER_CREATE || isFolder(name);
if (isUdfResource) {
return isFolder ? AuditModelType.UDF_FOLDER.getName() : AuditModelType.UDF_FILE.getName();
} else {
return isFolder ? AuditModelType.FOLDER.getName() : AuditModelType.FILE.getName();
}
return isFolder ? AuditModelType.FOLDER.getName() : AuditModelType.FILE.getName();
}
}

1
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java

@ -43,6 +43,5 @@ public final class AuditLogConstants {
public static final String FILE_NAME = "fileName";
public static final String FULL_NAME = "fullName";
public static final String FUNC_NAME = "funcName";
public static final String UDF_FUNC_ID = "udfFuncId";
}

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java

@ -24,7 +24,6 @@ import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ENVIRONMENT_CODE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.FILE_NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.FULL_NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.FUNC_NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PRIORITY;
@ -34,7 +33,6 @@ import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_IDS;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.QUEUE_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.TYPE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.UDF_FUNC_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.USER_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.VERSION;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODE;
@ -55,7 +53,6 @@ import static org.apache.dolphinscheduler.common.enums.AuditModelType.TASK_GROUP
import static org.apache.dolphinscheduler.common.enums.AuditModelType.TASK_INSTANCE;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.TENANT;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.TOKEN;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.UDF_FUNCTION;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.USER;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.WORKER_GROUP;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.YARN_QUEUE;
@ -96,7 +93,6 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.TaskGroupAuditOperato
import org.apache.dolphinscheduler.api.audit.operator.impl.TaskInstancesAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.TenantAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.TokenAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.UdfFunctionAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.UserAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkerGroupAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.YarnQueueAuditOperatorImpl;
@ -163,13 +159,6 @@ public enum AuditType {
FILE_UPDATE(FILE, UPDATE, ResourceAuditOperatorImpl.class, new String[]{TYPE, FILE_NAME, ALIAS}, new String[]{}),
FILE_DELETE(FILE, DELETE, ResourceAuditOperatorImpl.class, new String[]{FULL_NAME}, new String[]{}),
UDF_FUNCTION_CREATE(UDF_FUNCTION, CREATE, UdfFunctionAuditOperatorImpl.class, new String[]{FUNC_NAME},
new String[]{}),
UDF_FUNCTION_UPDATE(UDF_FUNCTION, UPDATE, UdfFunctionAuditOperatorImpl.class, new String[]{FUNC_NAME},
new String[]{}),
UDF_FUNCTION_DELETE(UDF_FUNCTION, DELETE, UdfFunctionAuditOperatorImpl.class, new String[]{UDF_FUNC_ID},
new String[]{}),
TASK_GROUP_CREATE(TASK_GROUP, CREATE, TaskGroupAuditOperatorImpl.class, new String[]{NAME}, new String[]{}),
TASK_GROUP_UPDATE(TASK_GROUP, UPDATE, TaskGroupAuditOperatorImpl.class, new String[]{}, new String[]{ID}),
TASK_GROUP_CLOSE(TASK_GROUP, CLOSE, TaskGroupAuditOperatorImpl.class, new String[]{ID}, new String[]{}),

46
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/UdfFunctionAuditOperatorImpl.java

@ -1,46 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.audit.operator.impl;
import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.commons.lang3.math.NumberUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class UdfFunctionAuditOperatorImpl extends BaseAuditOperator {
@Autowired
private UdfFuncMapper udfFuncMapper;
@Override
protected String getObjectNameFromIdentity(Object identity) {
int objId = NumberUtils.toInt(identity.toString(), -1);
if (objId == -1) {
return "";
}
UdfFunc obj = udfFuncMapper.selectUdfById(objId);
return obj == null ? "" : obj.getFuncName();
}
}

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java

@ -128,18 +128,6 @@ public class ApiFuncIdentificationConstant {
public static final String FILE_DOWNLOAD = "resources:file:download";
public static final String FILE_DELETE = "resources:file:delete";
public static final String UDF_FILE_VIEW = "resources:udf:view";
public static final String UDF_FOLDER_ONLINE_CREATE = "resources:udf:create";
public static final String UDF_UPLOAD = "resources:udf:upload";
public static final String UDF_UPDATE = "resources:udf:edit";
public static final String UDF_DOWNLOAD = "resources:udf:download";
public static final String UDF_DELETE = "resources:udf:delete";
public static final String UDF_FUNCTION_VIEW = "resources:udf-func:view";
public static final String UDF_FUNCTION_CREATE = "resources:udf-func:create";
public static final String UDF_FUNCTION_UPDATE = "resources:udf-func:update";
public static final String UDF_FUNCTION_DELETE = "resources:udf-func:delete";
public static final String TASK_GROUP_VIEW = "resources:task-group:view";
public static final String TASK_GROUP_CREATE = "resources:task-group:create";
public static final String TASK_GROUP_CLOSE = "resources:task-group:close";

193
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java

@ -19,21 +19,14 @@ package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_RESOURCE_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_BY_TYPE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_PAGING;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_NOT_EXIST;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_UDF_FUNCTION_NAME_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_UDF_FUNCTION_ERROR;
import org.apache.dolphinscheduler.api.audit.OperatorLog;
import org.apache.dolphinscheduler.api.audit.enums.AuditType;
@ -51,13 +44,11 @@ import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentReques
import org.apache.dolphinscheduler.api.dto.resources.UpdateFileRequest;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.api.vo.ResourceItemVO;
import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse;
import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
@ -74,7 +65,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
@ -102,9 +92,6 @@ public class ResourcesController extends BaseController {
@Autowired
private ResourcesService resourceService;
@Autowired
private UdfFuncService udfFuncService;
@Operation(summary = "createDirectory", description = "CREATE_RESOURCE_NOTES")
@Parameters({
@Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)),
@ -273,19 +260,6 @@ public class ResourcesController extends BaseController {
return Result.success(resourceService.pagingResourceItem(pagingResourceItemRequest));
}
// todo: this api is used for udf, we should remove it
@Operation(summary = "queryResourceList", description = "QUERY_RESOURCE_LIST_NOTES")
@Parameters({
@Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)),
@Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class))})
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR)
public Result<List<ResourceComponent>> queryResourceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type) {
return Result.success(resourceService.queryResourceFiles(loginUser, type));
}
@Operation(summary = "deleteResource", description = "DELETE_RESOURCE_BY_ID_NOTES")
@Parameters({
@Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "file:////tmp/dolphinscheduler/storage/default/resources/demo.sql"))
@ -354,173 +328,6 @@ public class ResourcesController extends BaseController {
resourceService.downloadResource(response, downloadFileRequest);
}
@Operation(summary = "createUdfFunc", description = "CREATE_UDF_FUNCTION_NOTES")
@Parameters({
@Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class)),
@Parameter(name = "funcName", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "className", description = "CLASS_NAME", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "argTypes", description = "ARG_TYPES", schema = @Schema(implementation = String.class)),
@Parameter(name = "database", description = "DATABASE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "description", description = "UDF_DESC", schema = @Schema(implementation = String.class)),
@Parameter(name = "resourceId", description = "RESOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100"))
})
@PostMapping(value = "/udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_UDF_FUNCTION_ERROR)
@OperatorLog(auditType = AuditType.UDF_FUNCTION_CREATE)
public Result createUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "fullName") String fullName,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description) {
// todo verify the sourceName
return udfFuncService.createUdfFunction(loginUser, funcName, className, fullName, argTypes, database,
description, type);
}
/**
* view udf function
*
* @param loginUser login user
* @param id udf function id
* @return udf function detail
*/
@Operation(summary = "viewUIUdfFunction", description = "VIEW_UDF_FUNCTION_NOTES")
@Parameters({
@Parameter(name = "id", description = "RESOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100"))
})
@GetMapping(value = "/{id}/udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(VIEW_UDF_FUNCTION_ERROR)
public Result viewUIUdfFunction(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable("id") int id) {
return udfFuncService.queryUdfFuncDetail(loginUser, id);
}
/**
* update udf function
*
* @param loginUser login user
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param description description
* @param className class name
* @param udfFuncId udf function id
* @return update result code
*/
@Operation(summary = "updateUdfFunc", description = "UPDATE_UDF_FUNCTION_NOTES")
@Parameters({
@Parameter(name = "id", description = "UDF_ID", required = true, schema = @Schema(implementation = int.class)),
@Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class)),
@Parameter(name = "funcName", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "className", description = "CLASS_NAME", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "argTypes", description = "ARG_TYPES", schema = @Schema(implementation = String.class)),
@Parameter(name = "database", description = "DATABASE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "description", description = "UDF_DESC", schema = @Schema(implementation = String.class))})
@PutMapping(value = "/udf-func/{id}")
@ApiException(UPDATE_UDF_FUNCTION_ERROR)
@OperatorLog(auditType = AuditType.UDF_FUNCTION_UPDATE)
public Result updateUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int udfFuncId, @RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "fullName") String fullName) {
return udfFuncService.updateUdfFunc(loginUser, udfFuncId, funcName, className, argTypes, database, description,
type, fullName);
}
/**
* query udf function list paging
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return udf function list page
*/
@Operation(summary = "queryUdfFuncListPaging", description = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES")
@Parameters({
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20"))})
@GetMapping(value = "/udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR)
public Result<Object> queryUdfFuncListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize) {
checkPageParams(pageNo, pageSize);
return udfFuncService.queryUdfFuncListPaging(loginUser, searchVal, pageNo, pageSize);
}
/**
* query udf func list by type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@Operation(summary = "queryUdfFuncList", description = "QUERY_UDF_FUNC_LIST_NOTES")
@Parameters({
@Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class))})
@GetMapping(value = "/udf-func/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR)
public Result<Object> queryUdfFuncList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") UdfType type) {
return udfFuncService.queryUdfFuncList(loginUser, type.getCode());
}
/**
* verify udf function name can use or not
*
* @param loginUser login user
* @param name name
* @return true if the name can user, otherwise return false
*/
@Operation(summary = "verifyUdfFuncName", description = "VERIFY_UDF_FUNCTION_NAME_NOTES")
@Parameters({
@Parameter(name = "name", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class))
})
@GetMapping(value = "/udf-func/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_UDF_FUNCTION_NAME_ERROR)
public Result verifyUdfFuncName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "name") String name) {
return udfFuncService.verifyUdfFuncByName(loginUser, name);
}
/**
* delete udf function
*
* @param loginUser login user
* @param udfFuncId udf function id
* @return delete result code
*/
@Operation(summary = "deleteUdfFunc", description = "DELETE_UDF_FUNCTION_NOTES")
@Parameters({
@Parameter(name = "id", description = "UDF_FUNC_ID", required = true, schema = @Schema(implementation = int.class, example = "100"))})
@DeleteMapping(value = "/udf-func/{id}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_UDF_FUNCTION_ERROR)
@OperatorLog(auditType = AuditType.UDF_FUNCTION_DELETE)
public Result deleteUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable(value = "id") int udfFuncId) {
return udfFuncService.delete(loginUser, udfFuncId);
}
@Operation(summary = "queryResourceBaseDir", description = "QUERY_RESOURCE_BASE_DIR")
@Parameters({
@Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class))})

86
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -24,7 +24,6 @@ import static org.apache.dolphinscheduler.api.enums.Status.GET_USER_INFO_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_DATASOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_K8S_NAMESPACE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GRANT_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_USER_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.REVOKE_PROJECT_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_USER_ERROR;
@ -112,9 +111,9 @@ public class UsersController extends BaseController {
* query user list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageNo page number
* @param searchVal search avlue
* @param pageSize page size
* @param pageSize page size
* @return user list page
*/
@Operation(summary = "queryUserList", description = "QUERY_USER_LIST_NOTES")
@ -138,14 +137,14 @@ public class UsersController extends BaseController {
/**
* update user
*
* @param loginUser login user
* @param id user id
* @param userName user name
* @param loginUser login user
* @param id user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @return update result code
*/
@Operation(summary = "updateUser", description = "UPDATE_USER_NOTES")
@ -190,7 +189,7 @@ public class UsersController extends BaseController {
* delete user by id
*
* @param loginUser login user
* @param id user id
* @param id user id
* @return delete result code
*/
@Operation(summary = "delUserById", description = "DELETE_USER_BY_ID_NOTES")
@ -210,8 +209,8 @@ public class UsersController extends BaseController {
/**
* revoke project By Id
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return revoke result code
*/
@ -233,8 +232,8 @@ public class UsersController extends BaseController {
/**
* grant project with read permission
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@ -256,8 +255,8 @@ public class UsersController extends BaseController {
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@ -279,8 +278,8 @@ public class UsersController extends BaseController {
/**
* grant project by code
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectCode project code
* @return grant result code
*/
@ -302,9 +301,9 @@ public class UsersController extends BaseController {
/**
* revoke project
*
* @param loginUser login user
* @param userId user id
* @param projectCode project code
* @param loginUser login user
* @param userId user id
* @param projectCode project code
* @return revoke result code
*/
@Operation(summary = "revokeProject", description = "REVOKE_PROJECT_NOTES")
@ -322,34 +321,11 @@ public class UsersController extends BaseController {
return returnDataList(result);
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Operation(summary = "grantUDFFunc", description = "GRANT_UDF_FUNC_NOTES")
@Parameters({
@Parameter(name = "userId", description = "USER_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "udfIds", description = "UDF_IDS", required = true, schema = @Schema(implementation = String.class))
})
@PostMapping(value = "/grant-udf-func")
@ResponseStatus(HttpStatus.OK)
@ApiException(GRANT_UDF_FUNCTION_ERROR)
public Result grantUDFFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userId") int userId,
@RequestParam(value = "udfIds") String udfIds) {
Map<String, Object> result = usersService.grantUDFFunction(loginUser, userId, udfIds);
return returnDataList(result);
}
/**
* grant namespace
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param namespaceIds namespace id array
* @return grant result code
*/
@ -371,8 +347,8 @@ public class UsersController extends BaseController {
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@ -439,7 +415,7 @@ public class UsersController extends BaseController {
* verify username
*
* @param loginUser login user
* @param userName user name
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
@Operation(summary = "verifyUserName", description = "VERIFY_USER_NAME_NOTES")
@ -457,7 +433,7 @@ public class UsersController extends BaseController {
/**
* unauthorized user
*
* @param loginUser login user
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
@ -477,7 +453,7 @@ public class UsersController extends BaseController {
/**
* authorized user
*
* @param loginUser login user
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
@ -502,10 +478,10 @@ public class UsersController extends BaseController {
/**
* user registry
*
* @param userName user name
* @param userPassword user password
* @param userName user name
* @param userPassword user password
* @param repeatPassword repeat password
* @param email user email
* @param email user email
*/
@Operation(summary = "registerUser", description = "REGISTER_USER_NOTES")
@Parameters({

15
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -93,17 +93,9 @@ public enum Status {
RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"),
@ -124,7 +116,6 @@ public enum Status {
DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"),
@ -298,19 +289,17 @@ public enum Status {
QUERY_PROJECT_PREFERENCE_ERROR(10302, "query project preference error", "查询项目偏好设置错误"),
UPDATE_PROJECT_PREFERENCE_STATE_ERROR(10303, "Failed to update the state of the project preference", "更新项目偏好设置错误"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),
RESOURCE_EXIST(20005, "resource already exists", "资源已存在"),
RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"),
RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"),
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"),
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!",
"资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists !", "资源文件[{0}]不存在"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java

@ -46,7 +46,6 @@ import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Queue;
import org.apache.dolphinscheduler.dao.entity.TaskGroup;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
@ -59,7 +58,6 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.QueueMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
@ -201,32 +199,6 @@ public class ResourcePermissionCheckServiceImpl
}
}
@Component
public static class UdfFuncPermissionCheck implements ResourceAcquisitionAndPermissionCheck<Integer> {
private final UdfFuncMapper udfFuncMapper;
public UdfFuncPermissionCheck(UdfFuncMapper udfFuncMapper) {
this.udfFuncMapper = udfFuncMapper;
}
@Override
public List<AuthorizationType> authorizationTypes() {
return Collections.singletonList(AuthorizationType.UDF);
}
@Override
public Set<Integer> listAuthorizedResourceIds(int userId, Logger logger) {
List<UdfFunc> udfFuncList = udfFuncMapper.listAuthorizedUdfByUserId(userId);
return udfFuncList.stream().map(UdfFunc::getId).collect(toSet());
}
@Override
public boolean permissionCheck(int userId, String permissionKey, Logger logger) {
return true;
}
}
@Component
public static class TaskGroupPermissionCheck implements ResourceAcquisitionAndPermissionCheck<Integer> {
@ -481,6 +453,7 @@ public class ResourcePermissionCheckServiceImpl
/**
* authorization types
*
* @return
*/
List<AuthorizationType> authorizationTypes();
@ -495,6 +468,7 @@ public class ResourcePermissionCheckServiceImpl
/**
* permission check
*
* @param userId
* @return
*/

118
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java

@ -1,118 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.dao.entity.User;
/**
* udf func service
*/
public interface UdfFuncService {
/**
* create udf function
*
* @param loginUser login user
* @param type udf type
* @param funcName function name
* @param argTypes argument types
* @param database database
* @param desc description
* @param className class name
* @return create result code
*/
Result<Object> createUdfFunction(User loginUser,
String funcName,
String className,
String fullName,
String argTypes,
String database,
String desc,
UdfType type);
/**
* query udf function
*
* @param id udf function id
* @return udf function detail
*/
Result<Object> queryUdfFuncDetail(User loginUser, int id);
/**
* updateProcessInstance udf function
*
* @param udfFuncId udf function id
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param desc description
* @param resourceId resource id
* @param fullName resource full name
* @param className class name
* @return update result code
*/
Result<Object> updateUdfFunc(User loginUser,
int udfFuncId,
String funcName,
String className,
String argTypes,
String database,
String desc,
UdfType type,
String fullName);
/**
* query udf function list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return udf function list page
*/
Result queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
/**
* query udf list
*
* @param loginUser login user
* @param type udf type
* @return udf func list
*/
Result<Object> queryUdfFuncList(User loginUser, Integer type);
/**
* delete udf function
*
* @param id udf function id
* @return delete result code
*/
Result<Object> delete(User loginUser, int id);
/**
* verify udf function by name
*
* @param name name
* @return true if the name can user, otherwise return false
*/
Result<Object> verifyUdfFuncByName(User loginUser, String name);
}

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

@ -183,16 +183,6 @@ public interface UsersService {
*/
Map<String, Object> revokeProject(User loginUser, int userId, long projectCode);
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds);
/**
* grant namespace
*

4
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java

@ -133,10 +133,8 @@ public class BaseServiceImpl implements BaseService {
// @Override
// public void createTenantDirIfNotExists(String tenantCode) throws IOException {
// String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
// String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
// // init resource path and udf path
// // init resource path
// HadoopUtils.getInstance().mkdir(tenantCode,resourcePath);
// HadoopUtils.getInstance().mkdir(tenantCode,udfsPath);
// }
/**

399
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java

@ -1,399 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* udf func service impl
*/
@Service
@Slf4j
public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService {
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired(required = false)
private StorageOperator storageOperator;
/**
* create udf function
*
* @param loginUser login user
* @param type udf type
* @param funcName function name
* @param argTypes argument types
* @param database database
* @param desc description
* @param className class name
* @return create result code
*/
@Override
@Transactional
public Result<Object> createUdfFunction(User loginUser,
String funcName,
String className,
String fullName,
String argTypes,
String database,
String desc,
UdfType type) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_CREATE);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
if (checkDescriptionLength(desc)) {
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
// verify udf func name exist
if (checkUdfFuncNameExists(funcName)) {
log.warn("Udf function with the same name already exists.");
putMsg(result, Status.UDF_FUNCTION_EXISTS);
return result;
}
boolean existResource = storageOperator.exists(fullName);
if (!existResource) {
log.error("resource full name {} is not exist", fullName);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// save data
UdfFunc udf = new UdfFunc();
Date now = new Date();
udf.setUserId(loginUser.getId());
udf.setFuncName(funcName);
udf.setClassName(className);
if (!StringUtils.isEmpty(argTypes)) {
udf.setArgTypes(argTypes);
}
if (!StringUtils.isEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
// set resourceId to -1 because we do not store resource to db anymore, instead we use fullName
udf.setResourceId(-1);
udf.setResourceName(fullName);
udf.setType(type);
udf.setCreateTime(now);
udf.setUpdateTime(now);
udfFuncMapper.insert(udf);
log.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName());
putMsg(result, Status.SUCCESS);
return result;
}
/**
*
* @param name name
* @return check result code
*/
private boolean checkUdfFuncNameExists(String name) {
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name);
return resource != null && !resource.isEmpty();
}
/**
* query udf function
*
* @param id udf function id
* @return udf function detail
*/
@Override
public Result<Object> queryUdfFuncDetail(User loginUser, int id) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{id}, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
log.error("Resource does not exist, udf func id:{}.", id);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
result.setData(udfFunc);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance udf function
*
* @param udfFuncId udf function id
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param desc description
* @param fullName resource full name
* @param className class name
* @return update result code
*/
@Override
public Result<Object> updateUdfFunc(User loginUser,
int udfFuncId,
String funcName,
String className,
String argTypes,
String database,
String desc,
UdfType type,
String fullName) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{udfFuncId},
AuthorizationType.UDF, ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
if (checkDescriptionLength(desc)) {
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
// verify udfFunc is exist
UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId);
if (udf == null) {
log.error("UDF function does not exist, udfFuncId:{}.", udfFuncId);
result.setCode(Status.UDF_FUNCTION_NOT_EXIST.getCode());
result.setMsg(Status.UDF_FUNCTION_NOT_EXIST.getMsg());
return result;
}
// verify udfFuncName is exist
if (!funcName.equals(udf.getFuncName())) {
if (checkUdfFuncNameExists(funcName)) {
log.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName);
result.setCode(Status.UDF_FUNCTION_EXISTS.getCode());
result.setMsg(Status.UDF_FUNCTION_EXISTS.getMsg());
return result;
}
}
Boolean doesResExist = false;
try {
doesResExist = storageOperator.exists(fullName);
} catch (Exception e) {
log.error("udf resource :{} checking error", fullName, e);
result.setCode(Status.RESOURCE_NOT_EXIST.getCode());
result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg());
return result;
}
if (!doesResExist) {
log.error("resource full name {} is not exist", fullName);
result.setCode(Status.RESOURCE_NOT_EXIST.getCode());
result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg());
return result;
}
Date now = new Date();
udf.setFuncName(funcName);
udf.setClassName(className);
udf.setArgTypes(argTypes);
if (!StringUtils.isEmpty(database)) {
udf.setDatabase(database);
}
udf.setDescription(desc);
// set resourceId to -1 because we do not store resource to db anymore, instead we use fullName
udf.setResourceId(-1);
udf.setResourceName(fullName);
udf.setType(type);
udf.setUpdateTime(now);
udfFuncMapper.updateById(udf);
log.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query udf function list paging
*
* @param loginUser login user
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return udf function list page
*/
@Override
public Result<Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Result<Object> result = new Result();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
PageInfo<UdfFunc> pageInfo = new PageInfo<>(pageNo, pageSize);
IPage<UdfFunc> udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo);
pageInfo.setTotal((int) udfFuncList.getTotal());
pageInfo.setTotalList(udfFuncList.getRecords());
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get udf functions
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return udf function list page
*/
private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) {
Set<Integer> udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF,
loginUser.getId(), log);
Page<UdfFunc> page = new Page<>(pageNo, pageSize);
if (udfFuncIds.isEmpty()) {
return page;
}
return udfFuncMapper.queryUdfFuncPaging(page, new ArrayList<>(udfFuncIds), searchVal);
}
/**
* query udf list
*
* @param loginUser login user
* @param type udf type
* @return udf func list
*/
@Override
public Result<Object> queryUdfFuncList(User loginUser, Integer type) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
Set<Integer> udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF,
loginUser.getId(), log);
if (udfFuncIds.isEmpty()) {
result.setData(Collections.emptyList());
putMsg(result, Status.SUCCESS);
return result;
}
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(new ArrayList<>(udfFuncIds), type);
result.setData(udfFuncList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete udf function
*
* @param id udf function id
* @return delete result code
*/
@Override
@Transactional
public Result<Object> delete(User loginUser, int id) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{id}, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_DELETE);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
udfFuncMapper.deleteById(id);
udfUserMapper.deleteByUdfFuncId(id);
log.info("UDF function delete complete, udfFuncId:{}.", id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify udf function by name
*
* @param name name
* @return true if the name can user, otherwise return false
*/
@Override
public Result<Object> verifyUdfFuncByName(User loginUser, String name) {
Result<Object> result = new Result<>();
boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW);
if (!canOperatorPermissions) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
if (checkUdfFuncNameExists(name)) {
log.warn("Udf function with the same already exists.");
putMsg(result, Status.UDF_FUNCTION_EXISTS);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
}

72
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java

@ -38,7 +38,6 @@ import org.apache.dolphinscheduler.dao.entity.K8sNamespaceUser;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper;
@ -47,7 +46,6 @@ import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator;
@ -99,9 +97,6 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
@Autowired
private DataSourceUserMapper datasourceUserMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private AlertGroupMapper alertGroupMapper;
@ -492,9 +487,10 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
/**
* revoke the project permission for specified user by id
* @param loginUser Login user
* @param userId User id
* @param projectIds project id array
*
* @param loginUser Login user
* @param userId User id
* @param projectIds project id array
* @return
*/
@Override
@ -537,8 +533,8 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
/**
* grant project with read permission
*
* @param loginUser login user
* @param userId user id
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@ -747,62 +743,6 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
return result;
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Override
@Transactional
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>();
if (resourcePermissionCheckService.functionDisabled()) {
putMsg(result, Status.FUNCTION_DISABLED);
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (!isAdmin(loginUser)) {
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
log.warn("Parameter udfIds is empty.");
return result;
}
String[] resourcesIdArr = udfIds.split(",");
for (String udfId : resourcesIdArr) {
Date now = new Date();
UDFUser udfUser = new UDFUser();
udfUser.setUserId(userId);
udfUser.setUdfId(Integer.parseInt(udfId));
udfUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
udfUser.setCreateTime(now);
udfUser.setUpdateTime(now);
udfUserMapper.insert(udfUser);
}
log.info("User is granted permission for UDF, userName:{}.", user.getUserName());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant namespace
*

16
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -117,21 +117,9 @@ SUFFIX=resource file suffix
CONTENT=resource file content
UPDATE_RESOURCE_NOTES=edit resource file online
DOWNLOAD_RESOURCE_NOTES=download resource file
CREATE_UDF_FUNCTION_NOTES=create udf function
UDF_TYPE=UDF type
FUNC_NAME=function name
CLASS_NAME=package and class name
ARG_TYPES=arguments
UDF_DESC=udf desc
VIEW_UDF_FUNCTION_NOTES=view udf function
UPDATE_UDF_FUNCTION_NOTES=update udf function
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging
VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name
DELETE_UDF_FUNCTION_NOTES=delete udf function
AUTHORIZED_FILE_NOTES=authorized file
UNAUTHORIZED_FILE_NOTES=unauthorized file
AUTHORIZED_UDF_FUNC_NOTES=authorized udf func
UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func
VERIFY_QUEUE_NOTES=verify queue
TENANT_TAG=tenant related operation
CREATE_TENANT_NOTES=create tenant
@ -259,8 +247,6 @@ UNAUTHORIZED_USER_NOTES=cancel authorization
ALERT_GROUP_ID=alert group id
ALERT_INSTANCE_IDS=alert instance ids(string format, multiple instances separated by ",")
AUTHORIZED_USER_NOTES=authorized user
GRANT_UDF_FUNC_NOTES=grant udf function
UDF_IDS=udf ids(string format, multiple udf functions separated by ",")
GRANT_DATASOURCE_NOTES=grant datasource
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",")
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id
@ -454,4 +440,4 @@ UPDATE_PROJECT_PREFERENCE_NOTES=update project preference
UPDATE_PROJECT_PREFERENCE_STATE_NOTES=update the state of the project preference
PROJECT_PREFERENCES_STATE= the state of the project preference
PROJECT_PREFERENCES=project preferences
QUERY_PROJECT_PREFERENCE_NOTES=query project preference
QUERY_PROJECT_PREFERENCE_NOTES=query project preference

16
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -122,21 +122,11 @@ SUFFIX=resource file suffix
CONTENT=resource file content
UPDATE_RESOURCE_NOTES=edit resource file online
DOWNLOAD_RESOURCE_NOTES=download resource file
CREATE_UDF_FUNCTION_NOTES=create udf function
UDF_TYPE=UDF type
FUNC_NAME=function name
CLASS_NAME=package and class name
ARG_TYPES=arguments
UDF_DESC=udf desc
VIEW_UDF_FUNCTION_NOTES=view udf function
UPDATE_UDF_FUNCTION_NOTES=update udf function
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging
VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name
DELETE_UDF_FUNCTION_NOTES=delete udf function
AUTHORIZED_FILE_NOTES=authorized file
UNAUTHORIZED_FILE_NOTES=unauthorized file
AUTHORIZED_UDF_FUNC_NOTES=authorized udf func
UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func
VERIFY_QUEUE_NOTES=verify queue
TENANT_TAG=tenant related operation
CREATE_TENANT_NOTES=create tenant
@ -251,7 +241,6 @@ QUERY_WORKER_ADDRESS_LIST_NOTES=query worker address list
QUERY_WORKFLOW_LINEAGE_BY_IDS_NOTES=query workflow lineage by ids
QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES=query workflow lineage by name
VIEW_TREE_NOTES=view tree
UDF_ID=udf id
GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id
GET_NODE_LIST_BY_DEFINITION_CODE_NOTES=get node list by definition code
QUERY_PROCESS_DEFINITION_BY_NAME_NOTES=query process definition by name
@ -301,10 +290,7 @@ QUERY_RESOURCE_LIST_PAGING_NOTES=query resource list paging
RESOURCE_PID=parent directory ID of the current resource
RESOURCE_FULL_NAME=resource full name
QUERY_BY_RESOURCE_NAME=query by resource name
QUERY_UDF_FUNC_LIST_NOTES=query udf funciton list
VERIFY_RESOURCE_NAME_NOTES=verify resource name
GRANT_UDF_FUNC_NOTES=grant udf function
UDF_IDS=udf ids(string format, multiple udf functions separated by ",")
GRANT_DATASOURCE_NOTES=grant datasource
DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",")
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id
@ -486,4 +472,4 @@ UPDATE_PROJECT_PARAMETER_NOTES=update project parameter
PROJECT_PARAMETER_CODE=project parameter code
DELETE_PROJECT_PARAMETER_NOTES=delete project parameter
QUERY_PROJECT_PARAMETER_LIST_PAGING_NOTES=query project parameter list paging
QUERY_PROJECT_PARAMETER_NOTES=query project parameter
QUERY_PROJECT_PARAMETER_NOTES=query project parameter

16
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -102,7 +102,6 @@ RESOURCE_FILE=资源文件
RESOURCE_ID=资源ID
QUERY_RESOURCE_LIST_NOTES=查询资源列表
QUERY_BY_RESOURCE_NAME=通过资源名称查询
QUERY_UDF_FUNC_LIST_NOTES=查询UDF函数列表
VERIFY_RESOURCE_NAME_NOTES=验证资源名称
DELETE_RESOURCE_BY_ID_NOTES=通过ID删除资源
VIEW_RESOURCE_BY_ID_NOTES=通过ID浏览资源
@ -111,21 +110,11 @@ SUFFIX=资源文件后缀
CONTENT=资源文件内容
UPDATE_RESOURCE_NOTES=在线更新资源文件
DOWNLOAD_RESOURCE_NOTES=下载资源文件
CREATE_UDF_FUNCTION_NOTES=创建UDF函数
UDF_TYPE=UDF类型
FUNC_NAME=函数名称
CLASS_NAME=包名类名
ARG_TYPES=参数
UDF_DESC=udf描述,使用说明
VIEW_UDF_FUNCTION_NOTES=查看udf函数
UPDATE_UDF_FUNCTION_NOTES=更新udf函数
QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表
VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名
DELETE_UDF_FUNCTION_NOTES=删除UDF函数
AUTHORIZED_FILE_NOTES=授权文件
UNAUTHORIZED_FILE_NOTES=取消授权文件
AUTHORIZED_UDF_FUNC_NOTES=授权udf函数
UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权
VERIFY_QUEUE_NOTES=验证队列
TENANT_TAG=租户相关操作
CREATE_TENANT_NOTES=创建租户
@ -231,7 +220,6 @@ PLUGIN_ID=插件ID
USER_ID=用户ID
PAGE_SIZE=页大小
LIMIT=显示多少条
UDF_ID=udf ID
AUTHORIZE_RESOURCE_TREE_NOTES=授权资源树
RESOURCE_CURRENTDIR=当前资源目录
RESOURCE_PID=资源父目录ID
@ -285,8 +273,6 @@ UNAUTHORIZED_USER_NOTES=取消授权
ALERT_GROUP_ID=告警组ID
ALERT_INSTANCE_IDS=告警实例ID列表(字符串格式,多个告警实例ID以","分割)
AUTHORIZED_USER_NOTES=授权用户
GRANT_UDF_FUNC_NOTES=授权udf函数
UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割)
GRANT_DATASOURCE_NOTES=授权数据源
DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割)
QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=通过任务实例ID查询子流程实例
@ -483,4 +469,4 @@ UPDATE_PROJECT_PARAMETER_NOTES=更新项目参数
PROJECT_PARAMETER_CODE=项目参数code
DELETE_PROJECT_PARAMETER_NOTES=删除项目参数
QUERY_PROJECT_PARAMETER_LIST_PAGING_NOTES=分页查询项目参数
QUERY_PROJECT_PARAMETER_NOTES=查询项目参数
QUERY_PROJECT_PARAMETER_NOTES=查询项目参数

178
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java

@ -27,10 +27,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
@ -54,9 +52,6 @@ public class ResourcesControllerTest extends AbstractControllerTest {
@MockBean(name = "resourcesServiceImpl")
private ResourcesService resourcesService;
@MockBean(name = "udfFuncServiceImpl")
private UdfFuncService udfFuncService;
@Test
public void testQueryResourceListPaging() throws Exception {
Result mockResult = new Result<>();
@ -183,179 +178,6 @@ public class ResourcesControllerTest extends AbstractControllerTest {
Assertions.assertNotNull(mvcResult);
}
@Test
public void testCreateUdfFunc() throws Exception {
Result mockResult = new Result<>();
mockResult.setCode(Status.TENANT_NOT_EXIST.getCode());
Mockito.when(udfFuncService
.createUdfFunction(Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(),
Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.any()))
.thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("type", String.valueOf(UdfType.HIVE));
paramsMap.add("funcName", "test_udf");
paramsMap.add("className", "com.test.word.contWord");
paramsMap.add("argTypes", "argTypes");
paramsMap.add("database", "database");
paramsMap.add("description", "description");
paramsMap.add("resourceId", "1");
paramsMap.add("fullName", "dolphinscheduler/resourcePath");
MvcResult mvcResult = mockMvc.perform(post("/resources/udf-func")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testViewUIUdfFunction() throws Exception {
Result<Object> mockResult = new Result<>();
putMsg(mockResult, Status.TENANT_NOT_EXIST);
Mockito.when(udfFuncService
.queryUdfFuncDetail(Mockito.any(), Mockito.anyInt()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/resources/{id}/udf-func", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testUpdateUdfFunc() throws Exception {
Result<Object> mockResult = new Result<>();
mockResult.setCode(Status.TENANT_NOT_EXIST.getCode());
Mockito.when(udfFuncService
.updateUdfFunc(Mockito.any(), Mockito.anyInt(), Mockito.anyString(), Mockito.anyString(),
Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.any(),
Mockito.anyString()))
.thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("id", "1");
paramsMap.add("type", String.valueOf(UdfType.HIVE));
paramsMap.add("funcName", "update_duf");
paramsMap.add("className", "com.test.word.contWord");
paramsMap.add("argTypes", "argTypes");
paramsMap.add("database", "database");
paramsMap.add("description", "description");
paramsMap.add("resourceId", "1");
paramsMap.add("fullName", "dolphinscheduler/resourcePath");
MvcResult mvcResult = mockMvc.perform(put("/resources/udf-func/{id}", "456")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryUdfFuncList() throws Exception {
Result mockResult = new Result<>();
mockResult.setCode(Status.SUCCESS.getCode());
Mockito.when(udfFuncService.queryUdfFuncListPaging(Mockito.any(), Mockito.anyString(), Mockito.anyInt(),
Mockito.anyInt())).thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("pageNo", "1");
paramsMap.add("searchVal", "udf");
paramsMap.add("pageSize", "1");
MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryResourceList() throws Exception {
Result<Object> mockResult = new Result<>();
mockResult.setCode(Status.SUCCESS.getCode());
Mockito.when(udfFuncService.queryUdfFuncList(Mockito.any(), Mockito.anyInt())).thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("type", String.valueOf(UdfType.HIVE));
MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func/list")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testVerifyUdfFuncName() throws Exception {
Result mockResult = new Result<>();
mockResult.setCode(Status.SUCCESS.getCode());
Mockito.when(udfFuncService.verifyUdfFuncByName(Mockito.any(), Mockito.anyString())).thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("name", "test");
MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func/verify-name")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testDeleteUdfFunc() throws Exception {
Result mockResult = new Result<>();
mockResult.setCode(Status.SUCCESS.getCode());
Mockito.when(udfFuncService.delete(Mockito.any(), Mockito.anyInt())).thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(delete("/resources/udf-func/{id}", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testDeleteResource() throws Exception {
Mockito.doNothing().when(resourcesService).delete(Mockito.any());

18
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java

@ -145,24 +145,6 @@ public class UsersControllerTest extends AbstractControllerTest {
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testGrantUDFFunc() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userId", "32");
paramsMap.add("udfIds", "5");
MvcResult mvcResult = mockMvc.perform(post("/users/grant-udf-func")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assertions.assertEquals(Status.USER_NOT_EXIST.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testGrantDataSource() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();

94
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/permission/UdfFuncPermissionCheckTest.java

@ -1,94 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.permission;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ExtendWith(MockitoExtension.class)
public class UdfFuncPermissionCheckTest {
private static final Logger logger = LoggerFactory.getLogger(UdfFuncPermissionCheckTest.class);
@InjectMocks
private ResourcePermissionCheckServiceImpl.UdfFuncPermissionCheck udfFuncPermissionCheck;
@Mock
private UdfFuncMapper udfFuncMapper;
@Test
public void testPermissionCheck() {
User user = getLoginUser();
Assertions.assertTrue(udfFuncPermissionCheck.permissionCheck(user.getId(), null, logger));
}
@Test
public void testAuthorizationTypes() {
List<AuthorizationType> authorizationTypes = udfFuncPermissionCheck.authorizationTypes();
Assertions.assertEquals(Collections.singletonList(AuthorizationType.UDF), authorizationTypes);
}
@Test
public void testListAuthorizedResourceIds() {
User user = getLoginUser();
UdfFunc udfFunc = new UdfFunc();
Set<Integer> ids = new HashSet();
ids.add(udfFunc.getId());
List<UdfFunc> udfFuncs = Arrays.asList(udfFunc);
Mockito.when(udfFuncMapper.listAuthorizedUdfByUserId(user.getId())).thenReturn(udfFuncs);
Assertions.assertEquals(ids, udfFuncPermissionCheck.listAuthorizedResourceIds(user.getId(), logger));
}
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName("test");
loginUser.setId(1);
return loginUser;
}
private Project getProject() {
Project project = new Project();
project.setCode(1L);
project.setId(1);
project.setName("projectName");
project.setUserId(1);
return project;
}
}

306
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java

@ -1,306 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService;
import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.UdfFuncServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator;
import org.apache.commons.collections4.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* udf func service test
*/
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
public class UdfFuncServiceTest {
private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class);
private MockedStatic<PropertyUtils> mockedStaticPropertyUtils;
@InjectMocks
private UdfFuncServiceImpl udfFuncService;
@Mock
private UdfFuncMapper udfFuncMapper;
@Mock
private UDFUserMapper udfUserMapper;
@Mock
private StorageOperator storageOperator;
@BeforeEach
public void setUp() {
mockedStaticPropertyUtils = Mockito.mockStatic(PropertyUtils.class);
}
@Mock
private ResourcePermissionCheckService resourcePermissionCheckService;
private static final Logger serviceLogger = LoggerFactory.getLogger(BaseServiceImpl.class);
private static final Logger udfLogger = LoggerFactory.getLogger(UdfFuncServiceImpl.class);
@Test
public void testCreateUdfFunction() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_CREATE, serviceLogger)).thenReturn(true);
Mockito.when(
resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger))
.thenReturn(true);
// resource not exist
Result result = udfFuncService.createUdfFunction(getLoginUser(), "UdfFuncServiceTest",
"org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String",
"UdfFuncServiceTest", "UdfFuncServiceTest", "", UdfType.HIVE);
logger.info(result.toString());
Assertions.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg());
// success
Mockito.when(storageOperator.exists("String")).thenReturn(true);
result = udfFuncService.createUdfFunction(getLoginUser(), "UdfFuncServiceTest",
"org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String",
"UdfFuncServiceTest", "UdfFuncServiceTest", "", UdfType.HIVE);
logger.info(result.toString());
Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testQueryUdfFuncDetail() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true);
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{2}, 0,
serviceLogger)).thenReturn(true);
Mockito.when(udfFuncMapper.selectById(1)).thenReturn(getUdfFunc());
// resource not exist
Result<Object> result = udfFuncService.queryUdfFuncDetail(getLoginUser(), 2);
logger.info(result.toString());
Assertions.assertTrue(Status.RESOURCE_NOT_EXIST.getCode() == result.getCode());
// success
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true);
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{1}, 0,
serviceLogger)).thenReturn(true);
result = udfFuncService.queryUdfFuncDetail(getLoginUser(), 1);
logger.info(result.toString());
Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode());
}
@Test
public void testUpdateUdfFunc() {
Mockito.when(udfFuncMapper.selectUdfById(1)).thenReturn(getUdfFunc());
// UDF_FUNCTION_NOT_EXIST
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true);
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{12}, 0,
serviceLogger)).thenReturn(true);
Result<Object> result = udfFuncService.updateUdfFunc(getLoginUser(), 12, "UdfFuncServiceTest",
"org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String",
"UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, "");
logger.info(result.toString());
Assertions.assertTrue(Status.UDF_FUNCTION_NOT_EXIST.getCode() == result.getCode());
// RESOURCE_NOT_EXIST
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true);
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{11}, 0,
serviceLogger)).thenReturn(true);
Mockito.when(udfFuncMapper.selectUdfById(11)).thenReturn(getUdfFunc());
result = udfFuncService.updateUdfFunc(getLoginUser(), 11, "UdfFuncServiceTest",
"org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String",
"UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, "");
logger.info(result.toString());
Assertions.assertTrue(Status.RESOURCE_NOT_EXIST.getCode() == result.getCode());
// success
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true);
Mockito.when(storageOperator.exists("")).thenReturn(true);
result = udfFuncService.updateUdfFunc(getLoginUser(), 11, "UdfFuncServiceTest",
"org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String",
"UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, "");
logger.info(result.toString());
Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode());
}
@Test
public void testQueryUdfFuncListPaging() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true);
Mockito.when(
resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger))
.thenReturn(true);
Mockito.when(
resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, 1, udfLogger))
.thenReturn(getSetIds());
IPage<UdfFunc> page = new Page<>(1, 10);
page.setTotal(1L);
page.setRecords(getList());
Mockito.when(udfFuncMapper.queryUdfFuncPaging(Mockito.any(Page.class), Mockito.anyList(), Mockito.eq("test")))
.thenReturn(page);
Result result = udfFuncService.queryUdfFuncListPaging(getLoginUser(), "test", 1, 10);
logger.info(result.toString());
PageInfo pageInfo = (PageInfo) result.getData();
Assertions.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList()));
}
@Test
public void testQueryUdfFuncList() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true);
Mockito.when(
resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 1, serviceLogger))
.thenReturn(true);
Mockito.when(
resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, 1, udfLogger))
.thenReturn(getSetIds());
User user = getLoginUser();
user.setUserType(UserType.GENERAL_USER);
user.setId(1);
Mockito.when(udfFuncMapper.getUdfFuncByType(Collections.singletonList(1), UdfType.HIVE.ordinal()))
.thenReturn(getList());
Result<Object> result = udfFuncService.queryUdfFuncList(user, UdfType.HIVE.ordinal());
logger.info(result.toString());
Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode());
List<UdfFunc> udfFuncList = (List<UdfFunc>) result.getData();
Assertions.assertTrue(CollectionUtils.isNotEmpty(udfFuncList));
}
@Test
public void testDelete() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_DELETE, serviceLogger)).thenReturn(true);
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{122}, 0,
serviceLogger)).thenReturn(true);
Mockito.when(udfFuncMapper.deleteById(Mockito.anyInt())).thenReturn(1);
Mockito.when(udfUserMapper.deleteByUdfFuncId(Mockito.anyInt())).thenReturn(1);
Result result = udfFuncService.delete(getLoginUser(), 122);
logger.info(result.toString());
Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
}
@Test
public void testVerifyUdfFuncByName() {
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1,
ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true);
Mockito.when(
resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger))
.thenReturn(true);
// success
Mockito.when(udfFuncMapper.queryUdfByIdStr(null, "UdfFuncServiceTest")).thenReturn(getList());
Result result = udfFuncService.verifyUdfFuncByName(getLoginUser(), "test");
logger.info(result.toString());
Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
// exist
result = udfFuncService.verifyUdfFuncByName(getLoginUser(), "UdfFuncServiceTest");
logger.info(result.toString());
Assertions.assertEquals(Status.UDF_FUNCTION_EXISTS.getMsg(), result.getMsg());
}
private Set<Integer> getSetIds() {
Set<Integer> set = new HashSet();
set.add(1);
return set;
}
/**
* create admin user
* @return
*/
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
loginUser.setId(1);
return loginUser;
}
private List<UdfFunc> getList() {
List<UdfFunc> udfFuncList = new ArrayList<>();
udfFuncList.add(getUdfFunc());
return udfFuncList;
}
/**
* get UdfFuncRequest id
*/
private UdfFunc getUdfFunc() {
UdfFunc udfFunc = new UdfFunc();
udfFunc.setFuncName("UdfFuncServiceTest");
udfFunc.setClassName("org.apache.dolphinscheduler.api.service.UdfFuncServiceTest");
udfFunc.setResourceId(0);
udfFunc.setResourceName("UdfFuncServiceTest");
udfFunc.setCreateTime(new Date());
udfFunc.setDatabase("database");
udfFunc.setUpdateTime(new Date());
udfFunc.setType(UdfType.HIVE);
return udfFunc;
}
@AfterEach
public void after() {
mockedStaticPropertyUtils.close();
}
}

31
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java

@ -46,7 +46,6 @@ import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator;
@ -107,9 +106,6 @@ public class UsersServiceTest {
@Mock
private MetricsCleanUpService metricsCleanUpService;
@Mock
private UDFUserMapper udfUserMapper;
@Mock
private K8sNamespaceUserMapper k8sNamespaceUserMapper;
@ -532,33 +528,6 @@ public class UsersServiceTest {
Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantUDFFunction() {
String udfIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
// user not exist
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = usersService.grantUDFFunction(loginUser, 2, udfIds);
logger.info(result.toString());
Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
// success
when(udfUserMapper.deleteByUserId(1)).thenReturn(1);
result = usersService.grantUDFFunction(loginUser, 1, udfIds);
logger.info(result.toString());
Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
// ERROR: NO_CURRENT_OPERATING_PERMISSION
loginUser.setId(2);
loginUser.setUserType(UserType.GENERAL_USER);
when(userMapper.selectById(2)).thenReturn(loginUser);
result = this.usersService.grantUDFFunction(loginUser, 2, udfIds);
logger.info(result.toString());
Assertions.assertEquals(Status.NO_CURRENT_OPERATING_PERMISSION, result.get(Constants.STATUS));
}
@Test
public void testGrantNamespaces() {
String namespaceIds = "100000,120000";

6
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java

@ -45,8 +45,6 @@ public final class Constants {
public static final String RESOURCE_TYPE_FILE = "resources";
public static final String RESOURCE_TYPE_UDF = "udfs";
public static final String EMPTY_STRING = "";
/**
@ -506,7 +504,9 @@ public final class Constants {
* session timeout
*/
public static final int SESSION_TIME_OUT = 7200;
public static final String UDF = "UDF";
public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024;
public static final String CLASS = "class";
/**

3
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditModelType.java

@ -39,9 +39,6 @@ public enum AuditModelType {
RESOURCE("Resource", null),
FOLDER("Folder", RESOURCE),
FILE("File", FOLDER),
UDF_FOLDER("UDFFolder", RESOURCE),
UDF_FILE("UDFFile", UDF_FOLDER),
UDF_FUNCTION("UDFFunction", RESOURCE),
TASK_GROUP("TaskGroup", RESOURCE),
DATASOURCE("Datasource", null),

1
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java

@ -51,7 +51,6 @@ public enum AuthorizationType {
@Deprecated
UDF_FILE(2, "udf file"),
DATASOURCE(3, "data source"),
UDF(4, "udf function"),
PROJECTS(5, "projects"),
WORKER_GROUP(6, "worker group"),
ALERT_GROUP(7, "alert group"),

59
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java

@ -1,59 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
/**
* UDF type
*/
public enum UdfType {
/**
* 0 hive; 1 spark
*/
HIVE(0, "hive"),
SPARK(1, "spark");
UdfType(int code, String descp) {
this.code = code;
this.descp = descp;
}
@EnumValue
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
public static UdfType of(int type) {
for (UdfType ut : values()) {
if (ut.getCode() == type) {
return ut;
}
}
throw new IllegalArgumentException("invalid type : " + type);
}
}

61
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java

@ -1,61 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import java.util.Date;
import lombok.Data;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
@Data
@TableName("t_ds_relation_udfs_user")
public class UDFUser {
/**
* id
*/
@TableId(value = "id", type = IdType.AUTO)
private Integer id;
/**
* id
*/
private int userId;
/**
* udf id
*/
private int udfId;
/**
* permission
*/
private int perm;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
}

157
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java

@ -1,157 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.io.IOException;
import java.util.Date;
import java.util.Objects;
import lombok.Data;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.KeyDeserializer;
import com.google.common.base.Strings;
@Data
@TableName("t_ds_udfs")
public class UdfFunc {
/**
* id
*/
@TableId(value = "id", type = IdType.AUTO)
private Integer id;
/**
* user id
*/
private int userId;
public String getResourceType() {
return resourceType;
}
public void setResourceType(String resourceType) {
this.resourceType = "UDF";
}
@TableField(exist = false)
private String resourceType = "UDF";
/**
* udf function name
*/
private String funcName;
/**
* udf class name
*/
private String className;
/**
* udf argument types
*/
private String argTypes;
/**
* udf data base
*/
private String database;
/**
* udf description
*/
private String description;
/**
* resource id
*/
private int resourceId;
/**
* resource name
*/
private String resourceName;
/**
* udf function type: hive / spark
*/
private UdfType type;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
/**
* user name
*/
@TableField(exist = false)
private String userName;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UdfFunc udfFunc = (UdfFunc) o;
if (!Objects.equals(id, udfFunc.id)) {
return false;
}
return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null);
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (funcName != null ? funcName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return JSONUtils.toJsonString(this);
}
public static class UdfFuncDeserializer extends KeyDeserializer {
@Override
public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException {
if (Strings.isNullOrEmpty(key)) {
return null;
}
return JSONUtils.parseObject(key, UdfFunc.class);
}
}
}

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java

@ -80,12 +80,12 @@ public interface DataSourceMapper extends BaseMapper<DataSource> {
List<DataSource> listAllDataSourceByType(@Param("type") Integer type);
/**
* list authorized UDF function
* list authorized datasource
*
* @param userId userId
* @param dataSourceIds data source id array
* @param <T> T
* @return UDF function list
* @return datasource list
*/
<T> List<DataSource> listAuthorizedDataSource(@Param("userId") int userId,
@Param("dataSourceIds") T[] dataSourceIds);

44
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java

@ -1,44 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* udf user realtion mapper interface
*/
public interface UDFUserMapper extends BaseMapper<UDFUser> {
/**
* delete udf user realtion by userId
* @param userId userId
* @return delete result
*/
int deleteByUserId(@Param("userId") int userId);
/**
* delete udf user realtion by function id
* @param udfFuncId udfFuncId
* @return delete result
*/
int deleteByUdfFuncId(@Param("udfFuncId") int udfFuncId);
}

118
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java

@ -1,118 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
/**
* udf function mapper interface
*/
public interface UdfFuncMapper extends BaseMapper<UdfFunc> {
/**
* select udf by id
* @param id udf id
* @return UdfFunc
*/
UdfFunc selectUdfById(@Param("id") int id);
/**
* query udf function by ids and function name
* @param ids ids
* @param funcNames funcNames
* @return udf function list
*/
List<UdfFunc> queryUdfByIdStr(@Param("ids") Integer[] ids,
@Param("funcNames") String funcNames);
/**
* udf function page
* @param page page
* @param ids userId
* @param searchVal searchVal
* @return udf function IPage
*/
IPage<UdfFunc> queryUdfFuncPaging(IPage<UdfFunc> page,
@Param("ids") List<Integer> ids,
@Param("searchVal") String searchVal);
/**
* query udf function by type
* @param ids userId
* @param type type
* @return udf function list
*/
List<UdfFunc> getUdfFuncByType(@Param("ids") List<Integer> ids,
@Param("type") Integer type);
/**
* query udf function except userId
* @param userId userId
* @return udf function list
*/
List<UdfFunc> queryUdfFuncExceptUserId(@Param("userId") int userId);
/**
* query authed udf function
* @param userId userId
* @return udf function list
*/
List<UdfFunc> queryAuthedUdfFunc(@Param("userId") int userId);
/**
* list authorized UDF function
* @param userId userId
* @param udfIds UDF function id array
* @return UDF function list
*/
<T> List<UdfFunc> listAuthorizedUdfFunc(@Param("userId") int userId, @Param("udfIds") T[] udfIds);
/**
* list UDF by resource id
* @param resourceIds resource id array
* @return UDF function list
*/
List<UdfFunc> listUdfByResourceId(@Param("resourceIds") Integer[] resourceIds);
/**
* list UDF by resource fullName
* @param resourceFullNames resource fullName array
* @return UDF function list
*/
List<UdfFunc> listUdfByResourceFullName(@Param("resourceFullNames") String[] resourceFullNames);
/**
* list authorized UDF by resource id
* @param resourceIds resource id array
* @return UDF function list
*/
List<UdfFunc> listAuthorizedUdfByResourceId(@Param("userId") int userId, @Param("resourceIds") int[] resourceIds);
/**
* listAuthorizedUdfByUserId
* @param userId
* @return
*/
List<UdfFunc> listAuthorizedUdfByUserId(@Param("userId") int userId);
}

29
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.UDFUserMapper">
<delete id="deleteByUserId">
delete from t_ds_relation_udfs_user
where user_id = #{userId}
</delete>
<delete id="deleteByUdfFuncId">
delete from t_ds_relation_udfs_user
where udf_id = #{udfFuncId}
</delete>
</mapper>

189
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml

@ -1,189 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper">
<sql id="baseSql">
${alias}.id, ${alias}.user_id, ${alias}.func_name, ${alias}.class_name, ${alias}.type, ${alias}.arg_types,
${alias}.database, ${alias}.description, ${alias}.resource_id, ${alias}.resource_name, ${alias}.create_time, ${alias}.update_time
</sql>
<select id="selectUdfById" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where id = #{id}
</select>
<select id="queryUdfByIdStr" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where 1 = 1
<if test="ids != null and ids.length > 0">
and udf.id in
<foreach collection="ids" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
<if test="funcNames != null and funcNames != ''">
and udf.func_name = #{funcNames}
</if>
order by udf.id asc
</select>
<select id="queryUdfFuncPaging" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
,u.user_name
from t_ds_udfs udf,t_ds_user u
where 1=1 and udf.user_id=u.id
<if test="searchVal!= null and searchVal != ''">
and udf.func_name like concat('%', #{searchVal}, '%')
</if>
<if test="ids != null and ids.size() > 0">
and udf.id in
<foreach collection="ids" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
<!-- <if test="userId != 0">-->
<!-- and udf.id in (-->
<!-- select udf_id from t_ds_relation_udfs_user where user_id=#{userId}-->
<!-- union select id as udf_id from t_ds_udfs where user_id=#{userId})-->
<!-- </if>-->
order by udf.create_time desc
</select>
<select id="getUdfFuncByType" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where udf.type=#{type}
<if test="ids != null and ids.size() > 0">
and udf.id in
<foreach collection="ids" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
<!-- <if test="userId != 0">-->
<!-- and udf.id in (-->
<!-- select udf_id from t_ds_relation_udfs_user where user_id=#{userId}-->
<!-- union select id as udf_id from t_ds_udfs where user_id=#{userId})-->
<!-- </if>-->
</select>
<select id="queryUdfFuncExceptUserId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where udf.user_id <![CDATA[ <> ]]> #{userId}
</select>
<select id="queryAuthedUdfFunc" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
SELECT
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf,t_ds_relation_udfs_user rel
WHERE udf.id = rel.udf_id
AND rel.user_id = #{userId}
</select>
<select id="listAuthorizedUdfFunc" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where
udf.id in (select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
<if test="udfIds != null and udfIds.length > 0">
and udf.id in
<foreach collection="udfIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listUdfByResourceId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where 1=1
<if test="resourceIds != null and resourceIds.length > 0">
and udf.resource_id in
<foreach collection="resourceIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listUdfByResourceFullName" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where 1=1
<if test="resourceFullNames != null and resourceFullNames.length > 0">
and udf.resource_name in
<foreach collection="resourceFullNames" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listAuthorizedUdfByResourceId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where
udf.id in (select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
<if test="resourceIds != null and resourceIds.length > 0">
and udf.resource_id in
<foreach collection="resourceIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listAuthorizedUdfByUserId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
<include refid="baseSql">
<property name="alias" value="udf"/>
</include>
from t_ds_udfs udf
where 1=1
<if test="userId != 0">
and udf.id in (
select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
</if>
</select>
</mapper>

50
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java

@ -1,50 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import org.apache.dolphinscheduler.dao.entity.UdfFunc.UdfFuncDeserializer;
import java.io.IOException;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class UdfFuncTest {
/**
* test UdfFuncDeserializer.deserializeKey
*
* @throws IOException
*/
@Test
public void testUdfFuncDeserializer() throws IOException {
// UdfFuncDeserializer.deserializeKey key is null
UdfFuncDeserializer udfFuncDeserializer = new UdfFuncDeserializer();
Assertions.assertNull(udfFuncDeserializer.deserializeKey(null, null));
//
UdfFunc udfFunc = new UdfFunc();
udfFunc.setResourceName("dolphin_resource_update");
udfFunc.setResourceId(2);
udfFunc.setClassName("org.apache.dolphinscheduler.test.mrUpdate");
Assertions.assertNotNull(udfFuncDeserializer.deserializeKey(udfFunc.toString(), null));
}
}

184
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java

@ -1,184 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.BaseDaoTest;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Date;
import java.util.List;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
public class UDFUserMapperTest extends BaseDaoTest {
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private UserMapper userMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
/**
* insert
* @return UDFUser
*/
private UDFUser insertOne() {
UDFUser udfUser = new UDFUser();
udfUser.setUdfId(1);
udfUser.setUserId(1);
udfUser.setCreateTime(new Date());
udfUser.setUpdateTime(new Date());
udfUserMapper.insert(udfUser);
return udfUser;
}
/**
* insert UDFUser
* @param user user
* @param udfFunc udfFunc
* @return UDFUser
*/
private UDFUser insertOne(User user, UdfFunc udfFunc) {
UDFUser udfUser = new UDFUser();
udfUser.setUdfId(udfFunc.getId());
udfUser.setUserId(user.getId());
udfUser.setCreateTime(new Date());
udfUser.setUpdateTime(new Date());
udfUserMapper.insert(udfUser);
return udfUser;
}
/**
* insert one user
* @return User
*/
private User insertOneUser() {
User user = new User();
user.setUserName("user1");
user.setUserPassword("1");
user.setEmail("xx@123.com");
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(new Date());
user.setTenantId(1);
user.setQueue("dolphin");
user.setUpdateTime(new Date());
userMapper.insert(user);
return user;
}
/**
* insert one udf
* @return UdfFunc
*/
private UdfFunc insertOneUdfFunc() {
UdfFunc udfFunc = new UdfFunc();
udfFunc.setFuncName("dolphin_udf_func");
udfFunc.setClassName("org.apache.dolphinscheduler.test.mr");
udfFunc.setType(UdfType.HIVE);
udfFunc.setResourceId(1);
udfFunc.setResourceName("dolphin_resource");
udfFunc.setCreateTime(new Date());
udfFunc.setUpdateTime(new Date());
udfFuncMapper.insert(udfFunc);
return udfFunc;
}
/**
* test update
*/
@Test
public void testUpdate() {
// insertOneUser
User user = insertOneUser();
// insertOneUdfFunc
UdfFunc udfFunc = insertOneUdfFunc();
// insertOne
UDFUser udfUser = insertOne(user, udfFunc);
udfUser.setUserId(2);
udfUser.setUdfId(2);
int update = udfUserMapper.updateById(udfUser);
Assertions.assertEquals(update, 1);
}
/**
* test delete
*/
@Test
public void testDelete() {
// insertOneUser
User user = insertOneUser();
// insertOneUdfFunc
UdfFunc udfFunc = insertOneUdfFunc();
// insertOne
UDFUser udfUser = insertOne(user, udfFunc);
int delete = udfUserMapper.deleteById(udfUser.getId());
Assertions.assertEquals(delete, 1);
}
/**
* test query
*/
@Test
public void testQuery() {
// insertOne
UDFUser udfUser = insertOne();
// query
List<UDFUser> udfUserList = udfUserMapper.selectList(null);
Assertions.assertNotEquals(0, udfUserList.size());
}
/**
* test delete by userId
*/
@Test
public void testDeleteByUserId() {
// insertOneUser
User user = insertOneUser();
// insertOneUdfFunc
UdfFunc udfFunc = insertOneUdfFunc();
// insertOne
UDFUser udfUser = insertOne(user, udfFunc);
int delete = udfUserMapper.deleteByUserId(user.getId());
Assertions.assertEquals(1, delete);
}
/**
* test delete by udffuncId
*/
@Test
public void testDeleteByUdfFuncId() {
// insertOneUser
User user = insertOneUser();
// insertOneUdfFunc
UdfFunc udfFunc = insertOneUdfFunc();
// insertOne
UDFUser udfUser = insertOne(user, udfFunc);
int delete = udfUserMapper.deleteByUdfFuncId(udfFunc.getId());
Assertions.assertEquals(1, delete);
}
}

280
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java

@ -1,280 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import static java.util.stream.Collectors.toList;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.BaseDaoTest;
import org.apache.dolphinscheduler.dao.entity.UDFUser;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
public class UdfFuncMapperTest extends BaseDaoTest {
@Autowired
private UserMapper userMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private UDFUserMapper udfUserMapper;
/**
* insert one udf
*
* @return UdfFunc
*/
private UdfFunc insertOne(String funcName) {
UdfFunc udfFunc = new UdfFunc();
udfFunc.setUserId(1);
udfFunc.setFuncName(funcName);
udfFunc.setClassName("org.apache.dolphinscheduler.test.mr");
udfFunc.setType(UdfType.HIVE);
udfFunc.setResourceId(1);
udfFunc.setResourceName("dolphin_resource");
udfFunc.setCreateTime(new Date());
udfFunc.setUpdateTime(new Date());
udfFuncMapper.insert(udfFunc);
return udfFunc;
}
/**
* insert one udf
*
* @return
*/
private UdfFunc insertOne(User user) {
UdfFunc udfFunc = new UdfFunc();
udfFunc.setUserId(user.getId());
udfFunc.setFuncName("dolphin_udf_func" + user.getUserName());
udfFunc.setClassName("org.apache.dolphinscheduler.test.mr");
udfFunc.setType(UdfType.HIVE);
udfFunc.setResourceId(1);
udfFunc.setResourceName("dolphin_resource");
udfFunc.setCreateTime(new Date());
udfFunc.setUpdateTime(new Date());
udfFuncMapper.insert(udfFunc);
return udfFunc;
}
/**
* insert one user
*
* @return User
*/
private User insertOneUser() {
return insertOneUser("user1");
}
/**
* insert one user
*
* @return User
*/
private User insertOneUser(String userName) {
return createGeneralUser(userName);
}
/**
* create general user
*
* @return User
*/
private User createGeneralUser(String userName) {
User user = new User();
user.setUserName(userName);
user.setUserPassword("1");
user.setEmail("xx@123.com");
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(new Date());
user.setTenantId(1);
user.setUpdateTime(new Date());
userMapper.insert(user);
return user;
}
/**
* insert UDFUser
*
* @param user user
* @param udfFunc udf func
* @return UDFUser
*/
private UDFUser insertOneUDFUser(User user, UdfFunc udfFunc) {
UDFUser udfUser = new UDFUser();
udfUser.setUdfId(udfFunc.getId());
udfUser.setUserId(user.getId());
udfUser.setCreateTime(new Date());
udfUser.setUpdateTime(new Date());
udfUserMapper.insert(udfUser);
return udfUser;
}
/**
* test update
*/
@Test
public void testUpdate() {
// insertOne
UdfFunc udfFunc = insertOne("func1");
udfFunc.setResourceName("dolphin_resource_update");
udfFunc.setResourceId(2);
udfFunc.setClassName("org.apache.dolphinscheduler.test.mrUpdate");
udfFunc.setUpdateTime(new Date());
// update
int update = udfFuncMapper.updateById(udfFunc);
Assertions.assertEquals(update, 1);
}
/**
* test delete
*/
@Test
public void testDelete() {
// insertOne
UdfFunc udfFunc = insertOne("func2");
// delete
int delete = udfFuncMapper.deleteById(udfFunc.getId());
Assertions.assertEquals(delete, 1);
}
/**
* test query udf by ids
*/
@Test
public void testQueryUdfByIdStr() {
// insertOne
UdfFunc udfFunc = insertOne("func3");
// insertOne
UdfFunc udfFunc1 = insertOne("func4");
Integer[] idArray = new Integer[]{udfFunc.getId(), udfFunc1.getId()};
// queryUdfByIdStr
List<UdfFunc> udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray, "");
Assertions.assertNotEquals(0, udfFuncList.size());
}
/**
* test page
*/
@Test
public void testQueryUdfFuncPaging() {
// insertOneUser
User user = insertOneUser();
// insertOne
UdfFunc udfFunc = insertOne(user);
// queryUdfFuncPaging
Page<UdfFunc> page = new Page(1, 3);
IPage<UdfFunc> udfFuncIPage =
udfFuncMapper.queryUdfFuncPaging(page, Collections.singletonList(udfFunc.getId()), "");
Assertions.assertNotEquals(0, udfFuncIPage.getTotal());
}
/**
* test get udffunc by type
*/
@Test
public void testGetUdfFuncByType() {
// insertOneUser
User user = insertOneUser();
// insertOne
UdfFunc udfFunc = insertOne(user);
// getUdfFuncByType
List<UdfFunc> udfFuncList =
udfFuncMapper.getUdfFuncByType(Collections.singletonList(udfFunc.getId()), udfFunc.getType().ordinal());
Assertions.assertNotEquals(0, udfFuncList.size());
}
/**
* test query udffunc expect userId
*/
@Test
public void testQueryUdfFuncExceptUserId() {
// insertOneUser
User user1 = insertOneUser();
User user2 = insertOneUser("user2");
// insertOne
UdfFunc udfFunc1 = insertOne(user1);
UdfFunc udfFunc2 = insertOne(user2);
List<UdfFunc> udfFuncList = udfFuncMapper.queryUdfFuncExceptUserId(user1.getId());
Assertions.assertNotEquals(0, udfFuncList.size());
}
/**
* test query authed udffunc
*/
@Test
public void testQueryAuthedUdfFunc() {
// insertOneUser
User user = insertOneUser();
// insertOne
UdfFunc udfFunc = insertOne(user);
// insertOneUDFUser
UDFUser udfUser = insertOneUDFUser(user, udfFunc);
// queryAuthedUdfFunc
List<UdfFunc> udfFuncList = udfFuncMapper.queryAuthedUdfFunc(user.getId());
Assertions.assertNotEquals(0, udfFuncList.size());
}
@Test
public void testListAuthorizedUdfFunc() {
// create general user
User generalUser1 = createGeneralUser("user1");
User generalUser2 = createGeneralUser("user2");
// create udf function
UdfFunc udfFunc = insertOne(generalUser1);
UdfFunc unauthorizdUdfFunc = insertOne(generalUser2);
// udf function ids
Integer[] udfFuncIds = new Integer[]{udfFunc.getId(), unauthorizdUdfFunc.getId()};
List<UdfFunc> authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds);
Assertions.assertEquals(generalUser1.getId().intValue(), udfFunc.getUserId());
Assertions.assertNotEquals(generalUser1.getId().intValue(), unauthorizdUdfFunc.getUserId());
Assertions.assertFalse(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList())
.containsAll(Arrays.asList(udfFuncIds)));
// authorize object unauthorizdUdfFunc to generalUser1
insertOneUDFUser(generalUser1, unauthorizdUdfFunc);
authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds);
Assertions.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList())
.containsAll(Arrays.asList(udfFuncIds)));
}
}

188
dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FunctionManageE2ETest.java

@ -1,188 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import lombok.SneakyThrows;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.dolphinscheduler.e2e.core.Constants;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.resource.FunctionManagePage;
import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage;
import org.apache.dolphinscheduler.e2e.pages.resource.UdfManagePage;
import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage;
import org.apache.dolphinscheduler.e2e.pages.security.TenantPage;
import org.apache.dolphinscheduler.e2e.pages.security.UserPage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Comparator;
import org.testcontainers.shaded.org.awaitility.Awaitility;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml")
public class FunctionManageE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String email = "admin@gmail.com";
private static final String phone = "15800000000";
private static final String testUdfFunctionName = "test_function";
private static final String testRenameUdfFunctionName = "test_rename_function";
private static final String testUploadUdfFileName = "hive-jdbc-3.1.2.jar";
private static final String testClassName = "org.dolphinscheduler.UdfTest";
private static final String testDescription = "test_description";
private static final Path testUploadUdfFilePath = Constants.HOST_TMP_PATH.resolve(testUploadUdfFileName);
@BeforeAll
@SneakyThrows
public static void setup() {
TenantPage tenantPage = new LoginPage(browser)
.login(user, password)
.goToNav(SecurityPage.class)
.goToTab(TenantPage.class)
.create(tenant);
Awaitility.await().untilAsserted(() -> assertThat(tenantPage.tenantList())
.as("Tenant list should contain newly-created tenant")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(tenant)));
downloadFile("https://repo1.maven.org/maven2/org/apache/hive/hive-jdbc/3.1.2/hive-jdbc-3.1.2.jar", testUploadUdfFilePath.toFile().getAbsolutePath());
UserPage userPage = tenantPage.goToNav(SecurityPage.class)
.goToTab(UserPage.class);
new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated(
new By.ByClassName("name")));
UdfManagePage udfManagePage = userPage.update(user, user, email, phone, tenant)
.goToNav(ResourcePage.class)
.goToTab(UdfManagePage.class)
.uploadFile(testUploadUdfFilePath.toFile().getAbsolutePath());
udfManagePage.goToNav(ResourcePage.class)
.goToTab(FunctionManagePage.class);
}
@AfterAll
@SneakyThrows
public static void cleanup() {
Files.walk(Constants.HOST_CHROME_DOWNLOAD_PATH)
.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(File::delete);
Files.deleteIfExists(testUploadUdfFilePath);
}
static void downloadFile(String downloadUrl, String filePath) throws Exception {
int byteRead;
URL url = new URL(downloadUrl);
URLConnection conn = url.openConnection();
InputStream inStream = conn.getInputStream();
FileOutputStream fs = new FileOutputStream(filePath);
byte[] buffer = new byte[1024];
while ((byteRead = inStream.read(buffer)) != -1) {
fs.write(buffer, 0, byteRead);
}
inStream.close();
fs.close();
}
@Test
@Order(10)
void testCreateUdfFunction() {
FunctionManagePage page = new FunctionManagePage(browser);
page.createUdfFunction(testUdfFunctionName, testClassName, testUploadUdfFileName, testDescription);
Awaitility.await().untilAsserted(() -> assertThat(page.functionList())
.as("Function list should contain newly-created file")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(testUdfFunctionName)));
}
@Test
@Order(20)
void testRenameUdfFunction() {
FunctionManagePage page = new FunctionManagePage(browser);
browser.navigate().refresh();
page.renameUdfFunction(testUdfFunctionName, testRenameUdfFunctionName);
Awaitility.await().pollDelay(Duration.ofSeconds(2)).untilAsserted(() -> assertThat(page.functionList())
.as("Function list should contain newly-created file")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(testRenameUdfFunctionName)));
}
@Test
@Order(30)
void testDeleteUdfFunction() {
FunctionManagePage page = new FunctionManagePage(browser);
page.deleteUdfFunction(testRenameUdfFunctionName);
Awaitility.await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.functionList()
).noneMatch(
it -> it.getText().contains(testRenameUdfFunctionName)
);
});
}
}

229
dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UdfManageE2ETest.java

@ -1,229 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.cases;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.dolphinscheduler.e2e.core.Constants;
import org.apache.dolphinscheduler.e2e.core.DolphinScheduler;
import org.apache.dolphinscheduler.e2e.pages.LoginPage;
import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage;
import org.apache.dolphinscheduler.e2e.pages.resource.UdfManagePage;
import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage;
import org.apache.dolphinscheduler.e2e.pages.security.TenantPage;
import org.apache.dolphinscheduler.e2e.pages.security.UserPage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Comparator;
import org.testcontainers.shaded.org.awaitility.Awaitility;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import lombok.SneakyThrows;
@DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml")
public class UdfManageE2ETest {
private static RemoteWebDriver browser;
private static final String tenant = System.getProperty("user.name");
private static final String user = "admin";
private static final String password = "dolphinscheduler123";
private static final String email = "admin@gmail.com";
private static final String phone = "15800000000";
private static final String testDirectoryName = "test_directory";
private static final String testRenameDirectoryName = "test_rename_directory";
private static final String testUploadUdfFileName = "hive-jdbc-3.1.2.jar";
private static final Path testUploadUdfFilePath = Constants.HOST_TMP_PATH.resolve(testUploadUdfFileName);
private static final String testUploadUdfRenameFileName = "hive-jdbc.jar";
@BeforeAll
public static void setup() {
TenantPage tenantPage = new LoginPage(browser)
.login(user, password)
.goToNav(SecurityPage.class)
.goToTab(TenantPage.class)
.create(tenant);
Awaitility.await().untilAsserted(() -> assertThat(tenantPage.tenantList())
.as("Tenant list should contain newly-created tenant")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(tenant)));
UserPage userPage = tenantPage.goToNav(SecurityPage.class)
.goToTab(UserPage.class);
new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated(
new By.ByClassName("name")));
userPage.update(user, user, email, phone, tenant)
.goToNav(ResourcePage.class)
.goToTab(UdfManagePage.class);
}
@AfterAll
@SneakyThrows
public static void cleanup() {
Files.walk(Constants.HOST_CHROME_DOWNLOAD_PATH)
.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(File::delete);
Files.deleteIfExists(testUploadUdfFilePath);
}
@Test
@Order(10)
void testCreateDirectory() {
final UdfManagePage page = new UdfManagePage(browser);
new WebDriverWait(page.driver(), Duration.ofSeconds(20))
.until(ExpectedConditions.urlContains("/resource-manage"));
page.createDirectory(testDirectoryName);
Awaitility.await().untilAsserted(() -> assertThat(page.udfList())
.as("File list should contain newly-created file")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(testDirectoryName)));
}
//when s3 the directory cannot be renamed
// @Test
// @Order(20)
// void testRenameDirectory() {
// final UdfManagePage page = new UdfManagePage(browser);
//
// page.rename(testDirectoryName, testRenameDirectoryName);
//
// await().untilAsserted(() -> {
// browser.navigate().refresh();
//
// assertThat(page.udfList())
// .as("File list should contain newly-created file")
// .extracting(WebElement::getText)
// .anyMatch(it -> it.contains(testRenameDirectoryName));
// });
// }
@Test
@Order(30)
void testDeleteDirectory() {
final UdfManagePage page = new UdfManagePage(browser);
page.delete(testDirectoryName);
Awaitility.await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.udfList()
).noneMatch(
it -> it.getText().contains(testDirectoryName)
);
});
}
@Test
@Order(40)
@SneakyThrows
void testUploadUdf() {
final UdfManagePage page = new UdfManagePage(browser);
downloadFile("https://repo1.maven.org/maven2/org/apache/hive/hive-jdbc/3.1.2/hive-jdbc-3.1.2.jar", testUploadUdfFilePath.toFile().getAbsolutePath());
page.uploadFile(testUploadUdfFilePath.toFile().getAbsolutePath());
Awaitility.await().untilAsserted(() -> {
assertThat(page.udfList())
.as("File list should contain newly-created file")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(testUploadUdfFileName));
});
}
void downloadFile(String downloadUrl, String filePath) throws Exception {
int byteRead;
URL url = new URL(downloadUrl);
URLConnection conn = url.openConnection();
InputStream inStream = conn.getInputStream();
FileOutputStream fs = new FileOutputStream(filePath);
byte[] buffer = new byte[1024];
while ((byteRead = inStream.read(buffer)) != -1) {
fs.write(buffer, 0, byteRead);
}
inStream.close();
fs.close();
}
@Test
@Order(60)
void testRenameUdf() {
final UdfManagePage page = new UdfManagePage(browser);
page.rename(testUploadUdfFileName, testUploadUdfRenameFileName);
Awaitility.await().untilAsserted(() -> {
assertThat(page.udfList())
.as("File list should contain newly-created file")
.extracting(WebElement::getText)
.anyMatch(it -> it.contains(testUploadUdfRenameFileName));
});
}
@Test
@Order(70)
void testDeleteUdf() {
final UdfManagePage page = new UdfManagePage(browser);
page.delete(testUploadUdfRenameFileName);
Awaitility.await().untilAsserted(() -> {
browser.navigate().refresh();
assertThat(
page.udfList()
).noneMatch(
it -> it.getText().contains(testUploadUdfRenameFileName)
);
});
}
}

201
dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FunctionManagePage.java

@ -1,201 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.pages.resource;
import lombok.Getter;
import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage;
import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.FindBys;
import org.openqa.selenium.support.PageFactory;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@Getter
public class FunctionManagePage extends NavBarPage implements ResourcePage.Tab {
@FindBy(className = "btn-create-udf-function")
private WebElement buttonCreateUdfFunction;
@FindBy(className = "items")
private List<WebElement> functionList;
@FindBys({
@FindBy(className = "n-popconfirm__action"),
@FindBy(className = "n-button--primary-type"),
})
private WebElement buttonConfirm;
private CreateUdfFunctionBox createUdfFunctionBox;
private RenameUdfFunctionBox renameUdfFunctionBox;
public FunctionManagePage(RemoteWebDriver driver) {
super(driver);
createUdfFunctionBox = new CreateUdfFunctionBox();
renameUdfFunctionBox = new RenameUdfFunctionBox();
}
public FunctionManagePage createUdfFunction(String udfFunctionName, String className, String udfResourceName, String description) {
buttonCreateUdfFunction().click();
((JavascriptExecutor) driver).executeScript("arguments[0].click();", createUdfFunctionBox().radioFunctionType());
createUdfFunctionBox().inputFunctionName().sendKeys(udfFunctionName);
createUdfFunctionBox().inputClassName().sendKeys(className);
createUdfFunctionBox().inputDescription().sendKeys(description);
createUdfFunctionBox().buttonUdfResourceDropDown().click();
createUdfFunctionBox().selectUdfResource()
.stream()
.filter(it -> it.getAttribute("innerHTML").contains(udfResourceName))
.findFirst()
.orElseThrow(() -> new RuntimeException(String.format("No %s in udf resource list", udfResourceName)))
.click();
createUdfFunctionBox().buttonSubmit().click();
return this;
}
public FunctionManagePage renameUdfFunction(String currentName, String afterName) {
functionList()
.stream()
.filter(it -> it.getText().contains(currentName))
.flatMap(it -> it.findElements(By.className("btn-edit")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No rename button in function manage list"))
.click();
renameUdfFunctionBox().inputFunctionName().sendKeys(Keys.CONTROL + "a");
renameUdfFunctionBox().inputFunctionName().sendKeys(Keys.BACK_SPACE);
renameUdfFunctionBox().inputFunctionName().sendKeys(afterName);
renameUdfFunctionBox.buttonSubmit().click();
return this;
}
public FunctionManagePage deleteUdfFunction(String udfFunctionName) {
functionList()
.stream()
.filter(it -> it.getText().contains(udfFunctionName))
.flatMap(it -> it.findElements(By.className("btn-delete")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No delete button in udf resource list"))
.click();
((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm());
return this;
}
@Getter
public class CreateUdfFunctionBox {
CreateUdfFunctionBox() {
PageFactory.initElements(driver, this);
}
@FindBys({
@FindBy(className = "radio-function-type"),
@FindBy(tagName = "input"),
})
private WebElement radioFunctionType;
@FindBys({
@FindBy(className = "input-function-name"),
@FindBy(tagName = "input"),
})
private WebElement inputFunctionName;
@FindBys({
@FindBy(className = "input-class-name"),
@FindBy(tagName = "input"),
})
private WebElement inputClassName;
@FindBys({
@FindBy(className = "btn-udf-resource-dropdown"),
@FindBy(className = "n-base-selection"),
})
private WebElement buttonUdfResourceDropDown;
@FindBy(className = "n-tree-node-content__text")
private List<WebElement> selectUdfResource;
@FindBys({
@FindBy(className = "input-description"),
@FindBy(tagName = "textarea"),
})
private WebElement inputDescription;
@FindBy(className = "btn-submit")
private WebElement buttonSubmit;
@FindBy(className = "btn-cancel")
private WebElement buttonCancel;
}
@Getter
public class RenameUdfFunctionBox {
RenameUdfFunctionBox() {
PageFactory.initElements(driver, this);
}
@FindBys({
@FindBy(className = "input-function-name"),
@FindBy(tagName = "input"),
})
private WebElement inputFunctionName;
@FindBys({
@FindBy(className = "input-class-name"),
@FindBy(tagName = "input"),
})
private WebElement inputClassName;
@FindBys({
@FindBy(className = "input-description"),
@FindBy(tagName = "textarea"),
})
private WebElement inputDescription;
@FindBy(className = "btn-submit")
private WebElement buttonSubmit;
@FindBy(className = "btn-cancel")
private WebElement buttonCancel;
}
}

22
dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java

@ -38,12 +38,6 @@ public class ResourcePage extends NavBarPage implements NavBarPage.NavBarItem {
@FindBy(css = ".tab-vertical > .n-menu-item:nth-child(1) > .n-menu-item-content")
private WebElement fileManageTab;
@FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) > .n-submenu-children > .n-menu-item:nth-of-type(1) > .n-menu-item-content")
private WebElement udfManageTab;
@FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) > .n-submenu-children > .n-menu-item:nth-of-type(2) > .n-menu-item-content")
private WebElement functionManageTab;
public ResourcePage(RemoteWebDriver driver) {
super(driver);
}
@ -57,22 +51,6 @@ public class ResourcePage extends NavBarPage implements NavBarPage.NavBarItem {
return tab.cast(new FileManagePage(driver));
}
if (tab == UdfManagePage.class) {
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource"));
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(udfManageTab));
((JavascriptExecutor) driver).executeScript("arguments[0].click();", udfManageTab());
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource-manage"));
return tab.cast(new UdfManagePage(driver));
}
if (tab == FunctionManagePage.class) {
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource"));
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(functionManageTab));
((JavascriptExecutor) driver).executeScript("arguments[0].click();", functionManageTab());
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/function-manage"));
return tab.cast(new FunctionManagePage(driver));
}
throw new UnsupportedOperationException("Unknown tab: " + tab.getName());
}

197
dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/UdfManagePage.java

@ -1,197 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.dolphinscheduler.e2e.pages.resource;
import lombok.Getter;
import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage;
import java.time.Duration;
import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.LocalFileDetector;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.FindBys;
import org.openqa.selenium.support.PageFactory;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
@Getter
public class UdfManagePage extends NavBarPage implements ResourcePage.Tab {
@FindBy(className = "btn-create-directory")
private WebElement buttonCreateDirectory;
@FindBy(className = "btn-upload-resource")
private WebElement buttonUploadUdf;
@FindBy(className = "items")
private List<WebElement> udfList;
@FindBys({
@FindBy(className = "n-popconfirm__action"),
@FindBy(className = "n-button--primary-type"),
})
private WebElement buttonConfirm;
private final UploadFileBox uploadFileBox;
private final RenameBox renameBox;
private final CreateDirectoryBox createDirectoryBox;
public UdfManagePage(RemoteWebDriver driver) {
super(driver);
uploadFileBox = new UploadFileBox();
renameBox = new RenameBox();
createDirectoryBox = new CreateDirectoryBox();
}
public UdfManagePage createDirectory(String name) {
buttonCreateDirectory().click();
createDirectoryBox().inputDirectoryName().sendKeys(name);
createDirectoryBox().buttonSubmit().click();
return this;
}
public UdfManagePage uploadFile(String filePath) {
new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(buttonUploadUdf));
buttonUploadUdf().click();
driver.setFileDetector(new LocalFileDetector());
uploadFileBox().buttonUpload().sendKeys(filePath);
uploadFileBox().buttonSubmit().click();
return this;
}
public UdfManagePage downloadFile(String fileName) {
udfList()
.stream()
.filter(it -> it.getText().contains(fileName))
.flatMap(it -> it.findElements(By.className("btn-download")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No download button in udf manage list"))
.click();
return this;
}
public UdfManagePage rename(String currentName, String AfterName) {
udfList()
.stream()
.filter(it -> it.getText().contains(currentName))
.flatMap(it -> it.findElements(By.className("btn-rename")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No rename button in udf manage list"))
.click();
renameBox().inputName().clear();
renameBox().inputName().sendKeys(AfterName);
renameBox().buttonSubmit().click();
return this;
}
public UdfManagePage delete(String name) {
udfList()
.stream()
.filter(it -> it.getText().contains(name))
.flatMap(it -> it.findElements(By.className("btn-delete")).stream())
.filter(WebElement::isDisplayed)
.findFirst()
.orElseThrow(() -> new RuntimeException("No delete button in udf manage list"))
.click();
((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm());
return this;
}
@Getter
public class RenameBox {
RenameBox() {
PageFactory.initElements(driver, this);
}
@FindBys({
@FindBy(className = "input-name"),
@FindBy(tagName = "input"),
})
private WebElement inputName;
@FindBy(className = "btn-submit")
private WebElement buttonSubmit;
@FindBy(className = "btn-cancel")
private WebElement buttonCancel;
}
@Getter
public class UploadFileBox {
UploadFileBox() {
PageFactory.initElements(driver, this);
}
@FindBys({
@FindBy(className = "btn-upload"),
@FindBy(tagName = "input"),
})
private WebElement buttonUpload;
@FindBy(className = "btn-submit")
private WebElement buttonSubmit;
@FindBy(className = "btn-cancel")
private WebElement buttonCancel;
}
@Getter
public class CreateDirectoryBox {
CreateDirectoryBox() {
PageFactory.initElements(driver, this);
}
@FindBys({
@FindBy(className = "input-directory-name"),
@FindBy(tagName = "input"),
})
private WebElement inputDirectoryName;
@FindBy(className = "btn-submit")
private WebElement buttonSubmit;
@FindBy(className = "btn-cancel")
private WebElement buttonCancel;
}
}

19
dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java

@ -38,7 +38,6 @@ import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.K8sTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
@ -53,7 +52,6 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQu
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.AbstractResourceParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters;
import org.apache.dolphinscheduler.plugin.task.api.utils.JdbcUrlParser;
import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
import org.apache.dolphinscheduler.plugin.task.spark.SparkParameters;
@ -152,9 +150,6 @@ public class TaskExecutionContextFactory {
case DATASOURCE:
setTaskDataSourceResourceInfo(map);
break;
case UDF:
setTaskUdfFuncResourceInfo(map);
break;
default:
break;
}
@ -178,19 +173,6 @@ public class TaskExecutionContextFactory {
});
}
private void setTaskUdfFuncResourceInfo(Map<Integer, AbstractResourceParameters> map) {
if (MapUtils.isEmpty(map)) {
return;
}
List<UdfFunc> udfFuncList = processService.queryUdfFunListByIds(map.keySet().toArray(new Integer[map.size()]));
udfFuncList.forEach(udfFunc -> {
UdfFuncParameters udfFuncParameters =
JSONUtils.parseObject(JSONUtils.toJsonString(udfFunc), UdfFuncParameters.class);
map.put(udfFunc.getId(), udfFuncParameters);
});
}
private void setDataQualityTaskRelation(DataQualityTaskExecutionContext dataQualityTaskExecutionContext,
TaskInstance taskInstance, String tenantCode) {
DataQualityParameters dataQualityParameters =
@ -406,6 +388,7 @@ public class TaskExecutionContextFactory {
/**
* The StatisticsValueWriterConfig will be used in DataQualityApplication that
* writes the statistics value into dolphin scheduler datasource
*
* @param dataQualityTaskExecutionContext
*/
private void setStatisticsValueWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {

3
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java

@ -43,7 +43,6 @@ import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.service.exceptions.CronParseException;
import org.apache.dolphinscheduler.service.model.TaskNode;
@ -120,8 +119,6 @@ public interface ProcessService {
DataSource findDataSourceById(int id);
List<UdfFunc> queryUdfFunListByIds(Integer[] ids);
ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId);
<T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType);

79
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java

@ -72,7 +72,6 @@ import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ClusterMapper;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
@ -98,7 +97,6 @@ import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper;
import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao;
@ -213,9 +211,6 @@ public class ProcessServiceImpl implements ProcessService {
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private TenantMapper tenantMapper;
@ -1502,33 +1497,50 @@ public class ProcessServiceImpl implements ProcessService {
}
/**
* find udf function list by id list string
* query project name and user name by processInstanceId.
*
* @param ids ids
* @return udf function list
* @param processInstanceId processInstanceId
* @return projectName and userName
*/
@Override
public List<UdfFunc> queryUdfFunListByIds(Integer[] ids) {
return udfFuncMapper.queryUdfByIdStr(ids, null);
public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) {
return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId);
}
/**
* query project name and user name by processInstanceId.
* get user by user id
*
* @param processInstanceId processInstanceId
* @return projectName and userName
* @param userId user id
* @return User
*/
@Override
public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) {
return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId);
public User getUserById(int userId) {
return userMapper.selectById(userId);
}
/**
* list unauthorized udf function
* format task app id in task instance
*/
@Override
public String formatTaskAppId(TaskInstance taskInstance) {
ProcessInstance processInstance = findProcessInstanceById(taskInstance.getProcessInstanceId());
if (processInstance == null) {
return "";
}
ProcessDefinition definition = findProcessDefinition(processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (definition == null) {
return "";
}
return String.format("%s_%s_%s", definition.getId(), processInstance.getId(), taskInstance.getId());
}
/**
* list unauthorized
*
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
* @return unauthorized
*/
@Override
public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) {
@ -1543,11 +1555,6 @@ public class ProcessServiceImpl implements ProcessService {
.stream().map(DataSource::getId).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream()
.map(UdfFunc::getId).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
default:
break;
}
@ -1558,34 +1565,6 @@ public class ProcessServiceImpl implements ProcessService {
return resultList;
}
/**
* get user by user id
*
* @param userId user id
* @return User
*/
@Override
public User getUserById(int userId) {
return userMapper.selectById(userId);
}
/**
* format task app id in task instance
*/
@Override
public String formatTaskAppId(TaskInstance taskInstance) {
ProcessInstance processInstance = findProcessInstanceById(taskInstance.getProcessInstanceId());
if (processInstance == null) {
return "";
}
ProcessDefinition definition = findProcessDefinition(processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (definition == null) {
return "";
}
return String.format("%s_%s_%s", definition.getId(), processInstance.getId(), taskInstance.getId());
}
/**
* switch process definition version to process definition log version
*/

2
dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java

@ -736,7 +736,7 @@ public class ProcessServiceTest {
processInstance.setId(62);
taskInstance.setVarPool("[{\"direct\":\"OUT\",\"prop\":\"test1\",\"type\":\"VARCHAR\",\"value\":\"\"}]");
taskInstance.setTaskParams("{\"type\":\"MYSQL\",\"datasource\":1,\"sql\":\"select id from tb_test limit 1\","
+ "\"udfs\":\"\",\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\","
+ "\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\","
+ "\"groupId\":null,\"localParams\":[{\"prop\":\"test1\",\"direct\":\"OUT\",\"type\":\"VARCHAR\",\"value\":\"12\"}],"
+ "\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[],\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],"
+ "\\\"failedNode\\\":[\\\"\\\"]}\",\"dependence\":\"{}\"}");

18
dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java

@ -17,36 +17,30 @@
package org.apache.dolphinscheduler.spi.enums;
import lombok.Getter;
import com.baomidou.mybatisplus.annotation.EnumValue;
/**
* resource type
*/
@Getter
public enum ResourceType {
/**
* 0 file, 1 udf
*/
FILE(0, "file"),
UDF(1, "udf"),
ALL(2, "all");
ResourceType(int code, String descp) {
ResourceType(int code, String desc) {
this.code = code;
this.descp = descp;
this.desc = desc;
}
@EnumValue
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
private final String desc;
public static ResourceType getResourceType(int code) {
for (ResourceType resourceType : ResourceType.values()) {

5
dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/AbstractStorageOperator.java

@ -51,7 +51,7 @@ public abstract class AbstractStorageOperator implements StorageOperator {
.resourceBaseDirectory(storageBaseDirectory)
.isDirectory(Files.getFileExtension(resourceAbsolutePath).isEmpty())
.tenant(segments[0])
.resourceType(segments[1].equals(FILE_FOLDER_NAME) ? ResourceType.FILE : ResourceType.UDF)
.resourceType(ResourceType.FILE)
.resourceRelativePath(segments.length == 2 ? "/" : segments[2])
.resourceParentAbsolutePath(StringUtils.substringBeforeLast(resourceAbsolutePath, File.separator))
.build();
@ -83,9 +83,6 @@ public abstract class AbstractStorageOperator implements StorageOperator {
case FILE:
resourceBaseDirectory = FileUtils.concatFilePath(tenantBaseDirectory, FILE_FOLDER_NAME);
break;
case UDF:
resourceBaseDirectory = FileUtils.concatFilePath(tenantBaseDirectory, UDF_FOLDER_NAME);
break;
case ALL:
resourceBaseDirectory = tenantBaseDirectory;
break;

7
dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorTest.java

@ -118,13 +118,6 @@ class LocalStorageOperatorTest {
.isEqualTo("file:" + Paths.get(resourceBaseDir, tenantCode, Constants.RESOURCE_TYPE_FILE));
}
@Test
public void testGetStorageBaseDirectory_withTenant_withResourceTypeUdf() {
String storageBaseDirectory = storageOperator.getStorageBaseDirectory("default", ResourceType.UDF);
assertThat(storageBaseDirectory)
.isEqualTo("file:" + Paths.get(resourceBaseDir, tenantCode, Constants.RESOURCE_TYPE_UDF));
}
@Test
public void testGetStorageBaseDirectory_withTenant_withResourceTypeAll() {
String storageBaseDirectory = storageOperator.getStorageBaseDirectory("default", ResourceType.ALL);

16
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java

@ -17,13 +17,10 @@
package org.apache.dolphinscheduler.plugin.task.api;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters;
import java.io.Serializable;
import java.util.List;
/**
* SQL Task ExecutionContext
* SQL Task ExecutionContext
*/
public class SQLTaskExecutionContext implements Serializable {
@ -37,8 +34,6 @@ public class SQLTaskExecutionContext implements Serializable {
*/
private String connectionParams;
private List<UdfFuncParameters> udfFuncParametersList;
/**
* DefaultFS
*/
@ -52,14 +47,6 @@ public class SQLTaskExecutionContext implements Serializable {
this.warningGroupId = warningGroupId;
}
public List<UdfFuncParameters> getUdfFuncParametersList() {
return udfFuncParametersList;
}
public void setUdfFuncParametersList(List<UdfFuncParameters> udfFuncParametersList) {
this.udfFuncParametersList = udfFuncParametersList;
}
public String getConnectionParams() {
return connectionParams;
}
@ -81,7 +68,6 @@ public class SQLTaskExecutionContext implements Serializable {
return "SQLTaskExecutionContext{"
+ "warningGroupId=" + warningGroupId
+ ", connectionParams='" + connectionParams + '\''
+ ", udfFuncParametersList=" + udfFuncParametersList
+ ", defaultFS='" + defaultFS + '\'' + '}';
}
}

2
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java

@ -20,5 +20,5 @@ package org.apache.dolphinscheduler.plugin.task.api.enums;
public enum ResourceType {
DATASOURCE,
UDF;
;
}

56
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/UdfType.java

@ -1,56 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api.enums;
/**
* UDF type
*/
public enum UdfType {
/**
* 0 hive; 1 spark
*/
HIVE(0, "hive"),
SPARK(1, "spark");
UdfType(int code, String descp) {
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
public static UdfType of(int type) {
for (UdfType ut : values()) {
if (ut.getCode() == type) {
return ut;
}
}
throw new IllegalArgumentException("invalid type : " + type);
}
}

38
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java

@ -21,12 +21,10 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.enums.DataType;
import org.apache.dolphinscheduler.plugin.task.api.enums.ResourceType;
import org.apache.dolphinscheduler.plugin.task.api.enums.UdfType;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters;
import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils;
import org.apache.commons.collections4.CollectionUtils;
@ -37,10 +35,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.base.Enums;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
/**
@ -80,10 +75,6 @@ public class SqlParameters extends AbstractParameters {
*/
private int displayRows;
/**
* udf list
*/
private String udfs;
/**
* show type
* 0 TABLE
@ -148,14 +139,6 @@ public class SqlParameters extends AbstractParameters {
this.sql = sql;
}
public String getUdfs() {
return udfs;
}
public void setUdfs(String udfs) {
this.udfs = udfs;
}
public int getSqlType() {
return sqlType;
}
@ -293,7 +276,6 @@ public class SqlParameters extends AbstractParameters {
+ ", sendEmail=" + sendEmail
+ ", displayRows=" + displayRows
+ ", limit=" + limit
+ ", udfs='" + udfs + '\''
+ ", showType='" + showType + '\''
+ ", connParams='" + connParams + '\''
+ ", groupId='" + groupId + '\''
@ -308,16 +290,6 @@ public class SqlParameters extends AbstractParameters {
ResourceParametersHelper resources = super.getResources();
resources.put(ResourceType.DATASOURCE, datasource);
// whether udf type
boolean udfTypeFlag = Enums.getIfPresent(UdfType.class, Strings.nullToEmpty(this.getType())).isPresent()
&& !StringUtils.isEmpty(this.getUdfs());
if (udfTypeFlag) {
String[] udfFunIds = this.getUdfs().split(",");
for (int i = 0; i < udfFunIds.length; i++) {
resources.put(ResourceType.UDF, Integer.parseInt(udfFunIds[i]));
}
}
return resources;
}
@ -334,16 +306,6 @@ public class SqlParameters extends AbstractParameters {
(DataSourceParameters) parametersHelper.getResourceParameters(ResourceType.DATASOURCE, datasource);
sqlTaskExecutionContext.setConnectionParams(dbSource.getConnectionParams());
// whether udf type
boolean udfTypeFlag = Enums.getIfPresent(UdfType.class, Strings.nullToEmpty(this.getType())).isPresent()
&& !StringUtils.isEmpty(this.getUdfs());
if (udfTypeFlag) {
List<UdfFuncParameters> collect = parametersHelper.getResourceMap(ResourceType.UDF).entrySet().stream()
.map(entry -> (UdfFuncParameters) entry.getValue()).collect(Collectors.toList());
sqlTaskExecutionContext.setUdfFuncParametersList(collect);
}
return sqlTaskExecutionContext;
}
}

3
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java

@ -23,8 +23,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, visible = true, property = "resourceType")
@JsonSubTypes({
@Type(value = DataSourceParameters.class, name = "DATASOURCE"),
@Type(value = UdfFuncParameters.class, name = "UDF")
@Type(value = DataSourceParameters.class, name = "DATASOURCE")
})
public abstract class AbstractResourceParameters {

133
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/UdfFuncParameters.java

@ -1,133 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.api.parameters.resource;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.enums.UdfType;
import java.util.Date;
import lombok.Data;
import com.fasterxml.jackson.annotation.JsonProperty;
@Data
public class UdfFuncParameters extends AbstractResourceParameters {
/**
* id
*/
private int id;
public String getResourceType() {
return resourceType;
}
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
@JsonProperty(value = "UDF")
private String resourceType;
/**
* user id
*/
private int userId;
/**
* udf function name
*/
private String funcName;
/**
* udf class name
*/
private String className;
/**
* udf argument types
*/
private String argTypes;
/**
* udf data base
*/
private String database;
/**
* udf description
*/
private String description;
/**
* resource id
*/
private int resourceId;
/**
* resource name
*/
private String resourceName;
/**
* udf function type: hive / spark
*/
private UdfType type;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UdfFuncParameters udfFuncRequest = (UdfFuncParameters) o;
if (id != udfFuncRequest.id) {
return false;
}
return !(funcName != null ? !funcName.equals(udfFuncRequest.funcName) : udfFuncRequest.funcName != null);
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (funcName != null ? funcName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return JSONUtils.toJsonString(this);
}
}

3
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java

@ -33,7 +33,6 @@ public class SqlParametersTest {
private final String type = "MYSQL";
private final String sql = "select * from t_ds_user";
private final String udfs = "test-udfs-1.0.0-SNAPSHOT.jar";
private final int datasource = 1;
private final int sqlType = 0;
private final Boolean sendEmail = true;
@ -57,7 +56,6 @@ public class SqlParametersTest {
sqlParameters.setType(type);
sqlParameters.setSql(sql);
sqlParameters.setUdfs(udfs);
sqlParameters.setDatasource(datasource);
sqlParameters.setSqlType(sqlType);
sqlParameters.setSendEmail(sendEmail);
@ -68,7 +66,6 @@ public class SqlParametersTest {
Assertions.assertEquals(type, sqlParameters.getType());
Assertions.assertEquals(sql, sqlParameters.getSql());
Assertions.assertEquals(udfs, sqlParameters.getUdfs());
Assertions.assertEquals(datasource, sqlParameters.getDatasource());
Assertions.assertEquals(sqlType, sqlParameters.getSqlType());
Assertions.assertEquals(sendEmail, sqlParameters.getSendEmail());

2
dolphinscheduler-task-plugin/dolphinscheduler-task-remoteshell/src/test/java/org/apache/dolphinscheduler/plugin/task/remoteshell/RemoteShellTaskTest.java

@ -68,7 +68,7 @@ public class RemoteShellTaskTest {
TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
taskExecutionContext.setTaskAppId("1");
taskExecutionContext
.setTaskParams("{\"localParams\":[],\"rawScript\":\"echo 1\",\"resourceList\":[],\"udfList\":[]}");
.setTaskParams("{\"localParams\":[],\"rawScript\":\"echo 1\",\"resourceList\":[]}");
taskExecutionContext.setExecutePath("/tmp");
taskExecutionContext.setEnvironmentConfig("export PATH=/opt/anaconda3/bin:$PATH");
RemoteShellTask remoteShellTask = spy(new RemoteShellTask(taskExecutionContext));

95
dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java

@ -35,13 +35,10 @@ import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.model.TaskAlertInfo;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.SqlParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters;
import org.apache.dolphinscheduler.plugin.task.api.resource.ResourceContext;
import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.sql.Connection;
@ -49,8 +46,6 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -118,12 +113,11 @@ public class SqlTask extends AbstractTask {
public void handle(TaskCallBack taskCallBack) throws TaskException {
log.info("Full sql parameters: {}", sqlParameters);
log.info(
"sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {},varPool : {} ,query max result limit {}",
"sql type : {}, datasource : {}, sql : {} , localParams : {},showType : {},connParams : {},varPool : {} ,query max result limit {}",
sqlParameters.getType(),
sqlParameters.getDatasource(),
sqlParameters.getSql(),
sqlParameters.getLocalParams(),
sqlParameters.getUdfs(),
sqlParameters.getShowType(),
sqlParameters.getConnParams(),
sqlParameters.getVarPool(),
@ -153,10 +147,8 @@ public class SqlTask extends AbstractTask {
.map(this::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
List<String> createFuncs = createFuncs(sqlTaskExecutionContext.getUdfFuncParametersList());
// execute sql task
executeFuncAndSql(mainStatementSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
executeFuncAndSql(mainStatementSqlBinds, preStatementSqlBinds, postStatementSqlBinds);
setExitStatusCode(TaskConstants.EXIT_CODE_SUCCESS);
@ -176,24 +168,17 @@ public class SqlTask extends AbstractTask {
* execute function and sql
*
* @param mainStatementsBinds main statements binds
* @param preStatementsBinds pre statements binds
* @param preStatementsBinds pre statements binds
* @param postStatementsBinds post statements binds
* @param createFuncs create functions
*/
public void executeFuncAndSql(List<SqlBinds> mainStatementsBinds,
List<SqlBinds> preStatementsBinds,
List<SqlBinds> postStatementsBinds,
List<String> createFuncs) throws Exception {
List<SqlBinds> postStatementsBinds) throws Exception {
try (
Connection connection =
DataSourceClientProvider.getAdHocConnection(DbType.valueOf(sqlParameters.getType()),
baseConnectionParam)) {
// create temp function
if (CollectionUtils.isNotEmpty(createFuncs)) {
createTempFunction(connection, createFuncs);
}
// pre execute
executeUpdate(connection, preStatementsBinds, "pre");
@ -299,7 +284,7 @@ public class SqlTask extends AbstractTask {
/**
* send alert as an attachment
*
* @param title title
* @param title title
* @param content content
*/
private void sendAttachment(int groupId, String title, String content) {
@ -332,22 +317,6 @@ public class SqlTask extends AbstractTask {
return String.valueOf(result);
}
/**
* create temp function
*
* @param connection connection
* @param createFuncs createFuncs
*/
private void createTempFunction(Connection connection,
List<String> createFuncs) throws Exception {
try (Statement funcStmt = connection.createStatement()) {
for (String createFunc : createFuncs) {
log.info("hive create function sql: {}", createFunc);
funcStmt.execute(createFunc);
}
}
}
/**
* close jdbc resource
*
@ -367,7 +336,7 @@ public class SqlTask extends AbstractTask {
* preparedStatement bind
*
* @param connection connection
* @param sqlBinds sqlBinds
* @param sqlBinds sqlBinds
* @return PreparedStatement
* @throws Exception Exception
*/
@ -400,9 +369,9 @@ public class SqlTask extends AbstractTask {
/**
* print replace sql
*
* @param content content
* @param formatSql format sql
* @param rgex rgex
* @param content content
* @param formatSql format sql
* @param rgex rgex
* @param sqlParamsMap sql params map
*/
private void printReplacedSql(String content, String formatSql, String rgex, Map<Integer, Property> sqlParamsMap) {
@ -477,50 +446,4 @@ public class SqlTask extends AbstractTask {
return content;
}
/**
* create function list
*
* @param udfFuncParameters udfFuncParameters
* @return
*/
private List<String> createFuncs(List<UdfFuncParameters> udfFuncParameters) {
if (CollectionUtils.isEmpty(udfFuncParameters)) {
log.info("can't find udf function resource");
return null;
}
// build jar sql
List<String> funcList = buildJarSql(udfFuncParameters);
// build temp function sql
List<String> tempFuncList = buildTempFuncSql(udfFuncParameters);
funcList.addAll(tempFuncList);
return funcList;
}
/**
* build temp function sql
* @param udfFuncParameters udfFuncParameters
* @return
*/
private List<String> buildTempFuncSql(List<UdfFuncParameters> udfFuncParameters) {
return udfFuncParameters.stream().map(value -> MessageFormat
.format(CREATE_OR_REPLACE_FUNCTION_FORMAT, value.getFuncName(), value.getClassName()))
.collect(Collectors.toList());
}
/**
* build jar sql
* @param udfFuncParameters udfFuncParameters
* @return
*/
private List<String> buildJarSql(List<UdfFuncParameters> udfFuncParameters) {
return udfFuncParameters.stream().map(value -> {
String resourceFullName = value.getResourceName();
ResourceContext resourceContext = taskExecutionContext.getResourceContext();
return String.format("add jar %s",
resourceContext.getResourceItem(resourceFullName).getResourceAbsolutePathInLocal());
}).collect(Collectors.toList());
}
}

20
dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/resource/MigrateResourceService.java

@ -19,9 +19,7 @@ package org.apache.dolphinscheduler.tools.resource;
import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
@ -54,9 +52,6 @@ public class MigrateResourceService {
@Autowired
private TenantMapper tenantMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private DataSource dataSource;
@ -69,11 +64,10 @@ public class MigrateResourceService {
}
String resMigrateBasePath = createMigrateDirByType(targetTenantCode, ResourceType.FILE);
String udfMigrateBasePath = createMigrateDirByType(targetTenantCode, ResourceType.UDF);
if (StringUtils.isEmpty(resMigrateBasePath) || StringUtils.isEmpty(udfMigrateBasePath)) {
if (StringUtils.isEmpty(resMigrateBasePath)) {
return;
}
// migrate all unmanaged resources and udfs once
// migrate all unmanaged resources once
List<Map<String, Object>> resources = getAllResources();
for (Map<String, Object> item : resources) {
String oriFullName = (String) item.get("full_name");
@ -84,16 +78,6 @@ public class MigrateResourceService {
if (ResourceType.FILE.getCode() == type) {
storageOperator.copy(oriFullName,
String.format(FORMAT_S_S, resMigrateBasePath, oriFullName), true, true);
} else if (ResourceType.UDF.getCode() == type) {
String fullName = String.format(FORMAT_S_S, udfMigrateBasePath, oriFullName);
storageOperator.copy(oriFullName, fullName, true, true);
// change relative udfs resourceName
List<UdfFunc> udfs = udfFuncMapper.listUdfByResourceId(new Integer[]{id});
udfs.forEach(udf -> {
udf.setResourceName(fullName);
udfFuncMapper.updateById(udf);
});
}
} catch (Exception e) {
logger.error("Migrate resource: {} failed: {}", item, e);

1
dolphinscheduler-ui/src/layouts/content/components/sidebar/index.tsx

@ -36,7 +36,6 @@ const Sidebar = defineComponent({
const defaultExpandedKeys = [
'workflow',
'task',
'udf-manage',
'service-manage',
'statistical-manage',
'task-group-manage'

1
dolphinscheduler-ui/src/layouts/content/components/user/use-dropdown.ts

@ -42,7 +42,6 @@ export function useDropDown() {
userStore.setSecurityConfigType('')
userStore.setUserInfo({})
userStore.setBaseResDir('')
userStore.setBaseUdfDir('')
cookies.remove('sessionId')
router.push({ path: '/login' })

16
dolphinscheduler-ui/src/layouts/content/use-dataList.ts

@ -31,7 +31,6 @@ import {
PartitionOutlined,
SettingOutlined,
FileSearchOutlined,
RobotOutlined,
AppstoreOutlined,
UsergroupAddOutlined,
UserAddOutlined,
@ -180,21 +179,6 @@ export function useDataList() {
key: '/resource/file-manage',
icon: renderIcon(FileSearchOutlined)
},
{
label: t('menu.udf_manage'),
key: 'udf-manage',
icon: renderIcon(RobotOutlined),
children: [
{
label: t('menu.resource_manage'),
key: '/resource/resource-manage'
},
{
label: t('menu.function_manage'),
key: '/resource/function-manage'
}
]
},
{
label: t('menu.task_group_manage'),
key: 'task-group-manage',

1
dolphinscheduler-ui/src/locales/en_US/menu.ts

@ -34,7 +34,6 @@ export default {
task_instance: 'Task Instance',
task_definition: 'Task Definition',
file_manage: 'File Manage',
udf_manage: 'UDF Manage',
resource_manage: 'Resource Manage',
function_manage: 'Function Manage',
service_manage: 'Service Manage',

4
dolphinscheduler-ui/src/locales/en_US/project.ts

@ -250,7 +250,8 @@ export default {
'The downstream dependent tasks exists. You can not delete the task.',
warning_delete_scheduler_dependent_tasks_desc:
'The downstream dependent tasks exists. Are you sure to delete the scheduler?',
warning_too_large_parallelism_number: 'The parallelism number is too large. It is better not to be over 10.'
warning_too_large_parallelism_number:
'The parallelism number is too large. It is better not to be over 10.'
},
task: {
on_line: 'Online',
@ -839,7 +840,6 @@ export default {
integer_tips: 'Please enter a positive integer',
sql_parameter: 'SQL Parameter',
format_tips: 'Please enter format',
udf_function: 'UDF Function',
unlimited: 'unlimited',
please_select_source_connector_type: 'Please select source connector type',
please_select_source_datasource_id: 'Please select source datasource id',

19
dolphinscheduler-ui/src/locales/en_US/resource.ts

@ -55,17 +55,7 @@ export default {
return: 'Return',
save: 'Save'
},
udf: {
udf_resources: 'UDF resources',
upload_udf_resources: 'Upload UDF Resources',
udf_source_name: 'UDF Resource Name',
user_name: 'Resource userName'
},
function: {
udf_function: 'UDF Function',
create_udf_function: 'Create UDF Function',
edit_udf_function: 'Edit UDF Function',
udf_function_name: 'UDF Function Name',
user_name: 'Resource userName',
class_name: 'Class Name',
type: 'Type',
@ -78,22 +68,15 @@ export default {
delete: 'Delete',
success: 'Success',
package_name: 'Package Name',
udf_resources: 'UDF Resources',
instructions: 'Instructions',
upload_resources: 'Upload Resources',
udf_resources_directory: 'UDF resources directory',
delete_confirm: 'Delete?',
enter_keyword_tips: 'Please enter keyword',
enter_udf_unction_name_tips: 'Please enter a UDF function name',
enter_package_name_tips: 'Please enter a Package name',
enter_select_udf_resources_tips: 'Please select UDF resources',
enter_select_udf_resources_directory_tips:
'Please select UDF resources directory',
enter_instructions_tips: 'Please enter a instructions',
enter_name_tips: 'Please enter name',
enter_description_tips: 'Please enter description',
upload: 'Upload',
upload_udf_resources: 'Upload UDF Resources'
upload: 'Upload'
},
task_group_option: {
manage: 'Task group manage',

4
dolphinscheduler-ui/src/locales/en_US/security.ts

@ -150,18 +150,14 @@ export default {
project: 'Project',
resource: 'Resource',
file_resource: 'File Resource',
udf_resource: 'UDF Resource',
datasource: 'Datasource',
udf: 'UDF Function',
namespace: 'Namespace',
revoke_auth: 'Revoke',
grant_read: 'Grant Read',
grant_all: 'Grant All',
authorize_project: 'Project Authorize',
authorize_resource: 'Resource Authorize',
authorize_namespace: 'Namespace Authorize',
authorize_datasource: 'Datasource Authorize',
authorize_udf: 'UDF Function Authorize',
username: 'Username',
username_exists: 'The username already exists',
username_tips: 'Please enter username',

1
dolphinscheduler-ui/src/locales/zh_CN/menu.ts

@ -35,7 +35,6 @@ export default {
task_instance: '任务实例',
task_definition: '任务定义',
file_manage: '文件管理',
udf_manage: 'UDF管理',
resource_manage: '资源管理',
function_manage: '函数管理',
service_manage: '服务管理',

3
dolphinscheduler-ui/src/locales/zh_CN/project.ts

@ -247,7 +247,7 @@ export default {
'下游存在依赖,你不能删除该任务.',
warning_delete_scheduler_dependent_tasks_desc:
'下游存在依赖, 删除定时可能会对下游任务产生影响. 你确定要删除该定时嘛?',
warning_too_large_parallelism_number: '并行度设置太大了, 最好不要超过10.',
warning_too_large_parallelism_number: '并行度设置太大了, 最好不要超过10.'
},
task: {
on_line: '线上',
@ -814,7 +814,6 @@ export default {
integer_tips: '请输入一个正整数',
sql_parameter: 'sql参数',
format_tips: '请输入格式为',
udf_function: 'UDF函数',
unlimited: '不限制',
please_select_source_connector_type: '请选择源数据类型',
please_select_source_datasource_id: '请选择源数据源',

17
dolphinscheduler-ui/src/locales/zh_CN/resource.ts

@ -55,16 +55,7 @@ export default {
return: '返回',
save: '保存'
},
udf: {
udf_resources: 'UDF资源',
upload_udf_resources: '上传UDF资源',
udf_source_name: 'UDF资源名称'
},
function: {
udf_function: 'UDF函数',
create_udf_function: '创建UDF函数',
edit_udf_function: '编辑UDF函数',
udf_function_name: 'UDF函数名称',
user_name: '所属用户',
class_name: '类名',
type: '类型',
@ -77,21 +68,15 @@ export default {
delete: '删除',
success: '成功',
package_name: '包名类名',
udf_resources: 'UDF资源',
instructions: '使用说明',
upload_resources: '上传资源',
udf_resources_directory: 'UDF资源目录',
delete_confirm: '确定删除吗?',
enter_keyword_tips: '请输入关键词',
enter_udf_unction_name_tips: '请输入UDF函数名称',
enter_package_name_tips: '请输入包名类名',
enter_select_udf_resources_tips: '请选择UDF资源',
enter_select_udf_resources_directory_tips: '请选择UDF资源目录',
enter_instructions_tips: '请输入使用说明',
enter_name_tips: '请输入名称',
enter_description_tips: '请输入描述',
upload: '上传',
upload_udf_resources: '上传UDF资源'
upload: '上传'
},
task_group_option: {
manage: '任务组管理',

4
dolphinscheduler-ui/src/locales/zh_CN/security.ts

@ -148,18 +148,14 @@ export default {
project: '项目',
resource: '资源',
file_resource: '文件资源',
udf_resource: 'UDF资源',
datasource: '数据源',
udf: 'UDF函数',
namespace: '命名空间',
revoke_auth: '撤销权限',
grant_read: '授予读权限',
grant_all: '授予所有权限',
authorize_project: '项目授权',
authorize_resource: '资源授权',
authorize_namespace: '命名空间授权',
authorize_datasource: '数据源授权',
authorize_udf: 'UDF函数授权',
username: '用户名',
username_exists: '用户名已存在',
username_tips: '请输入用户名',

34
dolphinscheduler-ui/src/router/modules/resources.ts

@ -100,40 +100,6 @@ export default {
auth: []
}
},
{
path: '/resource/resource-manage',
name: 'resource-manage',
component: components['resource-udf-resource'],
meta: {
title: '资源管理',
activeMenu: 'resource',
showSide: true,
auth: []
}
},
{
path: '/resource/resource-manage',
name: 'resource-sub-manage',
component: components['resource-udf-resource'],
meta: {
title: '资源管理',
activeMenu: 'resource',
activeSide: '/resource/resource-manage',
showSide: true,
auth: []
}
},
{
path: '/resource/function-manage',
name: 'function-manage',
component: components['resource-udf-function'],
meta: {
title: '函数管理',
activeMenu: 'resource',
showSide: true,
auth: []
}
},
{
path: '/resource/task-group-option',
name: 'task-group-option',

108
dolphinscheduler-ui/src/service/modules/resources/index.ts

@ -23,16 +23,13 @@ import {
FileNameReq,
FullNameReq,
TenantCodeReq,
IdReq,
ContentReq,
DescriptionReq,
CreateReq,
UserIdReq,
OnlineCreateReq,
ProgramTypeReq,
ListReq,
ViewResourceReq,
UdfFuncReq
ViewResourceReq
} from './types'
export function queryResourceListPaging(
@ -64,30 +61,6 @@ export function createResource(
})
}
export function authorizedFile(params: UserIdReq): any {
return axios({
url: '/resources/authed-file',
method: 'get',
params
})
}
export function authorizeResourceTree(params: UserIdReq): any {
return axios({
url: '/resources/authed-resource-tree',
method: 'get',
params
})
}
export function authUDFFunc(params: UserIdReq): any {
return axios({
url: '/resources/authed-udf-func',
method: 'get',
params
})
}
export function createDirectory(
data: CreateReq & NameReq & ResourceTypeReq
): any {
@ -126,62 +99,6 @@ export function queryResourceByProgramType(
})
}
export function queryUdfFuncListPaging(params: ListReq): any {
return axios({
url: '/resources/udf-func',
method: 'get',
params
})
}
export function queryUdfFuncList(params: { type: 'HIVE' | 'SPARK' }): any {
return axios({
url: '/resources/udf-func/list',
method: 'get',
params
})
}
export function verifyUdfFuncName(params: NameReq): any {
return axios({
url: '/resources/udf-func/verify-name',
method: 'get',
params
})
}
export function deleteUdfFunc(id: number, params: FullNameReq): any {
return axios({
url: `/resources/udf-func/${id}`,
method: 'delete',
params
})
}
export function unAuthUDFFunc(params: UserIdReq): any {
return axios({
url: '/resources/unauth-udf-func',
method: 'get',
params
})
}
export function verifyResourceName(params: FullNameReq & ResourceTypeReq): any {
return axios({
url: '/resources/verify-name',
method: 'get',
params
})
}
export function doesResourceExist(params: FullNameReq & ResourceTypeReq): any {
return axios({
url: '/resources/verify-name',
method: 'get',
params
})
}
export function updateResource(
data: NameReq & ResourceTypeReq & DescriptionReq & FullNameReq & TenantCodeReq
): any {
@ -204,13 +121,6 @@ export function downloadResource(params: FullNameReq): void {
utils.downloadFile('resources/download', params)
}
export function viewUIUdfFunction(id: IdReq): any {
return axios({
url: `/resources/${id}/udf-func`,
method: 'get'
})
}
export function updateResourceContent(
data: ContentReq & TenantCodeReq & FullNameReq
): any {
@ -230,19 +140,3 @@ export function viewResource(
params
})
}
export function createUdfFunc(data: UdfFuncReq): any {
return axios({
url: '/resources/udf-func',
method: 'post',
data
})
}
export function updateUdfFunc(data: UdfFuncReq, id: number): any {
return axios({
url: `/resources/udf-func/${id}`,
method: 'put',
data
})
}

15
dolphinscheduler-ui/src/service/modules/resources/types.ts

@ -20,14 +20,10 @@ interface FileReq {
}
interface ResourceTypeReq {
type: 'FILE' | 'UDF'
type: 'FILE'
programType?: string
}
interface UdfTypeReq {
type: 'HIVE' | 'SPARK'
}
interface NameReq {
name: string
}
@ -84,13 +80,6 @@ interface ViewResourceReq {
skipLineNum: number
}
interface UdfFuncReq extends UdfTypeReq, DescriptionReq, FullNameReq {
className: string
funcName: string
argTypes?: string
database?: string
}
interface ResourceFile {
id: number
pid: number
@ -122,7 +111,6 @@ interface ResourceViewRes {
export {
FileReq,
ResourceTypeReq,
UdfTypeReq,
NameReq,
FileNameReq,
FullNameReq,
@ -136,7 +124,6 @@ export {
ProgramTypeReq,
ListReq,
ViewResourceReq,
UdfFuncReq,
ResourceListRes,
ResourceViewRes,
ResourceFile

9
dolphinscheduler-ui/src/service/modules/users/index.ts

@ -27,7 +27,6 @@ import {
GrantResourceReq,
GrantProject,
ProjectCodeReq,
GrantUDFReq,
GrantNamespaceReq,
ListAllReq,
ListReq,
@ -129,14 +128,6 @@ export function grantProjectByCode(data: ProjectCodeReq & UserIdReq): any {
})
}
export function grantUDFFunc(data: GrantUDFReq & UserIdReq) {
return axios({
url: '/users/grant-udf-func',
method: 'post',
data
})
}
export function grantNamespaceFunc(data: GrantNamespaceReq & UserIdReq) {
return axios({
url: '/users/grant-namespace',

5
dolphinscheduler-ui/src/service/modules/users/types.ts

@ -62,10 +62,6 @@ interface ProjectCodeReq {
projectCode: string
}
interface GrantUDFReq {
udfIds: string
}
interface GrantNamespaceReq {
namespaceIds: string
}
@ -130,7 +126,6 @@ export {
GrantResourceReq,
GrantProject,
ProjectCodeReq,
GrantUDFReq,
GrantNamespaceReq,
ListAllReq,
ListReq,

1
dolphinscheduler-ui/src/service/service.ts

@ -66,7 +66,6 @@ const err = (err: AxiosError): Promise<AxiosError> => {
userStore.setSecurityConfigType('')
userStore.setUserInfo({})
userStore.setBaseResDir('')
userStore.setBaseUdfDir('')
router.push({ path: '/login' })
}

1
dolphinscheduler-ui/src/store/user/types.ts

@ -21,7 +21,6 @@ interface UserState {
sessionId: string
securityConfigType: string
baseResDir: string
baseUdfDir: string
userInfo: UserInfoRes | {}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save