Browse Source

[Feature][TASK-PLUGIN] AWS Database Migration Service (DMS) (#11868)

* add aws dms
3.2.0-release
JieguangZhou 2 years ago committed by GitHub
parent
commit
308f632fcc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 8
      docs/configs/docsdev.js
  2. 85
      docs/docs/en/guide/task/dms.md
  3. 85
      docs/docs/zh/guide/task/dms.md
  4. BIN
      docs/img/tasks/demo/dms_create_and_start.png
  5. BIN
      docs/img/tasks/demo/dms_create_and_start_json.png
  6. BIN
      docs/img/tasks/demo/dms_restart.png
  7. BIN
      docs/img/tasks/demo/dms_restart_json.png
  8. BIN
      docs/img/tasks/icons/dms.png
  9. 1
      dolphinscheduler-api/src/main/resources/task-type-config.yaml
  10. 5
      dolphinscheduler-bom/pom.xml
  11. 1
      dolphinscheduler-dist/release-docs/LICENSE
  12. 201
      dolphinscheduler-dist/release-docs/licenses/LICENSE-aws-java-sdk-dms.txt
  13. 6
      dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml
  14. 53
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml
  15. 321
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java
  16. 65
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsParameters.java
  17. 257
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java
  18. 49
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java
  19. 47
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java
  20. 237
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/test/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHookTest.java
  21. 188
      dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/test/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskTest.java
  22. 1
      dolphinscheduler-task-plugin/pom.xml
  23. BIN
      dolphinscheduler-ui/public/images/task-icons/dms.png
  24. BIN
      dolphinscheduler-ui/public/images/task-icons/dms_hover.png
  25. 4
      dolphinscheduler-ui/src/store/project/task-type.ts
  26. 1
      dolphinscheduler-ui/src/store/project/types.ts
  27. 1
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts
  28. 140
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dms.ts
  29. 12
      dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
  30. 4
      dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts
  31. 83
      dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts
  32. 10
      dolphinscheduler-ui/src/views/projects/task/components/node/types.ts
  33. 5
      dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts
  34. 6
      dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss
  35. 1
      tools/dependencies/known-dependencies.txt

8
docs/configs/docsdev.js

@ -205,6 +205,10 @@ export default {
title: 'Pytorch', title: 'Pytorch',
link: '/en-us/docs/dev/user_doc/guide/task/pytorch.html', link: '/en-us/docs/dev/user_doc/guide/task/pytorch.html',
}, },
{
title: 'Amazon DMS',
link: '/en-us/docs/dev/user_doc/guide/task/dms.html',
},
], ],
}, },
{ {
@ -841,6 +845,10 @@ export default {
title: 'Pytorch', title: 'Pytorch',
link: '/zh-cn/docs/dev/user_doc/guide/task/pytorch.html', link: '/zh-cn/docs/dev/user_doc/guide/task/pytorch.html',
}, },
{
title: 'Amazon DMS',
link: '/zh-cn/docs/dev/user_doc/guide/task/dms.html',
},
], ],
}, },
{ {

85
docs/docs/en/guide/task/dms.md

@ -0,0 +1,85 @@
# DMS Node
## Overview
[AWS Database Migration Service (AWS DMS)](https://aws.amazon.com/cn/dms) helps you migrate databases to AWS quickly and securely.
The source database remains fully operational during the migration, minimizing downtime to applications that rely on the database.
The AWS Database Migration Service can migrate your data to and from the most widely used commercial and open-source databases.
DMS task plugin can help users to create and start DMS tasks in DolphinScheduler more conveniently.
Contains two features:
- Create DMS task and start DMS task
- Restart DMS task
We can create DMS task and start DMS task in two ways:
- Use interface
- Use JSON data
DolphinScheduler will track the status of the DMS task and set the status to successfully completed when the DMS task is completed. Except for the CDC task without end time.
So, if the `migrationType` is `cdc` or `full-load-and-cdc`, `cdcStopPosition` not be set, DolphinScheduler will set the status to successfully after the DMS task starts successfully.
## Create Task
- Click `Project Management -> Project Name -> Workflow Definition`, and click the `Create Workflow` button to enter the DAG editing page.
- Drag <img src="../../../../img/tasks/icons/dms.png" width="15"/> from the toolbar to the canvas.
## Task Example
The task plugin picture is as follows
**Create and start DMS task by interface**
![dms](../../../../img/tasks/demo/dms_create_and_start.png)
**Restart DMS task by interface**
![dms](../../../../img/tasks/demo/dms_restart.png)
**Create and start DMS task by JSON data**
![dms](../../../../img/tasks/demo/dms_create_and_start_json.png)
**Restart DMS task by JSON data**
![dms](../../../../img/tasks/demo/dms_restart_json.png)
### First, introduce some general parameters of DolphinScheduler
- Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md#default-task-parameters) for default parameters.
### Here are some specific parameters for the DMS plugin
- **isRestartTask**:Whether to restart the task. If it is true, the task will be restarted. If it is false, the task will be created and started.
- **isJsonFormat**:Whether to use JSON data to create and start the task. If it is true, the task will be created and started by JSON data. If it is false, the task will be created and started by interface.
- **jsonData**:Json data for creating and starting the task. Only when `isJsonFormat` is true, this parameter is valid.
Parameters of creating and starting the task by interface
- **migrationType**:The type of migration. The value can be full-load, cdc, full-load-and-cdc.
- **replicationTaskIdentifier**:The name of the task.
- **replicationInstanceArn**:The ARN of the replication instance.
- **sourceEndpointArn**:The ARN of the source endpoint.
- **targetEndpointArn**:The ARN of the target endpoint.
- **tableMappings**:The mapping of the table.
Parameters of restarting the task by interface
- **replicationTaskArn**:The ARN of the task.
## Environment to prepare
Some AWS configuration is required, modify a field in file `common.properties`
```yaml
# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.access.key.id=<YOUR AWS ACCESS KEY>
# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.secret.access.key=<YOUR AWS SECRET KEY>
# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.region=<AWS REGION>
```

85
docs/docs/zh/guide/task/dms.md

@ -0,0 +1,85 @@
# DMS 节点
## 综述
[AWS Database Migration Service (AWS DMS)](https://aws.amazon.com/cn/dms) 可帮助您快速并安全地将数据库迁移至 AWS。
源数据库在迁移过程中可继续正常运行,从而最大程度地减少依赖该数据库的应用程序的停机时间。
AWS Database Migration Service 可以在广泛使用的开源商业数据库之间迁移您的数据。
DMS任务组件帮助用户在DolphinScheduler中创建和启动DMS任务。
组件主要包含两个功能:
- 创建并启动迁移任务
- 重启已存在的迁移任务
组件的使用方式有两种:
- 通过界面创建
- 通过Json数据创建
DolphinScheduler 在 启动DMS 任务后,会跟中DMS任务状态,直至DMS任务完成后才将任务设为成功状态。除了以下情况:
不跟踪无结束时间的CDC任务,即 当迁移类型为 `full-load-and-cdc` 或者 `cdc` 时,且没有配置 `cdcStopPosition` 参数时,DolphinScheduler 在成功启动任务后,则会将任务状态设为 成功。
## 创建任务
- 点击项目管理-项目名称-工作流定义,点击“创建工作流”按钮,进入 DAG 编辑页面;
- 拖动工具栏的 <img src="../../../../img/tasks/icons/dms.png" width="15"/> 任务节点到画板中。
## 任务样例
组件图示如下:
**创建并启动迁移任务(通过界面)**
![dms](../../../../img/tasks/demo/dms_create_and_start.png)
**重启已存在的迁移任务(通过界面)**
![dms](../../../../img/tasks/demo/dms_restart.png)
**创建并启动迁移任务(通过Json数据)**
![dms](../../../../img/tasks/demo/dms_create_and_start_json.png)
**重启已存在的迁移任务(通过Json数据)**
![dms](../../../../img/tasks/demo/dms_restart_json.png)
### 首先介绍一些DS通用参数
- 默认参数说明请参考[DolphinScheduler任务参数附录](appendix.md#默认任务参数)。
### DMS组件独有的参数
- **isRestartTask**:是否重启已存在的迁移任务
- **isJsonFormat**:是否使用Json格式的数据创建任务
- **jsonData**:Json格式的数据, 是有`isJsonFormat`为true时才会生效
创建并启动迁移任务时参数
- **migrationType**:迁移类型, 可选值为:[ `full-load`, `full-load-and-cdc`, `cdc` ]
- **replicationTaskIdentifier**:迁移任务标识符, 任务名称
- **replicationInstanceArn**:迁移实例的ARN
- **sourceEndpointArn**:源端点的ARN
- **targetEndpointArn**:目标端点的ARN
- **tableMappings**:表映射
重启已存在的迁移任务时参数
- **replicationTaskArn**:迁移任务的ARN
## 环境配置
需要进行AWS的一些配置,修改`common.properties`中的以下配置信息
```yaml
# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.access.key.id=<YOUR AWS ACCESS KEY>
# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.secret.access.key=<YOUR AWS SECRET KEY>
# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required
resource.aws.region=<AWS REGION>
```

BIN
docs/img/tasks/demo/dms_create_and_start.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 151 KiB

BIN
docs/img/tasks/demo/dms_create_and_start_json.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 81 KiB

BIN
docs/img/tasks/demo/dms_restart.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

BIN
docs/img/tasks/demo/dms_restart_json.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

BIN
docs/img/tasks/icons/dms.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

1
dolphinscheduler-api/src/main/resources/task-type-config.yaml

@ -32,6 +32,7 @@ task:
cloud: cloud:
- 'EMR' - 'EMR'
- 'K8S' - 'K8S'
- 'DMS'
logic: logic:
- 'SUB_PROCESS' - 'SUB_PROCESS'
- 'DEPENDENT' - 'DEPENDENT'

5
dolphinscheduler-bom/pom.xml

@ -627,6 +627,11 @@
<artifactId>aws-java-sdk-sagemaker</artifactId> <artifactId>aws-java-sdk-sagemaker</artifactId>
<version>${aws-sdk.version}</version> <version>${aws-sdk.version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-dms</artifactId>
<version>${aws-sdk.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.aliyun.oss</groupId> <groupId>com.aliyun.oss</groupId>

1
dolphinscheduler-dist/release-docs/LICENSE vendored

@ -445,6 +445,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
aws-java-sdk-core-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-core/1.12.300 Apache 2.0 aws-java-sdk-core-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-core/1.12.300 Apache 2.0
aws-java-sdk-kms-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-kms/1.12.300 Apache 2.0 aws-java-sdk-kms-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-kms/1.12.300 Apache 2.0
aws-java-sdk-sagemaker-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-sagemaker/1.12.300 Apache 2.0 aws-java-sdk-sagemaker-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-sagemaker/1.12.300 Apache 2.0
aws-java-sdk-dms-1.12.300 https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-dms/1.12.300 Apache 2.0
commons-text 1.8: https://mvnrepository.com/artifact/org.apache.commons/commons-text/1.8, Apache 2.0 commons-text 1.8: https://mvnrepository.com/artifact/org.apache.commons/commons-text/1.8, Apache 2.0
httpasyncclient 4.1.5: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpasyncclient/4.1.4, Apache 2.0 httpasyncclient 4.1.5: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpasyncclient/4.1.4, Apache 2.0
httpcore-nio 4.4.15: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore-nio/4.4.15, Apache 2.0 httpcore-nio 4.4.15: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore-nio/4.4.15, Apache 2.0

201
dolphinscheduler-dist/release-docs/licenses/LICENSE-aws-java-sdk-dms.txt vendored

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

6
dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml

@ -218,6 +218,12 @@
<artifactId>dolphinscheduler-task-chunjun</artifactId> <artifactId>dolphinscheduler-task-chunjun</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-task-dms</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

53
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml

@ -0,0 +1,53 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dolphinscheduler-task-plugin</artifactId>
<groupId>org.apache.dolphinscheduler</groupId>
<version>dev-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-task-dms</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-task-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-dms</artifactId>
</dependency>
</dependencies>
</project>

321
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java

@ -0,0 +1,321 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import org.apache.commons.io.IOUtils;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationService;
import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationServiceClientBuilder;
import com.amazonaws.services.databasemigrationservice.model.CreateReplicationTaskRequest;
import com.amazonaws.services.databasemigrationservice.model.CreateReplicationTaskResult;
import com.amazonaws.services.databasemigrationservice.model.DeleteReplicationTaskRequest;
import com.amazonaws.services.databasemigrationservice.model.DescribeConnectionsRequest;
import com.amazonaws.services.databasemigrationservice.model.DescribeConnectionsResult;
import com.amazonaws.services.databasemigrationservice.model.DescribeReplicationTasksRequest;
import com.amazonaws.services.databasemigrationservice.model.DescribeReplicationTasksResult;
import com.amazonaws.services.databasemigrationservice.model.Filter;
import com.amazonaws.services.databasemigrationservice.model.InvalidResourceStateException;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTask;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTaskStats;
import com.amazonaws.services.databasemigrationservice.model.ResourceNotFoundException;
import com.amazonaws.services.databasemigrationservice.model.StartReplicationTaskRequest;
import com.amazonaws.services.databasemigrationservice.model.StartReplicationTaskResult;
import com.amazonaws.services.databasemigrationservice.model.StopReplicationTaskRequest;
import com.amazonaws.services.databasemigrationservice.model.Tag;
import com.amazonaws.services.databasemigrationservice.model.TestConnectionRequest;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
public class DmsHook {
protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass()));
private AWSDatabaseMigrationService client;
private String replicationTaskIdentifier;
private String sourceEndpointArn;
private String targetEndpointArn;
private String replicationInstanceArn;
private String migrationType;
private String tableMappings;
private String replicationTaskSettings;
private Date cdcStartTime;
private String cdcStartPosition;
private String cdcStopPosition;
private List<Tag> tags;
private String taskData;
private String resourceIdentifier;
private String replicationTaskArn;
private String startReplicationTaskType;
public DmsHook() {
this.client = createClient();
}
public static AWSDatabaseMigrationService createClient() {
final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID);
final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY);
final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION);
final BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey);
final AWSCredentialsProvider awsCredentialsProvider = new AWSStaticCredentialsProvider(basicAWSCredentials);
// create a DMS client
return AWSDatabaseMigrationServiceClientBuilder.standard()
.withCredentials(awsCredentialsProvider)
.withRegion(awsRegion)
.build();
}
public Boolean createReplicationTask() throws Exception {
logger.info("createReplicationTask ......");
CreateReplicationTaskRequest request = new CreateReplicationTaskRequest()
.withReplicationTaskIdentifier(replicationTaskIdentifier)
.withSourceEndpointArn(sourceEndpointArn)
.withTargetEndpointArn(targetEndpointArn)
.withReplicationInstanceArn(replicationInstanceArn)
.withMigrationType(migrationType)
.withTableMappings(tableMappings)
.withReplicationTaskSettings(replicationTaskSettings)
.withCdcStartTime(cdcStartTime)
.withCdcStartPosition(cdcStartPosition)
.withCdcStopPosition(cdcStopPosition)
.withTags(tags)
.withTaskData(taskData)
.withResourceIdentifier(resourceIdentifier);
request.setTableMappings(replaceFileParameters(request.getTableMappings()));
request.setReplicationTaskSettings(replaceFileParameters(request.getReplicationTaskSettings()));
CreateReplicationTaskResult result = client.createReplicationTask(request);
replicationTaskIdentifier = result.getReplicationTask().getReplicationTaskIdentifier();
replicationTaskArn = result.getReplicationTask().getReplicationTaskArn();
logger.info("replicationTaskIdentifier: {}, replicationTaskArn: {}", replicationTaskIdentifier, replicationTaskArn);
return awaitReplicationTaskStatus(STATUS.READY);
}
public Boolean startReplicationTask() {
logger.info("startReplicationTask ......");
StartReplicationTaskRequest request = new StartReplicationTaskRequest()
.withReplicationTaskArn(replicationTaskArn)
.withStartReplicationTaskType(startReplicationTaskType)
.withCdcStartTime(cdcStartTime)
.withCdcStartPosition(cdcStartPosition)
.withCdcStopPosition(cdcStopPosition);
StartReplicationTaskResult result = client.startReplicationTask(request);
replicationTaskArn = result.getReplicationTask().getReplicationTaskArn();
return awaitReplicationTaskStatus(STATUS.RUNNING);
}
public Boolean checkFinishedReplicationTask() {
logger.info("checkFinishedReplicationTask ......");
awaitReplicationTaskStatus(STATUS.STOPPED);
String stopReason = describeReplicationTasks().getStopReason();
return stopReason.endsWith(STATUS.FINISH_END_TOKEN);
}
public void stopReplicationTask() {
logger.info("stopReplicationTask ......");
if (replicationTaskArn == null) {
return;
}
StopReplicationTaskRequest request = new StopReplicationTaskRequest()
.withReplicationTaskArn(replicationTaskArn);
client.stopReplicationTask(request);
awaitReplicationTaskStatus(STATUS.STOPPED);
}
public Boolean deleteReplicationTask() {
logger.info("deleteReplicationTask ......");
DeleteReplicationTaskRequest request = new DeleteReplicationTaskRequest()
.withReplicationTaskArn(replicationTaskArn);
client.deleteReplicationTask(request);
Boolean isDeleteSuccessfully;
try {
isDeleteSuccessfully = awaitReplicationTaskStatus(STATUS.DELETE);
} catch (ResourceNotFoundException e) {
isDeleteSuccessfully = true;
}
return isDeleteSuccessfully;
}
public Boolean testConnectionEndpoint() {
return (testConnection(replicationInstanceArn, sourceEndpointArn) && testConnection(replicationInstanceArn, targetEndpointArn));
}
public Boolean testConnection(String replicationInstanceArn, String endpointArn) {
logger.info("Test connect replication instance: {} and endpoint: {}", replicationInstanceArn, endpointArn);
TestConnectionRequest request = new TestConnectionRequest().
withReplicationInstanceArn(replicationInstanceArn)
.withEndpointArn(endpointArn);
try {
client.testConnection(request);
} catch (InvalidResourceStateException e) {
logger.info(e.getErrorMessage());
}
return awaitConnectSuccess(replicationInstanceArn, endpointArn);
}
public Boolean awaitConnectSuccess(String replicationInstanceArn, String endpointArn) {
Filter instanceFilters = new Filter().withName(AWS_KEY.REPLICATION_INSTANCE_ARN).withValues(replicationInstanceArn);
Filter endpointFilters = new Filter().withName(AWS_KEY.ENDPOINT_ARN).withValues(endpointArn);
DescribeConnectionsRequest request = new DescribeConnectionsRequest().withFilters(endpointFilters, instanceFilters)
.withMarker("");
while (true) {
ThreadUtils.sleep(CONSTANTS.CHECK_INTERVAL);
DescribeConnectionsResult response = client.describeConnections(request);
String status = response.getConnections().get(0).getStatus();
if (status.equals(STATUS.SUCCESSFUL)) {
logger.info("Connect successful");
return true;
} else if (!status.equals(STATUS.TESTING)) {
break;
}
}
logger.info("Connect error");
return false;
}
public ReplicationTask describeReplicationTasks() {
Filter replicationTaskFilter = new Filter().withName(AWS_KEY.REPLICATION_TASK_ARN).withValues(replicationTaskArn);
DescribeReplicationTasksRequest request = new DescribeReplicationTasksRequest().withFilters(replicationTaskFilter).withMaxRecords(20).withMarker("");
DescribeReplicationTasksResult result = client.describeReplicationTasks(request);
ReplicationTask replicationTask = result.getReplicationTasks().get(0);
if (sourceEndpointArn == null) {
sourceEndpointArn = replicationTask.getSourceEndpointArn();
}
if (targetEndpointArn == null) {
targetEndpointArn = replicationTask.getTargetEndpointArn();
}
if (replicationInstanceArn == null) {
replicationInstanceArn = replicationTask.getReplicationInstanceArn();
}
if (replicationTaskArn == null) {
replicationTaskArn = replicationTask.getReplicationTaskArn();
}
return replicationTask;
}
public Boolean awaitReplicationTaskStatus(String exceptStatus, String... stopStatus) {
List<String> stopStatusSet = Arrays.asList(stopStatus);
Integer lastPercent = 0;
while (true) {
ThreadUtils.sleep(CONSTANTS.CHECK_INTERVAL);
ReplicationTask replicationTask = describeReplicationTasks();
String status = replicationTask.getStatus();
if (status.equals(STATUS.RUNNING) || status.equals(STATUS.STOPPED)) {
ReplicationTaskStats taskStats = replicationTask.getReplicationTaskStats();
Integer percent;
if (taskStats != null) {
percent = taskStats.getFullLoadProgressPercent();
} else {
percent = 0;
}
if (!lastPercent.equals(percent)) {
String runningMessage = String.format("fullLoadProgressPercent: %s ", percent);
logger.info(runningMessage);
}
lastPercent = percent;
}
if (exceptStatus.equals(status)) {
logger.info("success");
return true;
} else if (stopStatusSet.contains(status)) {
break;
}
}
logger.info("error");
return false;
}
public String replaceFileParameters(String parameter) throws IOException {
if (parameter == null) {
return null;
}
if (parameter.startsWith("file://")) {
String filePath = parameter.substring(7);
try {
return IOUtils.toString(new FileInputStream(filePath), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new IOException("Error reading file: " + filePath, e);
}
}
return parameter;
}
public ApplicationIds getApplicationIds() {
return new ApplicationIds(replicationTaskArn);
}
@Data
@AllArgsConstructor
@NoArgsConstructor
public static class ApplicationIds {
private String replicationTaskArn;
}
public static class STATUS {
public static final String DELETE = "delete";
public static final String READY = "ready";
public static final String RUNNING = "running";
public static final String STOPPED = "stopped";
public static final String SUCCESSFUL = "successful";
public static final String TESTING = "testing";
public static final String FINISH_END_TOKEN = "FINISHED";
}
public static class AWS_KEY {
public static final String REPLICATION_TASK_ARN = "replication-task-arn";
public static final String REPLICATION_INSTANCE_ARN = "replication-instance-arn";
public static final String ENDPOINT_ARN = "endpoint-arn";
}
public static class START_TYPE {
public static final String START_REPLICATION = "start-replication";
public static final String RELOAD_TARGET = "reload-target";
}
public static class CONSTANTS {
public static final int CHECK_INTERVAL = 1000;
}
}

65
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsParameters.java

@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import java.util.Date;
import java.util.List;
import com.amazonaws.services.databasemigrationservice.model.Tag;
import lombok.Data;
@Data
public class DmsParameters extends AbstractParameters {
private Boolean isRestartTask = false;
private Boolean isJsonFormat = false;
private String jsonData;
private String replicationTaskIdentifier;
private String sourceEndpointArn;
private String targetEndpointArn;
private String replicationInstanceArn;
private String migrationType;
private String tableMappings;
private String replicationTaskSettings;
private Date cdcStartTime;
private String cdcStartPosition;
private String cdcStopPosition;
private List<Tag> tags;
private String taskData;
private String resourceIdentifier;
private String replicationTaskArn;
private String startReplicationTaskType;
@Override
public boolean checkParameters() {
boolean flag;
if (isJsonFormat) {
flag = jsonData != null;
} else if (isRestartTask) {
flag = replicationTaskArn != null;
} else {
flag = sourceEndpointArn != null && targetEndpointArn != null && replicationInstanceArn != null
&& migrationType != null && replicationTaskIdentifier != null && tableMappings != null;
}
return flag;
}
}

257
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java

@ -0,0 +1,257 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT;
import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL;
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskException;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils;
import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import org.apache.commons.beanutils.BeanUtils;
import java.util.Collections;
import java.util.List;
import com.amazonaws.services.databasemigrationservice.model.InvalidResourceStateException;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTask;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
public class DmsTask extends AbstractRemoteTask {
private static final ObjectMapper objectMapper =
new ObjectMapper().configure(FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
.configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true)
.configure(REQUIRE_SETTERS_FOR_GETTERS, true)
.setPropertyNamingStrategy(new PropertyNamingStrategy.UpperCamelCaseStrategy());
/**
* taskExecutionContext
*/
private final TaskExecutionContext taskExecutionContext;
public DmsHook dmsHook;
/**
* Dms parameters
*/
private DmsParameters parameters;
private DmsHook.ApplicationIds appId;
public DmsTask(TaskExecutionContext taskExecutionContext) {
super(taskExecutionContext);
this.taskExecutionContext = taskExecutionContext;
}
@Override
public void init() throws TaskException {
logger.info("Dms task params {}", taskExecutionContext.getTaskParams());
parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DmsParameters.class);
initDmsHook();
}
@Override
public List<String> getApplicationIds() throws TaskException {
return Collections.emptyList();
}
@Override
public void submitApplication() throws TaskException {
exitStatusCode = checkCreateReplicationTask();
if (exitStatusCode == TaskConstants.EXIT_CODE_SUCCESS) {
exitStatusCode = startReplicationTask();
} else {
throw new TaskException("Failed to create a ReplicationTask");
}
// if the task is not running, the task will be deleted
if (exitStatusCode == TaskConstants.EXIT_CODE_FAILURE && !parameters.getIsRestartTask()) {
dmsHook.deleteReplicationTask();
} else {
appId = dmsHook.getApplicationIds();
setAppIds(JSONUtils.toJsonString(appId));
}
}
@Override
public void trackApplicationStatus() {
initAppId();
dmsHook.setReplicationTaskArn(appId.getReplicationTaskArn());
// if CdcStopPosition is not set, the task will not continue to check the running status
if (isStopTaskWhenCdc()) {
logger.info("This is a cdc task and cdcStopPosition is not set, the task will not continue to check the running status");
exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS;
return;
}
Boolean isFinishedSuccessfully = dmsHook.checkFinishedReplicationTask();
if (isFinishedSuccessfully) {
exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS;
} else {
throw new TaskException("DMS task failed to track");
}
}
/**
* init DMS remote AppId if null
*/
private void initAppId() {
if (appId == null) {
if (StringUtils.isNotEmpty(getAppIds())) {
appId = JSONUtils.parseObject(getAppIds(), DmsHook.ApplicationIds.class);
}
}
if (appId == null) {
throw new TaskException("DMS applicationID is null");
}
}
public int checkCreateReplicationTask() throws TaskException {
// if IsRestartTask, return success, do not create replication task
if (parameters.getIsRestartTask()) {
return TaskConstants.EXIT_CODE_SUCCESS;
}
// if not IsRestartTask, create replication task
Boolean isCreateSuccessfully;
try {
isCreateSuccessfully = dmsHook.createReplicationTask();
} catch (Exception e) {
throw new TaskException("DMS task create replication task error", e);
}
// if create replication task successfully, return EXIT_CODE_SUCCESS, else return EXIT_CODE_FAILURE
if (isCreateSuccessfully) {
return TaskConstants.EXIT_CODE_SUCCESS;
} else {
return TaskConstants.EXIT_CODE_FAILURE;
}
}
/**
* start replication task
*
* @return
* @throws TaskException
*/
public int startReplicationTask() {
Boolean isStartSuccessfully = false;
try {
isStartSuccessfully = dmsHook.startReplicationTask();
} catch (InvalidResourceStateException e) {
logger.error("Failed to start a task, error message: {}", e.getErrorMessage());
// Only restart task when the error contains "Test connection", means instance can not connect to source or target
if (!e.getErrorMessage().contains("Test connection")) {
return TaskConstants.EXIT_CODE_FAILURE;
}
logger.info("restart replication task");
// if only restart task, run dmsHook.describeReplicationTasks to get replication task arn
if (parameters.getIsRestartTask()) {
dmsHook.describeReplicationTasks();
}
// test connection endpoint again and restart task if connection is ok
if (dmsHook.testConnectionEndpoint()) {
isStartSuccessfully = dmsHook.startReplicationTask();
}
}
// if start replication task failed, return EXIT_CODE_FAILURE
if (!isStartSuccessfully) {
return TaskConstants.EXIT_CODE_FAILURE;
}
return TaskConstants.EXIT_CODE_SUCCESS;
}
/**
* check if stop task when cdc
*
* @return true if stop task when cdc type and cdcStopPosition is not set, else return false
*/
public Boolean isStopTaskWhenCdc() {
ReplicationTask replicationTask = dmsHook.describeReplicationTasks();
String migrationType = replicationTask.getMigrationType();
return migrationType.contains("cdc") && parameters.getCdcStopPosition() == null;
}
/**
* init dms hook
*/
public void initDmsHook() throws TaskException {
convertJsonParameters();
dmsHook = new DmsHook();
try {
BeanUtils.copyProperties(dmsHook, parameters);
} catch (Exception e) {
throw new TaskException("DMS task init error", e);
}
if (!StringUtils.isNotEmpty(parameters.getStartReplicationTaskType())) {
if (parameters.getIsRestartTask()) {
dmsHook.setStartReplicationTaskType(DmsHook.START_TYPE.RELOAD_TARGET);
} else {
dmsHook.setStartReplicationTaskType(DmsHook.START_TYPE.START_REPLICATION);
}
}
}
/**
* convert json parameters to dms parameters
*/
public void convertJsonParameters() throws TaskException {
// create a new parameter object using the json data if the json data is not empty
if (parameters.getIsJsonFormat() && parameters.getJsonData() != null) {
// combining local and global parameters
String jsonData = ParameterUtils.convertParameterPlaceholders(parameters.getJsonData(), ParamUtils.convert(taskExecutionContext.getPrepareParamsMap()));
boolean isRestartTask = parameters.getIsRestartTask();
try {
parameters = objectMapper.readValue(jsonData, DmsParameters.class);
parameters.setIsRestartTask(isRestartTask);
} catch (Exception e) {
logger.error("Failed to convert json data to DmsParameters object.", e);
throw new TaskException(e.getMessage());
}
}
}
@Override
public DmsParameters getParameters() {
return parameters;
}
@Override
public void cancelApplication() {
dmsHook.stopReplicationTask();
}
}

49
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java

@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import org.apache.dolphinscheduler.plugin.task.api.TaskChannel;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
public class DmsTaskChannel implements TaskChannel {
@Override
public void cancelApplication(boolean status) {
}
@Override
public DmsTask createTask(TaskExecutionContext taskRequest) {
return new DmsTask(taskRequest);
}
@Override
public AbstractParameters parseParameters(ParametersNode parametersNode) {
return JSONUtils.parseObject(parametersNode.getTaskParams(), DmsParameters.class);
}
@Override
public ResourceParametersHelper getResources(String parameters) {
return null;
}
}

47
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import org.apache.dolphinscheduler.plugin.task.api.TaskChannel;
import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import java.util.Collections;
import java.util.List;
import com.google.auto.service.AutoService;
@AutoService(TaskChannelFactory.class)
public class DmsTaskChannelFactory implements TaskChannelFactory {
@Override
public TaskChannel create() {
return new DmsTaskChannel();
}
@Override
public String getName() {
return "DMS";
}
@Override
public List<PluginParams> getParams() {
return Collections.emptyList();
}
}

237
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/test/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHookTest.java

@ -0,0 +1,237 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationService;
import com.amazonaws.services.databasemigrationservice.model.CreateReplicationTaskResult;
import com.amazonaws.services.databasemigrationservice.model.DescribeReplicationTasksResult;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTask;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTaskStats;
import com.amazonaws.services.databasemigrationservice.model.StartReplicationTaskResult;
@RunWith(MockitoJUnitRunner.class)
public class DmsHookTest {
final String replicationTaskArn = "arn:aws:dms:ap-southeast-1:123456789012:task:task";
AWSDatabaseMigrationService client;
@Before
public void before() {
client = mock(AWSDatabaseMigrationService.class);
}
@Test(timeout = 60000)
public void testCreateReplicationTask() throws Exception {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
CreateReplicationTaskResult createReplicationTaskResult = mock(CreateReplicationTaskResult.class);
when(client.createReplicationTask(any())).thenReturn(createReplicationTaskResult);
ReplicationTask replicationTask = mock(ReplicationTask.class);
final String taskIdentifier = "task";
when(replicationTask.getReplicationTaskArn()).thenReturn(replicationTaskArn);
when(replicationTask.getReplicationTaskIdentifier()).thenReturn(taskIdentifier);
when(replicationTask.getStatus()).thenReturn(DmsHook.STATUS.READY);
when(createReplicationTaskResult.getReplicationTask()).thenReturn(replicationTask);
doReturn(replicationTask).when(dmsHook).describeReplicationTasks();
Assert.assertTrue(dmsHook.createReplicationTask());
Assert.assertEquals(replicationTaskArn, dmsHook.getReplicationTaskArn());
Assert.assertEquals(taskIdentifier, dmsHook.getReplicationTaskIdentifier());
}
}
@Test(timeout = 60000)
public void testStartReplicationTask() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
StartReplicationTaskResult startReplicationTaskResult = mock(StartReplicationTaskResult.class);
when(client.startReplicationTask(any())).thenReturn(startReplicationTaskResult);
ReplicationTask replicationTask = mock(ReplicationTask.class);
when(replicationTask.getReplicationTaskArn()).thenReturn(replicationTaskArn);
when(replicationTask.getStatus()).thenReturn(DmsHook.STATUS.RUNNING);
when(startReplicationTaskResult.getReplicationTask()).thenReturn(replicationTask);
doReturn(replicationTask).when(dmsHook).describeReplicationTasks();
Assert.assertTrue(dmsHook.startReplicationTask());
Assert.assertEquals(replicationTaskArn, dmsHook.getReplicationTaskArn());
}
}
@Test(timeout = 60000)
public void testCheckFinishedReplicationTask() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
ReplicationTask replicationTask = mock(ReplicationTask.class);
when(replicationTask.getStatus()).thenReturn(DmsHook.STATUS.STOPPED);
doReturn(replicationTask).when(dmsHook).describeReplicationTasks();
when(replicationTask.getStopReason()).thenReturn("*_FINISHED");
Assert.assertTrue(dmsHook.checkFinishedReplicationTask());
when(replicationTask.getStopReason()).thenReturn("*_ERROR");
Assert.assertFalse(dmsHook.checkFinishedReplicationTask());
}
}
@Test(timeout = 60000)
public void testDeleteReplicationTask() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
ReplicationTask replicationTask = mock(ReplicationTask.class);
when(replicationTask.getStatus()).thenReturn(DmsHook.STATUS.DELETE);
doReturn(replicationTask).when(dmsHook).describeReplicationTasks();
Assert.assertTrue(dmsHook.deleteReplicationTask());
}
}
@Test
public void testTestConnectionEndpoint() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
String replicationInstanceArn = "replicationInstanceArn";
String trueSourceEndpointArn = "trueSourceEndpointArn";
String trueTargetEndpointArn = "trueTargetEndpointArn";
String falseSourceEndpointArn = "falseSourceEndpointArn";
String falseTargetEndpointArn = "falseTargetEndpointArn";
doReturn(true).when(dmsHook).testConnection(replicationInstanceArn, trueSourceEndpointArn);
doReturn(true).when(dmsHook).testConnection(replicationInstanceArn, trueTargetEndpointArn);
doReturn(false).when(dmsHook).testConnection(replicationInstanceArn, falseSourceEndpointArn);
doReturn(false).when(dmsHook).testConnection(replicationInstanceArn, falseTargetEndpointArn);
dmsHook.setReplicationInstanceArn(replicationInstanceArn);
dmsHook.setSourceEndpointArn(trueSourceEndpointArn);
dmsHook.setTargetEndpointArn(trueTargetEndpointArn);
Assert.assertTrue(dmsHook.testConnectionEndpoint());
dmsHook.setSourceEndpointArn(falseSourceEndpointArn);
dmsHook.setTargetEndpointArn(falseTargetEndpointArn);
Assert.assertFalse(dmsHook.testConnectionEndpoint());
dmsHook.setSourceEndpointArn(trueSourceEndpointArn);
dmsHook.setTargetEndpointArn(falseTargetEndpointArn);
Assert.assertFalse(dmsHook.testConnectionEndpoint());
dmsHook.setSourceEndpointArn(falseSourceEndpointArn);
dmsHook.setTargetEndpointArn(trueTargetEndpointArn);
Assert.assertFalse(dmsHook.testConnectionEndpoint());
}
}
@Test
public void testDescribeReplicationTasks() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
dmsHook.setReplicationInstanceArn("arn:aws:dms:ap-southeast-1:123456789012:task:task_exist");
DescribeReplicationTasksResult describeReplicationTasksResult = mock(DescribeReplicationTasksResult.class);
when(client.describeReplicationTasks(any())).thenReturn(describeReplicationTasksResult);
ReplicationTask replicationTask = mock(ReplicationTask.class);
when(replicationTask.getReplicationTaskArn()).thenReturn("arn:aws:dms:ap-southeast-1:123456789012:task:task");
when(replicationTask.getReplicationTaskIdentifier()).thenReturn("task");
final String sourceArn = "arn:aws:dms:ap-southeast-1:123456789012:endpoint:source";
final String targetArn = "arn:aws:dms:ap-southeast-1:123456789012:endpoint:target";
when(replicationTask.getSourceEndpointArn()).thenReturn(sourceArn);
when(replicationTask.getTargetEndpointArn()).thenReturn(targetArn);
when(describeReplicationTasksResult.getReplicationTasks()).thenReturn(Arrays.asList(replicationTask));
ReplicationTask replicationTaskOut = dmsHook.describeReplicationTasks();
Assert.assertNotEquals(dmsHook.getReplicationInstanceArn(), replicationTaskOut.getReplicationTaskArn());
Assert.assertEquals("task", replicationTaskOut.getReplicationTaskIdentifier());
Assert.assertEquals(sourceArn, replicationTaskOut.getSourceEndpointArn());
Assert.assertEquals(targetArn, replicationTaskOut.getTargetEndpointArn());
}
}
@Test(timeout = 60000)
public void testAwaitReplicationTaskStatus() {
try (MockedStatic<DmsHook> mockHook = Mockito.mockStatic(DmsHook.class)) {
mockHook.when(DmsHook::createClient).thenReturn(client);
DmsHook dmsHook = spy(new DmsHook());
ReplicationTask replicationTask = mock(ReplicationTask.class);
doReturn(replicationTask).when(dmsHook).describeReplicationTasks();
ReplicationTaskStats taskStats = mock(ReplicationTaskStats.class);
when(replicationTask.getReplicationTaskStats()).thenReturn(taskStats);
when(taskStats.getFullLoadProgressPercent()).thenReturn(100);
when(replicationTask.getStatus()).thenReturn(
DmsHook.STATUS.STOPPED
);
Assert.assertTrue(dmsHook.awaitReplicationTaskStatus(DmsHook.STATUS.STOPPED));
when(replicationTask.getStatus()).thenReturn(
DmsHook.STATUS.RUNNING,
DmsHook.STATUS.STOPPED
);
Assert.assertTrue(dmsHook.awaitReplicationTaskStatus(DmsHook.STATUS.STOPPED));
when(replicationTask.getStatus()).thenReturn(
DmsHook.STATUS.RUNNING,
DmsHook.STATUS.STOPPED
);
Assert.assertFalse(dmsHook.awaitReplicationTaskStatus(DmsHook.STATUS.STOPPED, DmsHook.STATUS.RUNNING));
}
}
}

188
dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/test/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskTest.java

@ -0,0 +1,188 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.dms;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.plugin.task.api.TaskConstants;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.lang.reflect.Field;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import com.amazonaws.services.databasemigrationservice.model.InvalidResourceStateException;
import com.amazonaws.services.databasemigrationservice.model.ReplicationTask;
@RunWith(MockitoJUnitRunner.class)
public class DmsTaskTest {
@Mock
DmsHook dmsHook;
DmsTask dmsTask;
@Before
public void before() throws Exception {
DmsParameters dmsParameters = new DmsParameters();
dmsTask = initTask(dmsParameters);
Field testAField = dmsTask.getClass().getDeclaredField("dmsHook");
testAField.set(dmsTask, dmsHook);
}
@Test
public void testCreateTaskJson() {
String jsonData = "{\n" +
" \"ReplicationTaskIdentifier\":\"task6\",\n" +
" \"SourceEndpointArn\":\"arn:aws:dms:ap-southeast-1:511640773671:endpoint:Z7SUEAL273SCT7OCPYNF5YNDHJDDFRATGNQISOQ\",\n" +
" \"TargetEndpointArn\":\"arn:aws:dms:ap-southeast-1:511640773671:endpoint:aws-mysql57-target\",\n" +
" \"ReplicationInstanceArn\":\"arn:aws:dms:ap-southeast-1:511640773671:rep:dms2c2g\",\n" +
" \"MigrationType\":\"full-load\",\n" +
" \"TableMappings\":\"file://table-mapping.json\",\n" +
" \"ReplicationTaskSettings\":\"file://ReplicationTaskSettings.json\",\n" +
" \"Tags\":[\n" +
" {\n" +
" \"Key\":\"key1\",\n" +
" \"Value\":\"value1\"\n" +
" }\n" +
" ]\n" +
"}";
DmsParameters dmsParameters = new DmsParameters();
dmsParameters.setIsJsonFormat(true);
dmsParameters.setJsonData(jsonData);
DmsTask dmsTask = initTask(dmsParameters);
dmsTask.convertJsonParameters();
DmsParameters dmsParametersNew = dmsTask.getParameters();
Assert.assertEquals("task6", dmsParametersNew.getReplicationTaskIdentifier());
Assert.assertEquals("arn:aws:dms:ap-southeast-1:511640773671:endpoint:Z7SUEAL273SCT7OCPYNF5YNDHJDDFRATGNQISOQ", dmsParametersNew.getSourceEndpointArn());
Assert.assertEquals("arn:aws:dms:ap-southeast-1:511640773671:endpoint:aws-mysql57-target", dmsParametersNew.getTargetEndpointArn());
Assert.assertEquals("arn:aws:dms:ap-southeast-1:511640773671:rep:dms2c2g", dmsParametersNew.getReplicationInstanceArn());
Assert.assertEquals("full-load", dmsParametersNew.getMigrationType());
Assert.assertEquals("file://table-mapping.json", dmsParametersNew.getTableMappings());
Assert.assertEquals("file://ReplicationTaskSettings.json", dmsParametersNew.getReplicationTaskSettings());
Assert.assertEquals("key1", dmsParametersNew.getTags().get(0).getKey());
Assert.assertEquals("value1", dmsParametersNew.getTags().get(0).getValue());
}
@Test
public void testCheckCreateReplicationTask() throws Exception {
DmsParameters dmsParameters = dmsTask.getParameters();
dmsParameters.setIsRestartTask(true);
Assert.assertEquals(TaskConstants.EXIT_CODE_SUCCESS, dmsTask.checkCreateReplicationTask());
dmsParameters.setIsRestartTask(false);
when(dmsHook.createReplicationTask()).thenReturn(true);
Assert.assertEquals(TaskConstants.EXIT_CODE_SUCCESS, dmsTask.checkCreateReplicationTask());
when(dmsHook.createReplicationTask()).thenReturn(false);
dmsTask.checkCreateReplicationTask();
Assert.assertEquals(TaskConstants.EXIT_CODE_FAILURE, dmsTask.checkCreateReplicationTask());
}
@Test
public void testStartReplicationTask() {
when(dmsHook.startReplicationTask()).thenReturn(true);
Assert.assertEquals(TaskConstants.EXIT_CODE_SUCCESS, dmsTask.startReplicationTask());
when(dmsHook.startReplicationTask()).thenReturn(false);
Assert.assertEquals(TaskConstants.EXIT_CODE_FAILURE, dmsTask.startReplicationTask());
}
@Test
public void testStartReplicationTaskRestartTestConnection() {
DmsParameters parameters = dmsTask.getParameters();
parameters.setIsRestartTask(false);
when(dmsHook.testConnectionEndpoint()).thenReturn(true);
when(dmsHook.startReplicationTask())
.thenThrow(new InvalidResourceStateException("Test connection"))
.thenReturn(true);
Assert.assertEquals(TaskConstants.EXIT_CODE_SUCCESS, dmsTask.startReplicationTask());
when(dmsHook.startReplicationTask())
.thenThrow(new InvalidResourceStateException("Test connection"))
.thenReturn(false);
Assert.assertEquals(TaskConstants.EXIT_CODE_FAILURE, dmsTask.startReplicationTask());
}
@Test
public void testStartReplicationTaskRestartOtherException() {
DmsParameters parameters = dmsTask.getParameters();
parameters.setIsRestartTask(false);
when(dmsHook.startReplicationTask()).thenThrow(new InvalidResourceStateException("other error"));
Assert.assertEquals(TaskConstants.EXIT_CODE_FAILURE, dmsTask.startReplicationTask());
}
@Test
public void testIsStopTaskWhenCdc() {
DmsParameters parameters = dmsTask.getParameters();
parameters.setIsRestartTask(false);
ReplicationTask replicationTask = mock(ReplicationTask.class);
when(dmsTask.dmsHook.describeReplicationTasks()).thenReturn(replicationTask);
when(replicationTask.getMigrationType()).thenReturn("cdc");
parameters.setCdcStopPosition("now");
Assert.assertFalse(dmsTask.isStopTaskWhenCdc());
when(replicationTask.getMigrationType()).thenReturn("full-load-and-cdc");
parameters.setCdcStopPosition("now");
Assert.assertFalse(dmsTask.isStopTaskWhenCdc());
when(replicationTask.getMigrationType()).thenReturn("full-load-and-cdc");
parameters.setCdcStopPosition(null);
Assert.assertTrue(dmsTask.isStopTaskWhenCdc());
when(replicationTask.getMigrationType()).thenReturn("full-load");
parameters.setCdcStopPosition(null);
Assert.assertFalse(dmsTask.isStopTaskWhenCdc());
}
private DmsTask initTask(DmsParameters dmsParameters) {
TaskExecutionContext taskExecutionContext = createContext(dmsParameters);
DmsTask dmsTask = new DmsTask(taskExecutionContext);
dmsTask.init();
return dmsTask;
}
public TaskExecutionContext createContext(DmsParameters dmsParameters) {
String parameters = JSONUtils.toJsonString(dmsParameters);
TaskExecutionContext taskExecutionContext = Mockito.mock(TaskExecutionContext.class);
Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(parameters);
return taskExecutionContext;
}
}

1
dolphinscheduler-task-plugin/pom.xml

@ -62,6 +62,7 @@
<module>dolphinscheduler-task-flink-stream</module> <module>dolphinscheduler-task-flink-stream</module>
<module>dolphinscheduler-task-pytorch</module> <module>dolphinscheduler-task-pytorch</module>
<module>dolphinscheduler-task-hivecli</module> <module>dolphinscheduler-task-hivecli</module>
<module>dolphinscheduler-task-dms</module>
</modules> </modules>
<dependencyManagement> <dependencyManagement>

BIN
dolphinscheduler-ui/public/images/task-icons/dms.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

BIN
dolphinscheduler-ui/public/images/task-icons/dms_hover.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 187 KiB

4
dolphinscheduler-ui/src/store/project/task-type.ts

@ -133,6 +133,10 @@ export const TASK_TYPES_MAP = {
HIVECLI: { HIVECLI: {
alias: 'HIVECLI', alias: 'HIVECLI',
helperLinkDisable: true helperLinkDisable: true
},
DMS: {
alias: 'DMS',
helperLinkDisable: true
} }
} as { } as {
[key in TaskType]: { [key in TaskType]: {

1
dolphinscheduler-ui/src/store/project/types.ts

@ -52,6 +52,7 @@ type TaskType =
| 'FLINK_STREAM' | 'FLINK_STREAM'
| 'PYTORCH' | 'PYTORCH'
| 'HIVECLI' | 'HIVECLI'
| 'DMS'
type ProgramType = 'JAVA' | 'SCALA' | 'PYTHON' type ProgramType = 'JAVA' | 'SCALA' | 'PYTHON'
type DependentResultType = { type DependentResultType = {

1
dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts

@ -80,3 +80,4 @@ export { useChunjun } from './use-chunjun'
export { useChunjunDeployMode } from './use-chunjun-deploy-mode' export { useChunjunDeployMode } from './use-chunjun-deploy-mode'
export { usePytorch } from './use-pytorch' export { usePytorch } from './use-pytorch'
export { useHiveCli } from './use-hive-cli' export { useHiveCli } from './use-hive-cli'
export { useDms } from './use-dms'

140
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dms.ts

@ -0,0 +1,140 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { IJsonItem } from '../types'
import { watch, ref } from 'vue'
import { useCustomParams, useResources } from '.'
export function useDms(model: { [field: string]: any }): IJsonItem[] {
const jsonDataSpan = ref(0)
const replicationTaskArnSpan = ref(0)
const replicationTaskIdentifierSpan = ref(0)
const sourceEndpointArnSpan = ref(0)
const targetEndpointArnSpan = ref(0)
const replicationInstanceArnSpan = ref(0)
const migrationTypeSpan = ref(0)
const tableMappingsSpan = ref(0)
const setFlag = () => {
model.isCreateAndNotJson = !model.isRestartTask && !model.isJsonFormat ? true : false
model.isRestartAndNotJson = model.isRestartTask && !model.isJsonFormat ? true : false
}
const resetSpan = () => {
jsonDataSpan.value = model.isJsonFormat ? 24 : 0
replicationTaskArnSpan.value = model.isRestartAndNotJson ? 24 : 0
migrationTypeSpan.value = model.isCreateAndNotJson ? 24 : 0
sourceEndpointArnSpan.value = model.isCreateAndNotJson ? 24 : 0
replicationTaskIdentifierSpan.value = model.isCreateAndNotJson ? 24 : 0
targetEndpointArnSpan.value = model.isCreateAndNotJson ? 24 : 0
replicationInstanceArnSpan.value = model.isCreateAndNotJson ? 24 : 0
tableMappingsSpan.value = model.isCreateAndNotJson ? 24 : 0
}
watch(
() => [model.isRestartTask, model.isJsonFormat],
() => {
setFlag()
resetSpan()
}
)
setFlag()
resetSpan()
return [
{
type: 'switch',
field: 'isRestartTask',
name: 'isRestartTask',
span: 12
},
{
type: 'switch',
field: 'isJsonFormat',
name: 'isJsonFormat',
span: 12
},
{
type: 'editor',
field: 'jsonData',
name: 'jsonData',
span: jsonDataSpan
},
{
type: 'select',
field: 'migrationType',
name: 'migrationType',
span: migrationTypeSpan,
options: MIGRATION_TYPE
},
{
type: 'input',
field: 'replicationTaskIdentifier',
name: 'replicationTaskIdentifier',
span: replicationTaskIdentifierSpan
},
{
type: 'input',
field: 'replicationInstanceArn',
name: 'replicationInstanceArn',
span: replicationInstanceArnSpan
},
{
type: 'input',
field: 'sourceEndpointArn',
name: 'sourceEndpointArn',
span: sourceEndpointArnSpan
},
{
type: 'input',
field: 'targetEndpointArn',
name: 'targetEndpointArn',
span: targetEndpointArnSpan
},
{
type: 'editor',
field: 'tableMappings',
name: 'tableMappings',
span: tableMappingsSpan
},
{
type: 'input',
field: 'replicationTaskArn',
name: 'replicationTaskArn',
span: replicationTaskArnSpan
},
useResources(),
...useCustomParams({ model, field: 'localParams', isSimple: false })
]
}
export const MIGRATION_TYPE = [
{
label: 'full-load',
value: 'full-load'
},
{
label: 'cdc',
value: 'cdc'
},
{
label: 'full-load-and-cdc',
value: 'full-load-and-cdc'
}
]

12
dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts

@ -433,6 +433,18 @@ export function formatParams(data: INodeData): {
taskParams.hiveSqlScript = data.hiveSqlScript taskParams.hiveSqlScript = data.hiveSqlScript
taskParams.hiveCliOptions = data.hiveCliOptions taskParams.hiveCliOptions = data.hiveCliOptions
} }
if (data.taskType === 'DMS') {
taskParams.isRestartTask = data.isRestartTask
taskParams.isJsonFormat = data.isJsonFormat
taskParams.jsonData = data.jsonData
taskParams.migrationType = data.migrationType
taskParams.replicationTaskIdentifier = data.replicationTaskIdentifier
taskParams.sourceEndpointArn = data.sourceEndpointArn
taskParams.targetEndpointArn = data.targetEndpointArn
taskParams.replicationInstanceArn = data.replicationInstanceArn
taskParams.tableMappings = data.tableMappings
taskParams.replicationTaskArn = data.replicationTaskArn
}
let timeoutNotifyStrategy = '' let timeoutNotifyStrategy = ''
if (data.timeoutNotifyStrategy) { if (data.timeoutNotifyStrategy) {

4
dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts

@ -46,6 +46,7 @@ import { userSagemaker } from './use-sagemaker'
import { useChunjun } from './use-chunjun' import { useChunjun } from './use-chunjun'
import { usePytorch } from './use-pytorch' import { usePytorch } from './use-pytorch'
import { useHiveCli } from './use-hive-cli' import { useHiveCli } from './use-hive-cli'
import { useDms } from './use-dms'
export default { export default {
SHELL: useShell, SHELL: useShell,
@ -78,5 +79,6 @@ export default {
FLINK_STREAM: useFlinkStream, FLINK_STREAM: useFlinkStream,
JAVA: useJava, JAVA: useJava,
PYTORCH: usePytorch, PYTORCH: usePytorch,
HIVECLI: useHiveCli HIVECLI: useHiveCli,
DMS: useDms
} }

83
dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { reactive } from 'vue'
import * as Fields from '../fields/index'
import type { IJsonItem, INodeData, ITaskData } from '../types'
export function useDms({
projectCode,
from = 0,
readonly,
data
}: {
projectCode: number
from?: number
readonly?: boolean
data?: ITaskData
}) {
const model = reactive({
name: '',
taskType: 'DMS',
flag: 'YES',
description: '',
timeoutFlag: false,
localParams: [],
environmentCode: null,
failRetryInterval: 1,
failRetryTimes: 0,
workerGroup: 'default',
delayTime: 0,
timeout: 30,
timeoutNotifyStrategy: ['WARN'],
isRestartTask: false,
} as INodeData)
let extra: IJsonItem[] = []
if (from === 1) {
extra = [
Fields.useTaskType(model, readonly),
Fields.useProcessName({
model,
projectCode,
isCreate: !data?.id,
from,
processName: data?.processName
})
]
}
return {
json: [
Fields.useName(from),
...extra,
Fields.useRunFlag(),
Fields.useDescription(),
Fields.useTaskPriority(),
Fields.useWorkerGroup(),
Fields.useEnvironmentName(model, !model.id),
...Fields.useTaskGroup(model, projectCode),
...Fields.useFailed(),
...Fields.useResourceLimit(),
Fields.useDelayTime(model),
...Fields.useTimeoutAlarm(model),
...Fields.useDms(model),
Fields.usePreTasks()
] as IJsonItem[],
model
}
}

10
dolphinscheduler-ui/src/views/projects/task/components/node/types.ts

@ -373,6 +373,16 @@ interface ITaskParams {
pythonEnvTool?: string pythonEnvTool?: string
requirements?: string requirements?: string
condaPythonVersion?: string condaPythonVersion?: string
isRestartTask?: boolean
isJsonFormat?: boolean
jsonData?: string
migrationType?: string
replicationTaskIdentifier?: string
sourceEndpointArn?: string
targetEndpointArn?: string
replicationInstanceArn?: string
tableMappings?: string
replicationTaskArn?: string
} }
interface INodeData interface INodeData

5
dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts

@ -46,6 +46,7 @@ export type TaskType =
| 'FLINK_STREAM' | 'FLINK_STREAM'
| 'PYTORCH' | 'PYTORCH'
| 'HIVECLI' | 'HIVECLI'
| 'DMS'
export type TaskExecuteType = 'STREAM' | 'BATCH' export type TaskExecuteType = 'STREAM' | 'BATCH'
@ -160,6 +161,10 @@ export const TASK_TYPES_MAP = {
HIVECLI: { HIVECLI: {
alias: 'HIVECLI', alias: 'HIVECLI',
helperLinkDisable: true helperLinkDisable: true
},
DMS: {
alias: 'DMS',
helperLinkDisable: true
} }
} as { } as {
[key in TaskType]: { [key in TaskType]: {

6
dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss

@ -192,6 +192,9 @@ $bgLight: #ffffff;
&.icon-pytorch { &.icon-pytorch {
background-image: url('/images/task-icons/pytorch.png'); background-image: url('/images/task-icons/pytorch.png');
} }
&.icon-dms {
background-image: url('/images/task-icons/dms.png');
}
} }
&:hover { &:hover {
@ -287,6 +290,9 @@ $bgLight: #ffffff;
&.icon-pytorch { &.icon-pytorch {
background-image: url('/images/task-icons/pytorch_hover.png'); background-image: url('/images/task-icons/pytorch_hover.png');
} }
&.icon-dms {
background-image: url('/images/task-icons/dms_hover.png');
}
} }
} }

1
tools/dependencies/known-dependencies.txt

@ -17,6 +17,7 @@ aws-java-sdk-emr-1.12.300.jar
aws-java-sdk-kms-1.12.300.jar aws-java-sdk-kms-1.12.300.jar
aws-java-sdk-s3-1.12.300.jar aws-java-sdk-s3-1.12.300.jar
aws-java-sdk-sagemaker-1.12.300.jar aws-java-sdk-sagemaker-1.12.300.jar
aws-java-sdk-dms-1.12.300.jar
bcpkix-jdk15on-1.68.jar bcpkix-jdk15on-1.68.jar
bcprov-jdk15on-1.68.jar bcprov-jdk15on-1.68.jar
bonecp-0.8.0.RELEASE.jar bonecp-0.8.0.RELEASE.jar

Loading…
Cancel
Save