diff --git a/docs/configs/docsdev.js b/docs/configs/docsdev.js
index 770c327cf4..0fdd2a41e4 100644
--- a/docs/configs/docsdev.js
+++ b/docs/configs/docsdev.js
@@ -29,7 +29,7 @@ export default {
},
{
title: 'Features',
- link: '/en-us/docs/dev/user_doc/about/feature.html',
+ link: '/en-us/docs/dev/user_doc/about/features.html',
},
{
title: 'Hardware Environment',
@@ -211,10 +211,6 @@ export default {
{
title: 'Data Source',
children: [
- {
- title: 'Introduction',
- link: '/en-us/docs/dev/user_doc/guide/datasource/introduction.html',
- },
{
title: 'MySQL',
link: '/en-us/docs/dev/user_doc/guide/datasource/mysql.html',
@@ -231,6 +227,30 @@ export default {
title: 'Spark',
link: '/en-us/docs/dev/user_doc/guide/datasource/spark.html',
},
+ {
+ title: 'Presto',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/presto.html',
+ },
+ {
+ title: 'SQL SERVER',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/sqlserver.html',
+ },
+ {
+ title: 'Amazon Redshift',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/redshift.html',
+ },
+ {
+ title: 'ClickHouse',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/clickhouse.html',
+ },
+ {
+ title: 'IBM DB2',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/db2.html',
+ },
+ {
+ title: 'Oracle',
+ link: '/en-us/docs/dev/user_doc/guide/datasource/oracle.html',
+ },
],
},
{
@@ -429,7 +449,7 @@ export default {
},
{
title: '特性',
- link: '/zh-cn/docs/dev/user_doc/about/feature.html',
+ link: '/zh-cn/docs/dev/user_doc/about/features.html',
},
{
title: '建议配置',
@@ -611,10 +631,6 @@ export default {
{
title: '数据源中心',
children: [
- {
- title: '简介',
- link: '/zh-cn/docs/dev/user_doc/guide/datasource/introduction.html',
- },
{
title: 'MySQL',
link: '/zh-cn/docs/dev/user_doc/guide/datasource/mysql.html',
diff --git a/docs/docs/en/about/hardware.md b/docs/docs/en/about/hardware.md
index 3e8468e080..f67066e8c9 100644
--- a/docs/docs/en/about/hardware.md
+++ b/docs/docs/en/about/hardware.md
@@ -1,10 +1,12 @@
# Hardware Environment
-DolphinScheduler, as an open-source distributed workflow task scheduling system, can deploy and run smoothly in Intel architecture server environments and mainstream virtualization environments and supports mainstream Linux operating system environments.
+This section briefs about the hardware requirements for DolphinScheduler. DolphinScheduler works as an open-source distributed workflow task scheduling system. It can deploy and run smoothly in Intel architecture server environments and mainstream virtualization environments. It also supports mainstream Linux operating system environments and ARM architecture.
## Linux Operating System Version Requirements
-| OS | Version |
+The Linux operating systems specified below can run on physical servers and mainstream virtualization environments such as VMware, KVM, and XEN.
+
+| Operating System | Version |
| :----------------------- | :----------: |
| Red Hat Enterprise Linux | 7.0 and above |
| CentOS | 7.0 and above |
@@ -14,9 +16,9 @@ DolphinScheduler, as an open-source distributed workflow task scheduling system,
> **Note:**
>The above Linux operating systems can run on physical servers and mainstream virtualization environments such as VMware, KVM, and XEN.
-## Recommended Server Configuration
+## Server Configuration
-DolphinScheduler supports 64-bit hardware platforms with Intel x86-64 architecture. The following shows the recommended server requirements in a production environment:
+DolphinScheduler supports 64-bit hardware platforms with Intel x86-64 architecture. The following table shows the recommended server requirements in a production environment:
### Production Environment
@@ -28,7 +30,6 @@ DolphinScheduler supports 64-bit hardware platforms with Intel x86-64 architectu
> - The above recommended configuration is the minimum configuration for deploying DolphinScheduler. Higher configuration is strongly recommended for production environments.
> - The recommended hard disk size is more than 50GB and separate the system disk and data disk.
-
## Network Requirements
DolphinScheduler provides the following network port configurations for normal operation:
@@ -45,4 +46,4 @@ DolphinScheduler provides the following network port configurations for normal o
## Browser Requirements
-DolphinScheduler recommends Chrome and the latest browsers which use Chrome Kernel to access the front-end UI page.
\ No newline at end of file
+The minimum supported version of Google Chrome is version 85, but version 90 or above is recommended.
\ No newline at end of file
diff --git a/docs/docs/en/guide/alert/dingtalk.md b/docs/docs/en/guide/alert/dingtalk.md
index 03f18b5c4b..f0b9196386 100644
--- a/docs/docs/en/guide/alert/dingtalk.md
+++ b/docs/docs/en/guide/alert/dingtalk.md
@@ -1,6 +1,8 @@
# DingTalk
-If you need to use `DingTalk` for alerting, create an alert instance in the alert instance management and select the `DingTalk` plugin. The following shows the `DingTalk` configuration example:
+If you need to use `DingTalk` for alerting, create an alert instance in the alert instance management and select the `DingTalk` plugin.
+
+The following shows the `DingTalk` configuration example:
![alert-dingtalk](../../../../img/new_ui/dev/alert/alert_dingtalk.png)
diff --git a/docs/docs/en/guide/data-quality.md b/docs/docs/en/guide/data-quality.md
index 04418ba546..ec308df3bb 100644
--- a/docs/docs/en/guide/data-quality.md
+++ b/docs/docs/en/guide/data-quality.md
@@ -2,7 +2,8 @@
## Introduction
The data quality task is used to check the data accuracy during the integration and processing of data. Data quality tasks in this release include single-table checking, single-table custom SQL checking, multi-table accuracy, and two-table value comparisons. The running environment of the data quality task is Spark 2.4.0, and other versions have not been verified, and users can verify by themselves.
-The execution flow of the data quality task is as follows:
+
+The execution logic of the data quality task is as follows:
- The user defines the task in the interface, and the user input value is stored in `TaskParam`.
- When running a task, `Master` will parse `TaskParam`, encapsulate the parameters required by `DataQualityTask` and send it to `Worker`.
@@ -39,9 +40,12 @@ In the example, assuming that the actual value is 10, the operator is >, and the
# Task Operation Guide
## Null Value Check for Single Table Check
+
### Inspection Introduction
+
The goal of the null value check is to check the number of empty rows in the specified column. The number of empty rows can be compared with the total number of rows or a specified threshold. If it is greater than a certain threshold, it will be judged as failure.
-- Calculate the SQL statement that the specified column is empty as follows:
+
+- The SQL statement that calculates the null of the specified column is as follows:
```sql
SELECT COUNT(*) AS miss FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '') AND (${src_filter})
@@ -121,7 +125,7 @@ The goal of field length verification is to check whether the length of the sele
## Uniqueness Check for Single Table Check
### Inspection Introduction
-The goal of the uniqueness check is to check whether the field is duplicated. It is generally used to check whether the primary key is duplicated. If there is duplication and the threshold is reached, the check task will be judged to be failed.
+The goal of the uniqueness check is to check whether the fields are duplicated. It is generally used to check whether the primary key is duplicated. If there are duplicates and the threshold is reached, the check task will be judged to be failed.
### Interface Operation Guide
@@ -165,7 +169,7 @@ The goal of regular expression verification is to check whether the format of th
## Enumeration Value Validation for Single Table Check
### Inspection Introduction
-The goal of enumeration value verification is to check whether the value of a field is within the range of enumeration values. If there is data that is not in the range of enumeration values and exceeds the threshold, the task will be judged to fail
+The goal of enumeration value verification is to check whether the value of a field is within the range of the enumeration value. If there is data that is not in the range of the enumeration value and exceeds the threshold, the task will be judged to fail.
### Interface Operation Guide
diff --git a/docs/docs/en/guide/datasource/oracle.md b/docs/docs/en/guide/datasource/oracle.md
index e9c53d5067..c7d217ad51 100644
--- a/docs/docs/en/guide/datasource/oracle.md
+++ b/docs/docs/en/guide/datasource/oracle.md
@@ -17,4 +17,5 @@
| jdbc connect parameters | Parameter settings for Oracle connection, in JSON format. |
## Native Supported
+
Yes, could use this datasource by default.
\ No newline at end of file
diff --git a/docs/docs/en/guide/datasource/redshift.md b/docs/docs/en/guide/datasource/redshift.md
index 1e7cf7b5a2..3dbae981d1 100644
--- a/docs/docs/en/guide/datasource/redshift.md
+++ b/docs/docs/en/guide/datasource/redshift.md
@@ -15,6 +15,7 @@
| Password | Set the password for Redshift connection. |
| Database Name | Enter the database name of the Redshift connection. |
| jdbc connect parameters | Parameter settings for Redshift connection, in JSON format. |
+
## Native Supported
Yes, could use this datasource by default.
\ No newline at end of file
diff --git a/docs/docs/en/guide/datasource/sql-server.md b/docs/docs/en/guide/datasource/sqlserver.md
similarity index 94%
rename from docs/docs/en/guide/datasource/sql-server.md
rename to docs/docs/en/guide/datasource/sqlserver.md
index bbcc7cc672..be0addf991 100644
--- a/docs/docs/en/guide/datasource/sql-server.md
+++ b/docs/docs/en/guide/datasource/sqlserver.md
@@ -16,7 +16,6 @@
| Database Name | Enter the database name of the SQLSERVER connection. |
| jdbc connect parameters | Parameter settings for SQLSERVER connection, in JSON format. |
-
## Native Supported
Yes, could use this datasource by default.
\ No newline at end of file
diff --git a/docs/docs/en/guide/parameter/global.md b/docs/docs/en/guide/parameter/global.md
index a61ccc30bb..bda615eea6 100644
--- a/docs/docs/en/guide/parameter/global.md
+++ b/docs/docs/en/guide/parameter/global.md
@@ -18,7 +18,7 @@ Create a shell task and enter `echo ${dt}` in the script content. In this case,
### Save the workflow and set global parameters
-You could follow this guide to set global parameter: On the workflow definition page, click the plus sign to the right of "Set Global", after filling in the variable name and value, then save it
+Set global parameter: On the workflow definition page, click the plus sign to the right of "Set Global", after filling in the variable name and value, save it.
![global-parameter02](../../../../img/new_ui/dev/parameter/global_parameter02.png)
diff --git a/docs/docs/en/guide/parameter/local.md b/docs/docs/en/guide/parameter/local.md
index f92fdac45f..29a377e8e5 100644
--- a/docs/docs/en/guide/parameter/local.md
+++ b/docs/docs/en/guide/parameter/local.md
@@ -18,18 +18,20 @@ Usage of local parameters is: at the task define page, click the '+' beside the
### Use Local Parameter by Custom Parameter
-This example shows how to use local parameters to print the current date. Create a Shell task and write a script with the content `echo ${dt}`. Click **custom parameter** in the configuration bar, and the configuration is as follows:
+This example shows how to use local parameters to print the current date.
+
+Create a Shell task and write a script with the content `echo ${dt}`. Click **custom parameter** in the configuration bar, and the configuration is as follows:
![local-parameter01](../../../../img/new_ui/dev/parameter/local_parameter01.png)
Parameters:
- dt: indicates the parameter name.
-- in: IN indicates that local parameters can only be used on the current node, and OUT indicates that local.parameters can be transmitted to the downstream.
+- IN: IN indicates that local parameters can only be used on the current node, and OUT indicates that local parameters can be transmitted to the downstream.
- DATE: indicates the DATE of the data type.
- $[YYYY-MM-DD] : indicates a built-in parameter derived from a user-defined format.
-Save the workflow and run it. View Shell task's log.
+Save the workflow and run it. View log of Shell task.
![local-parameter02](../../../../img/new_ui/dev/parameter/local_parameter02.png)
diff --git a/docs/docs/en/guide/parameter/priority.md b/docs/docs/en/guide/parameter/priority.md
index 843e25cc54..9088f96fd1 100644
--- a/docs/docs/en/guide/parameter/priority.md
+++ b/docs/docs/en/guide/parameter/priority.md
@@ -4,7 +4,9 @@ DolphinScheduler has three parameter types:
* [Global Parameter](global.md): parameters defined at the workflow define page.
* [Parameter Context](context.md): parameters passed by upstream task nodes.
-* [Local Parameter](local.md): parameters belong to its node, which is the parameters defined by the user in [Custom Parameters]. The user can define part of the parameters when creating workflow definitions.
+* [Local Parameter](local.md): parameters belong to its node, which is the parameters defined by the user in [Custom Parameters].
+
+The user can define part of the parameters when creating workflow definitions.
As there are multiple sources of the parameter value, it will raise parameter priority issues when the parameter name is the same. The priority of DolphinScheduler parameters from high to low is: `Local Parameter > Parameter Context > Global Parameter`.
diff --git a/docs/docs/en/guide/project/project-list.md b/docs/docs/en/guide/project/project-list.md
index 60899d10dd..96c046981a 100644
--- a/docs/docs/en/guide/project/project-list.md
+++ b/docs/docs/en/guide/project/project-list.md
@@ -2,25 +2,30 @@
This page describes details regarding Project screen in Apache DolphinScheduler. Here, you will see all the functions which can be handled in this screen. The following table explains commonly used terms in Apache DolphinScheduler:
-| Glossary | |
-| ------ | -------- |
-| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
-| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
-| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
-| Workflow Relation | Shows dynamic status of all the workflows in a project. |
+| Glossary | description |
+| ------ |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
+| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
+| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
+| Workflow Relation | Shows dynamic status of all the workflows in a project. |
| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_PROCESS). It is also a separate process definition that can be started and executed separately. |
-| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
+| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
+
+## Project List
+
+On the project screen, list of all the existing projects along with details like name, owner, workflow definition, process instance, create and update time. This page also facilitates operations like create, edit, and delete project.
+
+![project-list](../../../../img/new_ui/dev/project/project-list.png)
## Create Project
- Click `Project Management` to enter the project management page, click the `Create Project` button, enter the project name, project description, and click "Submit" to create a new project.
-![project-list](../../../../img/new_ui/dev/project/project-list.png)
+![project-list](../../../../img/new_ui/dev/project/project-create.png)
-## Project Home
+## Project Overview
- Click the project name link on the project management page to enter the project home page, as shown in the figure below, the project home page contains the task status statistics, process status statistics, and workflow definition statistics of the project. The introduction for those metrics:
-
- Task status statistics: Within the specified time range, count the number of task instances status as successful submission, running, ready to pause, pause, ready to stop, stop, failure, success, need fault tolerance, kill and waiting threads
- Process status statistics: Within the specified time range, count the number of workflow instances status as submission success, running, ready to pause, pause, ready to stop, stop, failure, success, need fault tolerance, kill and waiting threads
- Workflow definition statistics: Count the workflow definitions created by this user and granted by the administrator
diff --git a/docs/docs/en/guide/project/task-instance.md b/docs/docs/en/guide/project/task-instance.md
index 32006ce741..2d7651732c 100644
--- a/docs/docs/en/guide/project/task-instance.md
+++ b/docs/docs/en/guide/project/task-instance.md
@@ -1,9 +1,13 @@
-## Task Instance
+# Task Instance
-- Click `Project Management -> Workflow -> Task Instance`. Enter the `Task Instance` page, as shown in the figure below, click workflow instance name, you can jump to the workflow instance DAG chart to view the task status.
+## Create Task Instance
+
+Click `Project Management -> Workflow -> Task Instance` to enter the task instance page, as shown in the figure below, click the name of the workflow instance to jump to the DAG diagram of the workflow instance to view the task status.
![task-instance](../../../../img/new_ui/dev/project/task-instance.png)
-- View log:Click the `View Log` button in the operation column to view task execution log.
+## View Log
+
+Click the `View Log` button in the operation column to view the log of the task execution
![task-log](../../../../img/new_ui/dev/project/task-log.png)
diff --git a/docs/docs/en/guide/project/workflow-definition.md b/docs/docs/en/guide/project/workflow-definition.md
index b718bb252f..5b1614f1ec 100644
--- a/docs/docs/en/guide/project/workflow-definition.md
+++ b/docs/docs/en/guide/project/workflow-definition.md
@@ -1,38 +1,48 @@
# Workflow Definition
+## Overview
+
+Work Definition screen shows list of existing workflows and utility to create or import workflows.
+
+![workflow-dag](../../../../img/new_ui/dev/project/workflow-overview.png)
+
## Create workflow definition
-- Click `Project Management -> Workflow -> Workflow Definition`, enter the `Workflow Definition` page, and click the `Create Workflow` button to enter the **workflow DAG edit** page, as shown in the following figure:
+Click `Project Management -> Workflow -> Workflow Definition`, enter the `Workflow Definition` page, and click the `Create Workflow` button to enter the **workflow DAG edit** page, as shown in the following figure:
+
+![workflow-dag](../../../../img/new_ui/dev/project/workflow-dag.png)
- ![workflow-dag](../../../../img/new_ui/dev/project/workflow-dag.png)
+### Create Tasks to Workflow
-- Drag from the toolbar to the canvas, to add a shell task to the canvas, as shown in the figure below:
+Drag from the toolbar
to the canvas, to add a shell task to the canvas, as shown in the figure below:
- ![demo-shell-simple](../../../../img/tasks/demo/shell.jpg)
+![demo-shell-simple](../../../../img/tasks/demo/shell.jpg)
-- **Add parameter settings for shell task:**
+**Add parameter settings for shell task:**
-1. Fill in the `Node Name`, `Description` and `Script` fields;
-2. Check “`Normal`” for “`Run Flag`”. If “`Prohibit Execution`” is checked, the task will not execute when the workflow runs;
-3. Select `Task Priority`: when the number of worker threads is insufficient, high priority tasks will execute first in the execution queue, and tasks with the same priority will execute in the order of first in, first out;
-4. Timeout alarm (optional): check the timeout alarm, timeout failure, and fill in the "timeout period". When the task execution time exceeds **timeout period**, an alert email will send and the task timeout fails;
-5. Resources (optional). Resources are files create or upload in the `Resource Center -> File Management` page. For example, the file name is `test.sh`, and the command to call the resource in the script is `sh test.sh`;
-6. Customize parameters (optional);
+1. Fill in the `Node Name`, `Description` and `Script` fields.
+2. Check `Normal` for `Run Flag`. If `Prohibit Execution` is checked, the task will not execute when the workflow runs.
+3. Select `Task Priority`: when the number of worker threads is insufficient, high priority tasks will execute first in the execution queue, and tasks with the same priority will execute in the order of first in, first out.
+4. Timeout alarm (optional): check the timeout alarm, timeout failure, and fill in the "timeout period". When the task execution time exceeds **timeout period**, an alert email will send and the task timeout fails.
+5. Resources (optional). Resources are files create or upload in the `Resource Center -> File Management` page. For example, the file name is `test.sh`, and the command to call the resource in the script is `sh test.sh`.
+6. Customize parameters (optional).
7. Click the `Confirm Add` button to save the task settings.
-- **Set dependencies between tasks:** Click the plus sign on the right of the task node to connect the task; as shown in the figure below, task Node_B and task Node_C execute in parallel, When task Node_A finished execution, tasks Node_B and Node_C will execute simultaneously.
+### Set dependencies between tasks
+
+Click the plus sign on the right of the task node to connect the task; as shown in the figure below, task Node_B and task Node_C execute in parallel, When task Node_A finished execution, tasks Node_B and Node_C will execute simultaneously.
- ![workflow-dependent](../../../../img/new_ui/dev/project/workflow-dependent.png)
+![workflow-dependent](../../../../img/new_ui/dev/project/workflow-dependent.png)
-- **Delete dependencies:** Click the "arrow" icon in the upper right corner
, select the connection line, and click the "Delete" icon in the upper right corner
, delete dependencies between tasks.
+**Delete dependencies:** Using your mouse to select the connection line, and click the "Delete" icon in the upper right corner
, delete dependencies between tasks.
- ![workflow-delete](../../../../img/new_ui/dev/project/workflow-delete.png)
+![workflow-delete](../../../../img/new_ui/dev/project/workflow-delete.png)
-- **Save workflow definition:** Click the `Save` button, and the "Set DAG chart name" window pops up, as shown in the figure below. Enter the workflow definition name, workflow definition description, and set global parameters (optional, refer to [global parameters](../parameter/global.md)), click the `Add` button to finish workflow definition creation.
+### Save workflow definition
- ![workflow-save](../../../../img/new_ui/dev/project/workflow-save.png)
+Click the `Save` button, and the "Set DAG chart name" window pops up, as shown in the figure below. Enter the workflow definition name, workflow definition description, and set global parameters (optional, refer to [global parameters](../parameter/global.md)), click the `Add` button to finish workflow definition creation.
-> For other types of tasks, please refer to [Task Node Type and Parameter Settings](#TaskParamers).
+![workflow-save](../../../../img/new_ui/dev/project/workflow-save.png)
## Workflow Definition Operation Function
@@ -40,13 +50,26 @@ Click `Project Management -> Workflow -> Workflow Definition` to enter the workf
![workflow-list](../../../../img/new_ui/dev/project/workflow-list.png)
+Workflow running parameter description:
+
+* **Failure strategy**: When a task node fails to execute, other parallel task nodes need to execute the strategy. "Continue" means: After a task fails, other task nodes execute normally; "End" means: Terminate all tasks being executed, and terminate the entire process.
+* **Notification strategy**: When the process ends, send process execution information notification emails according to the process status, including no status, success, failure, success or failure.
+* **Process priority**: the priority of process operation, divided into five levels: the highest (HIGHEST), high (HIGH), medium (MEDIUM), low (LOW), the lowest (LOWEST). When the number of master threads is insufficient, processes with higher levels will be executed first in the execution queue, and processes with the same priority will be executed in the order of first-in, first-out.
+* **Worker grouping**: This process can only be executed in the specified worker machine group. The default is Default, which can be executed on any worker.
+* **Notification Group**: Select Notification Policy||Timeout Alarm||When fault tolerance occurs, process information or emails will be sent to all members in the notification group.
+* **Recipient**: Select Notification Policy||Timeout Alarm||When fault tolerance occurs, process information or alarm email will be sent to the recipient list.
+* **Cc**: Select Notification Policy||Timeout Alarm||When fault tolerance occurs, the process information or alarm email will be copied to the Cc list.
+* **Startup parameters**: Set or override the value of global parameters when starting a new process instance.
+* **Complement**: There are 2 modes of serial complement and parallel complement. Serial complement: within the specified time range, perform complements in sequence from the start date to the end date, and generate N process instances in turn; parallel complement: within the specified time range, perform multiple complements at the same time, and generate N process instances at the same time .
+ * **Complement**: Execute the workflow definition of the specified date, you can select the time range of the supplement (currently only supports the supplement for consecutive days), for example, the data from May 1st to May 10th needs to be supplemented, as shown in the following figure:
+
The following are the operation functions of the workflow definition list:
-- **Edit:** Only "Offline" workflow definitions can be edited. Workflow DAG editing is the same as [Create Workflow Definition](#creatDag)
+- **Edit:** Only "Offline" workflow definitions can be edited. Workflow DAG editing is the same as [Create Workflow Definition](#create-workflow-definition)
- **Online:** When the workflow status is "Offline", used to make workflow online. Only the workflow in the "Online" state can run, but cannot edit.
- **Offline:** When the workflow status is "Online", used to make workflow offline. Only the workflow in the "Offline" state can be edited, but cannot run.
-- **Run:** Only workflow in the online state can run. See [2.3.3 Run Workflow](#run-the-workflow) for the operation steps.
-- **Timing:** Timing can only set to online workflows, and the system automatically schedules to run the workflow on time. The status after creating a timing setting is "offline", and the timing must set online on the timing management page to make effect. See [2.3.4 Workflow Timing](#workflow-timing) for timing operation steps.
+- **Run:** Only workflow in the online state can run. See [Run Workflow](#run-the-workflow) for the operation steps.
+- **Timing:** Timing can only set to online workflows, and the system automatically schedules to run the workflow on time. The status after creating a timing setting is "offline", and the timing must set online on the timing management page to make effect. See [Workflow Timing](#workflow-schedule) for timing operation steps.
- **Timing Management:** The timing management page can edit, online or offline and delete timing.
- **Delete:** Delete the workflow definition. In the same project, only the workflow definition created by yourself can be deleted, and the workflow definition of other users cannot be deleted. If you need to delete it, please contact the user who created it or the administrator.
- **Download:** Download workflow definition to local.
@@ -102,20 +125,20 @@ The following are the operation functions of the workflow definition list:
![workflow-task-run-config](../../../../img/new_ui/dev/project/workflow-task-run-config.png)
-## Workflow Timing
+## Workflow Schedule
-- Create timing: Click `Project Management -> Workflow-> Workflow Definition`, enter the workflow definition page, make the workflow online, click the "timing" button
, the timing parameter setting dialog box pops up, as shown in the figure below:
+- Create schedule: Click `Project Management -> Workflow-> Workflow Definition`, enter the workflow definition page, make the workflow online, click the "timing" button
, the timing parameter setting dialog box pops up, as shown in the figure below:
![workflow-time01](../../../../img/new_ui/dev/project/workflow-time01.png)
-- Choose the start and end time. In the time range, the workflow runs at regular intervals; If not in the time range, no regular workflow instances generate.
+- Select a start and end time. Within the start and end time range, the workflow is run regularly; outside the start and end time range, no timed workflow instance will be generated.
- Add a timing that execute 5 minutes once, as shown in the following figure:
![workflow-time02](../../../../img/new_ui/dev/project/workflow-time02.png)
- Failure strategy, notification strategy, process priority, worker group, notification group, recipient, and CC are the same as workflow running parameters.
- Click the "Create" button to create the timing. Now the timing status is "**Offline**" and the timing needs to be **Online** to make effect.
-- Timing online: Click the `Timing Management` button
, enter the timing management page, click the `online` button, the timing status will change to `online`, as shown in the below figure, the workflow makes effect regularly.
+- Schedule online: Click the `Timing Management` button
, enter the timing management page, click the `online` button, the timing status will change to `online`, as shown in the below figure, the workflow makes effect regularly.
![workflow-time03](../../../../img/new_ui/dev/project/workflow-time03.png)
diff --git a/docs/docs/en/guide/project/workflow-instance.md b/docs/docs/en/guide/project/workflow-instance.md
index 5f1686a97f..d9bffa239b 100644
--- a/docs/docs/en/guide/project/workflow-instance.md
+++ b/docs/docs/en/guide/project/workflow-instance.md
@@ -2,55 +2,57 @@
## View Workflow Instance
-- Click `Project Management -> Workflow -> Workflow Instance`, enter the Workflow Instance page, as shown in the figure below:
+Click `Project Management -> Workflow -> Workflow Instance`, enter the Workflow Instance page, as shown in the following figure:
![workflow-instance](../../../../img/new_ui/dev/project/workflow-instance.png)
-- Click the workflow name to enter the DAG view page, and check the task execution status, as shown in the figure below:
+Click the workflow name to enter the DAG view page, and check the task execution status, as shown in the following figure:
![instance-state](../../../../img/new_ui/dev/project/instance-state.png)
## View Task Log
-- Enter the workflow instance page, click the workflow name, enter the DAG view page, double-click the task node, as shown in the figure below:
+Enter the workflow instance page, click the workflow name, enter the DAG view page, double-click the task node, as shown in the following figure:
![instance-log01](../../../../img/new_ui/dev/project/instance-log01.png)
-- Click "View Log", a log window pops up, as shown in the figure below, you can also view the task log on the task instance page, refer to [Task View Log](./task-instance.md)
+Click "View Log", a log window pops up, as shown in the figure below, you can also view the task log on the task instance page, refer to [Task View Log](./task-instance.md)
![instance-log02](../../../../img/new_ui/dev/project/instance-log02.png)
## View Task History
-- Click `Project Management -> Workflow -> Workflow Instance`, enter the workflow instance page, and click the workflow name to enter the workflow DAG page;
-- Double-click the task node, as shown in the figure below, click `View History` to jump to the task instance page, and display a list of task instances running by the workflow instance.
+Click `Project Management -> Workflow -> Workflow Instance` to enter the workflow instance page, click the workflow name to enter the workflow DAG page;
+
+Double-click the task node, click `View History` to jump to the task instance page, and display the list of task instances run by the workflow instance.
![instance-history](../../../../img/new_ui/dev/project/instance-history.png)
-## View Operation Parameters
+## View Running Parameters
+
+Click `Project Management -> Workflow -> Workflow Instance` to enter the workflow instance page, click the workflow name to enter the workflow DAG page;
-- Click `Project Management -> Workflow -> Workflow Instance`, enter the workflow instance page, and click the workflow name to enter the workflow DAG page;
-- Click the icon in the upper left corner
,View the startup parameters of the workflow instance; click the icon
,View the global and local parameters of the workflow instance, as shown in the following figure:
+Click the icon in the upper left corner
to view the startup parameters of the workflow instance; click the icon
to view the global parameters and local parameters of the workflow instance, as shown in the following figure:
![instance-parameter](../../../../img/new_ui/dev/project/instance-parameter.png)
## Workflow Instance Operation Function
-Click `Project Management -> Workflow -> Workflow Instance`, enter the workflow instance page, as shown in the figure below:
+Click `Project Management -> Workflow -> Workflow Instance`, enter the workflow instance page, as shown in the following figure:
![workflow-instance](../../../../img/new_ui/dev/project/workflow-instance.png)
-- **Edit:** only processes with success/failed/stop status can be edited. Click the "Edit" button or the workflow instance name to enter the DAG edit page. After the edit, click the "Save" button to confirm, as shown in the figure below. In the pop-up box, check "Whether to update the workflow definition", after saving, the information modified by the instance will be updated to the workflow definition; if not checked, the workflow definition would not be updated.
+- **Edit:** Only processes with success/failed/stop status can be edited. Click the "Edit" button or the workflow instance name to enter the DAG edit page. After the edit, click the "Save" button to confirm, as shown in the figure below. In the pop-up box, check "Whether to update the workflow definition", after saving, the information modified by the instance will be updated to the workflow definition; if not checked, the workflow definition would not be updated.
+
-