Browse Source

[Feature-15146][dolphinscheduler-task-sqoop] add sqoop source/target type (#15146)

* task list: sgoop node params  optimize

* security.ts  add  alarm_instance params

* 1 add SqoopTask params
2 add alert plugin aliyun-voice

* add license header

* commit sqhoop optimize

* pnpm-locl.yaml supplement annotation

* remove irrelevent commit.

* Code specification optimization

* optimize sqoop task ui

* Merge Code

* add the license header to  pnpm-locl.yaml

* format the code

* format the code

* Fix sqoop task echo error

---------

Co-authored-by: xujiaqiang <xujiaqiang@aimatech.com>
Co-authored-by: xujiaqiang <“xujiaqiangwz@163.com”>
Co-authored-by: David Zollo <dailidong66@gmail.com>
3.2.1-prepare
xujiaqiang 12 months ago committed by GitHub
parent
commit
159179ac95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopConstants.java
  2. 1
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannel.java
  3. 48
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskExecutionContext.java
  4. 10
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java
  5. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ISourceGenerator.java
  6. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ITargetGenerator.java
  7. 31
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/SqoopJobGenerator.java
  8. 159
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HanaSourceGenerator.java
  9. 18
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java
  10. 37
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java
  11. 201
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java
  12. 157
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/OracleSourceGenerator.java
  13. 163
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/SqlServerSourceGenerator.java
  14. 128
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HanaTargetGenerator.java
  15. 128
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/OracleTargetGenerator.java
  16. 130
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/SqlServerTargetGenerator.java
  17. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SourceCommonParameter.java
  18. 99
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SqoopParameters.java
  19. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/TargetCommonParameter.java
  20. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHanaParameter.java
  21. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHdfsParameter.java
  22. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHiveParameter.java
  23. 15
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceMysqlParameter.java
  24. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceOracleParameter.java
  25. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceSqlServerParameter.java
  26. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHanaParameter.java
  27. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHdfsParameter.java
  28. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java
  29. 16
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetMysqlParameter.java
  30. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetOracleParameter.java
  31. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetSqlServerParameter.java
  32. 57
      dolphinscheduler-ui/pnpm-lock.yaml
  33. 2
      dolphinscheduler-ui/src/locales/en_US/project.ts
  34. 2
      dolphinscheduler-ui/src/locales/zh_CN/project.ts
  35. 142
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-datasource.ts
  36. 561
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-source-type.ts
  37. 724
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-target-type.ts
  38. 132
      dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
  39. 4
      dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sqoop.ts
  40. 98
      dolphinscheduler-ui/src/views/projects/task/components/node/use-task.ts

11
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopConstants.java

@ -21,10 +21,11 @@ public final class SqoopConstants {
private SqoopConstants() {
}
public static final String FORMAT_S_S_S = "%s%s%s";
// sqoop general param
public static final String SQOOP = "sqoop";
public static final String SQOOP_MR_JOB_NAME = "mapred.job.name";
public static final Object SQOOP_EXPORT_RECORDS_PER_STATEMENT = "sqoop.export.records.per.statement";
public static final String SQOOP_PARALLELISM = "-m";
public static final String FIELDS_TERMINATED_BY = "--fields-terminated-by";
public static final String LINES_TERMINATED_BY = "--lines-terminated-by";
@ -34,6 +35,7 @@ public final class SqoopConstants {
// sqoop db
public static final String DB_CONNECT = "--connect";
public static final String DRIVER = "--driver";
public static final String DB_USERNAME = "--username";
public static final String DB_PWD = "--password";
public static final String TABLE = "--table";
@ -73,4 +75,11 @@ public final class SqoopConstants {
public static final String UPDATE_MODE = "--update-mode";
public static final String SQOOP_PASSWORD_REGEX = "(?<=(--password \")).+?(?=\")";
public static final String MYSQL = "MYSQL";
public static final String HIVE = "HIVE";
public static final String HDFS = "HDFS";
public static final String ORACLE = "ORACLE";
public static final String HANA = "HANA";
public static final String SQLSERVER = "SQLSERVER";
}

1
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannel.java

@ -30,7 +30,6 @@ public class SqoopTaskChannel implements TaskChannel {
@Override
public void cancelApplication(boolean status) {
}
@Override

48
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskExecutionContext.java

@ -21,39 +21,25 @@ import org.apache.dolphinscheduler.spi.enums.DbType;
import java.io.Serializable;
/**
* master/worker task transport
*/
/** master/worker task transport */
public class SqoopTaskExecutionContext implements Serializable {
/**
* dataSourceId
*/
/** dataSourceId */
private int dataSourceId;
/**
* sourcetype
*/
/** sourcetype */
private DbType sourcetype;
/**
* sourceConnectionParams
*/
/** sourceConnectionParams */
private String sourceConnectionParams;
/**
* dataTargetId
*/
/** dataTargetId */
private int dataTargetId;
/**
* targetType
*/
/** targetType */
private DbType targetType;
/**
* targetConnectionParams
*/
/** targetConnectionParams */
private String targetConnectionParams;
public int getDataSourceId() {
@ -107,12 +93,20 @@ public class SqoopTaskExecutionContext implements Serializable {
@Override
public String toString() {
return "SqoopTaskExecutionContext{"
+ "dataSourceId=" + dataSourceId
+ ", sourcetype=" + sourcetype
+ ", sourceConnectionParams='" + sourceConnectionParams + '\''
+ ", dataTargetId=" + dataTargetId
+ ", targetType=" + targetType
+ ", targetConnectionParams='" + targetConnectionParams + '\''
+ "dataSourceId="
+ dataSourceId
+ ", sourcetype="
+ sourcetype
+ ", sourceConnectionParams='"
+ sourceConnectionParams
+ '\''
+ ", dataTargetId="
+ dataTargetId
+ ", targetType="
+ targetType
+ ", targetConnectionParams='"
+ targetConnectionParams
+ '\''
+ '}';
}
}

10
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java

@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.D;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FORMAT_S_S_S;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants;
@ -47,16 +48,21 @@ public class CommonGenerator {
.append(SPACE)
.append(sqoopParameters.getModelType());
// sqoop sqoop.export.records.per.statement
commonSb.append(SPACE).append(D).append(SPACE)
.append(String.format(FORMAT_S_S_S, SqoopConstants.SQOOP_EXPORT_RECORDS_PER_STATEMENT,
EQUAL_SIGN, 1));
// sqoop map-reduce job name
commonSb.append(SPACE).append(D).append(SPACE)
.append(String.format("%s%s%s", SqoopConstants.SQOOP_MR_JOB_NAME,
.append(String.format(FORMAT_S_S_S, SqoopConstants.SQOOP_MR_JOB_NAME,
EQUAL_SIGN, sqoopParameters.getJobName()));
// hadoop custom param
List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams();
if (CollectionUtils.isNotEmpty(hadoopCustomParams)) {
for (Property hadoopCustomParam : hadoopCustomParams) {
String hadoopCustomParamStr = String.format("%s%s%s", hadoopCustomParam.getProp(),
String hadoopCustomParamStr = String.format(FORMAT_S_S_S, hadoopCustomParam.getProp(),
EQUAL_SIGN, hadoopCustomParam.getValue());
commonSb.append(SPACE).append(D)

9
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ISourceGenerator.java

@ -20,17 +20,16 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/**
* Source Generator Interface
*/
/** Source Generator Interface */
public interface ISourceGenerator {
/**
* generate the source script
*
* @param sqoopParameters sqoopParameters
* @param sqoopParameters sqoopParameters
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @return source script
*/
String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
}

9
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ITargetGenerator.java

@ -20,17 +20,16 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/**
* Target Generator Interface
*/
/** Target Generator Interface */
public interface ITargetGenerator {
/**
* generate the target script
*
* @param sqoopParameters sqoopParameters
* @param sqoopParameters sqoopParameters
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @return target script
*/
String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
}

31
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/SqoopJobGenerator.java

@ -17,14 +17,27 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HANA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HDFS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MYSQL;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.ORACLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.SQLSERVER;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HanaSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HdfsSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HiveSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.MySQLSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.OracleSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.SqlServerSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HanaTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HdfsTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HiveTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.MySQLTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.OracleTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.SqlServerTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/**
@ -32,10 +45,6 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
*/
public class SqoopJobGenerator {
private static final String MYSQL = "MYSQL";
private static final String HIVE = "HIVE";
private static final String HDFS = "HDFS";
/**
* target script generator
*/
@ -61,7 +70,7 @@ public class SqoopJobGenerator {
/**
* get the final sqoop scripts
*
* @param sqoopParameters sqoop params
* @param sqoopParameters sqoop params
* @param sqoopTaskExecutionContext
* @return sqoop scripts
*/
@ -100,6 +109,12 @@ public class SqoopJobGenerator {
return new HiveSourceGenerator();
case HDFS:
return new HdfsSourceGenerator();
case ORACLE:
return new OracleSourceGenerator();
case HANA:
return new HanaSourceGenerator();
case SQLSERVER:
return new SqlServerSourceGenerator();
default:
return null;
}
@ -119,6 +134,12 @@ public class SqoopJobGenerator {
return new HiveTargetGenerator();
case HDFS:
return new HdfsTargetGenerator();
case ORACLE:
return new OracleTargetGenerator();
case HANA:
return new HanaTargetGenerator();
case SQLSERVER:
return new SqlServerTargetGenerator();
default:
return null;
}

159
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HanaSourceGenerator.java

@ -0,0 +1,159 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHanaParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* hana source generator
*/
public class HanaSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(HanaSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hanaSourceSb = new StringBuilder();
try {
SourceHanaParameter sourceHanaParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHanaParameter.class);
if (null == sourceHanaParameter)
return hanaSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return hanaSourceSb.toString();
hanaSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.HANA, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.HANA))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceHanaParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceHanaParameter.getSrcTable())) {
hanaSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceHanaParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceHanaParameter.getSrcColumns())) {
hanaSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceHanaParameter.getSrcColumns());
}
} else if (sourceHanaParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceHanaParameter.getSrcQuerySql())) {
String srcQuery = sourceHanaParameter.getSrcQuerySql();
hanaSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
hanaSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
hanaSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHIve(hanaSourceSb, sourceHanaParameter);
// sqoop map column java
buildColumnMapToJava(hanaSourceSb, sourceHanaParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task hana source params build failed: [%s]", e.getMessage()));
}
return hanaSourceSb.toString();
}
private static void buildColumnMapToJava(StringBuilder hanaSourceSb, SourceHanaParameter sourceHanaParameter) {
List<Property> mapColumnJava = sourceHanaParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
hanaSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToHIve(StringBuilder hanaSourceSb, SourceHanaParameter sourceHanaParameter) {
List<Property> mapColumnHive = sourceHanaParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
hanaSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

18
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java

@ -30,29 +30,31 @@ import org.apache.commons.lang3.StringUtils;
import lombok.extern.slf4j.Slf4j;
/**
* hdfs source generator
*/
/** hdfs source generator */
@Slf4j
public class HdfsSourceGenerator implements ISourceGenerator {
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hdfsSourceSb = new StringBuilder();
try {
SourceHdfsParameter sourceHdfsParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHdfsParameter.class);
JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceHdfsParameter.class);
if (null != sourceHdfsParameter) {
if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) {
hdfsSourceSb.append(SPACE).append(HDFS_EXPORT_DIR)
.append(SPACE).append(sourceHdfsParameter.getExportDir());
hdfsSourceSb
.append(SPACE)
.append(HDFS_EXPORT_DIR)
.append(SPACE)
.append(sourceHdfsParameter.getExportDir());
} else {
throw new IllegalArgumentException("Sqoop hdfs export dir is null");
}
}
} catch (Exception e) {
log.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage()));

37
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java

@ -33,38 +33,49 @@ import org.apache.commons.lang3.StringUtils;
import lombok.extern.slf4j.Slf4j;
/**
* hive source generator
*/
/** hive source generator */
@Slf4j
public class HiveSourceGenerator implements ISourceGenerator {
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hiveSourceSb = new StringBuilder();
try {
SourceHiveParameter sourceHiveParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHiveParameter.class);
JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceHiveParameter.class);
if (null != sourceHiveParameter) {
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) {
hiveSourceSb.append(SPACE).append(HCATALOG_DATABASE)
.append(SPACE).append(sourceHiveParameter.getHiveDatabase());
hiveSourceSb
.append(SPACE)
.append(HCATALOG_DATABASE)
.append(SPACE)
.append(sourceHiveParameter.getHiveDatabase());
}
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) {
hiveSourceSb.append(SPACE).append(HCATALOG_TABLE)
.append(SPACE).append(sourceHiveParameter.getHiveTable());
hiveSourceSb
.append(SPACE)
.append(HCATALOG_TABLE)
.append(SPACE)
.append(sourceHiveParameter.getHiveTable());
}
if (StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())
&& StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())) {
hiveSourceSb.append(SPACE).append(HCATALOG_PARTITION_KEYS)
.append(SPACE).append(sourceHiveParameter.getHivePartitionKey())
.append(SPACE).append(HCATALOG_PARTITION_VALUES)
.append(SPACE).append(sourceHiveParameter.getHivePartitionValue());
hiveSourceSb
.append(SPACE)
.append(HCATALOG_PARTITION_KEYS)
.append(SPACE)
.append(sourceHiveParameter.getHivePartitionKey())
.append(SPACE)
.append(HCATALOG_PARTITION_VALUES)
.append(SPACE)
.append(sourceHiveParameter.getHivePartitionValue());
}
}
} catch (Exception e) {

201
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java

@ -59,91 +59,142 @@ import lombok.extern.slf4j.Slf4j;
public class MySQLSourceGenerator implements ISourceGenerator {
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder mysqlSourceSb = new StringBuilder();
try {
SourceMysqlParameter sourceMysqlParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
if (null != sourceMysqlParameter) {
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null != baseDataSource) {
mysqlSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.MYSQL, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) {
mysqlSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceMysqlParameter.getSrcTable());
}
if (sourceMysqlParameter.getSrcColumnType() == SqoopColumnType.CUSTOMIZE_COLUMNS.getCode()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) {
mysqlSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceMysqlParameter.getSrcColumns());
}
} else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) {
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
mysqlSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
mysqlSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
mysqlSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
// sqoop map column java
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
if (null == sourceMysqlParameter)
return mysqlSourceSb.toString();
BaseConnectionParam baseDataSource =
(BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return mysqlSourceSb.toString();
mysqlSourceSb
.append(SPACE)
.append(DB_CONNECT)
.append(SPACE)
.append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.MYSQL, baseDataSource))
.append(DOUBLE_QUOTES)
.append(SPACE)
.append(DB_USERNAME)
.append(SPACE)
.append(baseDataSource.getUser())
.append(SPACE)
.append(DB_PWD)
.append(SPACE)
.append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword()))
.append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) {
mysqlSourceSb
.append(SPACE)
.append(TABLE)
.append(SPACE)
.append(sourceMysqlParameter.getSrcTable());
}
if (sourceMysqlParameter.getSrcColumnType() == SqoopColumnType.CUSTOMIZE_COLUMNS.getCode()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) {
mysqlSourceSb
.append(SPACE)
.append(COLUMNS)
.append(SPACE)
.append(sourceMysqlParameter.getSrcColumns());
}
} else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) {
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
mysqlSourceSb
.append(SPACE)
.append(QUERY)
.append(SPACE)
.append(DOUBLE_QUOTES)
.append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
mysqlSourceSb
.append(SPACE)
.append(QUERY_CONDITION)
.append(DOUBLE_QUOTES);
} else {
mysqlSourceSb
.append(SPACE)
.append(QUERY_WITHOUT_CONDITION)
.append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHIve(mysqlSourceSb, sourceMysqlParameter);
// sqoop map column java
buildColumnMapToJava(mysqlSourceSb, sourceMysqlParameter);
} catch (Exception e) {
log.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage()));
log.error(
String.format(
"Sqoop task mysql source params build failed: [%s]", e.getMessage()));
}
return mysqlSourceSb.toString();
}
private static void buildColumnMapToHIve(StringBuilder mysqlSourceSb, SourceMysqlParameter sourceMysqlParameter) {
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap
.append(item.getProp())
.append(EQUAL_SIGN)
.append(item.getValue())
.append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb
.append(SPACE)
.append(MAP_COLUMN_HIVE)
.append(SPACE)
.append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToJava(StringBuilder mysqlSourceSb, SourceMysqlParameter sourceMysqlParameter) {
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap
.append(item.getProp())
.append(EQUAL_SIGN)
.append(item.getValue())
.append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb
.append(SPACE)
.append(MAP_COLUMN_JAVA)
.append(SPACE)
.append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

157
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/OracleSourceGenerator.java

@ -0,0 +1,157 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceOracleParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* oracle source generator
*/
public class OracleSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(OracleSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleSourceSb = new StringBuilder();
try {
SourceOracleParameter sourceOracleParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceOracleParameter.class);
if (null == sourceOracleParameter)
return oracleSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return oracleSourceSb.toString();
oracleSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.ORACLE, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceOracleParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceOracleParameter.getSrcTable())) {
oracleSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceOracleParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceOracleParameter.getSrcColumns())) {
oracleSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceOracleParameter.getSrcColumns());
}
} else if (sourceOracleParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceOracleParameter.getSrcQuerySql())) {
String srcQuery = sourceOracleParameter.getSrcQuerySql();
oracleSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
oracleSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
oracleSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHIve(oracleSourceSb, sourceOracleParameter);
// sqoop map column java
buildColumnMapToJava(oracleSourceSb, sourceOracleParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task oracle source params build failed: [%s]", e.getMessage()));
}
return oracleSourceSb.toString();
}
private static void buildColumnMapToJava(StringBuilder oracleSourceSb,
SourceOracleParameter sourceOracleParameter) {
List<Property> mapColumnJava = sourceOracleParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
oracleSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToHIve(StringBuilder oracleSourceSb,
SourceOracleParameter sourceOracleParameter) {
List<Property> mapColumnHive = sourceOracleParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
oracleSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

163
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/SqlServerSourceGenerator.java

@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceSqlServerParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* sqlServer source generator
*/
public class SqlServerSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(SqlServerSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder sqlServerSourceSb = new StringBuilder();
try {
SourceSqlServerParameter sourceSqlServerParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceSqlServerParameter.class);
if (null == sourceSqlServerParameter)
return sqlServerSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return sqlServerSourceSb.toString();
sqlServerSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.SQLSERVER, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.SQLSERVER))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceSqlServerParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcTable())) {
sqlServerSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceSqlServerParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcColumns())) {
sqlServerSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceSqlServerParameter.getSrcColumns());
}
} else if (sourceSqlServerParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcQuerySql())) {
String srcQuery = sourceSqlServerParameter.getSrcQuerySql();
sqlServerSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
sqlServerSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
sqlServerSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHive(sqlServerSourceSb, sourceSqlServerParameter);
// sqoop map column java
buildColumnMapToJava(sqlServerSourceSb, sourceSqlServerParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task sqlServer source params build failed: [%s]", e.getMessage()));
}
return sqlServerSourceSb.toString();
}
private static void buildColumnMapToHive(StringBuilder sqlServerSourceSb,
SourceSqlServerParameter sourceSqlServerParameter) {
List<Property> mapColumnHive = sourceSqlServerParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
sqlServerSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToJava(StringBuilder sqlServerSourceSb,
SourceSqlServerParameter sourceSqlServerParameter) {
List<Property> mapColumnJava = sourceSqlServerParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
sqlServerSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

128
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HanaTargetGenerator.java

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHanaParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* hana target generator
*/
public class HanaTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(HanaTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hanaTargetSb = new StringBuilder();
try {
TargetHanaParameter targetHanaParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHanaParameter.class);
if (null == targetHanaParameter || targetHanaParameter.getTargetDatasource() == 0)
return hanaTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return hanaTargetSb.toString();
}
hanaTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.HANA, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.HANA))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetHanaParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetHanaParameter.getTargetColumns())) {
hanaTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetHanaParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetHanaParameter.getFieldsTerminated())) {
hanaTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetHanaParameter.getFieldsTerminated().contains("'")) {
hanaTargetSb.append(SPACE).append(targetHanaParameter.getFieldsTerminated());
} else {
hanaTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetHanaParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetHanaParameter.getLinesTerminated())) {
hanaTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetHanaParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
hanaTargetSb.append(SPACE).append(targetHanaParameter.getLinesTerminated());
} else {
hanaTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetHanaParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetHanaParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetHanaParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetHanaParameter.getTargetUpdateMode())) {
hanaTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetHanaParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetHanaParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop hana target params build failed: [%s]", e.getMessage()));
}
return hanaTargetSb.toString();
}
}

128
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/OracleTargetGenerator.java

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetOracleParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* oracle target generator
*/
public class OracleTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(OracleTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleTargetSb = new StringBuilder();
try {
TargetOracleParameter targetOracleParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetOracleParameter.class);
if (null == targetOracleParameter || targetOracleParameter.getTargetDatasource() == 0)
return oracleTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return oracleTargetSb.toString();
}
oracleTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.ORACLE, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetOracleParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetOracleParameter.getTargetColumns())) {
oracleTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetOracleParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetOracleParameter.getFieldsTerminated())) {
oracleTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetOracleParameter.getFieldsTerminated().contains("'")) {
oracleTargetSb.append(SPACE).append(targetOracleParameter.getFieldsTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetOracleParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetOracleParameter.getLinesTerminated())) {
oracleTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetOracleParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
oracleTargetSb.append(SPACE).append(targetOracleParameter.getLinesTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetOracleParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetOracleParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetOracleParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetOracleParameter.getTargetUpdateMode())) {
oracleTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetOracleParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetOracleParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop oracle target params build failed: [%s]", e.getMessage()));
}
return oracleTargetSb.toString();
}
}

130
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/SqlServerTargetGenerator.java

@ -0,0 +1,130 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetSqlServerParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* sqlServer target generator
*/
public class SqlServerTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(SqlServerTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleTargetSb = new StringBuilder();
try {
TargetSqlServerParameter targetSqlServerParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetSqlServerParameter.class);
if (null == targetSqlServerParameter || targetSqlServerParameter.getTargetDatasource() == 0)
return oracleTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return oracleTargetSb.toString();
}
oracleTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.SQLSERVER, baseDataSource)).append(DOUBLE_QUOTES)
// .append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.SQLSERVER))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetSqlServerParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetSqlServerParameter.getTargetColumns())) {
oracleTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetSqlServerParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetSqlServerParameter.getFieldsTerminated())) {
oracleTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetSqlServerParameter.getFieldsTerminated().contains("'")) {
oracleTargetSb.append(SPACE).append(targetSqlServerParameter.getFieldsTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetSqlServerParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetSqlServerParameter.getLinesTerminated())) {
oracleTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetSqlServerParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
oracleTargetSb.append(SPACE).append(targetSqlServerParameter.getLinesTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetSqlServerParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetSqlServerParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetSqlServerParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetSqlServerParameter.getTargetUpdateMode())) {
oracleTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetSqlServerParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetSqlServerParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop oracle target params build failed: [%s]", e.getMessage()));
}
return oracleTargetSb.toString();
}
}

38
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SourceCommonParameter.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
/**
* source common parameter
*/
public class SourceCommonParameter {
/**
* src datasource
*/
protected int srcDatasource;
public int getSrcDatasource() {
return srcDatasource;
}
public void setSrcDatasource(int srcDatasource) {
this.srcDatasource = srcDatasource;
}
}

99
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SqoopParameters.java

@ -17,6 +17,13 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HANA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HDFS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MYSQL;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.ORACLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.SQLSERVER;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.enums.ResourceType;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
@ -25,8 +32,18 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourc
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHanaParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHdfsParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHiveParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceMysqlParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceOracleParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceSqlServerParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHanaParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHdfsParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHiveParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetMysqlParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetOracleParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetSqlServerParameter;
import org.apache.commons.lang3.StringUtils;
@ -223,16 +240,16 @@ public class SqoopParameters extends AbstractParameters {
return resources;
}
SourceMysqlParameter sourceMysqlParameter =
JSONUtils.parseObject(this.getSourceParams(), SourceMysqlParameter.class);
if (sourceMysqlParameter.getSrcDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, sourceMysqlParameter.getSrcDatasource());
SourceCommonParameter sourceParameter = (SourceCommonParameter) JSONUtils.parseObject(this.getSourceParams(),
getSourceParameter(this.getSourceType()));
if (sourceParameter.getSrcDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, sourceParameter.getSrcDatasource());
}
TargetMysqlParameter targetMysqlParameter =
JSONUtils.parseObject(this.getTargetParams(), TargetMysqlParameter.class);
if (targetMysqlParameter.getTargetDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, targetMysqlParameter.getTargetDatasource());
TargetCommonParameter targetParameter = (TargetCommonParameter) JSONUtils.parseObject(this.getTargetParams(),
getTargetParameter(this.getTargetType()));
if (targetParameter.getTargetDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, targetParameter.getTargetDatasource());
}
return resources;
@ -245,28 +262,78 @@ public class SqoopParameters extends AbstractParameters {
return sqoopTaskExecutionContext;
}
SourceMysqlParameter sourceMysqlParameter =
JSONUtils.parseObject(this.getSourceParams(), SourceMysqlParameter.class);
TargetMysqlParameter targetMysqlParameter =
JSONUtils.parseObject(this.getTargetParams(), TargetMysqlParameter.class);
SourceCommonParameter sourceParameter = (SourceCommonParameter) JSONUtils.parseObject(this.getSourceParams(),
getSourceParameter(this.getSourceType()));
TargetCommonParameter targetParameter = (TargetCommonParameter) JSONUtils.parseObject(this.getTargetParams(),
getTargetParameter(this.getTargetType()));
DataSourceParameters dataSource = (DataSourceParameters) parametersHelper
.getResourceParameters(ResourceType.DATASOURCE, sourceMysqlParameter.getSrcDatasource());
.getResourceParameters(ResourceType.DATASOURCE, sourceParameter.getSrcDatasource());
DataSourceParameters dataTarget = (DataSourceParameters) parametersHelper
.getResourceParameters(ResourceType.DATASOURCE, targetMysqlParameter.getTargetDatasource());
.getResourceParameters(ResourceType.DATASOURCE, targetParameter.getTargetDatasource());
if (Objects.nonNull(dataSource)) {
sqoopTaskExecutionContext.setDataSourceId(sourceMysqlParameter.getSrcDatasource());
sqoopTaskExecutionContext.setDataSourceId(sourceParameter.getSrcDatasource());
sqoopTaskExecutionContext.setSourcetype(dataSource.getType());
sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
}
if (Objects.nonNull(dataTarget)) {
sqoopTaskExecutionContext.setDataTargetId(targetMysqlParameter.getTargetDatasource());
sqoopTaskExecutionContext.setDataTargetId(targetParameter.getTargetDatasource());
sqoopTaskExecutionContext.setTargetType(dataTarget.getType());
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
}
return sqoopTaskExecutionContext;
}
/**
* get the target generator
*
* @param targetType sqoop target type
* @return sqoop target generator
*/
private Class<?> getTargetParameter(String targetType) {
switch (targetType) {
case MYSQL:
return TargetMysqlParameter.class;
case HIVE:
return TargetHiveParameter.class;
case HDFS:
return TargetHdfsParameter.class;
case ORACLE:
return TargetOracleParameter.class;
case HANA:
return TargetHanaParameter.class;
case SQLSERVER:
return TargetSqlServerParameter.class;
default:
return null;
}
}
/**
* get the target generator
*
* @param targetType sqoop target type
* @return sqoop target generator
*/
private Class<?> getSourceParameter(String targetType) {
switch (targetType) {
case HIVE:
return SourceHiveParameter.class;
case HDFS:
return SourceHdfsParameter.class;
case MYSQL:
return SourceMysqlParameter.class;
case ORACLE:
return SourceOracleParameter.class;
case HANA:
return SourceHanaParameter.class;
case SQLSERVER:
return SourceSqlServerParameter.class;
default:
return null;
}
}
}

38
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/TargetCommonParameter.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
/**
* target common parameter
*/
public class TargetCommonParameter {
/**
* target datasource
*/
protected int targetDatasource;
public int getTargetDatasource() {
return targetDatasource;
}
public void setTargetDatasource(int targetDatasource) {
this.targetDatasource = targetDatasource;
}
}

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHanaParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source hana parameter
*/
public class SourceHanaParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHdfsParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
/**
* source hdfs parameter
*/
public class SourceHdfsParameter {
public class SourceHdfsParameter extends SourceCommonParameter {
/**
* export dir

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHiveParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
/**
* source hive parameter
*/
public class SourceHiveParameter {
public class SourceHiveParameter extends SourceCommonParameter {
/**
* hive database

15
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceMysqlParameter.java

@ -18,18 +18,15 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source mysql parameter
*/
public class SourceMysqlParameter {
public class SourceMysqlParameter extends SourceCommonParameter {
/**
* src datasource
*/
private int srcDatasource;
/**
* src table
*/
@ -63,14 +60,6 @@ public class SourceMysqlParameter {
*/
private List<Property> mapColumnJava;
public int getSrcDatasource() {
return srcDatasource;
}
public void setSrcDatasource(int srcDatasource) {
this.srcDatasource = srcDatasource;
}
public String getSrcTable() {
return srcTable;
}

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceOracleParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source oracle parameter
*/
public class SourceOracleParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceSqlServerParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source sqlServer parameter
*/
public class SourceSqlServerParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHanaParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target hana parameter
*/
public class TargetHanaParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHdfsParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target hdfs parameter
*/
public class TargetHdfsParameter {
public class TargetHdfsParameter extends TargetCommonParameter {
/**
* target dir

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target hive parameter
*/
public class TargetHiveParameter {
public class TargetHiveParameter extends TargetCommonParameter {
/**
* hive database

16
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetMysqlParameter.java

@ -17,15 +17,13 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target mysql parameter
*/
public class TargetMysqlParameter {
public class TargetMysqlParameter extends TargetCommonParameter {
/**
* target datasource
*/
private int targetDatasource;
/**
* target table
*/
@ -59,14 +57,6 @@ public class TargetMysqlParameter {
*/
private String targetUpdateMode;
public int getTargetDatasource() {
return targetDatasource;
}
public void setTargetDatasource(int targetDatasource) {
this.targetDatasource = targetDatasource;
}
public String getTargetTable() {
return targetTable;
}

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetOracleParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target oracle parameter
*/
public class TargetOracleParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetSqlServerParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target sqlServer parameter
*/
public class TargetSqlServerParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

57
dolphinscheduler-ui/pnpm-lock.yaml

@ -91,22 +91,22 @@ devDependencies:
'@types/node': 18.16.18
'@types/nprogress': 0.2.0
'@types/qs': 6.9.7
'@typescript-eslint/eslint-plugin': 5.59.11_khxwfo2nlv6qliptqsbqa3vjsm
'@typescript-eslint/parser': 5.59.11_kigkzfftsmftz3xok324pyvzui
'@typescript-eslint/eslint-plugin': 5.59.11_51ef62bb4d5d7d05a1f38483006ea993
'@typescript-eslint/parser': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@vicons/antd': 0.12.0
'@vitejs/plugin-vue': 3.2.0_vite@3.2.7+vue@3.3.4
'@vitejs/plugin-vue-jsx': 2.1.1_vite@3.2.7+vue@3.3.4
dart-sass: 1.25.0
eslint: 8.42.0
eslint-config-prettier: 8.8.0_eslint@8.42.0
eslint-plugin-prettier: 4.2.1_vnriwwub2rhvoyn4ckagrc4lpi
eslint-plugin-prettier: 4.2.1_ab628b5a81d44f5761bc1280688b8b7a
eslint-plugin-vue: 9.14.1_eslint@8.42.0
prettier: 2.8.8
sass: 1.63.4
sass-loader: 13.3.2_sass@1.63.4
typescript: 4.9.5
typescript-plugin-css-modules: 3.4.0_typescript@4.9.5
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i
vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vite-plugin-compression: 0.5.1_vite@3.2.7
vue-tsc: 0.40.13_typescript@4.9.5
@ -740,7 +740,7 @@ packages:
resolution: {integrity: sha512-oh8q2Zc32S6gd/j50GowEjKLoOVOwHP/bWVjKJInBwQqdOYMdPrf1oVlelTlyfFK3CKxL1uahMDAr+vy8T7yMQ==}
dev: false
/@typescript-eslint/eslint-plugin/5.59.11_khxwfo2nlv6qliptqsbqa3vjsm:
/@typescript-eslint/eslint-plugin/5.59.11_51ef62bb4d5d7d05a1f38483006ea993:
resolution: {integrity: sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
@ -752,10 +752,10 @@ packages:
optional: true
dependencies:
'@eslint-community/regexpp': 4.5.1
'@typescript-eslint/parser': 5.59.11_kigkzfftsmftz3xok324pyvzui
'@typescript-eslint/parser': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@typescript-eslint/scope-manager': 5.59.11
'@typescript-eslint/type-utils': 5.59.11_kigkzfftsmftz3xok324pyvzui
'@typescript-eslint/utils': 5.59.11_kigkzfftsmftz3xok324pyvzui
'@typescript-eslint/type-utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@typescript-eslint/utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
debug: 4.3.4
eslint: 8.42.0
grapheme-splitter: 1.0.4
@ -768,7 +768,7 @@ packages:
- supports-color
dev: true
/@typescript-eslint/parser/5.59.11_kigkzfftsmftz3xok324pyvzui:
/@typescript-eslint/parser/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-s9ZF3M+Nym6CAZEkJJeO2TFHHDsKAM3ecNkLuH4i4s8/RCPnF5JRip2GyviYkeEAcwGMJxkqG9h2dAsnA1nZpA==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
@ -796,7 +796,7 @@ packages:
'@typescript-eslint/visitor-keys': 5.59.11
dev: true
/@typescript-eslint/type-utils/5.59.11_kigkzfftsmftz3xok324pyvzui:
/@typescript-eslint/type-utils/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
@ -807,7 +807,7 @@ packages:
optional: true
dependencies:
'@typescript-eslint/typescript-estree': 5.59.11_typescript@4.9.5
'@typescript-eslint/utils': 5.59.11_kigkzfftsmftz3xok324pyvzui
'@typescript-eslint/utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
debug: 4.3.4
eslint: 8.42.0
tsutils: 3.21.0_typescript@4.9.5
@ -842,7 +842,7 @@ packages:
- supports-color
dev: true
/@typescript-eslint/utils/5.59.11_kigkzfftsmftz3xok324pyvzui:
/@typescript-eslint/utils/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
@ -884,7 +884,7 @@ packages:
'@babel/core': 7.22.5
'@babel/plugin-transform-typescript': 7.22.5_@babel+core@7.22.5
'@vue/babel-plugin-jsx': 1.1.1_@babel+core@7.22.5
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i
vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vue: 3.3.4
transitivePeerDependencies:
- supports-color
@ -897,7 +897,7 @@ packages:
vite: ^3.0.0
vue: ^3.2.25
dependencies:
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i
vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vue: 3.3.4
dev: true
@ -1023,6 +1023,7 @@ packages:
dependencies:
'@vue/reactivity': 3.3.4
'@vue/shared': 3.3.4
dev: false
/@vue/runtime-dom/3.3.4:
resolution: {integrity: sha512-Aj5bTJ3u5sFsUckRghsNjVTtxZQ1OyMWCr5dZRAPijF/0Vy4xEoRCwLyHXcj4D0UFbJ4lbx3gPTgg06K/GnPnQ==}
@ -1030,6 +1031,7 @@ packages:
'@vue/runtime-core': 3.3.4
'@vue/shared': 3.3.4
csstype: 3.1.2
dev: false
/@vue/server-renderer/3.3.4_vue@3.3.4:
resolution: {integrity: sha512-Q6jDDzR23ViIb67v+vM1Dqntu+HUexQcsWKhhQa4ARVzxOY2HbC7QRW/ggkDBd5BU+uM1sV6XOAP0b216o34JQ==}
@ -1039,6 +1041,7 @@ packages:
'@vue/compiler-ssr': 3.3.4
'@vue/shared': 3.3.4
vue: 3.3.4
dev: false
/@vue/shared/3.2.38:
resolution: {integrity: sha512-dTyhTIRmGXBjxJE+skC8tTWCGLCVc4wQgRRLt8+O9p5ewBAjoBwtCAkLPrtToSr1xltoe3st21Pv953aOZ7alg==}
@ -1354,6 +1357,7 @@ packages:
/csstype/3.1.2:
resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==}
dev: false
/d3-array/3.2.4:
resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==}
@ -1649,22 +1653,12 @@ packages:
/debug/3.1.0:
resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies:
ms: 2.0.0
dev: true
/debug/3.2.7:
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies:
ms: 2.1.3
dev: true
@ -1980,7 +1974,7 @@ packages:
eslint: 8.42.0
dev: true
/eslint-plugin-prettier/4.2.1_vnriwwub2rhvoyn4ckagrc4lpi:
/eslint-plugin-prettier/4.2.1_ab628b5a81d44f5761bc1280688b8b7a:
resolution: {integrity: sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==}
engines: {node: '>=12.0.0'}
peerDependencies:
@ -2579,8 +2573,6 @@ packages:
mime: 1.6.0
needle: 3.2.0
source-map: 0.6.1
transitivePeerDependencies:
- supports-color
dev: true
/levn/0.4.1:
@ -2800,8 +2792,6 @@ packages:
debug: 3.2.7
iconv-lite: 0.6.3
sax: 1.2.4
transitivePeerDependencies:
- supports-color
dev: true
optional: true
@ -3305,8 +3295,6 @@ packages:
sax: 1.2.4
semver: 6.3.0
source-map: 0.7.4
transitivePeerDependencies:
- supports-color
dev: true
/supports-color/5.5.0:
@ -3414,7 +3402,6 @@ packages:
tsconfig-paths: 3.14.2
typescript: 4.9.5
transitivePeerDependencies:
- supports-color
- ts-node
dev: true
@ -3422,6 +3409,7 @@ packages:
resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==}
engines: {node: '>=4.2.0'}
hasBin: true
dev: true
/universalify/2.0.0:
resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
@ -3480,12 +3468,12 @@ packages:
chalk: 4.1.2
debug: 4.3.4
fs-extra: 10.1.0
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i
vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
transitivePeerDependencies:
- supports-color
dev: true
/vite/3.2.7_ffzaxsbr6mwjfgagqxd743xe6i:
/vite/3.2.7_@types+node@18.16.18+sass@1.63.4:
resolution: {integrity: sha512-29pdXjk49xAP0QBr0xXqu2s5jiQIXNvE/xwd0vUizYT2Hzqe4BksNNoWllFVXJf4eLZ+UlVQmXfB4lWrc+t18g==}
engines: {node: ^14.18.0 || >=16.0.0}
hasBin: true
@ -3603,6 +3591,7 @@ packages:
'@vue/runtime-dom': 3.3.4
'@vue/server-renderer': 3.3.4_vue@3.3.4
'@vue/shared': 3.3.4
dev: false
/vueuc/0.4.51_vue@3.3.4:
resolution: {integrity: sha512-pLiMChM4f+W8czlIClGvGBYo656lc2Y0/mXFSCydcSmnCR1izlKPGMgiYBGjbY9FDkFG8a2HEVz7t0DNzBWbDw==}

2
dolphinscheduler-ui/src/locales/en_US/project.ts

@ -539,7 +539,7 @@ export default {
model_type: 'ModelType',
form: 'Form',
table: 'Table',
table_tips: 'Please enter Mysql Table(required)',
table_tips: 'Please enter Table(required)',
column_type: 'ColumnType',
all_columns: 'All Columns',
some_columns: 'Some Columns',

2
dolphinscheduler-ui/src/locales/zh_CN/project.ts

@ -529,7 +529,7 @@ export default {
model_type: '模式',
form: '表单',
table: '表名',
table_tips: '请输入Mysql表名(必填)',
table_tips: '请输入表名(必填)',
column_type: '列类型',
all_columns: '全表导入',
some_columns: '选择列',

142
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-datasource.ts

@ -15,66 +15,94 @@
* limitations under the License.
*/
import { onMounted, ref, Ref } from 'vue'
import { queryDataSourceList } from '@/service/modules/data-source'
import { useI18n } from 'vue-i18n'
import type { IJsonItem, IDataBase } from '../types'
import type { TypeReq } from '@/service/modules/data-source/types'
import {onMounted, ref, Ref, watch} from 'vue'
import {queryDataSourceList} from '@/service/modules/data-source'
import {useI18n} from 'vue-i18n'
import type {IJsonItem, IDataBase} from '../types'
import type {TypeReq} from '@/service/modules/data-source/types'
export function useDatasource(
model: { [field: string]: any },
span: Ref,
fieldType: string,
fieldDatasource: string
model: { [field: string]: any },
span: Ref,
fieldType: string,
fieldDatasource: string
): IJsonItem[] {
const { t } = useI18n()
const dataSourceList = ref([])
const loading = ref(false)
const {t} = useI18n()
const dataSourceList = ref([])
const loading = ref(false)
const hadoopSourceTypes = ref(['HIVE', 'HDFS'])
const getDataSource = async (type: IDataBase) => {
if (hadoopSourceTypes.value.some(source => source === type)) {
loading.value = false;
return
}
loading.value = true
if (model.modelType === 'import') {
model.sourceMysqlDatasource = model.sourceMysqlDatasource ? model.sourceMysqlDatasource : ''
model.sourceMysqlType = type;
} else {
model.sourceMysqlDatasource = model.targetMysqlDatasource ? model.targetMysqlDatasource : ''
model.targetMysqlType = type;
}
const params = {type, testFlag: 0} as TypeReq
const result = await queryDataSourceList(params)
dataSourceList.value = result.map((item: { name: string; id: number }) => ({
label: item.name,
value: item.id
}))
loading.value = false
}
onMounted(() => {
getDataSource(model.sourceType)
})
const getDataSource = async (type: IDataBase) => {
if (loading.value) return
loading.value = true
const params = { type, testFlag: 0 } as TypeReq
const result = await queryDataSourceList(params)
dataSourceList.value = result.map((item: { name: string; id: number }) => ({
label: item.name,
value: item.id
}))
loading.value = false
}
onMounted(() => {
getDataSource('MYSQL')
})
watch(
() => [
model.sourceType,
],
() => {
getDataSource(model.sourceType)
}
)
return [
{
type: 'select',
field: fieldType,
name: t('project.node.datasource'),
span: span,
options: [{ label: 'MYSQL', value: 'MYSQL' }],
validate: {
required: true
}
},
{
type: 'select',
field: fieldDatasource,
name: ' ',
span: span,
props: {
placeholder: t('project.node.datasource_tips'),
filterable: true,
loading
},
options: dataSourceList,
validate: {
trigger: ['blur', 'input'],
validator(validate, value) {
if (!value) {
return new Error(t('project.node.datasource_tips'))
}
watch(
() => [
model.targetType,
],
() => {
getDataSource(model.targetType)
}
}
}
]
)
return [
{
type: 'input',
field: fieldType,
name: t('project.node.datasource'),
span: 0,
validate: {
required: true,
}
},
{
type: 'select',
field: fieldDatasource,
name: t('project.node.datasource'),
span: span,
props: {
placeholder: t('project.node.datasource_tips'),
filterable: true,
loading
},
options: dataSourceList,
validate: {
trigger: ['blur', 'input'],
validator(validate, value) {
if (!value) {
return new Error(t('project.node.datasource_tips'))
}
}
}
}
]
}

561
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-source-type.ts

@ -15,293 +15,312 @@
* limitations under the License.
*/
import { ref, h, watch, Ref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource'
import { useCustomParams } from '.'
import {h, onMounted, Ref, ref, watch} from 'vue'
import {useI18n} from 'vue-i18n'
import {useDatasource} from './use-sqoop-datasource'
import {useCustomParams} from '.'
import styles from '../index.module.scss'
import type { IJsonItem, IOption, ModelType } from '../types'
import type {IJsonItem, IOption, ModelType} from '../types'
export function useSourceType(
model: { [field: string]: any },
unCustomSpan: Ref<number>
model: { [field: string]: any },
unCustomSpan: Ref<number>
): IJsonItem[] {
const { t } = useI18n()
const mysqlSpan = ref(24)
const tableSpan = ref(0)
const editorSpan = ref(24)
const columnSpan = ref(0)
const hiveSpan = ref(0)
const hdfsSpan = ref(0)
const datasourceSpan = ref(12)
const resetSpan = () => {
mysqlSpan.value =
unCustomSpan.value && model.sourceType === 'MYSQL' ? 24 : 0
tableSpan.value = mysqlSpan.value && model.srcQueryType === '0' ? 24 : 0
editorSpan.value = mysqlSpan.value && model.srcQueryType === '1' ? 24 : 0
columnSpan.value = tableSpan.value && model.srcColumnType === '1' ? 24 : 0
hiveSpan.value = unCustomSpan.value && model.sourceType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.sourceType === 'HDFS' ? 24 : 0
datasourceSpan.value =
unCustomSpan.value && model.sourceType === 'MYSQL' ? 12 : 0
}
const sourceTypes = ref([
{
label: 'MYSQL',
value: 'MYSQL'
}
] as IOption[])
const getSourceTypesByModelType = (modelType: ModelType): IOption[] => {
switch (modelType) {
case 'import':
return [
{
const {t} = useI18n()
const rdbmsSpan = ref(24)
const tableSpan = ref(0)
const editorSpan = ref(24)
const columnSpan = ref(0)
const hiveSpan = ref(0)
const hdfsSpan = ref(0)
const datasourceSpan = ref(24)
const isChange: any = ref(false)
const rdbmsSourceTypes = ref([
{
label: 'MYSQL',
value: 'MYSQL'
}
]
case 'export':
return [
{
label: 'HDFS',
value: 'HDFS'
},
{
},
{
label: 'ORACLE',
value: 'ORACLE'
},
{
label: 'SQLSERVER',
value: 'SQLSERVER'
},
{
label: 'HANA',
value: 'HANA'
}
] as IOption[])
const hadoopSourceTypes = ref([
{
label: 'HIVE',
value: 'HIVE'
}
]
default:
return [
{
label: 'MYSQL',
value: 'MYSQL'
},
{
}, {
label: 'HDFS',
value: 'HDFS'
},
{
label: 'HIVE',
value: 'HIVE'
}
]
}
] as IOption[])
const sourceTypes = ref()
const resetSpan = () => {
rdbmsSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(source => source.value === model.sourceType) ? 24 : 0
tableSpan.value = rdbmsSpan.value && model.srcQueryType === '0' ? 24 : 0
editorSpan.value = rdbmsSpan.value && model.srcQueryType === '1' ? 24 : 0
columnSpan.value = tableSpan.value && model.srcColumnType === '1' ? 24 : 0
hiveSpan.value = unCustomSpan.value && model.sourceType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.sourceType === 'HDFS' ? 24 : 0
datasourceSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(source => source.value === model.sourceType) ? 24 : 0
}
}
const resetValue = () => {
if (!isChange.value) {
isChange.value = true
return
}
switch (model.modelType) {
case 'import':
model.sourceMysqlDatasource = ''
break
case 'export':
model.sourceHiveDatabase = ''
model.sourceHiveTable = ''
model.sourceHivePartitionKey = ''
model.sourceHivePartitionValue = ''
model.sourceHdfsExportDir = ''
break
default:
model.sourceMysqlDatasource = ''
}
watch(
() => model.modelType,
(modelType: ModelType) => {
sourceTypes.value = getSourceTypesByModelType(modelType)
if (!model.sourceType) {
model.sourceType = sourceTypes.value[0].value
}
}
)
watch(
() => [
unCustomSpan.value,
model.sourceType,
model.srcQueryType,
model.srcColumnType
],
() => {
resetSpan()
const getSourceTypesByModelType = (modelType: ModelType): IOption[] => {
switch (modelType) {
case 'import':
return rdbmsSourceTypes.value
case 'export':
return hadoopSourceTypes.value
default:
return rdbmsSourceTypes.value
}
}
)
return [
{
type: 'custom',
field: 'custom-title-source',
span: unCustomSpan,
widget: h(
'div',
{ class: styles['field-title'] },
t('project.node.data_source')
)
},
{
type: 'select',
field: 'sourceType',
name: t('project.node.type'),
span: unCustomSpan,
options: sourceTypes
},
...useDatasource(
model,
datasourceSpan,
'sourceMysqlType',
'sourceMysqlDatasource'
),
{
type: 'radio',
field: 'srcQueryType',
name: t('project.node.model_type'),
span: mysqlSpan,
options: [
onMounted(() => {
sourceTypes.value = [...rdbmsSourceTypes.value];
})
watch(
() => model.modelType,
(modelType: ModelType) => {
sourceTypes.value = getSourceTypesByModelType(modelType)
model.sourceType = sourceTypes.value[0].value
}
)
watch(
() => [
unCustomSpan.value,
model.sourceType,
model.srcQueryType,
model.srcColumnType
],
() => {
resetValue();
resetSpan();
}
)
return [
{
label: t('project.node.form'),
value: '0'
type: 'custom',
field: 'custom-title-source',
span: unCustomSpan,
widget: h(
'div',
{class: styles['field-title']},
t('project.node.data_source')
)
},
{
label: 'SQL',
value: '1'
}
],
props: {
'on-update:value': (value: '0' | '1') => {
model.targetType = value === '0' ? 'HIVE' : 'HDFS'
}
}
},
{
type: 'input',
field: 'srcTable',
name: t('project.node.table'),
span: tableSpan,
props: {
placeholder: t('project.node.table_tips')
},
validate: {
trigger: ['input', 'blur'],
required: true,
validator(validate, value) {
if (tableSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'radio',
field: 'srcColumnType',
name: t('project.node.column_type'),
span: tableSpan,
options: [
{ label: t('project.node.all_columns'), value: '0' },
{ label: t('project.node.some_columns'), value: '1' }
]
},
{
type: 'input',
field: 'srcColumns',
name: t('project.node.column'),
span: columnSpan,
props: {
placeholder: t('project.node.column_tips')
},
validate: {
trigger: ['input', 'blur'],
required: true,
validator(validate, value) {
if (!!columnSpan.value && !value) {
return new Error(t('project.node.column_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.database_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveTable',
name: t('project.node.table'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.hive_table_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'sourceHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'sourceHdfsExportDir',
name: t('project.node.export_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.export_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hdfsSpan.value && !value) {
return new Error(t('project.node.export_dir_tips'))
}
}
}
},
{
type: 'editor',
field: 'sourceMysqlSrcQuerySql',
name: t('project.node.sql_statement'),
span: editorSpan,
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (editorSpan.value && !value) {
return new Error(t('project.node.sql_statement_tips'))
}
}
}
},
...useCustomParams({
model,
field: 'mapColumnHive',
name: 'map_column_hive',
isSimple: true,
span: mysqlSpan
}),
...useCustomParams({
model,
field: 'mapColumnJava',
name: 'map_column_java',
isSimple: true,
span: mysqlSpan
})
]
type: 'select',
field: 'sourceType',
name: t('project.node.type'),
span: unCustomSpan,
options: sourceTypes
},
...useDatasource(
model,
datasourceSpan,
'sourceMysqlType',
'sourceMysqlDatasource'
),
{
type: 'radio',
field: 'srcQueryType',
name: t('project.node.model_type'),
span: rdbmsSpan,
options: [
{
label: t('project.node.form'),
value: '0'
},
{
label: 'SQL',
value: '1'
}
],
props: {
'on-update:value': (value: '0' | '1') => {
model.targetType = value === '0' ? 'HIVE' : 'HDFS'
}
}
},
{
type: 'input',
field: 'srcTable',
name: t('project.node.table'),
span: tableSpan,
props: {
placeholder: t('project.node.table_tips')
},
validate: {
trigger: ['input', 'blur'],
required: true,
validator(validate, value) {
if (tableSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'radio',
field: 'srcColumnType',
name: t('project.node.column_type'),
span: tableSpan,
options: [
{label: t('project.node.all_columns'), value: '0'},
{label: t('project.node.some_columns'), value: '1'}
]
},
{
type: 'input',
field: 'srcColumns',
name: t('project.node.column'),
span: columnSpan,
props: {
placeholder: t('project.node.column_tips')
},
validate: {
trigger: ['input', 'blur'],
required: true,
validator(validate, value) {
if (!!columnSpan.value && !value) {
return new Error(t('project.node.column_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.database_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHiveTable',
name: t('project.node.table'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.hive_table_tips'))
}
}
}
},
{
type: 'input',
field: 'sourceHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'sourceHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'sourceHdfsExportDir',
name: t('project.node.export_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.export_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hdfsSpan.value && !value) {
return new Error(t('project.node.export_dir_tips'))
}
}
}
},
{
type: 'editor',
field: 'sourceMysqlSrcQuerySql',
name: t('project.node.sql_statement'),
span: editorSpan,
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (editorSpan.value && !value) {
return new Error(t('project.node.sql_statement_tips'))
}
}
}
},
...useCustomParams({
model,
field: 'mapColumnHive',
name: 'map_column_hive',
isSimple: true,
span: rdbmsSpan
}),
...useCustomParams({
model,
field: 'mapColumnJava',
name: 'map_column_java',
isSimple: true,
span: rdbmsSpan
})
]
}

724
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-target-type.ts

@ -15,393 +15,407 @@
* limitations under the License.
*/
import { ref, h, watch, Ref } from 'vue'
import { useI18n } from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource'
import {h, onMounted, Ref, ref, watch} from 'vue'
import {useI18n} from 'vue-i18n'
import {useDatasource} from './use-sqoop-datasource'
import styles from '../index.module.scss'
import type { IJsonItem, IOption, SourceType } from '../types'
import type {IJsonItem, IOption, SourceType} from '../types'
export function useTargetType(
model: { [field: string]: any },
unCustomSpan: Ref<number>
model: { [field: string]: any },
unCustomSpan: Ref<number>
): IJsonItem[] {
const { t } = useI18n()
const hiveSpan = ref(0)
const hdfsSpan = ref(24)
const mysqlSpan = ref(0)
const dataSourceSpan = ref(0)
const updateSpan = ref(0)
const resetSpan = () => {
hiveSpan.value = unCustomSpan.value && model.targetType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.targetType === 'HDFS' ? 24 : 0
mysqlSpan.value =
unCustomSpan.value && model.targetType === 'MYSQL' ? 24 : 0
dataSourceSpan.value =
unCustomSpan.value && model.targetType === 'MYSQL' ? 12 : 0
updateSpan.value = mysqlSpan.value && model.targetMysqlIsUpdate ? 24 : 0
}
const targetTypes = ref([
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
] as IOption[])
const getTargetTypesBySourceType = (
sourceType: SourceType,
srcQueryType: string
): IOption[] => {
switch (sourceType) {
case 'MYSQL':
if (srcQueryType === '1') {
return [
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
]
}
return [
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
]
case 'HDFS':
case 'HIVE':
return [
{
const {t} = useI18n()
const hiveSpan = ref(24)
const hdfsSpan = ref(0)
const rdbmsSpan = ref(0)
const dataSourceSpan = ref(0)
const updateSpan = ref(0)
const isChange: any = ref(false)
const rdbmsSourceTypes = ref([
{
label: 'MYSQL',
value: 'MYSQL'
}
]
default:
return [
{
},
{
label: 'ORACLE',
value: 'ORACLE'
},
{
label: 'SQLSERVER',
value: 'SQLSERVER'
},
{
label: 'HANA',
value: 'HANA'
}
] as IOption[])
const hadoopSourceTypes = ref([
{
label: 'HIVE',
value: 'HIVE'
},
{
}, {
label: 'HDFS',
value: 'HDFS'
}
]
}
] as IOption[])
const targetTypes = ref()
const resetSpan = () => {
hiveSpan.value = unCustomSpan.value && model.targetType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.targetType === 'HDFS' ? 24 : 0
rdbmsSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(target => target.value === model.targetType) ? 24 : 0
dataSourceSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(target => target.value === model.targetType) ? 24 : 0
updateSpan.value = rdbmsSpan.value && model.targetMysqlIsUpdate ? 24 : 0
}
}
watch(
() => [model.sourceType, model.srcQueryType],
([sourceType, srcQueryType]) => {
targetTypes.value = getTargetTypesBySourceType(sourceType, srcQueryType)
if (!model.targetType) {
model.targetType = targetTypes.value[0].value
}
const getTargetTypesBySourceType = (
sourceType: SourceType,
srcQueryType: string
): IOption[] => {
switch (sourceType) {
case 'MYSQL':
if (srcQueryType === '1') {
return hadoopSourceTypes.value
}
return hadoopSourceTypes.value
case 'HDFS':
case 'HIVE':
return rdbmsSourceTypes.value
default:
return hadoopSourceTypes.value
}
}
)
watch(
() => [unCustomSpan.value, model.targetType, model.targetMysqlIsUpdate],
() => {
resetSpan()
const resetValue = () => {
if (!isChange.value) {
isChange.value = true
return
}
switch (model.modelType) {
case 'import':
model.targetHiveDatabase = '';
model.targetHiveTable = '';
model.targetHdfsTargetPath = '';
break
case 'export':
model.targetMysqlDatasource = '';
model.targetMysqlTable = '';
model.targetMysqlColumns = '';
model.targetMysqlFieldsTerminated = '';
model.targetMysqlLinesTerminated = '';
model.targetMysqlTable = '';
break
default:
model.sourceMysqlDatasource = '';
}
}
)
return [
{
type: 'custom',
field: 'custom-title-target',
span: unCustomSpan,
widget: h(
'div',
{ class: styles['field-title'] },
t('project.node.data_target')
)
},
{
type: 'select',
field: 'targetType',
name: t('project.node.type'),
span: unCustomSpan,
options: targetTypes
},
{
type: 'input',
field: 'targetHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.database_tips'))
}
onMounted(() => {
targetTypes.value = [...hadoopSourceTypes.value];
})
watch(
() => [model.sourceType, model.srcQueryType],
([sourceType, srcQueryType]) => {
targetTypes.value = getTargetTypesBySourceType(sourceType, srcQueryType)
model.targetType = targetTypes.value[0].value
}
}
},
{
type: 'input',
field: 'targetHiveTable',
name: t('project.node.table'),
span: hiveSpan,
props: {
placeholder: t('project.node.table')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.hive_table_tips'))
}
)
watch(
() => [unCustomSpan.value, model.targetType, model.targetMysqlIsUpdate],
() => {
resetValue();
resetSpan()
}
}
},
{
type: 'switch',
field: 'targetHiveCreateTable',
span: hiveSpan,
name: t('project.node.create_hive_table')
},
{
type: 'switch',
field: 'targetHiveDropDelimiter',
span: hiveSpan,
name: t('project.node.drop_delimiter')
},
{
type: 'switch',
field: 'targetHiveOverWrite',
span: hiveSpan,
name: t('project.node.over_write_src')
},
{
type: 'input',
field: 'targetHiveTargetDir',
name: t('project.node.hive_target_dir'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_target_dir_tips')
}
},
{
type: 'input',
field: 'targetHiveReplaceDelimiter',
name: t('project.node.replace_delimiter'),
span: hiveSpan,
props: {
placeholder: t('project.node.replace_delimiter_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'targetHdfsTargetPath',
name: t('project.node.target_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.target_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hdfsSpan.value && !value) {
return new Error(t('project.node.target_dir_tips'))
}
)
return [
{
type: 'custom',
field: 'custom-title-target',
span: unCustomSpan,
widget: h(
'div',
{class: styles['field-title']},
t('project.node.data_target')
)
},
{
type: 'select',
field: 'targetType',
name: t('project.node.type'),
span: unCustomSpan,
options: targetTypes
},
{
type: 'input',
field: 'targetHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.database_tips'))
}
}
}
},
{
type: 'input',
field: 'targetHiveTable',
name: t('project.node.table'),
span: hiveSpan,
props: {
placeholder: t('project.node.table')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'switch',
field: 'targetHiveCreateTable',
span: hiveSpan,
name: t('project.node.create_hive_table')
},
{
type: 'switch',
field: 'targetHiveDropDelimiter',
span: hiveSpan,
name: t('project.node.drop_delimiter')
},
{
type: 'switch',
field: 'targetHiveOverWrite',
span: hiveSpan,
name: t('project.node.over_write_src')
},
{
type: 'input',
field: 'targetHiveTargetDir',
name: t('project.node.hive_target_dir'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_target_dir_tips')
}
},
{
type: 'input',
field: 'targetHiveReplaceDelimiter',
name: t('project.node.replace_delimiter'),
span: hiveSpan,
props: {
placeholder: t('project.node.replace_delimiter_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'targetHdfsTargetPath',
name: t('project.node.target_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.target_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hdfsSpan.value && !value) {
return new Error(t('project.node.target_dir_tips'))
}
}
}
},
{
type: 'switch',
field: 'targetHdfsDeleteTargetDir',
name: t('project.node.delete_target_dir'),
span: hdfsSpan
},
{
type: 'radio',
field: 'targetHdfsCompressionCodec',
name: t('project.node.compression_codec'),
span: hdfsSpan,
options: COMPRESSIONCODECS
},
{
type: 'radio',
field: 'targetHdfsFileType',
name: t('project.node.file_type'),
span: hdfsSpan,
options: FILETYPES
},
{
type: 'input',
field: 'targetHdfsFieldsTerminated',
name: t('project.node.fields_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetHdfsLinesTerminated',
name: t('project.node.lines_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
...useDatasource(
model,
dataSourceSpan,
'targetMysqlType',
'targetMysqlDatasource'
),
{
type: 'input',
field: 'targetMysqlTable',
name: t('project.node.table'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (rdbmsSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'input',
field: 'targetMysqlColumns',
name: t('project.node.column'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.column_tips')
}
},
{
type: 'input',
field: 'targetMysqlFieldsTerminated',
name: t('project.node.fields_terminated'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetMysqlLinesTerminated',
name: t('project.node.lines_terminated'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
{
type: 'switch',
field: 'targetMysqlIsUpdate',
span: rdbmsSpan,
name: t('project.node.is_update')
},
{
type: 'input',
field: 'targetMysqlTargetUpdateKey',
name: t('project.node.update_key'),
span: updateSpan,
props: {
placeholder: t('project.node.update_key_tips')
}
},
{
type: 'radio',
field: 'targetMysqlUpdateMode',
name: t('project.node.update_mode'),
span: updateSpan,
options: [
{
label: t('project.node.only_update'),
value: 'updateonly'
},
{
label: t('project.node.allow_insert'),
value: 'allowinsert'
}
]
}
}
},
{
type: 'switch',
field: 'targetHdfsDeleteTargetDir',
name: t('project.node.delete_target_dir'),
span: hdfsSpan
},
{
type: 'radio',
field: 'targetHdfsCompressionCodec',
name: t('project.node.compression_codec'),
span: hdfsSpan,
options: COMPRESSIONCODECS
},
{
type: 'radio',
field: 'targetHdfsFileType',
name: t('project.node.file_type'),
span: hdfsSpan,
options: FILETYPES
},
{
type: 'input',
field: 'targetHdfsFieldsTerminated',
name: t('project.node.fields_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
]
}
const COMPRESSIONCODECS = [
{
type: 'input',
field: 'targetHdfsLinesTerminated',
name: t('project.node.lines_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
label: 'snappy',
value: 'snappy'
},
...useDatasource(
model,
dataSourceSpan,
'targetMysqlType',
'targetMysqlDatasource'
),
{
type: 'input',
field: 'targetMysqlTable',
name: t('project.node.table'),
span: mysqlSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (mysqlSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
label: 'lzo',
value: 'lzo'
},
{
type: 'input',
field: 'targetMysqlColumns',
name: t('project.node.column'),
span: mysqlSpan,
props: {
placeholder: t('project.node.column_tips')
}
label: 'gzip',
value: 'gzip'
},
{
type: 'input',
field: 'targetMysqlFieldsTerminated',
name: t('project.node.fields_terminated'),
span: mysqlSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
label: 'no',
value: ''
}
]
const FILETYPES = [
{
type: 'input',
field: 'targetMysqlLinesTerminated',
name: t('project.node.lines_terminated'),
span: mysqlSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
label: 'avro',
value: '--as-avrodatafile'
},
{
type: 'switch',
field: 'targetMysqlIsUpdate',
span: mysqlSpan,
name: t('project.node.is_update')
label: 'sequence',
value: '--as-sequencefile'
},
{
type: 'input',
field: 'targetMysqlTargetUpdateKey',
name: t('project.node.update_key'),
span: updateSpan,
props: {
placeholder: t('project.node.update_key_tips')
}
label: 'text',
value: '--as-textfile'
},
{
type: 'radio',
field: 'targetMysqlUpdateMode',
name: t('project.node.update_mode'),
span: updateSpan,
options: [
{
label: t('project.node.only_update'),
value: 'updateonly'
},
{
label: t('project.node.allow_insert'),
value: 'allowinsert'
}
]
label: 'parquet',
value: '--as-parquetfile'
}
]
}
const COMPRESSIONCODECS = [
{
label: 'snappy',
value: 'snappy'
},
{
label: 'lzo',
value: 'lzo'
},
{
label: 'gzip',
value: 'gzip'
},
{
label: 'no',
value: ''
}
]
const FILETYPES = [
{
label: 'avro',
value: '--as-avrodatafile'
},
{
label: 'sequence',
value: '--as-sequencefile'
},
{
label: 'text',
value: '--as-textfile'
},
{
label: 'parquet',
value: '--as-parquetfile'
}
]

132
dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts

@ -25,13 +25,14 @@ import type {
ILocalParam,
IDependentParameters
} from './types'
import {ref} from "vue";
export function formatParams(data: INodeData): {
processDefinitionCode: string
upstreamCodes: string
taskDefinitionJsonObj: object
} {
const rdbmsSourceTypes = ref(['MYSQL', 'ORACLE', 'SQLSERVER', 'HANA'])
const taskParams: ITaskParams = {}
if (data.taskType === 'SUB_PROCESS' || data.taskType === 'DYNAMIC') {
taskParams.processDefinitionCode = data.processDefinitionCode
@ -109,79 +110,67 @@ export function formatParams(data: INodeData): {
taskParams.targetType = data.targetType
let targetParams: ISqoopTargetParams = {}
let sourceParams: ISqoopSourceParams = {}
switch (data.targetType) {
case 'HIVE':
targetParams = {
hiveDatabase: data.targetHiveDatabase,
hiveTable: data.targetHiveTable,
createHiveTable: data.targetHiveCreateTable,
dropDelimiter: data.targetHiveDropDelimiter,
hiveOverWrite: data.targetHiveOverWrite,
hiveTargetDir: data.targetHiveTargetDir,
replaceDelimiter: data.targetHiveReplaceDelimiter,
hivePartitionKey: data.targetHivePartitionKey,
hivePartitionValue: data.targetHivePartitionValue
}
break
case 'HDFS':
targetParams = {
targetPath: data.targetHdfsTargetPath,
deleteTargetDir: data.targetHdfsDeleteTargetDir,
compressionCodec: data.targetHdfsCompressionCodec,
fileType: data.targetHdfsFileType,
fieldsTerminated: data.targetHdfsFieldsTerminated,
linesTerminated: data.targetHdfsLinesTerminated
}
break
case 'MYSQL':
targetParams = {
targetType: data.targetMysqlType,
targetDatasource: data.targetMysqlDatasource,
targetTable: data.targetMysqlTable,
targetColumns: data.targetMysqlColumns,
fieldsTerminated: data.targetMysqlFieldsTerminated,
linesTerminated: data.targetMysqlLinesTerminated,
isUpdate: data.targetMysqlIsUpdate,
targetUpdateKey: data.targetMysqlTargetUpdateKey,
targetUpdateMode: data.targetMysqlUpdateMode
}
break
default:
break
if (data.targetType === 'HIVE') {
targetParams = {
hiveDatabase: data.targetHiveDatabase,
hiveTable: data.targetHiveTable,
createHiveTable: data.targetHiveCreateTable,
dropDelimiter: data.targetHiveDropDelimiter,
hiveOverWrite: data.targetHiveOverWrite,
hiveTargetDir: data.targetHiveTargetDir,
replaceDelimiter: data.targetHiveReplaceDelimiter,
hivePartitionKey: data.targetHivePartitionKey,
hivePartitionValue: data.targetHivePartitionValue
}
} else if (data.targetType === 'HDFS') {
targetParams = {
targetPath: data.targetHdfsTargetPath,
deleteTargetDir: data.targetHdfsDeleteTargetDir,
compressionCodec: data.targetHdfsCompressionCodec,
fileType: data.targetHdfsFileType,
fieldsTerminated: data.targetHdfsFieldsTerminated,
linesTerminated: data.targetHdfsLinesTerminated
}
} else if (rdbmsSourceTypes.value.some(target => target === data.targetType)){
targetParams = {
targetType: data.targetMysqlType,
targetDatasource: data.targetMysqlDatasource,
targetTable: data.targetMysqlTable,
targetColumns: data.targetMysqlColumns,
fieldsTerminated: data.targetMysqlFieldsTerminated,
linesTerminated: data.targetMysqlLinesTerminated,
isUpdate: data.targetMysqlIsUpdate,
targetUpdateKey: data.targetMysqlTargetUpdateKey,
targetUpdateMode: data.targetMysqlUpdateMode
}
}
switch (data.sourceType) {
case 'MYSQL':
sourceParams = {
srcTable: data.srcQueryType === '1' ? '' : data.srcTable,
srcColumnType: data.srcQueryType === '1' ? '0' : data.srcColumnType,
srcColumns:
if (rdbmsSourceTypes.value.some(target => target === data.sourceType)) {
sourceParams = {
srcTable: data.srcQueryType === '1' ? '' : data.srcTable,
srcColumnType: data.srcQueryType === '1' ? '0' : data.srcColumnType,
srcColumns:
data.srcQueryType === '1' || data.srcColumnType === '0'
? ''
: data.srcColumns,
srcQuerySql:
? ''
: data.srcColumns,
srcQuerySql:
data.srcQueryType === '0' ? '' : data.sourceMysqlSrcQuerySql,
srcQueryType: data.srcQueryType,
srcType: data.sourceMysqlType,
srcDatasource: data.sourceMysqlDatasource,
mapColumnHive: data.mapColumnHive,
mapColumnJava: data.mapColumnJava
}
break
case 'HDFS':
sourceParams = {
exportDir: data.sourceHdfsExportDir
}
break
case 'HIVE':
sourceParams = {
hiveDatabase: data.sourceHiveDatabase,
hiveTable: data.sourceHiveTable,
hivePartitionKey: data.sourceHivePartitionKey,
hivePartitionValue: data.sourceHivePartitionValue
}
break
default:
break
srcQueryType: data.srcQueryType,
srcType: data.sourceMysqlType,
srcDatasource: data.sourceMysqlDatasource,
mapColumnHive: data.mapColumnHive,
mapColumnJava: data.mapColumnJava
}
} else if (data.sourceType === 'HDFS') {
sourceParams = {
exportDir: data.sourceHdfsExportDir
}
} else if (data.sourceType === 'HIVE') {
sourceParams = {
hiveDatabase: data.sourceHiveDatabase,
hiveTable: data.sourceHiveTable,
hivePartitionKey: data.sourceHivePartitionKey,
hivePartitionValue: data.sourceHivePartitionValue
}
}
taskParams.targetParams = JSON.stringify(targetParams)
taskParams.sourceParams = JSON.stringify(sourceParams)
@ -608,6 +597,7 @@ export function formatModel(data: ITaskData) {
const targetParams: ISqoopTargetParams = JSON.parse(
data.taskParams.targetParams
)
params.targetType = data.taskParams.targetType
params.targetHiveDatabase = targetParams.hiveDatabase
params.targetHiveTable = targetParams.hiveTable
params.targetHiveCreateTable = targetParams.createHiveTable

4
dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sqoop.ts

@ -54,12 +54,12 @@ export function useSqoop({
sourceType: 'MYSQL',
srcQueryType: '1',
srcColumnType: '0',
targetType: 'HDFS',
targetType: 'HIVE',
sourceMysqlType: 'MYSQL',
targetHdfsDeleteTargetDir: true,
targetHdfsCompressionCodec: 'snappy',
targetHdfsFileType: '--as-avrodatafile',
targetMysqlType: 'MYSQL',
targetMysqlType: 'MYSQL',
targetMysqlUpdateMode: 'allowinsert',
targetHiveCreateTable: false,
targetHiveDropDelimiter: false,

98
dolphinscheduler-ui/src/views/projects/task/components/node/use-task.ts

@ -14,69 +14,63 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ref, Ref, unref } from 'vue'
import {ref, Ref, unref} from 'vue'
import nodes from './tasks'
import getElementByJson from '@/components/form/get-elements-by-json'
import { useTaskNodeStore } from '@/store/project/task-node'
import { TASK_TYPES_MAP } from '@/store/project/task-type'
import type {
IFormItem,
IJsonItem,
INodeData,
ITaskData,
FormRules,
EditWorkflowDefinition
} from './types'
import {useTaskNodeStore} from '@/store/project/task-node'
import {TASK_TYPES_MAP} from '@/store/project/task-type'
import type {EditWorkflowDefinition, FormRules, IFormItem, IJsonItem, INodeData, ITaskData} from './types'
export function useTask({
data,
projectCode,
from,
readonly,
definition
}: {
data: ITaskData
projectCode: number
from?: number
readonly?: boolean
definition?: EditWorkflowDefinition
data,
projectCode,
from,
readonly,
definition
}: {
data: ITaskData
projectCode: number
from?: number
readonly?: boolean
definition?: EditWorkflowDefinition
}): {
elementsRef: Ref<IFormItem[]>
rulesRef: Ref<FormRules>
model: INodeData
elementsRef: Ref<IFormItem[]>
rulesRef: Ref<FormRules>
model: INodeData
} {
const taskStore = useTaskNodeStore()
taskStore.updateDefinition(unref(definition), data?.code)
const taskStore = useTaskNodeStore()
taskStore.updateDefinition(unref(definition), data?.code)
const jsonRef = ref([]) as Ref<IJsonItem[]>
const elementsRef = ref([]) as Ref<IFormItem[]>
const rulesRef = ref({})
const jsonRef = ref([]) as Ref<IJsonItem[]>
const elementsRef = ref([]) as Ref<IFormItem[]>
const rulesRef = ref({})
const params = {
projectCode,
from,
readonly,
data,
jsonRef,
updateElements: () => {
getElements()
const params = {
projectCode,
from,
readonly,
data,
jsonRef,
updateElements: () => {
getElements()
}
}
}
const { model, json } = nodes[data.taskType || 'SHELL'](params)
jsonRef.value = json
model.preTasks = taskStore.getPreTasks
model.name = taskStore.getName
model.taskExecuteType =
TASK_TYPES_MAP[data.taskType || 'SHELL'].taskExecuteType || 'BATCH'
const {model, json} = nodes[data.taskType || 'SHELL'](params)
debugger
jsonRef.value = json
model.preTasks = taskStore.getPreTasks
model.name = taskStore.getName
model.taskExecuteType =
TASK_TYPES_MAP[data.taskType || 'SHELL'].taskExecuteType || 'BATCH'
const getElements = () => {
const { rules, elements } = getElementByJson(jsonRef.value, model)
elementsRef.value = elements
rulesRef.value = rules
}
const getElements = () => {
const {rules, elements} = getElementByJson(jsonRef.value, model)
elementsRef.value = elements
rulesRef.value = rules
}
getElements()
getElements()
return { elementsRef, rulesRef, model }
return {elementsRef, rulesRef, model}
}

Loading…
Cancel
Save