Browse Source

[Feature-15146][dolphinscheduler-task-sqoop] add sqoop source/target type (#15146)

* task list: sgoop node params  optimize

* security.ts  add  alarm_instance params

* 1 add SqoopTask params
2 add alert plugin aliyun-voice

* add license header

* commit sqhoop optimize

* pnpm-locl.yaml supplement annotation

* remove irrelevent commit.

* Code specification optimization

* optimize sqoop task ui

* Merge Code

* add the license header to  pnpm-locl.yaml

* format the code

* format the code

* Fix sqoop task echo error

---------

Co-authored-by: xujiaqiang <xujiaqiang@aimatech.com>
Co-authored-by: xujiaqiang <“xujiaqiangwz@163.com”>
Co-authored-by: David Zollo <dailidong66@gmail.com>
3.2.1-prepare
xujiaqiang 12 months ago committed by GitHub
parent
commit
159179ac95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopConstants.java
  2. 1
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannel.java
  3. 48
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskExecutionContext.java
  4. 10
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java
  5. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ISourceGenerator.java
  6. 9
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ITargetGenerator.java
  7. 31
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/SqoopJobGenerator.java
  8. 159
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HanaSourceGenerator.java
  9. 18
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java
  10. 37
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java
  11. 201
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java
  12. 157
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/OracleSourceGenerator.java
  13. 163
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/SqlServerSourceGenerator.java
  14. 128
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HanaTargetGenerator.java
  15. 128
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/OracleTargetGenerator.java
  16. 130
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/SqlServerTargetGenerator.java
  17. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SourceCommonParameter.java
  18. 99
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SqoopParameters.java
  19. 38
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/TargetCommonParameter.java
  20. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHanaParameter.java
  21. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHdfsParameter.java
  22. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHiveParameter.java
  23. 15
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceMysqlParameter.java
  24. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceOracleParameter.java
  25. 126
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceSqlServerParameter.java
  26. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHanaParameter.java
  27. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHdfsParameter.java
  28. 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java
  29. 16
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetMysqlParameter.java
  30. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetOracleParameter.java
  31. 123
      dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetSqlServerParameter.java
  32. 57
      dolphinscheduler-ui/pnpm-lock.yaml
  33. 2
      dolphinscheduler-ui/src/locales/en_US/project.ts
  34. 2
      dolphinscheduler-ui/src/locales/zh_CN/project.ts
  35. 142
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-datasource.ts
  36. 561
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-source-type.ts
  37. 724
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-target-type.ts
  38. 132
      dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
  39. 4
      dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sqoop.ts
  40. 98
      dolphinscheduler-ui/src/views/projects/task/components/node/use-task.ts

11
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopConstants.java

@ -21,10 +21,11 @@ public final class SqoopConstants {
private SqoopConstants() { private SqoopConstants() {
} }
public static final String FORMAT_S_S_S = "%s%s%s";
// sqoop general param // sqoop general param
public static final String SQOOP = "sqoop"; public static final String SQOOP = "sqoop";
public static final String SQOOP_MR_JOB_NAME = "mapred.job.name"; public static final String SQOOP_MR_JOB_NAME = "mapred.job.name";
public static final Object SQOOP_EXPORT_RECORDS_PER_STATEMENT = "sqoop.export.records.per.statement";
public static final String SQOOP_PARALLELISM = "-m"; public static final String SQOOP_PARALLELISM = "-m";
public static final String FIELDS_TERMINATED_BY = "--fields-terminated-by"; public static final String FIELDS_TERMINATED_BY = "--fields-terminated-by";
public static final String LINES_TERMINATED_BY = "--lines-terminated-by"; public static final String LINES_TERMINATED_BY = "--lines-terminated-by";
@ -34,6 +35,7 @@ public final class SqoopConstants {
// sqoop db // sqoop db
public static final String DB_CONNECT = "--connect"; public static final String DB_CONNECT = "--connect";
public static final String DRIVER = "--driver";
public static final String DB_USERNAME = "--username"; public static final String DB_USERNAME = "--username";
public static final String DB_PWD = "--password"; public static final String DB_PWD = "--password";
public static final String TABLE = "--table"; public static final String TABLE = "--table";
@ -73,4 +75,11 @@ public final class SqoopConstants {
public static final String UPDATE_MODE = "--update-mode"; public static final String UPDATE_MODE = "--update-mode";
public static final String SQOOP_PASSWORD_REGEX = "(?<=(--password \")).+?(?=\")"; public static final String SQOOP_PASSWORD_REGEX = "(?<=(--password \")).+?(?=\")";
public static final String MYSQL = "MYSQL";
public static final String HIVE = "HIVE";
public static final String HDFS = "HDFS";
public static final String ORACLE = "ORACLE";
public static final String HANA = "HANA";
public static final String SQLSERVER = "SQLSERVER";
} }

1
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannel.java

@ -30,7 +30,6 @@ public class SqoopTaskChannel implements TaskChannel {
@Override @Override
public void cancelApplication(boolean status) { public void cancelApplication(boolean status) {
} }
@Override @Override

48
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskExecutionContext.java

@ -21,39 +21,25 @@ import org.apache.dolphinscheduler.spi.enums.DbType;
import java.io.Serializable; import java.io.Serializable;
/** /** master/worker task transport */
* master/worker task transport
*/
public class SqoopTaskExecutionContext implements Serializable { public class SqoopTaskExecutionContext implements Serializable {
/** /** dataSourceId */
* dataSourceId
*/
private int dataSourceId; private int dataSourceId;
/** /** sourcetype */
* sourcetype
*/
private DbType sourcetype; private DbType sourcetype;
/** /** sourceConnectionParams */
* sourceConnectionParams
*/
private String sourceConnectionParams; private String sourceConnectionParams;
/** /** dataTargetId */
* dataTargetId
*/
private int dataTargetId; private int dataTargetId;
/** /** targetType */
* targetType
*/
private DbType targetType; private DbType targetType;
/** /** targetConnectionParams */
* targetConnectionParams
*/
private String targetConnectionParams; private String targetConnectionParams;
public int getDataSourceId() { public int getDataSourceId() {
@ -107,12 +93,20 @@ public class SqoopTaskExecutionContext implements Serializable {
@Override @Override
public String toString() { public String toString() {
return "SqoopTaskExecutionContext{" return "SqoopTaskExecutionContext{"
+ "dataSourceId=" + dataSourceId + "dataSourceId="
+ ", sourcetype=" + sourcetype + dataSourceId
+ ", sourceConnectionParams='" + sourceConnectionParams + '\'' + ", sourcetype="
+ ", dataTargetId=" + dataTargetId + sourcetype
+ ", targetType=" + targetType + ", sourceConnectionParams='"
+ ", targetConnectionParams='" + targetConnectionParams + '\'' + sourceConnectionParams
+ '\''
+ ", dataTargetId="
+ dataTargetId
+ ", targetType="
+ targetType
+ ", targetConnectionParams='"
+ targetConnectionParams
+ '\''
+ '}'; + '}';
} }
} }

10
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java

@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.D; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.D;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FORMAT_S_S_S;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants;
@ -47,16 +48,21 @@ public class CommonGenerator {
.append(SPACE) .append(SPACE)
.append(sqoopParameters.getModelType()); .append(sqoopParameters.getModelType());
// sqoop sqoop.export.records.per.statement
commonSb.append(SPACE).append(D).append(SPACE)
.append(String.format(FORMAT_S_S_S, SqoopConstants.SQOOP_EXPORT_RECORDS_PER_STATEMENT,
EQUAL_SIGN, 1));
// sqoop map-reduce job name // sqoop map-reduce job name
commonSb.append(SPACE).append(D).append(SPACE) commonSb.append(SPACE).append(D).append(SPACE)
.append(String.format("%s%s%s", SqoopConstants.SQOOP_MR_JOB_NAME, .append(String.format(FORMAT_S_S_S, SqoopConstants.SQOOP_MR_JOB_NAME,
EQUAL_SIGN, sqoopParameters.getJobName())); EQUAL_SIGN, sqoopParameters.getJobName()));
// hadoop custom param // hadoop custom param
List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams(); List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams();
if (CollectionUtils.isNotEmpty(hadoopCustomParams)) { if (CollectionUtils.isNotEmpty(hadoopCustomParams)) {
for (Property hadoopCustomParam : hadoopCustomParams) { for (Property hadoopCustomParam : hadoopCustomParams) {
String hadoopCustomParamStr = String.format("%s%s%s", hadoopCustomParam.getProp(), String hadoopCustomParamStr = String.format(FORMAT_S_S_S, hadoopCustomParam.getProp(),
EQUAL_SIGN, hadoopCustomParam.getValue()); EQUAL_SIGN, hadoopCustomParam.getValue());
commonSb.append(SPACE).append(D) commonSb.append(SPACE).append(D)

9
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ISourceGenerator.java

@ -20,17 +20,16 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/** /** Source Generator Interface */
* Source Generator Interface
*/
public interface ISourceGenerator { public interface ISourceGenerator {
/** /**
* generate the source script * generate the source script
* *
* @param sqoopParameters sqoopParameters * @param sqoopParameters sqoopParameters
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext * @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @return source script * @return source script
*/ */
String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext); String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
} }

9
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/ITargetGenerator.java

@ -20,17 +20,16 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/** /** Target Generator Interface */
* Target Generator Interface
*/
public interface ITargetGenerator { public interface ITargetGenerator {
/** /**
* generate the target script * generate the target script
* *
* @param sqoopParameters sqoopParameters * @param sqoopParameters sqoopParameters
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext * @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @return target script * @return target script
*/ */
String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext); String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext);
} }

31
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/SqoopJobGenerator.java

@ -17,14 +17,27 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.generator; package org.apache.dolphinscheduler.plugin.task.sqoop.generator;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HANA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HDFS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MYSQL;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.ORACLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.SQLSERVER;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HanaSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HdfsSourceGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HdfsSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HiveSourceGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.HiveSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.MySQLSourceGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.MySQLSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.OracleSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources.SqlServerSourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HanaTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HdfsTargetGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HdfsTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HiveTargetGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.HiveTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.MySQLTargetGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.MySQLTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.OracleTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets.SqlServerTargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
/** /**
@ -32,10 +45,6 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
*/ */
public class SqoopJobGenerator { public class SqoopJobGenerator {
private static final String MYSQL = "MYSQL";
private static final String HIVE = "HIVE";
private static final String HDFS = "HDFS";
/** /**
* target script generator * target script generator
*/ */
@ -61,7 +70,7 @@ public class SqoopJobGenerator {
/** /**
* get the final sqoop scripts * get the final sqoop scripts
* *
* @param sqoopParameters sqoop params * @param sqoopParameters sqoop params
* @param sqoopTaskExecutionContext * @param sqoopTaskExecutionContext
* @return sqoop scripts * @return sqoop scripts
*/ */
@ -100,6 +109,12 @@ public class SqoopJobGenerator {
return new HiveSourceGenerator(); return new HiveSourceGenerator();
case HDFS: case HDFS:
return new HdfsSourceGenerator(); return new HdfsSourceGenerator();
case ORACLE:
return new OracleSourceGenerator();
case HANA:
return new HanaSourceGenerator();
case SQLSERVER:
return new SqlServerSourceGenerator();
default: default:
return null; return null;
} }
@ -119,6 +134,12 @@ public class SqoopJobGenerator {
return new HiveTargetGenerator(); return new HiveTargetGenerator();
case HDFS: case HDFS:
return new HdfsTargetGenerator(); return new HdfsTargetGenerator();
case ORACLE:
return new OracleTargetGenerator();
case HANA:
return new HanaTargetGenerator();
case SQLSERVER:
return new SqlServerTargetGenerator();
default: default:
return null; return null;
} }

159
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HanaSourceGenerator.java

@ -0,0 +1,159 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHanaParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* hana source generator
*/
public class HanaSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(HanaSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hanaSourceSb = new StringBuilder();
try {
SourceHanaParameter sourceHanaParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHanaParameter.class);
if (null == sourceHanaParameter)
return hanaSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return hanaSourceSb.toString();
hanaSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.HANA, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.HANA))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceHanaParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceHanaParameter.getSrcTable())) {
hanaSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceHanaParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceHanaParameter.getSrcColumns())) {
hanaSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceHanaParameter.getSrcColumns());
}
} else if (sourceHanaParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceHanaParameter.getSrcQuerySql())) {
String srcQuery = sourceHanaParameter.getSrcQuerySql();
hanaSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
hanaSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
hanaSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHIve(hanaSourceSb, sourceHanaParameter);
// sqoop map column java
buildColumnMapToJava(hanaSourceSb, sourceHanaParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task hana source params build failed: [%s]", e.getMessage()));
}
return hanaSourceSb.toString();
}
private static void buildColumnMapToJava(StringBuilder hanaSourceSb, SourceHanaParameter sourceHanaParameter) {
List<Property> mapColumnJava = sourceHanaParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
hanaSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToHIve(StringBuilder hanaSourceSb, SourceHanaParameter sourceHanaParameter) {
List<Property> mapColumnHive = sourceHanaParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
hanaSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

18
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java

@ -30,29 +30,31 @@ import org.apache.commons.lang3.StringUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
/** /** hdfs source generator */
* hdfs source generator
*/
@Slf4j @Slf4j
public class HdfsSourceGenerator implements ISourceGenerator { public class HdfsSourceGenerator implements ISourceGenerator {
@Override @Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hdfsSourceSb = new StringBuilder(); StringBuilder hdfsSourceSb = new StringBuilder();
try { try {
SourceHdfsParameter sourceHdfsParameter = SourceHdfsParameter sourceHdfsParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHdfsParameter.class); JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceHdfsParameter.class);
if (null != sourceHdfsParameter) { if (null != sourceHdfsParameter) {
if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) { if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) {
hdfsSourceSb.append(SPACE).append(HDFS_EXPORT_DIR) hdfsSourceSb
.append(SPACE).append(sourceHdfsParameter.getExportDir()); .append(SPACE)
.append(HDFS_EXPORT_DIR)
.append(SPACE)
.append(sourceHdfsParameter.getExportDir());
} else { } else {
throw new IllegalArgumentException("Sqoop hdfs export dir is null"); throw new IllegalArgumentException("Sqoop hdfs export dir is null");
} }
} }
} catch (Exception e) { } catch (Exception e) {
log.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage())); log.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage()));

37
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java

@ -33,38 +33,49 @@ import org.apache.commons.lang3.StringUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
/** /** hive source generator */
* hive source generator
*/
@Slf4j @Slf4j
public class HiveSourceGenerator implements ISourceGenerator { public class HiveSourceGenerator implements ISourceGenerator {
@Override @Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hiveSourceSb = new StringBuilder(); StringBuilder hiveSourceSb = new StringBuilder();
try { try {
SourceHiveParameter sourceHiveParameter = SourceHiveParameter sourceHiveParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHiveParameter.class); JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceHiveParameter.class);
if (null != sourceHiveParameter) { if (null != sourceHiveParameter) {
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) { if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) {
hiveSourceSb.append(SPACE).append(HCATALOG_DATABASE) hiveSourceSb
.append(SPACE).append(sourceHiveParameter.getHiveDatabase()); .append(SPACE)
.append(HCATALOG_DATABASE)
.append(SPACE)
.append(sourceHiveParameter.getHiveDatabase());
} }
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) { if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) {
hiveSourceSb.append(SPACE).append(HCATALOG_TABLE) hiveSourceSb
.append(SPACE).append(sourceHiveParameter.getHiveTable()); .append(SPACE)
.append(HCATALOG_TABLE)
.append(SPACE)
.append(sourceHiveParameter.getHiveTable());
} }
if (StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey()) if (StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())
&& StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())) { && StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())) {
hiveSourceSb.append(SPACE).append(HCATALOG_PARTITION_KEYS) hiveSourceSb
.append(SPACE).append(sourceHiveParameter.getHivePartitionKey()) .append(SPACE)
.append(SPACE).append(HCATALOG_PARTITION_VALUES) .append(HCATALOG_PARTITION_KEYS)
.append(SPACE).append(sourceHiveParameter.getHivePartitionValue()); .append(SPACE)
.append(sourceHiveParameter.getHivePartitionKey())
.append(SPACE)
.append(HCATALOG_PARTITION_VALUES)
.append(SPACE)
.append(sourceHiveParameter.getHivePartitionValue());
} }
} }
} catch (Exception e) { } catch (Exception e) {

201
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java

@ -59,91 +59,142 @@ import lombok.extern.slf4j.Slf4j;
public class MySQLSourceGenerator implements ISourceGenerator { public class MySQLSourceGenerator implements ISourceGenerator {
@Override @Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { public String generate(
SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder mysqlSourceSb = new StringBuilder(); StringBuilder mysqlSourceSb = new StringBuilder();
try { try {
SourceMysqlParameter sourceMysqlParameter = SourceMysqlParameter sourceMysqlParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); JSONUtils.parseObject(
sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
if (null != sourceMysqlParameter) {
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams( if (null == sourceMysqlParameter)
sqoopTaskExecutionContext.getSourcetype(), return mysqlSourceSb.toString();
sqoopTaskExecutionContext.getSourceConnectionParams()); BaseConnectionParam baseDataSource =
(BaseConnectionParam) DataSourceUtils.buildConnectionParams(
if (null != baseDataSource) { sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
mysqlSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES) if (null == baseDataSource)
.append(DataSourceUtils.getJdbcUrl(DbType.MYSQL, baseDataSource)).append(DOUBLE_QUOTES) return mysqlSourceSb.toString();
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser()) mysqlSourceSb
.append(SPACE).append(DB_PWD) .append(SPACE)
.append(SPACE).append(DOUBLE_QUOTES) .append(DB_CONNECT)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES); .append(SPACE)
.append(DOUBLE_QUOTES)
// sqoop table & sql query .append(DataSourceUtils.getJdbcUrl(DbType.MYSQL, baseDataSource))
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) { .append(DOUBLE_QUOTES)
if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) { .append(SPACE)
mysqlSourceSb.append(SPACE).append(TABLE) .append(DB_USERNAME)
.append(SPACE).append(sourceMysqlParameter.getSrcTable()); .append(SPACE)
} .append(baseDataSource.getUser())
.append(SPACE)
if (sourceMysqlParameter.getSrcColumnType() == SqoopColumnType.CUSTOMIZE_COLUMNS.getCode() .append(DB_PWD)
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) { .append(SPACE)
mysqlSourceSb.append(SPACE).append(COLUMNS) .append(DOUBLE_QUOTES)
.append(SPACE).append(sourceMysqlParameter.getSrcColumns()); .append(decodePassword(baseDataSource.getPassword()))
} .append(DOUBLE_QUOTES);
} else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) { // sqoop table & sql query
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
String srcQuery = sourceMysqlParameter.getSrcQuerySql(); if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) {
mysqlSourceSb.append(SPACE).append(QUERY) mysqlSourceSb
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery); .append(SPACE)
.append(TABLE)
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) { .append(SPACE)
mysqlSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES); .append(sourceMysqlParameter.getSrcTable());
} else { }
mysqlSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
} if (sourceMysqlParameter.getSrcColumnType() == SqoopColumnType.CUSTOMIZE_COLUMNS.getCode()
} && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) {
mysqlSourceSb
// sqoop hive map column .append(SPACE)
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive(); .append(COLUMNS)
.append(SPACE)
if (null != mapColumnHive && !mapColumnHive.isEmpty()) { .append(sourceMysqlParameter.getSrcColumns());
StringBuilder columnMap = new StringBuilder(); }
for (Property item : mapColumnHive) { } else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA); && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) {
}
String srcQuery = sourceMysqlParameter.getSrcQuerySql();
if (StringUtils.isNotEmpty(columnMap.toString())) { mysqlSourceSb
mysqlSourceSb.append(SPACE).append(MAP_COLUMN_HIVE) .append(SPACE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1)); .append(QUERY)
} .append(SPACE)
} .append(DOUBLE_QUOTES)
.append(srcQuery);
// sqoop map column java
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava(); if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
mysqlSourceSb
if (null != mapColumnJava && !mapColumnJava.isEmpty()) { .append(SPACE)
StringBuilder columnMap = new StringBuilder(); .append(QUERY_CONDITION)
for (Property item : mapColumnJava) { .append(DOUBLE_QUOTES);
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA); } else {
} mysqlSourceSb
.append(SPACE)
if (StringUtils.isNotEmpty(columnMap.toString())) { .append(QUERY_WITHOUT_CONDITION)
mysqlSourceSb.append(SPACE).append(MAP_COLUMN_JAVA) .append(DOUBLE_QUOTES);
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
} }
} }
// sqoop hive map column
buildColumnMapToHIve(mysqlSourceSb, sourceMysqlParameter);
// sqoop map column java
buildColumnMapToJava(mysqlSourceSb, sourceMysqlParameter);
} catch (Exception e) { } catch (Exception e) {
log.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage())); log.error(
String.format(
"Sqoop task mysql source params build failed: [%s]", e.getMessage()));
} }
return mysqlSourceSb.toString(); return mysqlSourceSb.toString();
} }
private static void buildColumnMapToHIve(StringBuilder mysqlSourceSb, SourceMysqlParameter sourceMysqlParameter) {
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap
.append(item.getProp())
.append(EQUAL_SIGN)
.append(item.getValue())
.append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb
.append(SPACE)
.append(MAP_COLUMN_HIVE)
.append(SPACE)
.append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToJava(StringBuilder mysqlSourceSb, SourceMysqlParameter sourceMysqlParameter) {
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap
.append(item.getProp())
.append(EQUAL_SIGN)
.append(item.getValue())
.append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
mysqlSourceSb
.append(SPACE)
.append(MAP_COLUMN_JAVA)
.append(SPACE)
.append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
} }

157
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/OracleSourceGenerator.java

@ -0,0 +1,157 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceOracleParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* oracle source generator
*/
public class OracleSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(OracleSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleSourceSb = new StringBuilder();
try {
SourceOracleParameter sourceOracleParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceOracleParameter.class);
if (null == sourceOracleParameter)
return oracleSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return oracleSourceSb.toString();
oracleSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.ORACLE, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceOracleParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceOracleParameter.getSrcTable())) {
oracleSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceOracleParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceOracleParameter.getSrcColumns())) {
oracleSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceOracleParameter.getSrcColumns());
}
} else if (sourceOracleParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceOracleParameter.getSrcQuerySql())) {
String srcQuery = sourceOracleParameter.getSrcQuerySql();
oracleSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
oracleSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
oracleSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHIve(oracleSourceSb, sourceOracleParameter);
// sqoop map column java
buildColumnMapToJava(oracleSourceSb, sourceOracleParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task oracle source params build failed: [%s]", e.getMessage()));
}
return oracleSourceSb.toString();
}
private static void buildColumnMapToJava(StringBuilder oracleSourceSb,
SourceOracleParameter sourceOracleParameter) {
List<Property> mapColumnJava = sourceOracleParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
oracleSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToHIve(StringBuilder oracleSourceSb,
SourceOracleParameter sourceOracleParameter) {
List<Property> mapColumnHive = sourceOracleParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
oracleSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

163
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/SqlServerSourceGenerator.java

@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COMMA;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MAP_COLUMN_JAVA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WHERE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.QUERY_WITHOUT_CONDITION;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceSqlServerParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* sqlServer source generator
*/
public class SqlServerSourceGenerator implements ISourceGenerator {
private static final Logger logger = LoggerFactory.getLogger(SqlServerSourceGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder sqlServerSourceSb = new StringBuilder();
try {
SourceSqlServerParameter sourceSqlServerParameter =
JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceSqlServerParameter.class);
if (null == sourceSqlServerParameter)
return sqlServerSourceSb.toString();
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getSourcetype(),
sqoopTaskExecutionContext.getSourceConnectionParams());
if (null == baseDataSource)
return sqlServerSourceSb.toString();
sqlServerSourceSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.SQLSERVER, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.SQLSERVER))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES);
// sqoop table & sql query
if (sourceSqlServerParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcTable())) {
sqlServerSourceSb.append(SPACE).append(TABLE)
.append(SPACE).append(sourceSqlServerParameter.getSrcTable());
}
if (StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcColumns())) {
sqlServerSourceSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(sourceSqlServerParameter.getSrcColumns());
}
} else if (sourceSqlServerParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
&& StringUtils.isNotEmpty(sourceSqlServerParameter.getSrcQuerySql())) {
String srcQuery = sourceSqlServerParameter.getSrcQuerySql();
sqlServerSourceSb.append(SPACE).append(QUERY)
.append(SPACE).append(DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(QUERY_WHERE)) {
sqlServerSourceSb.append(SPACE).append(QUERY_CONDITION).append(DOUBLE_QUOTES);
} else {
sqlServerSourceSb.append(SPACE).append(QUERY_WITHOUT_CONDITION).append(DOUBLE_QUOTES);
}
}
// sqoop hive map column
buildColumnMapToHive(sqlServerSourceSb, sourceSqlServerParameter);
// sqoop map column java
buildColumnMapToJava(sqlServerSourceSb, sourceSqlServerParameter);
} catch (Exception e) {
logger.error(String.format("Sqoop task sqlServer source params build failed: [%s]", e.getMessage()));
}
return sqlServerSourceSb.toString();
}
private static void buildColumnMapToHive(StringBuilder sqlServerSourceSb,
SourceSqlServerParameter sourceSqlServerParameter) {
List<Property> mapColumnHive = sourceSqlServerParameter.getMapColumnHive();
if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
sqlServerSourceSb.append(SPACE).append(MAP_COLUMN_HIVE)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
private static void buildColumnMapToJava(StringBuilder sqlServerSourceSb,
SourceSqlServerParameter sourceSqlServerParameter) {
List<Property> mapColumnJava = sourceSqlServerParameter.getMapColumnJava();
if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append(EQUAL_SIGN).append(item.getValue()).append(COMMA);
}
if (StringUtils.isNotEmpty(columnMap.toString())) {
sqlServerSourceSb.append(SPACE).append(MAP_COLUMN_JAVA)
.append(SPACE).append(columnMap.substring(0, columnMap.length() - 1));
}
}
}
}

128
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HanaTargetGenerator.java

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DRIVER;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHanaParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* hana target generator
*/
public class HanaTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(HanaTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder hanaTargetSb = new StringBuilder();
try {
TargetHanaParameter targetHanaParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHanaParameter.class);
if (null == targetHanaParameter || targetHanaParameter.getTargetDatasource() == 0)
return hanaTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return hanaTargetSb.toString();
}
hanaTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.HANA, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.HANA))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetHanaParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetHanaParameter.getTargetColumns())) {
hanaTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetHanaParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetHanaParameter.getFieldsTerminated())) {
hanaTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetHanaParameter.getFieldsTerminated().contains("'")) {
hanaTargetSb.append(SPACE).append(targetHanaParameter.getFieldsTerminated());
} else {
hanaTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetHanaParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetHanaParameter.getLinesTerminated())) {
hanaTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetHanaParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
hanaTargetSb.append(SPACE).append(targetHanaParameter.getLinesTerminated());
} else {
hanaTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetHanaParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetHanaParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetHanaParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetHanaParameter.getTargetUpdateMode())) {
hanaTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetHanaParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetHanaParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop hana target params build failed: [%s]", e.getMessage()));
}
return hanaTargetSb.toString();
}
}

128
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/OracleTargetGenerator.java

@ -0,0 +1,128 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetOracleParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* oracle target generator
*/
public class OracleTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(OracleTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleTargetSb = new StringBuilder();
try {
TargetOracleParameter targetOracleParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetOracleParameter.class);
if (null == targetOracleParameter || targetOracleParameter.getTargetDatasource() == 0)
return oracleTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return oracleTargetSb.toString();
}
oracleTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.ORACLE, baseDataSource)).append(DOUBLE_QUOTES)
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetOracleParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetOracleParameter.getTargetColumns())) {
oracleTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetOracleParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetOracleParameter.getFieldsTerminated())) {
oracleTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetOracleParameter.getFieldsTerminated().contains("'")) {
oracleTargetSb.append(SPACE).append(targetOracleParameter.getFieldsTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetOracleParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetOracleParameter.getLinesTerminated())) {
oracleTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetOracleParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
oracleTargetSb.append(SPACE).append(targetOracleParameter.getLinesTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetOracleParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetOracleParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetOracleParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetOracleParameter.getTargetUpdateMode())) {
oracleTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetOracleParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetOracleParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop oracle target params build failed: [%s]", e.getMessage()));
}
return oracleTargetSb.toString();
}
}

130
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/SqlServerTargetGenerator.java

@ -0,0 +1,130 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets;
import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SPACE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_USERNAME;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.FIELDS_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.LINES_TERMINATED_BY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TABLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_KEY;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.UPDATE_MODE;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetSqlServerParameter;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* sqlServer target generator
*/
public class SqlServerTargetGenerator implements ITargetGenerator {
private static final Logger logger = LoggerFactory.getLogger(SqlServerTargetGenerator.class);
@Override
public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) {
StringBuilder oracleTargetSb = new StringBuilder();
try {
TargetSqlServerParameter targetSqlServerParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetSqlServerParameter.class);
if (null == targetSqlServerParameter || targetSqlServerParameter.getTargetDatasource() == 0)
return oracleTargetSb.toString();
// get datasource
BaseConnectionParam baseDataSource = (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
sqoopTaskExecutionContext.getTargetType(),
sqoopTaskExecutionContext.getTargetConnectionParams());
if (null == baseDataSource) {
return oracleTargetSb.toString();
}
oracleTargetSb.append(SPACE).append(DB_CONNECT)
.append(SPACE).append(DOUBLE_QUOTES)
.append(DataSourceUtils.getJdbcUrl(DbType.SQLSERVER, baseDataSource)).append(DOUBLE_QUOTES)
// .append(SPACE).append(DRIVER)
.append(SPACE).append(DataSourceUtils.getDatasourceDriver(DbType.SQLSERVER))
.append(SPACE).append(DB_USERNAME)
.append(SPACE).append(baseDataSource.getUser())
.append(SPACE).append(DB_PWD)
.append(SPACE).append(DOUBLE_QUOTES)
.append(decodePassword(baseDataSource.getPassword())).append(DOUBLE_QUOTES)
.append(SPACE).append(TABLE)
.append(SPACE).append(targetSqlServerParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetSqlServerParameter.getTargetColumns())) {
oracleTargetSb.append(SPACE).append(COLUMNS)
.append(SPACE).append(targetSqlServerParameter.getTargetColumns());
}
if (StringUtils.isNotEmpty(targetSqlServerParameter.getFieldsTerminated())) {
oracleTargetSb.append(SPACE).append(FIELDS_TERMINATED_BY);
if (targetSqlServerParameter.getFieldsTerminated().contains("'")) {
oracleTargetSb.append(SPACE).append(targetSqlServerParameter.getFieldsTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetSqlServerParameter.getFieldsTerminated()).append(SINGLE_QUOTES);
}
}
if (StringUtils.isNotEmpty(targetSqlServerParameter.getLinesTerminated())) {
oracleTargetSb.append(SPACE).append(LINES_TERMINATED_BY);
if (targetSqlServerParameter.getLinesTerminated().contains(SINGLE_QUOTES)) {
oracleTargetSb.append(SPACE).append(targetSqlServerParameter.getLinesTerminated());
} else {
oracleTargetSb.append(SPACE).append(SINGLE_QUOTES)
.append(targetSqlServerParameter.getLinesTerminated()).append(SINGLE_QUOTES);
}
}
if (targetSqlServerParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetSqlServerParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetSqlServerParameter.getTargetUpdateMode())) {
oracleTargetSb.append(SPACE).append(UPDATE_KEY)
.append(SPACE).append(targetSqlServerParameter.getTargetUpdateKey())
.append(SPACE).append(UPDATE_MODE)
.append(SPACE).append(targetSqlServerParameter.getTargetUpdateMode());
}
} catch (Exception e) {
logger.error(String.format("Sqoop oracle target params build failed: [%s]", e.getMessage()));
}
return oracleTargetSb.toString();
}
}

38
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SourceCommonParameter.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
/**
* source common parameter
*/
public class SourceCommonParameter {
/**
* src datasource
*/
protected int srcDatasource;
public int getSrcDatasource() {
return srcDatasource;
}
public void setSrcDatasource(int srcDatasource) {
this.srcDatasource = srcDatasource;
}
}

99
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/SqoopParameters.java

@ -17,6 +17,13 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HANA;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HDFS;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.MYSQL;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.ORACLE;
import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.SQLSERVER;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.enums.ResourceType; import org.apache.dolphinscheduler.plugin.task.api.enums.ResourceType;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
@ -25,8 +32,18 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourc
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopJobType;
import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHanaParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHdfsParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHiveParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceMysqlParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceOracleParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceSqlServerParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHanaParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHdfsParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHiveParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetMysqlParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetOracleParameter;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetSqlServerParameter;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -223,16 +240,16 @@ public class SqoopParameters extends AbstractParameters {
return resources; return resources;
} }
SourceMysqlParameter sourceMysqlParameter = SourceCommonParameter sourceParameter = (SourceCommonParameter) JSONUtils.parseObject(this.getSourceParams(),
JSONUtils.parseObject(this.getSourceParams(), SourceMysqlParameter.class); getSourceParameter(this.getSourceType()));
if (sourceMysqlParameter.getSrcDatasource() != 0) { if (sourceParameter.getSrcDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, sourceMysqlParameter.getSrcDatasource()); resources.put(ResourceType.DATASOURCE, sourceParameter.getSrcDatasource());
} }
TargetMysqlParameter targetMysqlParameter = TargetCommonParameter targetParameter = (TargetCommonParameter) JSONUtils.parseObject(this.getTargetParams(),
JSONUtils.parseObject(this.getTargetParams(), TargetMysqlParameter.class); getTargetParameter(this.getTargetType()));
if (targetMysqlParameter.getTargetDatasource() != 0) { if (targetParameter.getTargetDatasource() != 0) {
resources.put(ResourceType.DATASOURCE, targetMysqlParameter.getTargetDatasource()); resources.put(ResourceType.DATASOURCE, targetParameter.getTargetDatasource());
} }
return resources; return resources;
@ -245,28 +262,78 @@ public class SqoopParameters extends AbstractParameters {
return sqoopTaskExecutionContext; return sqoopTaskExecutionContext;
} }
SourceMysqlParameter sourceMysqlParameter = SourceCommonParameter sourceParameter = (SourceCommonParameter) JSONUtils.parseObject(this.getSourceParams(),
JSONUtils.parseObject(this.getSourceParams(), SourceMysqlParameter.class); getSourceParameter(this.getSourceType()));
TargetMysqlParameter targetMysqlParameter = TargetCommonParameter targetParameter = (TargetCommonParameter) JSONUtils.parseObject(this.getTargetParams(),
JSONUtils.parseObject(this.getTargetParams(), TargetMysqlParameter.class); getTargetParameter(this.getTargetType()));
DataSourceParameters dataSource = (DataSourceParameters) parametersHelper DataSourceParameters dataSource = (DataSourceParameters) parametersHelper
.getResourceParameters(ResourceType.DATASOURCE, sourceMysqlParameter.getSrcDatasource()); .getResourceParameters(ResourceType.DATASOURCE, sourceParameter.getSrcDatasource());
DataSourceParameters dataTarget = (DataSourceParameters) parametersHelper DataSourceParameters dataTarget = (DataSourceParameters) parametersHelper
.getResourceParameters(ResourceType.DATASOURCE, targetMysqlParameter.getTargetDatasource()); .getResourceParameters(ResourceType.DATASOURCE, targetParameter.getTargetDatasource());
if (Objects.nonNull(dataSource)) { if (Objects.nonNull(dataSource)) {
sqoopTaskExecutionContext.setDataSourceId(sourceMysqlParameter.getSrcDatasource()); sqoopTaskExecutionContext.setDataSourceId(sourceParameter.getSrcDatasource());
sqoopTaskExecutionContext.setSourcetype(dataSource.getType()); sqoopTaskExecutionContext.setSourcetype(dataSource.getType());
sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
} }
if (Objects.nonNull(dataTarget)) { if (Objects.nonNull(dataTarget)) {
sqoopTaskExecutionContext.setDataTargetId(targetMysqlParameter.getTargetDatasource()); sqoopTaskExecutionContext.setDataTargetId(targetParameter.getTargetDatasource());
sqoopTaskExecutionContext.setTargetType(dataTarget.getType()); sqoopTaskExecutionContext.setTargetType(dataTarget.getType());
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
} }
return sqoopTaskExecutionContext; return sqoopTaskExecutionContext;
} }
/**
* get the target generator
*
* @param targetType sqoop target type
* @return sqoop target generator
*/
private Class<?> getTargetParameter(String targetType) {
switch (targetType) {
case MYSQL:
return TargetMysqlParameter.class;
case HIVE:
return TargetHiveParameter.class;
case HDFS:
return TargetHdfsParameter.class;
case ORACLE:
return TargetOracleParameter.class;
case HANA:
return TargetHanaParameter.class;
case SQLSERVER:
return TargetSqlServerParameter.class;
default:
return null;
}
}
/**
* get the target generator
*
* @param targetType sqoop target type
* @return sqoop target generator
*/
private Class<?> getSourceParameter(String targetType) {
switch (targetType) {
case HIVE:
return SourceHiveParameter.class;
case HDFS:
return SourceHdfsParameter.class;
case MYSQL:
return SourceMysqlParameter.class;
case ORACLE:
return SourceOracleParameter.class;
case HANA:
return SourceHanaParameter.class;
case SQLSERVER:
return SourceSqlServerParameter.class;
default:
return null;
}
}
} }

38
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/TargetCommonParameter.java

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter;
/**
* target common parameter
*/
public class TargetCommonParameter {
/**
* target datasource
*/
protected int targetDatasource;
public int getTargetDatasource() {
return targetDatasource;
}
public void setTargetDatasource(int targetDatasource) {
this.targetDatasource = targetDatasource;
}
}

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHanaParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source hana parameter
*/
public class SourceHanaParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHdfsParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
/** /**
* source hdfs parameter * source hdfs parameter
*/ */
public class SourceHdfsParameter { public class SourceHdfsParameter extends SourceCommonParameter {
/** /**
* export dir * export dir

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceHiveParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
/** /**
* source hive parameter * source hive parameter
*/ */
public class SourceHiveParameter { public class SourceHiveParameter extends SourceCommonParameter {
/** /**
* hive database * hive database

15
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceMysqlParameter.java

@ -18,18 +18,15 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List; import java.util.List;
/** /**
* source mysql parameter * source mysql parameter
*/ */
public class SourceMysqlParameter { public class SourceMysqlParameter extends SourceCommonParameter {
/**
* src datasource
*/
private int srcDatasource;
/** /**
* src table * src table
*/ */
@ -63,14 +60,6 @@ public class SourceMysqlParameter {
*/ */
private List<Property> mapColumnJava; private List<Property> mapColumnJava;
public int getSrcDatasource() {
return srcDatasource;
}
public void setSrcDatasource(int srcDatasource) {
this.srcDatasource = srcDatasource;
}
public String getSrcTable() { public String getSrcTable() {
return srcTable; return srcTable;
} }

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceOracleParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source oracle parameter
*/
public class SourceOracleParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

126
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/sources/SourceSqlServerParameter.java

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SourceCommonParameter;
import java.util.List;
/**
* source sqlServer parameter
*/
public class SourceSqlServerParameter extends SourceCommonParameter {
/**
* src table
*/
private String srcTable;
/**
* src query type
*/
private int srcQueryType;
/**
* src query sql
*/
private String srcQuerySql;
/**
* src column type
*/
private int srcColumnType;
/**
* src columns
*/
private String srcColumns;
/**
* src condition list
*/
private List<Property> srcConditionList;
/**
* map column hive
*/
private List<Property> mapColumnHive;
/**
* map column java
*/
private List<Property> mapColumnJava;
public String getSrcTable() {
return srcTable;
}
public void setSrcTable(String srcTable) {
this.srcTable = srcTable;
}
public int getSrcQueryType() {
return srcQueryType;
}
public void setSrcQueryType(int srcQueryType) {
this.srcQueryType = srcQueryType;
}
public String getSrcQuerySql() {
return srcQuerySql;
}
public void setSrcQuerySql(String srcQuerySql) {
this.srcQuerySql = srcQuerySql;
}
public int getSrcColumnType() {
return srcColumnType;
}
public void setSrcColumnType(int srcColumnType) {
this.srcColumnType = srcColumnType;
}
public String getSrcColumns() {
return srcColumns;
}
public void setSrcColumns(String srcColumns) {
this.srcColumns = srcColumns;
}
public List<Property> getSrcConditionList() {
return srcConditionList;
}
public void setSrcConditionList(List<Property> srcConditionList) {
this.srcConditionList = srcConditionList;
}
public List<Property> getMapColumnHive() {
return mapColumnHive;
}
public void setMapColumnHive(List<Property> mapColumnHive) {
this.mapColumnHive = mapColumnHive;
}
public List<Property> getMapColumnJava() {
return mapColumnJava;
}
public void setMapColumnJava(List<Property> mapColumnJava) {
this.mapColumnJava = mapColumnJava;
}
}

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHanaParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target hana parameter
*/
public class TargetHanaParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHdfsParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/** /**
* target hdfs parameter * target hdfs parameter
*/ */
public class TargetHdfsParameter { public class TargetHdfsParameter extends TargetCommonParameter {
/** /**
* target dir * target dir

4
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java

@ -17,10 +17,12 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/** /**
* target hive parameter * target hive parameter
*/ */
public class TargetHiveParameter { public class TargetHiveParameter extends TargetCommonParameter {
/** /**
* hive database * hive database

16
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetMysqlParameter.java

@ -17,15 +17,13 @@
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets; package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/** /**
* target mysql parameter * target mysql parameter
*/ */
public class TargetMysqlParameter { public class TargetMysqlParameter extends TargetCommonParameter {
/**
* target datasource
*/
private int targetDatasource;
/** /**
* target table * target table
*/ */
@ -59,14 +57,6 @@ public class TargetMysqlParameter {
*/ */
private String targetUpdateMode; private String targetUpdateMode;
public int getTargetDatasource() {
return targetDatasource;
}
public void setTargetDatasource(int targetDatasource) {
this.targetDatasource = targetDatasource;
}
public String getTargetTable() { public String getTargetTable() {
return targetTable; return targetTable;
} }

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetOracleParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target oracle parameter
*/
public class TargetOracleParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

123
dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetSqlServerParameter.java

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets;
import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.TargetCommonParameter;
/**
* target sqlServer parameter
*/
public class TargetSqlServerParameter extends TargetCommonParameter {
/**
* target table
*/
private String targetTable;
/**
* target columns
*/
private String targetColumns;
/**
* fields terminated
*/
private String fieldsTerminated;
/**
* lines terminated
*/
private String linesTerminated;
/**
* pre query
*/
private String preQuery;
/**
* is update
*/
private boolean isUpdate;
/**
* target update key
*/
private String targetUpdateKey;
/**
* target update mode
*/
private String targetUpdateMode;
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getTargetColumns() {
return targetColumns;
}
public void setTargetColumns(String targetColumns) {
this.targetColumns = targetColumns;
}
public String getFieldsTerminated() {
return fieldsTerminated;
}
public void setFieldsTerminated(String fieldsTerminated) {
this.fieldsTerminated = fieldsTerminated;
}
public String getLinesTerminated() {
return linesTerminated;
}
public void setLinesTerminated(String linesTerminated) {
this.linesTerminated = linesTerminated;
}
public String getPreQuery() {
return preQuery;
}
public void setPreQuery(String preQuery) {
this.preQuery = preQuery;
}
public boolean getIsUpdate() {
return isUpdate;
}
public void setUpdate(boolean update) {
isUpdate = update;
}
public String getTargetUpdateKey() {
return targetUpdateKey;
}
public void setTargetUpdateKey(String targetUpdateKey) {
this.targetUpdateKey = targetUpdateKey;
}
public String getTargetUpdateMode() {
return targetUpdateMode;
}
public void setTargetUpdateMode(String targetUpdateMode) {
this.targetUpdateMode = targetUpdateMode;
}
}

57
dolphinscheduler-ui/pnpm-lock.yaml

@ -91,22 +91,22 @@ devDependencies:
'@types/node': 18.16.18 '@types/node': 18.16.18
'@types/nprogress': 0.2.0 '@types/nprogress': 0.2.0
'@types/qs': 6.9.7 '@types/qs': 6.9.7
'@typescript-eslint/eslint-plugin': 5.59.11_khxwfo2nlv6qliptqsbqa3vjsm '@typescript-eslint/eslint-plugin': 5.59.11_51ef62bb4d5d7d05a1f38483006ea993
'@typescript-eslint/parser': 5.59.11_kigkzfftsmftz3xok324pyvzui '@typescript-eslint/parser': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@vicons/antd': 0.12.0 '@vicons/antd': 0.12.0
'@vitejs/plugin-vue': 3.2.0_vite@3.2.7+vue@3.3.4 '@vitejs/plugin-vue': 3.2.0_vite@3.2.7+vue@3.3.4
'@vitejs/plugin-vue-jsx': 2.1.1_vite@3.2.7+vue@3.3.4 '@vitejs/plugin-vue-jsx': 2.1.1_vite@3.2.7+vue@3.3.4
dart-sass: 1.25.0 dart-sass: 1.25.0
eslint: 8.42.0 eslint: 8.42.0
eslint-config-prettier: 8.8.0_eslint@8.42.0 eslint-config-prettier: 8.8.0_eslint@8.42.0
eslint-plugin-prettier: 4.2.1_vnriwwub2rhvoyn4ckagrc4lpi eslint-plugin-prettier: 4.2.1_ab628b5a81d44f5761bc1280688b8b7a
eslint-plugin-vue: 9.14.1_eslint@8.42.0 eslint-plugin-vue: 9.14.1_eslint@8.42.0
prettier: 2.8.8 prettier: 2.8.8
sass: 1.63.4 sass: 1.63.4
sass-loader: 13.3.2_sass@1.63.4 sass-loader: 13.3.2_sass@1.63.4
typescript: 4.9.5 typescript: 4.9.5
typescript-plugin-css-modules: 3.4.0_typescript@4.9.5 typescript-plugin-css-modules: 3.4.0_typescript@4.9.5
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vite-plugin-compression: 0.5.1_vite@3.2.7 vite-plugin-compression: 0.5.1_vite@3.2.7
vue-tsc: 0.40.13_typescript@4.9.5 vue-tsc: 0.40.13_typescript@4.9.5
@ -740,7 +740,7 @@ packages:
resolution: {integrity: sha512-oh8q2Zc32S6gd/j50GowEjKLoOVOwHP/bWVjKJInBwQqdOYMdPrf1oVlelTlyfFK3CKxL1uahMDAr+vy8T7yMQ==} resolution: {integrity: sha512-oh8q2Zc32S6gd/j50GowEjKLoOVOwHP/bWVjKJInBwQqdOYMdPrf1oVlelTlyfFK3CKxL1uahMDAr+vy8T7yMQ==}
dev: false dev: false
/@typescript-eslint/eslint-plugin/5.59.11_khxwfo2nlv6qliptqsbqa3vjsm: /@typescript-eslint/eslint-plugin/5.59.11_51ef62bb4d5d7d05a1f38483006ea993:
resolution: {integrity: sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==} resolution: {integrity: sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies: peerDependencies:
@ -752,10 +752,10 @@ packages:
optional: true optional: true
dependencies: dependencies:
'@eslint-community/regexpp': 4.5.1 '@eslint-community/regexpp': 4.5.1
'@typescript-eslint/parser': 5.59.11_kigkzfftsmftz3xok324pyvzui '@typescript-eslint/parser': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@typescript-eslint/scope-manager': 5.59.11 '@typescript-eslint/scope-manager': 5.59.11
'@typescript-eslint/type-utils': 5.59.11_kigkzfftsmftz3xok324pyvzui '@typescript-eslint/type-utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
'@typescript-eslint/utils': 5.59.11_kigkzfftsmftz3xok324pyvzui '@typescript-eslint/utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
debug: 4.3.4 debug: 4.3.4
eslint: 8.42.0 eslint: 8.42.0
grapheme-splitter: 1.0.4 grapheme-splitter: 1.0.4
@ -768,7 +768,7 @@ packages:
- supports-color - supports-color
dev: true dev: true
/@typescript-eslint/parser/5.59.11_kigkzfftsmftz3xok324pyvzui: /@typescript-eslint/parser/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-s9ZF3M+Nym6CAZEkJJeO2TFHHDsKAM3ecNkLuH4i4s8/RCPnF5JRip2GyviYkeEAcwGMJxkqG9h2dAsnA1nZpA==} resolution: {integrity: sha512-s9ZF3M+Nym6CAZEkJJeO2TFHHDsKAM3ecNkLuH4i4s8/RCPnF5JRip2GyviYkeEAcwGMJxkqG9h2dAsnA1nZpA==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies: peerDependencies:
@ -796,7 +796,7 @@ packages:
'@typescript-eslint/visitor-keys': 5.59.11 '@typescript-eslint/visitor-keys': 5.59.11
dev: true dev: true
/@typescript-eslint/type-utils/5.59.11_kigkzfftsmftz3xok324pyvzui: /@typescript-eslint/type-utils/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==} resolution: {integrity: sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies: peerDependencies:
@ -807,7 +807,7 @@ packages:
optional: true optional: true
dependencies: dependencies:
'@typescript-eslint/typescript-estree': 5.59.11_typescript@4.9.5 '@typescript-eslint/typescript-estree': 5.59.11_typescript@4.9.5
'@typescript-eslint/utils': 5.59.11_kigkzfftsmftz3xok324pyvzui '@typescript-eslint/utils': 5.59.11_eslint@8.42.0+typescript@4.9.5
debug: 4.3.4 debug: 4.3.4
eslint: 8.42.0 eslint: 8.42.0
tsutils: 3.21.0_typescript@4.9.5 tsutils: 3.21.0_typescript@4.9.5
@ -842,7 +842,7 @@ packages:
- supports-color - supports-color
dev: true dev: true
/@typescript-eslint/utils/5.59.11_kigkzfftsmftz3xok324pyvzui: /@typescript-eslint/utils/5.59.11_eslint@8.42.0+typescript@4.9.5:
resolution: {integrity: sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==} resolution: {integrity: sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies: peerDependencies:
@ -884,7 +884,7 @@ packages:
'@babel/core': 7.22.5 '@babel/core': 7.22.5
'@babel/plugin-transform-typescript': 7.22.5_@babel+core@7.22.5 '@babel/plugin-transform-typescript': 7.22.5_@babel+core@7.22.5
'@vue/babel-plugin-jsx': 1.1.1_@babel+core@7.22.5 '@vue/babel-plugin-jsx': 1.1.1_@babel+core@7.22.5
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vue: 3.3.4 vue: 3.3.4
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
@ -897,7 +897,7 @@ packages:
vite: ^3.0.0 vite: ^3.0.0
vue: ^3.2.25 vue: ^3.2.25
dependencies: dependencies:
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
vue: 3.3.4 vue: 3.3.4
dev: true dev: true
@ -1023,6 +1023,7 @@ packages:
dependencies: dependencies:
'@vue/reactivity': 3.3.4 '@vue/reactivity': 3.3.4
'@vue/shared': 3.3.4 '@vue/shared': 3.3.4
dev: false
/@vue/runtime-dom/3.3.4: /@vue/runtime-dom/3.3.4:
resolution: {integrity: sha512-Aj5bTJ3u5sFsUckRghsNjVTtxZQ1OyMWCr5dZRAPijF/0Vy4xEoRCwLyHXcj4D0UFbJ4lbx3gPTgg06K/GnPnQ==} resolution: {integrity: sha512-Aj5bTJ3u5sFsUckRghsNjVTtxZQ1OyMWCr5dZRAPijF/0Vy4xEoRCwLyHXcj4D0UFbJ4lbx3gPTgg06K/GnPnQ==}
@ -1030,6 +1031,7 @@ packages:
'@vue/runtime-core': 3.3.4 '@vue/runtime-core': 3.3.4
'@vue/shared': 3.3.4 '@vue/shared': 3.3.4
csstype: 3.1.2 csstype: 3.1.2
dev: false
/@vue/server-renderer/3.3.4_vue@3.3.4: /@vue/server-renderer/3.3.4_vue@3.3.4:
resolution: {integrity: sha512-Q6jDDzR23ViIb67v+vM1Dqntu+HUexQcsWKhhQa4ARVzxOY2HbC7QRW/ggkDBd5BU+uM1sV6XOAP0b216o34JQ==} resolution: {integrity: sha512-Q6jDDzR23ViIb67v+vM1Dqntu+HUexQcsWKhhQa4ARVzxOY2HbC7QRW/ggkDBd5BU+uM1sV6XOAP0b216o34JQ==}
@ -1039,6 +1041,7 @@ packages:
'@vue/compiler-ssr': 3.3.4 '@vue/compiler-ssr': 3.3.4
'@vue/shared': 3.3.4 '@vue/shared': 3.3.4
vue: 3.3.4 vue: 3.3.4
dev: false
/@vue/shared/3.2.38: /@vue/shared/3.2.38:
resolution: {integrity: sha512-dTyhTIRmGXBjxJE+skC8tTWCGLCVc4wQgRRLt8+O9p5ewBAjoBwtCAkLPrtToSr1xltoe3st21Pv953aOZ7alg==} resolution: {integrity: sha512-dTyhTIRmGXBjxJE+skC8tTWCGLCVc4wQgRRLt8+O9p5ewBAjoBwtCAkLPrtToSr1xltoe3st21Pv953aOZ7alg==}
@ -1354,6 +1357,7 @@ packages:
/csstype/3.1.2: /csstype/3.1.2:
resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==} resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==}
dev: false
/d3-array/3.2.4: /d3-array/3.2.4:
resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==}
@ -1649,22 +1653,12 @@ packages:
/debug/3.1.0: /debug/3.1.0:
resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==} resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies: dependencies:
ms: 2.0.0 ms: 2.0.0
dev: true dev: true
/debug/3.2.7: /debug/3.2.7:
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies: dependencies:
ms: 2.1.3 ms: 2.1.3
dev: true dev: true
@ -1980,7 +1974,7 @@ packages:
eslint: 8.42.0 eslint: 8.42.0
dev: true dev: true
/eslint-plugin-prettier/4.2.1_vnriwwub2rhvoyn4ckagrc4lpi: /eslint-plugin-prettier/4.2.1_ab628b5a81d44f5761bc1280688b8b7a:
resolution: {integrity: sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==} resolution: {integrity: sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==}
engines: {node: '>=12.0.0'} engines: {node: '>=12.0.0'}
peerDependencies: peerDependencies:
@ -2579,8 +2573,6 @@ packages:
mime: 1.6.0 mime: 1.6.0
needle: 3.2.0 needle: 3.2.0
source-map: 0.6.1 source-map: 0.6.1
transitivePeerDependencies:
- supports-color
dev: true dev: true
/levn/0.4.1: /levn/0.4.1:
@ -2800,8 +2792,6 @@ packages:
debug: 3.2.7 debug: 3.2.7
iconv-lite: 0.6.3 iconv-lite: 0.6.3
sax: 1.2.4 sax: 1.2.4
transitivePeerDependencies:
- supports-color
dev: true dev: true
optional: true optional: true
@ -3305,8 +3295,6 @@ packages:
sax: 1.2.4 sax: 1.2.4
semver: 6.3.0 semver: 6.3.0
source-map: 0.7.4 source-map: 0.7.4
transitivePeerDependencies:
- supports-color
dev: true dev: true
/supports-color/5.5.0: /supports-color/5.5.0:
@ -3414,7 +3402,6 @@ packages:
tsconfig-paths: 3.14.2 tsconfig-paths: 3.14.2
typescript: 4.9.5 typescript: 4.9.5
transitivePeerDependencies: transitivePeerDependencies:
- supports-color
- ts-node - ts-node
dev: true dev: true
@ -3422,6 +3409,7 @@ packages:
resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==}
engines: {node: '>=4.2.0'} engines: {node: '>=4.2.0'}
hasBin: true hasBin: true
dev: true
/universalify/2.0.0: /universalify/2.0.0:
resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
@ -3480,12 +3468,12 @@ packages:
chalk: 4.1.2 chalk: 4.1.2
debug: 4.3.4 debug: 4.3.4
fs-extra: 10.1.0 fs-extra: 10.1.0
vite: 3.2.7_ffzaxsbr6mwjfgagqxd743xe6i vite: 3.2.7_@types+node@18.16.18+sass@1.63.4
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
dev: true dev: true
/vite/3.2.7_ffzaxsbr6mwjfgagqxd743xe6i: /vite/3.2.7_@types+node@18.16.18+sass@1.63.4:
resolution: {integrity: sha512-29pdXjk49xAP0QBr0xXqu2s5jiQIXNvE/xwd0vUizYT2Hzqe4BksNNoWllFVXJf4eLZ+UlVQmXfB4lWrc+t18g==} resolution: {integrity: sha512-29pdXjk49xAP0QBr0xXqu2s5jiQIXNvE/xwd0vUizYT2Hzqe4BksNNoWllFVXJf4eLZ+UlVQmXfB4lWrc+t18g==}
engines: {node: ^14.18.0 || >=16.0.0} engines: {node: ^14.18.0 || >=16.0.0}
hasBin: true hasBin: true
@ -3603,6 +3591,7 @@ packages:
'@vue/runtime-dom': 3.3.4 '@vue/runtime-dom': 3.3.4
'@vue/server-renderer': 3.3.4_vue@3.3.4 '@vue/server-renderer': 3.3.4_vue@3.3.4
'@vue/shared': 3.3.4 '@vue/shared': 3.3.4
dev: false
/vueuc/0.4.51_vue@3.3.4: /vueuc/0.4.51_vue@3.3.4:
resolution: {integrity: sha512-pLiMChM4f+W8czlIClGvGBYo656lc2Y0/mXFSCydcSmnCR1izlKPGMgiYBGjbY9FDkFG8a2HEVz7t0DNzBWbDw==} resolution: {integrity: sha512-pLiMChM4f+W8czlIClGvGBYo656lc2Y0/mXFSCydcSmnCR1izlKPGMgiYBGjbY9FDkFG8a2HEVz7t0DNzBWbDw==}

2
dolphinscheduler-ui/src/locales/en_US/project.ts

@ -539,7 +539,7 @@ export default {
model_type: 'ModelType', model_type: 'ModelType',
form: 'Form', form: 'Form',
table: 'Table', table: 'Table',
table_tips: 'Please enter Mysql Table(required)', table_tips: 'Please enter Table(required)',
column_type: 'ColumnType', column_type: 'ColumnType',
all_columns: 'All Columns', all_columns: 'All Columns',
some_columns: 'Some Columns', some_columns: 'Some Columns',

2
dolphinscheduler-ui/src/locales/zh_CN/project.ts

@ -529,7 +529,7 @@ export default {
model_type: '模式', model_type: '模式',
form: '表单', form: '表单',
table: '表名', table: '表名',
table_tips: '请输入Mysql表名(必填)', table_tips: '请输入表名(必填)',
column_type: '列类型', column_type: '列类型',
all_columns: '全表导入', all_columns: '全表导入',
some_columns: '选择列', some_columns: '选择列',

142
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-datasource.ts

@ -15,66 +15,94 @@
* limitations under the License. * limitations under the License.
*/ */
import { onMounted, ref, Ref } from 'vue' import {onMounted, ref, Ref, watch} from 'vue'
import { queryDataSourceList } from '@/service/modules/data-source' import {queryDataSourceList} from '@/service/modules/data-source'
import { useI18n } from 'vue-i18n' import {useI18n} from 'vue-i18n'
import type { IJsonItem, IDataBase } from '../types' import type {IJsonItem, IDataBase} from '../types'
import type { TypeReq } from '@/service/modules/data-source/types' import type {TypeReq} from '@/service/modules/data-source/types'
export function useDatasource( export function useDatasource(
model: { [field: string]: any }, model: { [field: string]: any },
span: Ref, span: Ref,
fieldType: string, fieldType: string,
fieldDatasource: string fieldDatasource: string
): IJsonItem[] { ): IJsonItem[] {
const { t } = useI18n() const {t} = useI18n()
const dataSourceList = ref([]) const dataSourceList = ref([])
const loading = ref(false) const loading = ref(false)
const hadoopSourceTypes = ref(['HIVE', 'HDFS'])
const getDataSource = async (type: IDataBase) => {
if (hadoopSourceTypes.value.some(source => source === type)) {
loading.value = false;
return
}
loading.value = true
if (model.modelType === 'import') {
model.sourceMysqlDatasource = model.sourceMysqlDatasource ? model.sourceMysqlDatasource : ''
model.sourceMysqlType = type;
} else {
model.sourceMysqlDatasource = model.targetMysqlDatasource ? model.targetMysqlDatasource : ''
model.targetMysqlType = type;
}
const params = {type, testFlag: 0} as TypeReq
const result = await queryDataSourceList(params)
dataSourceList.value = result.map((item: { name: string; id: number }) => ({
label: item.name,
value: item.id
}))
loading.value = false
}
onMounted(() => {
getDataSource(model.sourceType)
})
const getDataSource = async (type: IDataBase) => { watch(
if (loading.value) return () => [
loading.value = true model.sourceType,
const params = { type, testFlag: 0 } as TypeReq ],
const result = await queryDataSourceList(params) () => {
dataSourceList.value = result.map((item: { name: string; id: number }) => ({ getDataSource(model.sourceType)
label: item.name, }
value: item.id )
}))
loading.value = false
}
onMounted(() => {
getDataSource('MYSQL')
})
return [ watch(
{ () => [
type: 'select', model.targetType,
field: fieldType, ],
name: t('project.node.datasource'), () => {
span: span, getDataSource(model.targetType)
options: [{ label: 'MYSQL', value: 'MYSQL' }],
validate: {
required: true
}
},
{
type: 'select',
field: fieldDatasource,
name: ' ',
span: span,
props: {
placeholder: t('project.node.datasource_tips'),
filterable: true,
loading
},
options: dataSourceList,
validate: {
trigger: ['blur', 'input'],
validator(validate, value) {
if (!value) {
return new Error(t('project.node.datasource_tips'))
}
} }
} )
} return [
] {
type: 'input',
field: fieldType,
name: t('project.node.datasource'),
span: 0,
validate: {
required: true,
}
},
{
type: 'select',
field: fieldDatasource,
name: t('project.node.datasource'),
span: span,
props: {
placeholder: t('project.node.datasource_tips'),
filterable: true,
loading
},
options: dataSourceList,
validate: {
trigger: ['blur', 'input'],
validator(validate, value) {
if (!value) {
return new Error(t('project.node.datasource_tips'))
}
}
}
}
]
} }

561
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-source-type.ts

@ -15,293 +15,312 @@
* limitations under the License. * limitations under the License.
*/ */
import { ref, h, watch, Ref } from 'vue' import {h, onMounted, Ref, ref, watch} from 'vue'
import { useI18n } from 'vue-i18n' import {useI18n} from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource' import {useDatasource} from './use-sqoop-datasource'
import { useCustomParams } from '.' import {useCustomParams} from '.'
import styles from '../index.module.scss' import styles from '../index.module.scss'
import type { IJsonItem, IOption, ModelType } from '../types' import type {IJsonItem, IOption, ModelType} from '../types'
export function useSourceType( export function useSourceType(
model: { [field: string]: any }, model: { [field: string]: any },
unCustomSpan: Ref<number> unCustomSpan: Ref<number>
): IJsonItem[] { ): IJsonItem[] {
const { t } = useI18n() const {t} = useI18n()
const mysqlSpan = ref(24) const rdbmsSpan = ref(24)
const tableSpan = ref(0) const tableSpan = ref(0)
const editorSpan = ref(24) const editorSpan = ref(24)
const columnSpan = ref(0) const columnSpan = ref(0)
const hiveSpan = ref(0) const hiveSpan = ref(0)
const hdfsSpan = ref(0) const hdfsSpan = ref(0)
const datasourceSpan = ref(12) const datasourceSpan = ref(24)
const resetSpan = () => { const isChange: any = ref(false)
mysqlSpan.value = const rdbmsSourceTypes = ref([
unCustomSpan.value && model.sourceType === 'MYSQL' ? 24 : 0 {
tableSpan.value = mysqlSpan.value && model.srcQueryType === '0' ? 24 : 0
editorSpan.value = mysqlSpan.value && model.srcQueryType === '1' ? 24 : 0
columnSpan.value = tableSpan.value && model.srcColumnType === '1' ? 24 : 0
hiveSpan.value = unCustomSpan.value && model.sourceType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.sourceType === 'HDFS' ? 24 : 0
datasourceSpan.value =
unCustomSpan.value && model.sourceType === 'MYSQL' ? 12 : 0
}
const sourceTypes = ref([
{
label: 'MYSQL',
value: 'MYSQL'
}
] as IOption[])
const getSourceTypesByModelType = (modelType: ModelType): IOption[] => {
switch (modelType) {
case 'import':
return [
{
label: 'MYSQL', label: 'MYSQL',
value: 'MYSQL' value: 'MYSQL'
} },
] {
case 'export': label: 'ORACLE',
return [ value: 'ORACLE'
{ },
label: 'HDFS', {
value: 'HDFS' label: 'SQLSERVER',
}, value: 'SQLSERVER'
{ },
{
label: 'HANA',
value: 'HANA'
}
] as IOption[])
const hadoopSourceTypes = ref([
{
label: 'HIVE', label: 'HIVE',
value: 'HIVE' value: 'HIVE'
} }, {
]
default:
return [
{
label: 'MYSQL',
value: 'MYSQL'
},
{
label: 'HDFS', label: 'HDFS',
value: 'HDFS' value: 'HDFS'
}, }
{ ] as IOption[])
label: 'HIVE', const sourceTypes = ref()
value: 'HIVE' const resetSpan = () => {
} rdbmsSpan.value =
] unCustomSpan.value && rdbmsSourceTypes.value.some(source => source.value === model.sourceType) ? 24 : 0
tableSpan.value = rdbmsSpan.value && model.srcQueryType === '0' ? 24 : 0
editorSpan.value = rdbmsSpan.value && model.srcQueryType === '1' ? 24 : 0
columnSpan.value = tableSpan.value && model.srcColumnType === '1' ? 24 : 0
hiveSpan.value = unCustomSpan.value && model.sourceType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.sourceType === 'HDFS' ? 24 : 0
datasourceSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(source => source.value === model.sourceType) ? 24 : 0
} }
} const resetValue = () => {
if (!isChange.value) {
isChange.value = true
return
}
switch (model.modelType) {
case 'import':
model.sourceMysqlDatasource = ''
break
case 'export':
model.sourceHiveDatabase = ''
model.sourceHiveTable = ''
model.sourceHivePartitionKey = ''
model.sourceHivePartitionValue = ''
model.sourceHdfsExportDir = ''
break
default:
model.sourceMysqlDatasource = ''
}
watch(
() => model.modelType,
(modelType: ModelType) => {
sourceTypes.value = getSourceTypesByModelType(modelType)
if (!model.sourceType) {
model.sourceType = sourceTypes.value[0].value
}
} }
) const getSourceTypesByModelType = (modelType: ModelType): IOption[] => {
watch( switch (modelType) {
() => [ case 'import':
unCustomSpan.value, return rdbmsSourceTypes.value
model.sourceType, case 'export':
model.srcQueryType, return hadoopSourceTypes.value
model.srcColumnType default:
], return rdbmsSourceTypes.value
() => { }
resetSpan()
} }
)
return [ onMounted(() => {
{ sourceTypes.value = [...rdbmsSourceTypes.value];
type: 'custom', })
field: 'custom-title-source',
span: unCustomSpan, watch(
widget: h( () => model.modelType,
'div', (modelType: ModelType) => {
{ class: styles['field-title'] }, sourceTypes.value = getSourceTypesByModelType(modelType)
t('project.node.data_source') model.sourceType = sourceTypes.value[0].value
) }
}, )
{ watch(
type: 'select', () => [
field: 'sourceType', unCustomSpan.value,
name: t('project.node.type'), model.sourceType,
span: unCustomSpan, model.srcQueryType,
options: sourceTypes model.srcColumnType
}, ],
...useDatasource( () => {
model, resetValue();
datasourceSpan, resetSpan();
'sourceMysqlType', }
'sourceMysqlDatasource' )
),
{ return [
type: 'radio',
field: 'srcQueryType',
name: t('project.node.model_type'),
span: mysqlSpan,
options: [
{ {
label: t('project.node.form'), type: 'custom',
value: '0' field: 'custom-title-source',
span: unCustomSpan,
widget: h(
'div',
{class: styles['field-title']},
t('project.node.data_source')
)
}, },
{ {
label: 'SQL', type: 'select',
value: '1' field: 'sourceType',
} name: t('project.node.type'),
], span: unCustomSpan,
props: { options: sourceTypes
'on-update:value': (value: '0' | '1') => { },
model.targetType = value === '0' ? 'HIVE' : 'HDFS' ...useDatasource(
} model,
} datasourceSpan,
}, 'sourceMysqlType',
{ 'sourceMysqlDatasource'
type: 'input', ),
field: 'srcTable', {
name: t('project.node.table'), type: 'radio',
span: tableSpan, field: 'srcQueryType',
props: { name: t('project.node.model_type'),
placeholder: t('project.node.table_tips') span: rdbmsSpan,
}, options: [
validate: { {
trigger: ['input', 'blur'], label: t('project.node.form'),
required: true, value: '0'
validator(validate, value) { },
if (tableSpan.value && !value) { {
return new Error(t('project.node.table_tips')) label: 'SQL',
} value: '1'
} }
} ],
}, props: {
{ 'on-update:value': (value: '0' | '1') => {
type: 'radio', model.targetType = value === '0' ? 'HIVE' : 'HDFS'
field: 'srcColumnType', }
name: t('project.node.column_type'), }
span: tableSpan, },
options: [ {
{ label: t('project.node.all_columns'), value: '0' }, type: 'input',
{ label: t('project.node.some_columns'), value: '1' } field: 'srcTable',
] name: t('project.node.table'),
}, span: tableSpan,
{ props: {
type: 'input', placeholder: t('project.node.table_tips')
field: 'srcColumns', },
name: t('project.node.column'), validate: {
span: columnSpan, trigger: ['input', 'blur'],
props: { required: true,
placeholder: t('project.node.column_tips') validator(validate, value) {
}, if (tableSpan.value && !value) {
validate: { return new Error(t('project.node.table_tips'))
trigger: ['input', 'blur'], }
required: true, }
validator(validate, value) { }
if (!!columnSpan.value && !value) { },
return new Error(t('project.node.column_tips')) {
} type: 'radio',
} field: 'srcColumnType',
} name: t('project.node.column_type'),
}, span: tableSpan,
{ options: [
type: 'input', {label: t('project.node.all_columns'), value: '0'},
field: 'sourceHiveDatabase', {label: t('project.node.some_columns'), value: '1'}
name: t('project.node.database'), ]
span: hiveSpan, },
props: { {
placeholder: t('project.node.database_tips') type: 'input',
}, field: 'srcColumns',
validate: { name: t('project.node.column'),
trigger: ['blur', 'input'], span: columnSpan,
required: true, props: {
validator(validate, value) { placeholder: t('project.node.column_tips')
if (hiveSpan.value && !value) { },
return new Error(t('project.node.database_tips')) validate: {
} trigger: ['input', 'blur'],
} required: true,
} validator(validate, value) {
}, if (!!columnSpan.value && !value) {
{ return new Error(t('project.node.column_tips'))
type: 'input', }
field: 'sourceHiveTable', }
name: t('project.node.table'), }
span: hiveSpan, },
props: { {
placeholder: t('project.node.hive_table_tips') type: 'input',
}, field: 'sourceHiveDatabase',
validate: { name: t('project.node.database'),
trigger: ['blur', 'input'], span: hiveSpan,
required: true, props: {
validator(validate, value) { placeholder: t('project.node.database_tips')
if (hiveSpan.value && !value) { },
return new Error(t('project.node.hive_table_tips')) validate: {
} trigger: ['blur', 'input'],
} required: true,
} validator(validate, value) {
}, if (hiveSpan.value && !value) {
{ return new Error(t('project.node.database_tips'))
type: 'input', }
field: 'sourceHivePartitionKey', }
name: t('project.node.hive_partition_keys'), }
span: hiveSpan, },
props: { {
placeholder: t('project.node.hive_partition_keys_tips') type: 'input',
} field: 'sourceHiveTable',
}, name: t('project.node.table'),
{ span: hiveSpan,
type: 'input', props: {
field: 'sourceHivePartitionValue', placeholder: t('project.node.hive_table_tips')
name: t('project.node.hive_partition_values'), },
span: hiveSpan, validate: {
props: { trigger: ['blur', 'input'],
placeholder: t('project.node.hive_partition_values_tips') required: true,
} validator(validate, value) {
}, if (hiveSpan.value && !value) {
{ return new Error(t('project.node.hive_table_tips'))
type: 'input', }
field: 'sourceHdfsExportDir', }
name: t('project.node.export_dir'), }
span: hdfsSpan, },
props: { {
placeholder: t('project.node.export_dir_tips') type: 'input',
}, field: 'sourceHivePartitionKey',
validate: { name: t('project.node.hive_partition_keys'),
trigger: ['blur', 'input'], span: hiveSpan,
required: true, props: {
validator(validate, value) { placeholder: t('project.node.hive_partition_keys_tips')
if (hdfsSpan.value && !value) { }
return new Error(t('project.node.export_dir_tips')) },
} {
} type: 'input',
} field: 'sourceHivePartitionValue',
}, name: t('project.node.hive_partition_values'),
{ span: hiveSpan,
type: 'editor', props: {
field: 'sourceMysqlSrcQuerySql', placeholder: t('project.node.hive_partition_values_tips')
name: t('project.node.sql_statement'), }
span: editorSpan, },
validate: { {
trigger: ['blur', 'input'], type: 'input',
required: true, field: 'sourceHdfsExportDir',
validator(validate, value) { name: t('project.node.export_dir'),
if (editorSpan.value && !value) { span: hdfsSpan,
return new Error(t('project.node.sql_statement_tips')) props: {
} placeholder: t('project.node.export_dir_tips')
} },
} validate: {
}, trigger: ['blur', 'input'],
...useCustomParams({ required: true,
model, validator(validate, value) {
field: 'mapColumnHive', if (hdfsSpan.value && !value) {
name: 'map_column_hive', return new Error(t('project.node.export_dir_tips'))
isSimple: true, }
span: mysqlSpan }
}), }
...useCustomParams({ },
model, {
field: 'mapColumnJava', type: 'editor',
name: 'map_column_java', field: 'sourceMysqlSrcQuerySql',
isSimple: true, name: t('project.node.sql_statement'),
span: mysqlSpan span: editorSpan,
}) validate: {
] trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (editorSpan.value && !value) {
return new Error(t('project.node.sql_statement_tips'))
}
}
}
},
...useCustomParams({
model,
field: 'mapColumnHive',
name: 'map_column_hive',
isSimple: true,
span: rdbmsSpan
}),
...useCustomParams({
model,
field: 'mapColumnJava',
name: 'map_column_java',
isSimple: true,
span: rdbmsSpan
})
]
} }

724
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-sqoop-target-type.ts

@ -15,393 +15,407 @@
* limitations under the License. * limitations under the License.
*/ */
import { ref, h, watch, Ref } from 'vue' import {h, onMounted, Ref, ref, watch} from 'vue'
import { useI18n } from 'vue-i18n' import {useI18n} from 'vue-i18n'
import { useDatasource } from './use-sqoop-datasource' import {useDatasource} from './use-sqoop-datasource'
import styles from '../index.module.scss' import styles from '../index.module.scss'
import type { IJsonItem, IOption, SourceType } from '../types' import type {IJsonItem, IOption, SourceType} from '../types'
export function useTargetType( export function useTargetType(
model: { [field: string]: any }, model: { [field: string]: any },
unCustomSpan: Ref<number> unCustomSpan: Ref<number>
): IJsonItem[] { ): IJsonItem[] {
const { t } = useI18n() const {t} = useI18n()
const hiveSpan = ref(0) const hiveSpan = ref(24)
const hdfsSpan = ref(24) const hdfsSpan = ref(0)
const mysqlSpan = ref(0) const rdbmsSpan = ref(0)
const dataSourceSpan = ref(0) const dataSourceSpan = ref(0)
const updateSpan = ref(0) const updateSpan = ref(0)
const isChange: any = ref(false)
const resetSpan = () => { const rdbmsSourceTypes = ref([
hiveSpan.value = unCustomSpan.value && model.targetType === 'HIVE' ? 24 : 0 {
hdfsSpan.value = unCustomSpan.value && model.targetType === 'HDFS' ? 24 : 0
mysqlSpan.value =
unCustomSpan.value && model.targetType === 'MYSQL' ? 24 : 0
dataSourceSpan.value =
unCustomSpan.value && model.targetType === 'MYSQL' ? 12 : 0
updateSpan.value = mysqlSpan.value && model.targetMysqlIsUpdate ? 24 : 0
}
const targetTypes = ref([
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
] as IOption[])
const getTargetTypesBySourceType = (
sourceType: SourceType,
srcQueryType: string
): IOption[] => {
switch (sourceType) {
case 'MYSQL':
if (srcQueryType === '1') {
return [
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
]
}
return [
{
label: 'HIVE',
value: 'HIVE'
},
{
label: 'HDFS',
value: 'HDFS'
}
]
case 'HDFS':
case 'HIVE':
return [
{
label: 'MYSQL', label: 'MYSQL',
value: 'MYSQL' value: 'MYSQL'
} },
] {
default: label: 'ORACLE',
return [ value: 'ORACLE'
{ },
{
label: 'SQLSERVER',
value: 'SQLSERVER'
},
{
label: 'HANA',
value: 'HANA'
}
] as IOption[])
const hadoopSourceTypes = ref([
{
label: 'HIVE', label: 'HIVE',
value: 'HIVE' value: 'HIVE'
}, }, {
{
label: 'HDFS', label: 'HDFS',
value: 'HDFS' value: 'HDFS'
} }
] ] as IOption[])
const targetTypes = ref()
const resetSpan = () => {
hiveSpan.value = unCustomSpan.value && model.targetType === 'HIVE' ? 24 : 0
hdfsSpan.value = unCustomSpan.value && model.targetType === 'HDFS' ? 24 : 0
rdbmsSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(target => target.value === model.targetType) ? 24 : 0
dataSourceSpan.value =
unCustomSpan.value && rdbmsSourceTypes.value.some(target => target.value === model.targetType) ? 24 : 0
updateSpan.value = rdbmsSpan.value && model.targetMysqlIsUpdate ? 24 : 0
} }
}
watch( const getTargetTypesBySourceType = (
() => [model.sourceType, model.srcQueryType], sourceType: SourceType,
([sourceType, srcQueryType]) => { srcQueryType: string
targetTypes.value = getTargetTypesBySourceType(sourceType, srcQueryType) ): IOption[] => {
if (!model.targetType) { switch (sourceType) {
model.targetType = targetTypes.value[0].value case 'MYSQL':
} if (srcQueryType === '1') {
return hadoopSourceTypes.value
}
return hadoopSourceTypes.value
case 'HDFS':
case 'HIVE':
return rdbmsSourceTypes.value
default:
return hadoopSourceTypes.value
}
} }
)
watch( const resetValue = () => {
() => [unCustomSpan.value, model.targetType, model.targetMysqlIsUpdate], if (!isChange.value) {
() => { isChange.value = true
resetSpan() return
}
switch (model.modelType) {
case 'import':
model.targetHiveDatabase = '';
model.targetHiveTable = '';
model.targetHdfsTargetPath = '';
break
case 'export':
model.targetMysqlDatasource = '';
model.targetMysqlTable = '';
model.targetMysqlColumns = '';
model.targetMysqlFieldsTerminated = '';
model.targetMysqlLinesTerminated = '';
model.targetMysqlTable = '';
break
default:
model.sourceMysqlDatasource = '';
}
} }
)
return [ onMounted(() => {
{ targetTypes.value = [...hadoopSourceTypes.value];
type: 'custom', })
field: 'custom-title-target',
span: unCustomSpan, watch(
widget: h( () => [model.sourceType, model.srcQueryType],
'div', ([sourceType, srcQueryType]) => {
{ class: styles['field-title'] }, targetTypes.value = getTargetTypesBySourceType(sourceType, srcQueryType)
t('project.node.data_target') model.targetType = targetTypes.value[0].value
)
},
{
type: 'select',
field: 'targetType',
name: t('project.node.type'),
span: unCustomSpan,
options: targetTypes
},
{
type: 'input',
field: 'targetHiveDatabase',
name: t('project.node.database'),
span: hiveSpan,
props: {
placeholder: t('project.node.database_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.database_tips'))
}
} }
} )
},
{ watch(
type: 'input', () => [unCustomSpan.value, model.targetType, model.targetMysqlIsUpdate],
field: 'targetHiveTable', () => {
name: t('project.node.table'), resetValue();
span: hiveSpan, resetSpan()
props: {
placeholder: t('project.node.table')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hiveSpan.value && !value) {
return new Error(t('project.node.hive_table_tips'))
}
} }
} )
},
{ return [
type: 'switch', {
field: 'targetHiveCreateTable', type: 'custom',
span: hiveSpan, field: 'custom-title-target',
name: t('project.node.create_hive_table') span: unCustomSpan,
}, widget: h(
{ 'div',
type: 'switch', {class: styles['field-title']},
field: 'targetHiveDropDelimiter', t('project.node.data_target')
span: hiveSpan, )
name: t('project.node.drop_delimiter') },
}, {
{ type: 'select',
type: 'switch', field: 'targetType',
field: 'targetHiveOverWrite', name: t('project.node.type'),
span: hiveSpan, span: unCustomSpan,
name: t('project.node.over_write_src') options: targetTypes
}, },
{ {
type: 'input', type: 'input',
field: 'targetHiveTargetDir', field: 'targetHiveDatabase',
name: t('project.node.hive_target_dir'), name: t('project.node.database'),
span: hiveSpan, span: hiveSpan,
props: { props: {
placeholder: t('project.node.hive_target_dir_tips') placeholder: t('project.node.database_tips')
} },
}, validate: {
{ trigger: ['blur', 'input'],
type: 'input', required: true,
field: 'targetHiveReplaceDelimiter', validator(validate, value) {
name: t('project.node.replace_delimiter'), if (hiveSpan.value && !value) {
span: hiveSpan, return new Error(t('project.node.database_tips'))
props: { }
placeholder: t('project.node.replace_delimiter_tips') }
} }
}, },
{ {
type: 'input', type: 'input',
field: 'targetHivePartitionKey', field: 'targetHiveTable',
name: t('project.node.hive_partition_keys'), name: t('project.node.table'),
span: hiveSpan, span: hiveSpan,
props: { props: {
placeholder: t('project.node.hive_partition_keys_tips') placeholder: t('project.node.table')
} },
}, validate: {
{ trigger: ['blur', 'input'],
type: 'input', required: true,
field: 'targetHivePartitionValue', validator(rule, value) {
name: t('project.node.hive_partition_values'), if (hiveSpan.value && !value) {
span: hiveSpan, return new Error(t('project.node.table_tips'))
props: { }
placeholder: t('project.node.hive_partition_values_tips') }
} }
}, },
{ {
type: 'input', type: 'switch',
field: 'targetHdfsTargetPath', field: 'targetHiveCreateTable',
name: t('project.node.target_dir'), span: hiveSpan,
span: hdfsSpan, name: t('project.node.create_hive_table')
props: { },
placeholder: t('project.node.target_dir_tips') {
}, type: 'switch',
validate: { field: 'targetHiveDropDelimiter',
trigger: ['blur', 'input'], span: hiveSpan,
required: true, name: t('project.node.drop_delimiter')
validator(rule, value) { },
if (hdfsSpan.value && !value) { {
return new Error(t('project.node.target_dir_tips')) type: 'switch',
} field: 'targetHiveOverWrite',
span: hiveSpan,
name: t('project.node.over_write_src')
},
{
type: 'input',
field: 'targetHiveTargetDir',
name: t('project.node.hive_target_dir'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_target_dir_tips')
}
},
{
type: 'input',
field: 'targetHiveReplaceDelimiter',
name: t('project.node.replace_delimiter'),
span: hiveSpan,
props: {
placeholder: t('project.node.replace_delimiter_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionKey',
name: t('project.node.hive_partition_keys'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_keys_tips')
}
},
{
type: 'input',
field: 'targetHivePartitionValue',
name: t('project.node.hive_partition_values'),
span: hiveSpan,
props: {
placeholder: t('project.node.hive_partition_values_tips')
}
},
{
type: 'input',
field: 'targetHdfsTargetPath',
name: t('project.node.target_dir'),
span: hdfsSpan,
props: {
placeholder: t('project.node.target_dir_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(rule, value) {
if (hdfsSpan.value && !value) {
return new Error(t('project.node.target_dir_tips'))
}
}
}
},
{
type: 'switch',
field: 'targetHdfsDeleteTargetDir',
name: t('project.node.delete_target_dir'),
span: hdfsSpan
},
{
type: 'radio',
field: 'targetHdfsCompressionCodec',
name: t('project.node.compression_codec'),
span: hdfsSpan,
options: COMPRESSIONCODECS
},
{
type: 'radio',
field: 'targetHdfsFileType',
name: t('project.node.file_type'),
span: hdfsSpan,
options: FILETYPES
},
{
type: 'input',
field: 'targetHdfsFieldsTerminated',
name: t('project.node.fields_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetHdfsLinesTerminated',
name: t('project.node.lines_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
...useDatasource(
model,
dataSourceSpan,
'targetMysqlType',
'targetMysqlDatasource'
),
{
type: 'input',
field: 'targetMysqlTable',
name: t('project.node.table'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (rdbmsSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
},
{
type: 'input',
field: 'targetMysqlColumns',
name: t('project.node.column'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.column_tips')
}
},
{
type: 'input',
field: 'targetMysqlFieldsTerminated',
name: t('project.node.fields_terminated'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{
type: 'input',
field: 'targetMysqlLinesTerminated',
name: t('project.node.lines_terminated'),
span: rdbmsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
},
{
type: 'switch',
field: 'targetMysqlIsUpdate',
span: rdbmsSpan,
name: t('project.node.is_update')
},
{
type: 'input',
field: 'targetMysqlTargetUpdateKey',
name: t('project.node.update_key'),
span: updateSpan,
props: {
placeholder: t('project.node.update_key_tips')
}
},
{
type: 'radio',
field: 'targetMysqlUpdateMode',
name: t('project.node.update_mode'),
span: updateSpan,
options: [
{
label: t('project.node.only_update'),
value: 'updateonly'
},
{
label: t('project.node.allow_insert'),
value: 'allowinsert'
}
]
} }
} ]
}, }
{
type: 'switch', const COMPRESSIONCODECS = [
field: 'targetHdfsDeleteTargetDir',
name: t('project.node.delete_target_dir'),
span: hdfsSpan
},
{
type: 'radio',
field: 'targetHdfsCompressionCodec',
name: t('project.node.compression_codec'),
span: hdfsSpan,
options: COMPRESSIONCODECS
},
{
type: 'radio',
field: 'targetHdfsFileType',
name: t('project.node.file_type'),
span: hdfsSpan,
options: FILETYPES
},
{
type: 'input',
field: 'targetHdfsFieldsTerminated',
name: t('project.node.fields_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.fields_terminated_tips')
}
},
{ {
type: 'input', label: 'snappy',
field: 'targetHdfsLinesTerminated', value: 'snappy'
name: t('project.node.lines_terminated'),
span: hdfsSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
}, },
...useDatasource(
model,
dataSourceSpan,
'targetMysqlType',
'targetMysqlDatasource'
),
{ {
type: 'input', label: 'lzo',
field: 'targetMysqlTable', value: 'lzo'
name: t('project.node.table'),
span: mysqlSpan,
props: {
placeholder: t('project.node.hive_table_tips')
},
validate: {
trigger: ['blur', 'input'],
required: true,
validator(validate, value) {
if (mysqlSpan.value && !value) {
return new Error(t('project.node.table_tips'))
}
}
}
}, },
{ {
type: 'input', label: 'gzip',
field: 'targetMysqlColumns', value: 'gzip'
name: t('project.node.column'),
span: mysqlSpan,
props: {
placeholder: t('project.node.column_tips')
}
}, },
{ {
type: 'input', label: 'no',
field: 'targetMysqlFieldsTerminated', value: ''
name: t('project.node.fields_terminated'), }
span: mysqlSpan, ]
props: { const FILETYPES = [
placeholder: t('project.node.fields_terminated_tips')
}
},
{ {
type: 'input', label: 'avro',
field: 'targetMysqlLinesTerminated', value: '--as-avrodatafile'
name: t('project.node.lines_terminated'),
span: mysqlSpan,
props: {
placeholder: t('project.node.lines_terminated_tips')
}
}, },
{ {
type: 'switch', label: 'sequence',
field: 'targetMysqlIsUpdate', value: '--as-sequencefile'
span: mysqlSpan,
name: t('project.node.is_update')
}, },
{ {
type: 'input', label: 'text',
field: 'targetMysqlTargetUpdateKey', value: '--as-textfile'
name: t('project.node.update_key'),
span: updateSpan,
props: {
placeholder: t('project.node.update_key_tips')
}
}, },
{ {
type: 'radio', label: 'parquet',
field: 'targetMysqlUpdateMode', value: '--as-parquetfile'
name: t('project.node.update_mode'),
span: updateSpan,
options: [
{
label: t('project.node.only_update'),
value: 'updateonly'
},
{
label: t('project.node.allow_insert'),
value: 'allowinsert'
}
]
} }
]
}
const COMPRESSIONCODECS = [
{
label: 'snappy',
value: 'snappy'
},
{
label: 'lzo',
value: 'lzo'
},
{
label: 'gzip',
value: 'gzip'
},
{
label: 'no',
value: ''
}
]
const FILETYPES = [
{
label: 'avro',
value: '--as-avrodatafile'
},
{
label: 'sequence',
value: '--as-sequencefile'
},
{
label: 'text',
value: '--as-textfile'
},
{
label: 'parquet',
value: '--as-parquetfile'
}
] ]

132
dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts

@ -25,13 +25,14 @@ import type {
ILocalParam, ILocalParam,
IDependentParameters IDependentParameters
} from './types' } from './types'
import {ref} from "vue";
export function formatParams(data: INodeData): { export function formatParams(data: INodeData): {
processDefinitionCode: string processDefinitionCode: string
upstreamCodes: string upstreamCodes: string
taskDefinitionJsonObj: object taskDefinitionJsonObj: object
} { } {
const rdbmsSourceTypes = ref(['MYSQL', 'ORACLE', 'SQLSERVER', 'HANA'])
const taskParams: ITaskParams = {} const taskParams: ITaskParams = {}
if (data.taskType === 'SUB_PROCESS' || data.taskType === 'DYNAMIC') { if (data.taskType === 'SUB_PROCESS' || data.taskType === 'DYNAMIC') {
taskParams.processDefinitionCode = data.processDefinitionCode taskParams.processDefinitionCode = data.processDefinitionCode
@ -109,79 +110,67 @@ export function formatParams(data: INodeData): {
taskParams.targetType = data.targetType taskParams.targetType = data.targetType
let targetParams: ISqoopTargetParams = {} let targetParams: ISqoopTargetParams = {}
let sourceParams: ISqoopSourceParams = {} let sourceParams: ISqoopSourceParams = {}
switch (data.targetType) { if (data.targetType === 'HIVE') {
case 'HIVE': targetParams = {
targetParams = { hiveDatabase: data.targetHiveDatabase,
hiveDatabase: data.targetHiveDatabase, hiveTable: data.targetHiveTable,
hiveTable: data.targetHiveTable, createHiveTable: data.targetHiveCreateTable,
createHiveTable: data.targetHiveCreateTable, dropDelimiter: data.targetHiveDropDelimiter,
dropDelimiter: data.targetHiveDropDelimiter, hiveOverWrite: data.targetHiveOverWrite,
hiveOverWrite: data.targetHiveOverWrite, hiveTargetDir: data.targetHiveTargetDir,
hiveTargetDir: data.targetHiveTargetDir, replaceDelimiter: data.targetHiveReplaceDelimiter,
replaceDelimiter: data.targetHiveReplaceDelimiter, hivePartitionKey: data.targetHivePartitionKey,
hivePartitionKey: data.targetHivePartitionKey, hivePartitionValue: data.targetHivePartitionValue
hivePartitionValue: data.targetHivePartitionValue }
} } else if (data.targetType === 'HDFS') {
break targetParams = {
case 'HDFS': targetPath: data.targetHdfsTargetPath,
targetParams = { deleteTargetDir: data.targetHdfsDeleteTargetDir,
targetPath: data.targetHdfsTargetPath, compressionCodec: data.targetHdfsCompressionCodec,
deleteTargetDir: data.targetHdfsDeleteTargetDir, fileType: data.targetHdfsFileType,
compressionCodec: data.targetHdfsCompressionCodec, fieldsTerminated: data.targetHdfsFieldsTerminated,
fileType: data.targetHdfsFileType, linesTerminated: data.targetHdfsLinesTerminated
fieldsTerminated: data.targetHdfsFieldsTerminated, }
linesTerminated: data.targetHdfsLinesTerminated } else if (rdbmsSourceTypes.value.some(target => target === data.targetType)){
} targetParams = {
break targetType: data.targetMysqlType,
case 'MYSQL': targetDatasource: data.targetMysqlDatasource,
targetParams = { targetTable: data.targetMysqlTable,
targetType: data.targetMysqlType, targetColumns: data.targetMysqlColumns,
targetDatasource: data.targetMysqlDatasource, fieldsTerminated: data.targetMysqlFieldsTerminated,
targetTable: data.targetMysqlTable, linesTerminated: data.targetMysqlLinesTerminated,
targetColumns: data.targetMysqlColumns, isUpdate: data.targetMysqlIsUpdate,
fieldsTerminated: data.targetMysqlFieldsTerminated, targetUpdateKey: data.targetMysqlTargetUpdateKey,
linesTerminated: data.targetMysqlLinesTerminated, targetUpdateMode: data.targetMysqlUpdateMode
isUpdate: data.targetMysqlIsUpdate, }
targetUpdateKey: data.targetMysqlTargetUpdateKey,
targetUpdateMode: data.targetMysqlUpdateMode
}
break
default:
break
} }
switch (data.sourceType) { if (rdbmsSourceTypes.value.some(target => target === data.sourceType)) {
case 'MYSQL': sourceParams = {
sourceParams = { srcTable: data.srcQueryType === '1' ? '' : data.srcTable,
srcTable: data.srcQueryType === '1' ? '' : data.srcTable, srcColumnType: data.srcQueryType === '1' ? '0' : data.srcColumnType,
srcColumnType: data.srcQueryType === '1' ? '0' : data.srcColumnType, srcColumns:
srcColumns:
data.srcQueryType === '1' || data.srcColumnType === '0' data.srcQueryType === '1' || data.srcColumnType === '0'
? '' ? ''
: data.srcColumns, : data.srcColumns,
srcQuerySql: srcQuerySql:
data.srcQueryType === '0' ? '' : data.sourceMysqlSrcQuerySql, data.srcQueryType === '0' ? '' : data.sourceMysqlSrcQuerySql,
srcQueryType: data.srcQueryType, srcQueryType: data.srcQueryType,
srcType: data.sourceMysqlType, srcType: data.sourceMysqlType,
srcDatasource: data.sourceMysqlDatasource, srcDatasource: data.sourceMysqlDatasource,
mapColumnHive: data.mapColumnHive, mapColumnHive: data.mapColumnHive,
mapColumnJava: data.mapColumnJava mapColumnJava: data.mapColumnJava
} }
break } else if (data.sourceType === 'HDFS') {
case 'HDFS': sourceParams = {
sourceParams = { exportDir: data.sourceHdfsExportDir
exportDir: data.sourceHdfsExportDir }
} } else if (data.sourceType === 'HIVE') {
break sourceParams = {
case 'HIVE': hiveDatabase: data.sourceHiveDatabase,
sourceParams = { hiveTable: data.sourceHiveTable,
hiveDatabase: data.sourceHiveDatabase, hivePartitionKey: data.sourceHivePartitionKey,
hiveTable: data.sourceHiveTable, hivePartitionValue: data.sourceHivePartitionValue
hivePartitionKey: data.sourceHivePartitionKey, }
hivePartitionValue: data.sourceHivePartitionValue
}
break
default:
break
} }
taskParams.targetParams = JSON.stringify(targetParams) taskParams.targetParams = JSON.stringify(targetParams)
taskParams.sourceParams = JSON.stringify(sourceParams) taskParams.sourceParams = JSON.stringify(sourceParams)
@ -608,6 +597,7 @@ export function formatModel(data: ITaskData) {
const targetParams: ISqoopTargetParams = JSON.parse( const targetParams: ISqoopTargetParams = JSON.parse(
data.taskParams.targetParams data.taskParams.targetParams
) )
params.targetType = data.taskParams.targetType
params.targetHiveDatabase = targetParams.hiveDatabase params.targetHiveDatabase = targetParams.hiveDatabase
params.targetHiveTable = targetParams.hiveTable params.targetHiveTable = targetParams.hiveTable
params.targetHiveCreateTable = targetParams.createHiveTable params.targetHiveCreateTable = targetParams.createHiveTable

4
dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sqoop.ts

@ -54,12 +54,12 @@ export function useSqoop({
sourceType: 'MYSQL', sourceType: 'MYSQL',
srcQueryType: '1', srcQueryType: '1',
srcColumnType: '0', srcColumnType: '0',
targetType: 'HDFS', targetType: 'HIVE',
sourceMysqlType: 'MYSQL', sourceMysqlType: 'MYSQL',
targetHdfsDeleteTargetDir: true, targetHdfsDeleteTargetDir: true,
targetHdfsCompressionCodec: 'snappy', targetHdfsCompressionCodec: 'snappy',
targetHdfsFileType: '--as-avrodatafile', targetHdfsFileType: '--as-avrodatafile',
targetMysqlType: 'MYSQL', targetMysqlType: 'MYSQL',
targetMysqlUpdateMode: 'allowinsert', targetMysqlUpdateMode: 'allowinsert',
targetHiveCreateTable: false, targetHiveCreateTable: false,
targetHiveDropDelimiter: false, targetHiveDropDelimiter: false,

98
dolphinscheduler-ui/src/views/projects/task/components/node/use-task.ts

@ -14,69 +14,63 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
import { ref, Ref, unref } from 'vue' import {ref, Ref, unref} from 'vue'
import nodes from './tasks' import nodes from './tasks'
import getElementByJson from '@/components/form/get-elements-by-json' import getElementByJson from '@/components/form/get-elements-by-json'
import { useTaskNodeStore } from '@/store/project/task-node' import {useTaskNodeStore} from '@/store/project/task-node'
import { TASK_TYPES_MAP } from '@/store/project/task-type' import {TASK_TYPES_MAP} from '@/store/project/task-type'
import type { import type {EditWorkflowDefinition, FormRules, IFormItem, IJsonItem, INodeData, ITaskData} from './types'
IFormItem,
IJsonItem,
INodeData,
ITaskData,
FormRules,
EditWorkflowDefinition
} from './types'
export function useTask({ export function useTask({
data, data,
projectCode, projectCode,
from, from,
readonly, readonly,
definition definition
}: { }: {
data: ITaskData data: ITaskData
projectCode: number projectCode: number
from?: number from?: number
readonly?: boolean readonly?: boolean
definition?: EditWorkflowDefinition definition?: EditWorkflowDefinition
}): { }): {
elementsRef: Ref<IFormItem[]> elementsRef: Ref<IFormItem[]>
rulesRef: Ref<FormRules> rulesRef: Ref<FormRules>
model: INodeData model: INodeData
} { } {
const taskStore = useTaskNodeStore() const taskStore = useTaskNodeStore()
taskStore.updateDefinition(unref(definition), data?.code) taskStore.updateDefinition(unref(definition), data?.code)
const jsonRef = ref([]) as Ref<IJsonItem[]> const jsonRef = ref([]) as Ref<IJsonItem[]>
const elementsRef = ref([]) as Ref<IFormItem[]> const elementsRef = ref([]) as Ref<IFormItem[]>
const rulesRef = ref({}) const rulesRef = ref({})
const params = { const params = {
projectCode, projectCode,
from, from,
readonly, readonly,
data, data,
jsonRef, jsonRef,
updateElements: () => { updateElements: () => {
getElements() getElements()
}
} }
}
const { model, json } = nodes[data.taskType || 'SHELL'](params) const {model, json} = nodes[data.taskType || 'SHELL'](params)
jsonRef.value = json debugger
model.preTasks = taskStore.getPreTasks jsonRef.value = json
model.name = taskStore.getName model.preTasks = taskStore.getPreTasks
model.taskExecuteType = model.name = taskStore.getName
TASK_TYPES_MAP[data.taskType || 'SHELL'].taskExecuteType || 'BATCH' model.taskExecuteType =
TASK_TYPES_MAP[data.taskType || 'SHELL'].taskExecuteType || 'BATCH'
const getElements = () => { const getElements = () => {
const { rules, elements } = getElementByJson(jsonRef.value, model) const {rules, elements} = getElementByJson(jsonRef.value, model)
elementsRef.value = elements elementsRef.value = elements
rulesRef.value = rules rulesRef.value = rules
} }
getElements() getElements()
return { elementsRef, rulesRef, model } return {elementsRef, rulesRef, model}
} }

Loading…
Cancel
Save