Browse Source

[FIX-4034][server] fix sqoop import fail (#4036)

* fix #4043, sqoop import query fail

* fix #4043, sqoop task hard code & code style

* add license for SqoopConstants

* add private constructor for SqoopConstants

* fixed sqoop mysql pwd have special character

* fix checkstyle

* fix sqoop task log

* remove unused constants
pull/3/MERGE
Yelli 4 years ago committed by GitHub
parent
commit
145314b782
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  2. 26
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopQueryType.java
  3. 74
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopConstants.java
  4. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java
  5. 55
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java
  6. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java
  7. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java
  8. 24
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java
  9. 24
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java
  10. 37
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java
  11. 90
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  12. 38
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java
  13. 54
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java
  14. 66
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  15. 95
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -221,6 +221,11 @@ public final class Constants {
*/ */
public static final String COLON = ":"; public static final String COLON = ":";
/**
* SPACE " "
*/
public static final String SPACE = " ";
/** /**
* SINGLE_SLASH / * SINGLE_SLASH /
*/ */
@ -231,6 +236,15 @@ public final class Constants {
*/ */
public static final String DOUBLE_SLASH = "//"; public static final String DOUBLE_SLASH = "//";
/**
* SINGLE_QUOTES "'"
*/
public static final String SINGLE_QUOTES = "'";
/**
* DOUBLE_QUOTES "\""
*/
public static final String DOUBLE_QUOTES = "\"";
/** /**
* SEMICOLON ; * SEMICOLON ;
*/ */

26
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java → dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopQueryType.java

@ -14,20 +14,28 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.common.enums; package org.apache.dolphinscheduler.common.enums;
public enum QueryType { public enum SqoopQueryType {
FORM(0, "SQOOP_QUERY_FORM"),
SQL(1, "SQOOP_QUERY_SQL");
private final Integer code;
FORM, private final String desc;
SQL;
public static QueryType getEnum(int value){ SqoopQueryType(Integer code, String desc) {
for (QueryType e:QueryType.values()) { this.code = code;
if(e.ordinal() == value) { this.desc = desc;
return e;
} }
public Integer getCode() {
return code;
} }
//For values out of enum scope
return null; public String getDesc() {
return desc;
} }
} }

74
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopConstants.java

@ -0,0 +1,74 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop;
public final class SqoopConstants {
private SqoopConstants() {
}
//sqoop general param
public static final String SQOOP = "sqoop";
public static final String SQOOP_MR_JOB_NAME = "mapred.job.name";
public static final String SQOOP_PARALLELISM = "-m";
public static final String FIELDS_TERMINATED_BY = "--fields-terminated-by";
public static final String LINES_TERMINATED_BY = "--lines-terminated-by";
public static final String FIELD_NULL_PLACEHOLDER = "--null-non-string 'NULL' --null-string 'NULL'";
//sqoop db
public static final String DB_CONNECT = "--connect";
public static final String DB_USERNAME = "--username";
public static final String DB_PWD = "--password";
public static final String TABLE = "--table";
public static final String COLUMNS = "--columns";
public static final String QUERY_WHERE = "where";
public static final String QUERY = "--query";
public static final String QUERY_CONDITION = "AND \\$CONDITIONS";
public static final String QUERY_WITHOUT_CONDITION = "WHERE \\$CONDITIONS";
public static final String MAP_COLUMN_HIVE = "--map-column-hive";
public static final String MAP_COLUMN_JAVA = "--map-column-java";
//sqoop hive source
public static final String HCATALOG_DATABASE = "--hcatalog-database";
public static final String HCATALOG_TABLE = "--hcatalog-table";
public static final String HCATALOG_PARTITION_KEYS = "--hcatalog-partition-keys";
public static final String HCATALOG_PARTITION_VALUES = "--hcatalog-partition-values";
//sqoop hdfs
public static final String HDFS_EXPORT_DIR = "--export-dir";
public static final String TARGET_DIR = "--target-dir";
public static final String COMPRESSION_CODEC = "--compression-codec";
//sqoop hive
public static final String HIVE_IMPORT = "--hive-import";
public static final String HIVE_TABLE = "--hive-table";
public static final String CREATE_HIVE_TABLE = "--create-hive-table";
public static final String HIVE_DROP_IMPORT_DELIMS = "--hive-drop-import-delims";
public static final String HIVE_OVERWRITE = "--hive-overwrite";
public static final String DELETE_TARGET_DIR = "--delete-target-dir";
public static final String HIVE_DELIMS_REPLACEMENT = "--hive-delims-replacement";
public static final String HIVE_PARTITION_KEY = "--hive-partition-key";
public static final String HIVE_PARTITION_VALUE = "--hive-partition-value";
//sqoop update model
public static final String UPDATE_KEY = "--update-key";
public static final String UPDATE_MODE = "--update-mode";
}

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java

@ -14,61 +14,70 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop; package org.apache.dolphinscheduler.server.worker.task.sqoop;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator;
import org.slf4j.Logger;
import java.util.Map; import java.util.Map;
import org.apache.dolphinscheduler.common.utils.*;
import org.slf4j.Logger;
/** /**
* sqoop task extends the shell task * sqoop task extends the shell task
*/ */
public class SqoopTask extends AbstractYarnTask { public class SqoopTask extends AbstractYarnTask {
/**
* sqoop task params
*/
private SqoopParameters sqoopParameters; private SqoopParameters sqoopParameters;
/** /**
* taskExecutionContext * taskExecutionContext
*/ */
private TaskExecutionContext taskExecutionContext; private final TaskExecutionContext sqoopTaskExecutionContext;
public SqoopTask(TaskExecutionContext taskExecutionContext, Logger logger) { public SqoopTask(TaskExecutionContext taskExecutionContext, Logger logger) {
super(taskExecutionContext, logger); super(taskExecutionContext, logger);
this.taskExecutionContext = taskExecutionContext; this.sqoopTaskExecutionContext = taskExecutionContext;
} }
@Override @Override
public void init() throws Exception { public void init() {
logger.info("sqoop task params {}", taskExecutionContext.getTaskParams()); logger.info("sqoop task params {}", sqoopTaskExecutionContext.getTaskParams());
sqoopParameters = sqoopParameters =
JSONUtils.parseObject(taskExecutionContext.getTaskParams(),SqoopParameters.class); JSONUtils.parseObject(sqoopTaskExecutionContext.getTaskParams(), SqoopParameters.class);
if (!sqoopParameters.checkParameters()) { //check sqoop task params
throw new RuntimeException("sqoop task params is not valid"); if (null == sqoopParameters) {
throw new IllegalArgumentException("Sqoop Task params is null");
} }
if (!sqoopParameters.checkParameters()) {
throw new IllegalArgumentException("Sqoop Task params check fail");
}
} }
@Override @Override
protected String buildCommand() throws Exception { protected String buildCommand() {
//get sqoop scripts //get sqoop scripts
SqoopJobGenerator generator = new SqoopJobGenerator(); SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters,taskExecutionContext); String script = generator.generateSqoopJob(sqoopParameters, sqoopTaskExecutionContext);
Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(sqoopTaskExecutionContext.getDefinedParams()),
taskExecutionContext.getDefinedParams(), sqoopTaskExecutionContext.getDefinedParams(),
sqoopParameters.getLocalParametersMap(), sqoopParameters.getLocalParametersMap(),
CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), CommandType.of(sqoopTaskExecutionContext.getCmdTypeIfComplement()),
taskExecutionContext.getScheduleTime()); sqoopTaskExecutionContext.getScheduleTime());
if (paramsMap != null) { if (paramsMap != null) {
String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap));

55
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java

@ -14,71 +14,72 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* common script generator * common script generator
*/ */
public class CommonGenerator { public class CommonGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(CommonGenerator.class);
public String generate(SqoopParameters sqoopParameters) { public String generate(SqoopParameters sqoopParameters) {
StringBuilder result = new StringBuilder();
StringBuilder commonSb = new StringBuilder();
try { try {
result.append("sqoop ") //sqoop task model
commonSb.append(SqoopConstants.SQOOP)
.append(Constants.SPACE)
.append(sqoopParameters.getModelType()); .append(sqoopParameters.getModelType());
//set sqoop job name //sqoop map-reduce job name
result.append(" -D mapred.job.name") commonSb.append(Constants.SPACE).append(Constants.D).append(Constants.SPACE)
.append(Constants.EQUAL_SIGN) .append(String.format("%s%s%s", SqoopConstants.SQOOP_MR_JOB_NAME,
.append(sqoopParameters.getJobName()); Constants.EQUAL_SIGN, sqoopParameters.getJobName()));
//set hadoop custom param //hadoop custom param
List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams(); List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams();
if (CollectionUtils.isNotEmpty(hadoopCustomParams)) { if (CollectionUtils.isNotEmpty(hadoopCustomParams)) {
for (Property hadoopCustomParam : hadoopCustomParams) { for (Property hadoopCustomParam : hadoopCustomParams) {
String hadoopCustomParamStr = " -D " + hadoopCustomParam.getProp() String hadoopCustomParamStr = String.format("%s%s%s", hadoopCustomParam.getProp(),
+ Constants.EQUAL_SIGN + hadoopCustomParam.getValue(); Constants.EQUAL_SIGN, hadoopCustomParam.getValue());
if (StringUtils.isNotEmpty(hadoopCustomParamStr)) { commonSb.append(Constants.SPACE).append(Constants.D)
result.append(hadoopCustomParamStr); .append(Constants.SPACE).append(hadoopCustomParamStr);
}
} }
} }
//set sqoop advanced custom param //sqoop custom params
List<Property> sqoopAdvancedParams = sqoopParameters.getSqoopAdvancedParams(); List<Property> sqoopAdvancedParams = sqoopParameters.getSqoopAdvancedParams();
if (CollectionUtils.isNotEmpty(sqoopAdvancedParams)) { if (CollectionUtils.isNotEmpty(sqoopAdvancedParams)) {
for (Property sqoopAdvancedParam : sqoopAdvancedParams) { for (Property sqoopAdvancedParam : sqoopAdvancedParams) {
String sqoopAdvancedParamStr = " " + sqoopAdvancedParam.getProp() commonSb.append(Constants.SPACE).append(sqoopAdvancedParam.getProp())
+ " " + sqoopAdvancedParam.getValue(); .append(Constants.SPACE).append(sqoopAdvancedParam.getValue());
if (StringUtils.isNotEmpty(sqoopAdvancedParamStr)) {
result.append(sqoopAdvancedParamStr);
}
} }
} }
//sqoop parallelism
if (sqoopParameters.getConcurrency() > 0) { if (sqoopParameters.getConcurrency() > 0) {
result.append(" -m ") commonSb.append(Constants.SPACE).append(SqoopConstants.SQOOP_PARALLELISM)
.append(sqoopParameters.getConcurrency()); .append(Constants.SPACE).append(sqoopParameters.getConcurrency());
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop task general param build failed: [%s]", e.getMessage()));
} }
return result.toString(); return commonSb.toString();
} }
} }

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
@ -26,6 +27,7 @@ public interface ISourceGenerator {
/** /**
* generate the source script * generate the source script
*
* @param sqoopParameters sqoopParameters * @param sqoopParameters sqoopParameters
* @param taskExecutionContext taskExecutionContext * @param taskExecutionContext taskExecutionContext
* @return source script * @return source script

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
@ -26,6 +27,7 @@ public interface ITargetGenerator {
/** /**
* generate the target script * generate the target script
*
* @param sqoopParameters sqoopParameters * @param sqoopParameters sqoopParameters
* @param taskExecutionContext taskExecutionContext * @param taskExecutionContext taskExecutionContext
* @return target script * @return target script

24
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.SqoopJobType;
@ -46,7 +47,7 @@ public class SqoopJobGenerator {
/** /**
* common script generator * common script generator
*/ */
private CommonGenerator commonGenerator; private final CommonGenerator commonGenerator;
public SqoopJobGenerator() { public SqoopJobGenerator() {
commonGenerator = new CommonGenerator(); commonGenerator = new CommonGenerator();
@ -59,8 +60,9 @@ public class SqoopJobGenerator {
/** /**
* get the final sqoop scripts * get the final sqoop scripts
* @param sqoopParameters *
* @return * @param sqoopParameters sqoop params
* @return sqoop scripts
*/ */
public String generateSqoopJob(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generateSqoopJob(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
@ -72,9 +74,9 @@ public class SqoopJobGenerator {
throw new RuntimeException("sqoop task source type or target type is null"); throw new RuntimeException("sqoop task source type or target type is null");
} }
sqoopScripts = commonGenerator.generate(sqoopParameters) sqoopScripts = String.format("%s%s%s", commonGenerator.generate(sqoopParameters),
+ sourceGenerator.generate(sqoopParameters,taskExecutionContext) sourceGenerator.generate(sqoopParameters, taskExecutionContext),
+ targetGenerator.generate(sqoopParameters,taskExecutionContext); targetGenerator.generate(sqoopParameters, taskExecutionContext));
} else if (SqoopJobType.CUSTOM.getDescp().equals(sqoopParameters.getJobType())) { } else if (SqoopJobType.CUSTOM.getDescp().equals(sqoopParameters.getJobType())) {
sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n"); sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n");
} }
@ -84,8 +86,9 @@ public class SqoopJobGenerator {
/** /**
* get the source generator * get the source generator
* @param sourceType *
* @return * @param sourceType sqoop source type
* @return sqoop source generator
*/ */
private ISourceGenerator createSourceGenerator(String sourceType) { private ISourceGenerator createSourceGenerator(String sourceType) {
switch (sourceType) { switch (sourceType) {
@ -102,8 +105,9 @@ public class SqoopJobGenerator {
/** /**
* get the target generator * get the target generator
* @param targetType *
* @return * @param targetType sqoop target type
* @return sqoop target generator
*/ */
private ITargetGenerator createTargetGenerator(String targetType) { private ITargetGenerator createTargetGenerator(String targetType) {
switch (targetType) { switch (targetType) {

24
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java

@ -14,14 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -30,28 +34,30 @@ import org.slf4j.LoggerFactory;
*/ */
public class HdfsSourceGenerator implements ISourceGenerator { public class HdfsSourceGenerator implements ISourceGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(HdfsSourceGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder result = new StringBuilder();
StringBuilder hdfsSourceSb = new StringBuilder();
try { try {
SourceHdfsParameter sourceHdfsParameter SourceHdfsParameter sourceHdfsParameter
= JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHdfsParameter.class); = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHdfsParameter.class);
if(sourceHdfsParameter != null){ if (null != sourceHdfsParameter) {
if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) { if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) {
result.append(" --export-dir ") hdfsSourceSb.append(Constants.SPACE).append(SqoopConstants.HDFS_EXPORT_DIR)
.append(sourceHdfsParameter.getExportDir()); .append(Constants.SPACE).append(sourceHdfsParameter.getExportDir());
} else { } else {
throw new Exception("--export-dir is null"); throw new IllegalArgumentException("Sqoop hdfs export dir is null");
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("get hdfs source failed",e); logger.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage()));
} }
return result.toString(); return hdfsSourceSb.toString();
} }
} }

37
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java

@ -14,14 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources;
import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -30,33 +34,40 @@ import org.slf4j.LoggerFactory;
*/ */
public class HiveSourceGenerator implements ISourceGenerator { public class HiveSourceGenerator implements ISourceGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(HiveSourceGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder sb = new StringBuilder();
StringBuilder hiveSourceSb = new StringBuilder();
try { try {
SourceHiveParameter sourceHiveParameter SourceHiveParameter sourceHiveParameter
= JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHiveParameter.class); = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHiveParameter.class);
if(sourceHiveParameter != null){
if (null != sourceHiveParameter) {
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) { if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) {
sb.append(" --hcatalog-database ").append(sourceHiveParameter.getHiveDatabase()); hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_DATABASE)
.append(Constants.SPACE).append(sourceHiveParameter.getHiveDatabase());
} }
if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) { if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) {
sb.append(" --hcatalog-table ").append(sourceHiveParameter.getHiveTable()); hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_TABLE)
.append(Constants.SPACE).append(sourceHiveParameter.getHiveTable());
} }
if(StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())&& if (StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())
StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())){ && StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())) {
sb.append(" --hcatalog-partition-keys ").append(sourceHiveParameter.getHivePartitionKey()) hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_PARTITION_KEYS)
.append(" --hcatalog-partition-values ").append(sourceHiveParameter.getHivePartitionValue()); .append(Constants.SPACE).append(sourceHiveParameter.getHivePartitionKey())
.append(Constants.SPACE).append(SqoopConstants.HCATALOG_PARTITION_VALUES)
.append(Constants.SPACE).append(sourceHiveParameter.getHivePartitionValue());
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop hive source params build failed: [%s]", e.getMessage()));
} }
return sb.toString(); return hiveSourceSb.toString();
} }
} }

90
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -14,106 +14,118 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources;
import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.QueryType; import org.apache.dolphinscheduler.common.enums.SqoopQueryType;
import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* mysql source generator * mysql source generator
*/ */
public class MysqlSourceGenerator implements ISourceGenerator { public class MysqlSourceGenerator implements ISourceGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(MysqlSourceGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder result = new StringBuilder();
try {
SourceMysqlParameter sourceMysqlParameter
= JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceMysqlParameter.class);
StringBuilder mysqlSourceSb = new StringBuilder();
try {
SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext();
if(sourceMysqlParameter != null){ if (null != sourceMysqlParameter) {
BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()), BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()),
sqoopTaskExecutionContext.getSourceConnectionParams()); sqoopTaskExecutionContext.getSourceConnectionParams());
if(baseDataSource != null){
result.append(" --connect ") if (null != baseDataSource) {
.append(baseDataSource.getJdbcUrl())
.append(" --username ") mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT)
.append(baseDataSource.getUser()) .append(Constants.SPACE).append(baseDataSource.getJdbcUrl())
.append(" --password ") .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME)
.append(baseDataSource.getPassword()); .append(Constants.SPACE).append(baseDataSource.getUser())
.append(Constants.SPACE).append(SqoopConstants.DB_PWD)
if(sourceMysqlParameter.getSrcQueryType() == QueryType.FORM.ordinal()){ .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES);
//sqoop table & sql query
if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) {
if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) { if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) {
result.append(" --table ").append(sourceMysqlParameter.getSrcTable()); mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.TABLE)
.append(Constants.SPACE).append(sourceMysqlParameter.getSrcTable());
} }
if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) { if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) {
result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns()); mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS)
.append(Constants.SPACE).append(sourceMysqlParameter.getSrcColumns());
} }
} else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode()
}else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()
&& StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) { && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) {
String srcQuery = sourceMysqlParameter.getSrcQuerySql(); String srcQuery = sourceMysqlParameter.getSrcQuerySql();
if(srcQuery.toLowerCase().contains("where")){ mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY)
srcQuery += " AND "+"$CONDITIONS"; .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(srcQuery);
if (srcQuery.toLowerCase().contains(SqoopConstants.QUERY_WHERE)) {
mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_CONDITION).append(Constants.DOUBLE_QUOTES);
} else { } else {
srcQuery += " WHERE $CONDITIONS"; mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_WITHOUT_CONDITION).append(Constants.DOUBLE_QUOTES);
} }
result.append(" --query \'").append(srcQuery).append("\'");
} }
//sqoop hive map column
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive(); List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if(mapColumnHive != null && !mapColumnHive.isEmpty()){ if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
StringBuilder columnMap = new StringBuilder(); StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnHive) { for (Property item : mapColumnHive) {
columnMap.append(item.getProp()).append("=").append(item.getValue()).append(","); columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA);
} }
if (StringUtils.isNotEmpty(columnMap.toString())) { if (StringUtils.isNotEmpty(columnMap.toString())) {
result.append(" --map-column-hive ") mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_HIVE)
.append(columnMap.substring(0,columnMap.length() - 1)); .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1));
} }
} }
//sqoop map column java
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava(); List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if(mapColumnJava != null && !mapColumnJava.isEmpty()){ if (null != mapColumnJava && !mapColumnJava.isEmpty()) {
StringBuilder columnMap = new StringBuilder(); StringBuilder columnMap = new StringBuilder();
for (Property item : mapColumnJava) { for (Property item : mapColumnJava) {
columnMap.append(item.getProp()).append("=").append(item.getValue()).append(","); columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA);
} }
if (StringUtils.isNotEmpty(columnMap.toString())) { if (StringUtils.isNotEmpty(columnMap.toString())) {
result.append(" --map-column-java ") mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_JAVA)
.append(columnMap.substring(0,columnMap.length() - 1)); .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1));
} }
} }
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage()));
} }
return result.toString(); return mysqlSourceSb.toString();
} }
} }

38
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java

@ -14,14 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets;
import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -30,47 +34,53 @@ import org.slf4j.LoggerFactory;
*/ */
public class HdfsTargetGenerator implements ITargetGenerator { public class HdfsTargetGenerator implements ITargetGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(HdfsTargetGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder result = new StringBuilder();
StringBuilder hdfsTargetSb = new StringBuilder();
try { try {
TargetHdfsParameter targetHdfsParameter = TargetHdfsParameter targetHdfsParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHdfsParameter.class); JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHdfsParameter.class);
if(targetHdfsParameter != null){ if (null != targetHdfsParameter) {
if (StringUtils.isNotEmpty(targetHdfsParameter.getTargetPath())) { if (StringUtils.isNotEmpty(targetHdfsParameter.getTargetPath())) {
result.append(" --target-dir ").append(targetHdfsParameter.getTargetPath()); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.TARGET_DIR)
.append(Constants.SPACE).append(targetHdfsParameter.getTargetPath());
} }
if (StringUtils.isNotEmpty(targetHdfsParameter.getCompressionCodec())) { if (StringUtils.isNotEmpty(targetHdfsParameter.getCompressionCodec())) {
result.append(" --compression-codec ").append(targetHdfsParameter.getCompressionCodec()); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.COMPRESSION_CODEC)
.append(Constants.SPACE).append(targetHdfsParameter.getCompressionCodec());
} }
if (StringUtils.isNotEmpty(targetHdfsParameter.getFileType())) { if (StringUtils.isNotEmpty(targetHdfsParameter.getFileType())) {
result.append(" ").append(targetHdfsParameter.getFileType()); hdfsTargetSb.append(Constants.SPACE).append(targetHdfsParameter.getFileType());
} }
if (targetHdfsParameter.isDeleteTargetDir()) { if (targetHdfsParameter.isDeleteTargetDir()) {
result.append(" --delete-target-dir"); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.DELETE_TARGET_DIR);
} }
if (StringUtils.isNotEmpty(targetHdfsParameter.getFieldsTerminated())) { if (StringUtils.isNotEmpty(targetHdfsParameter.getFieldsTerminated())) {
result.append(" --fields-terminated-by '").append(targetHdfsParameter.getFieldsTerminated()).append("'"); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELDS_TERMINATED_BY)
.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetHdfsParameter.getFieldsTerminated()).append(Constants.SINGLE_QUOTES);
} }
if (StringUtils.isNotEmpty(targetHdfsParameter.getLinesTerminated())) { if (StringUtils.isNotEmpty(targetHdfsParameter.getLinesTerminated())) {
result.append(" --lines-terminated-by '").append(targetHdfsParameter.getLinesTerminated()).append("'"); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.LINES_TERMINATED_BY)
.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetHdfsParameter.getLinesTerminated()).append(Constants.SINGLE_QUOTES);
} }
result.append(" --null-non-string 'NULL' --null-string 'NULL'"); hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELD_NULL_PLACEHOLDER);
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop hdfs target params build failed: [%s]", e.getMessage()));
} }
return result.toString(); return hdfsTargetSb.toString();
} }
} }

54
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java

@ -14,14 +14,18 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets;
import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -30,57 +34,57 @@ import org.slf4j.LoggerFactory;
*/ */
public class HiveTargetGenerator implements ITargetGenerator { public class HiveTargetGenerator implements ITargetGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(HiveTargetGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder result = new StringBuilder(); StringBuilder hiveTargetSb = new StringBuilder();
try { try {
TargetHiveParameter targetHiveParameter = TargetHiveParameter targetHiveParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHiveParameter.class); JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHiveParameter.class);
if(targetHiveParameter != null){ if (null != targetHiveParameter) {
hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_IMPORT);
result.append(" --hive-import ");
if(StringUtils.isNotEmpty(targetHiveParameter.getHiveDatabase())&& if (StringUtils.isNotEmpty(targetHiveParameter.getHiveDatabase())
StringUtils.isNotEmpty(targetHiveParameter.getHiveTable())){ && StringUtils.isNotEmpty(targetHiveParameter.getHiveTable())) {
result.append(" --hive-table ") hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_TABLE)
.append(targetHiveParameter.getHiveDatabase()) .append(Constants.SPACE).append(String.format("%s.%s", targetHiveParameter.getHiveDatabase(),
.append(".") targetHiveParameter.getHiveTable()));
.append(targetHiveParameter.getHiveTable());
} }
if (targetHiveParameter.isCreateHiveTable()) { if (targetHiveParameter.isCreateHiveTable()) {
result.append(" --create-hive-table"); hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.CREATE_HIVE_TABLE);
} }
if (targetHiveParameter.isDropDelimiter()) { if (targetHiveParameter.isDropDelimiter()) {
result.append(" --hive-drop-import-delims"); hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_DROP_IMPORT_DELIMS);
} }
if (targetHiveParameter.isHiveOverWrite()) { if (targetHiveParameter.isHiveOverWrite()) {
result.append(" --hive-overwrite -delete-target-dir"); hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_OVERWRITE)
.append(Constants.SPACE).append(SqoopConstants.DELETE_TARGET_DIR);
} }
if (StringUtils.isNotEmpty(targetHiveParameter.getReplaceDelimiter())) { if (StringUtils.isNotEmpty(targetHiveParameter.getReplaceDelimiter())) {
result.append(" --hive-delims-replacement ").append(targetHiveParameter.getReplaceDelimiter()); hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_DELIMS_REPLACEMENT)
.append(Constants.SPACE).append(targetHiveParameter.getReplaceDelimiter());
} }
if(StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionKey())&& if (StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionKey())
StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionValue())){ && StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionValue())) {
result.append(" --hive-partition-key ") hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_PARTITION_KEY)
.append(targetHiveParameter.getHivePartitionKey()) .append(Constants.SPACE).append(targetHiveParameter.getHivePartitionKey())
.append(" --hive-partition-value ") .append(Constants.SPACE).append(SqoopConstants.HIVE_PARTITION_VALUE)
.append(targetHiveParameter.getHivePartitionValue()); .append(Constants.SPACE).append(targetHiveParameter.getHivePartitionValue());
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop hive target params build failed: [%s]", e.getMessage()));
} }
return result.toString(); return hiveTargetSb.toString();
} }
} }

66
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -14,21 +14,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets;
import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -37,59 +38,74 @@ import org.slf4j.LoggerFactory;
*/ */
public class MysqlTargetGenerator implements ITargetGenerator { public class MysqlTargetGenerator implements ITargetGenerator {
private Logger logger = LoggerFactory.getLogger(getClass()); private static final Logger logger = LoggerFactory.getLogger(MysqlTargetGenerator.class);
@Override @Override
public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) {
StringBuilder result = new StringBuilder(); StringBuilder mysqlTargetSb = new StringBuilder();
try{
try {
TargetMysqlParameter targetMysqlParameter = TargetMysqlParameter targetMysqlParameter =
JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class);
SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext();
if(targetMysqlParameter != null && targetMysqlParameter.getTargetDatasource() != 0){ if (null != targetMysqlParameter && targetMysqlParameter.getTargetDatasource() != 0) {
// get datasource // get datasource
BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()), BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()),
sqoopTaskExecutionContext.getTargetConnectionParams()); sqoopTaskExecutionContext.getTargetConnectionParams());
if(baseDataSource != null){ if (null != baseDataSource) {
result.append(" --connect ")
.append(baseDataSource.getJdbcUrl()) mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT)
.append(" --username ") .append(Constants.SPACE).append(baseDataSource.getJdbcUrl())
.append(baseDataSource.getUser()) .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME)
.append(" --password ") .append(Constants.SPACE).append(baseDataSource.getUser())
.append(baseDataSource.getPassword()) .append(Constants.SPACE).append(SqoopConstants.DB_PWD)
.append(" --table ") .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES)
.append(targetMysqlParameter.getTargetTable()); .append(Constants.SPACE).append(SqoopConstants.TABLE)
.append(Constants.SPACE).append(targetMysqlParameter.getTargetTable());
if (StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())) { if (StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())) {
result.append(" --columns ").append(targetMysqlParameter.getTargetColumns()); mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS)
.append(Constants.SPACE).append(targetMysqlParameter.getTargetColumns());
} }
if (StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())) { if (StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())) {
result.append(" --fields-terminated-by '").append(targetMysqlParameter.getFieldsTerminated()).append("'"); mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELDS_TERMINATED_BY);
if (targetMysqlParameter.getFieldsTerminated().contains("'")) {
mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getFieldsTerminated());
} else {
mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getFieldsTerminated()).append(Constants.SINGLE_QUOTES);
}
} }
if (StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())) { if (StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())) {
result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.LINES_TERMINATED_BY);
if (targetMysqlParameter.getLinesTerminated().contains(Constants.SINGLE_QUOTES)) {
mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getLinesTerminated());
} else {
mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getLinesTerminated()).append(Constants.SINGLE_QUOTES);
}
} }
if (targetMysqlParameter.getIsUpdate() if (targetMysqlParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())) { && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())) {
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.UPDATE_KEY)
.append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); .append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateKey())
.append(Constants.SPACE).append(SqoopConstants.UPDATE_MODE)
.append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateMode());
} }
} }
} }
} catch (Exception e) { } catch (Exception e) {
logger.error(e.getMessage()); logger.error(String.format("Sqoop mysql target params build failed: [%s]", e.getMessage()));
} }
return result.toString(); return mysqlTargetSb.toString();
} }
} }

95
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop; package org.apache.dolphinscheduler.server.worker.task.sqoop;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
@ -23,6 +24,9 @@ import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.Date;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -33,7 +37,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import java.util.Date;
/** /**
* sqoop task test * sqoop task test
@ -43,14 +46,12 @@ public class SqoopTaskTest {
private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class); private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class);
private ProcessService processService;
private ApplicationContext applicationContext;
private SqoopTask sqoopTask; private SqoopTask sqoopTask;
@Before @Before
public void before() throws Exception{ public void before() {
processService = Mockito.mock(ProcessService.class); ProcessService processService = Mockito.mock(ProcessService.class);
applicationContext = Mockito.mock(ApplicationContext.class); ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext(); SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext); springApplicationContext.setApplicationContext(applicationContext);
Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
@ -61,17 +62,17 @@ public class SqoopTaskTest {
taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh"); taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh");
taskExecutionContext.setStartTime(new Date()); taskExecutionContext.setStartTime(new Date());
taskExecutionContext.setTaskTimeout(0); taskExecutionContext.setTaskTimeout(0);
taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," + taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,"
"\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," + + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\","
"\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," + + "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\","
"\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," + + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],"
"\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" + + "\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\""
",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," + + ",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,"
"\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," + + "\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\","
"\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); + "\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}");
sqoopTask = new SqoopTask(taskExecutionContext, logger); sqoopTask = new SqoopTask(taskExecutionContext, logger);
//test sqoop tash init method //test sqoop task init method
sqoopTask.init(); sqoopTask.init();
} }
@ -83,36 +84,68 @@ public class SqoopTaskTest {
TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext(); TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext();
//sqoop TEMPLATE job //sqoop TEMPLATE job
//import mysql to HDFS with hadoo //import mysql to HDFS with hadoop
String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}],\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]," + String mysqlToHdfs =
"\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}],"
+ "\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}],\"jobType\":\"TEMPLATE\",\"concurrency\":1,"
+ "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\","
+ "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\","
+ "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\","
+ "\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\","
+ "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs, SqoopParameters.class); SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs, SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator(); SqoopJobGenerator generator = new SqoopJobGenerator();
String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext); String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext);
String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; String mysqlToHdfsExpected =
"sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test "
+ "--username kylo --password \"123456\" --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile "
+ "--delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'";
Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript); Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript);
//export hdfs to mysql using update mode //export hdfs to mysql using update mode
String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," + String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\","
"\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," + + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\","
"\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\","
+ "\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\","
+ "\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class); SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class);
String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext); String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext);
String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; String hdfsToMysqlScriptExpected =
"sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test "
+ "--username kylo --password \"123456\" --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' "
+ "--lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript); Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript);
//export hive to mysql //export hive to mysql
String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; String hiveToMysql =
"{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\","
+ "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\","
+ "\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\","
+ "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\","
+ "\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\","
+ "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql, SqoopParameters.class); SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql, SqoopParameters.class);
String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext); String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext);
String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; String hiveToMysqlExpected =
"sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date "
+ "--hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" --table person_3 "
+ "--fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript); Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript);
//import mysql to hive //import mysql to hive
String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; String mysqlToHive =
"{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\","
+ "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\","
+ "\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],"
+ "\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\","
+ "\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,"
+ "\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class); SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class);
String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext); String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext);
String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; String mysqlToHiveExpected =
"sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" "
+ "--query \"SELECT * FROM person_2 WHERE \\$CONDITIONS\" --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 "
+ "--create-hive-table --hive-overwrite --delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";
Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript); Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript);
//sqoop CUSTOM job //sqoop CUSTOM job
@ -124,16 +157,18 @@ public class SqoopTaskTest {
} }
/** /**
* get taskExecutionContext include mysql * get taskExecutionContext include mysql
*
* @return TaskExecutionContext * @return TaskExecutionContext
*/ */
private TaskExecutionContext getMysqlTaskExecutionContext() { private TaskExecutionContext getMysqlTaskExecutionContext() {
TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext();
String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; String mysqlSourceConnectionParams =
String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
String mysqlTargetConnectionParams =
"{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
sqoopTaskExecutionContext.setDataSourceId(2); sqoopTaskExecutionContext.setDataSourceId(2);
sqoopTaskExecutionContext.setDataTargetId(2); sqoopTaskExecutionContext.setDataTargetId(2);
sqoopTaskExecutionContext.setSourcetype(0); sqoopTaskExecutionContext.setSourcetype(0);

Loading…
Cancel
Save