Browse Source

Merge pull request #2943 from Eights-Li/dev-sqoop-optimization

Sqoop task optimization
pull/2/head
Rubik-W 5 years ago committed by GitHub
parent
commit
a7fd0a523c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  2. 41
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopJobType.java
  3. 87
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java
  4. 2
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java
  5. 6
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
  6. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java
  7. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java
  8. 14
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
  9. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
  10. 126
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
  11. 2
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue
  12. 292
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue
  13. 7
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  14. 7
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  15. 1
      pom.xml

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -218,7 +218,7 @@ public enum Status {
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"), DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"), DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"), PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid", "流程节点[%s]参数无效"), PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"), PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"), DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"), SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),

41
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopJobType.java

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
public enum SqoopJobType {
CUSTOM(0, "CUSTOM"),
TEMPLATE(1, "TEMPLATE");
SqoopJobType(int code, String descp){
this.code = code;
this.descp = descp;
}
@EnumValue
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
}

87
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java

@ -16,6 +16,8 @@
*/ */
package org.apache.dolphinscheduler.common.task.sqoop; package org.apache.dolphinscheduler.common.task.sqoop;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.StringUtils;
@ -28,6 +30,23 @@ import java.util.List;
*/ */
public class SqoopParameters extends AbstractParameters { public class SqoopParameters extends AbstractParameters {
/**
* sqoop job type:
* CUSTOM - custom sqoop job
* TEMPLATE - sqoop template job
*/
private String jobType;
/**
* customJob eq 1, use customShell
*/
private String customShell;
/**
* sqoop job name - map-reduce job name
*/
private String jobName;
/** /**
* model type * model type
*/ */
@ -53,6 +72,16 @@ public class SqoopParameters extends AbstractParameters {
*/ */
private String targetParams; private String targetParams;
/**
* hadoop custom param for sqoop job
*/
private List<Property> hadoopCustomParams;
/**
* sqoop advanced param
*/
private List<Property> sqoopAdvancedParams;
public String getModelType() { public String getModelType() {
return modelType; return modelType;
} }
@ -101,14 +130,70 @@ public class SqoopParameters extends AbstractParameters {
this.targetParams = targetParams; this.targetParams = targetParams;
} }
public String getJobType() {
return jobType;
}
public void setJobType(String jobType) {
this.jobType = jobType;
}
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public String getCustomShell() {
return customShell;
}
public void setCustomShell(String customShell) {
this.customShell = customShell;
}
public List<Property> getHadoopCustomParams() {
return hadoopCustomParams;
}
public void setHadoopCustomParams(List<Property> hadoopCustomParams) {
this.hadoopCustomParams = hadoopCustomParams;
}
public List<Property> getSqoopAdvancedParams() {
return sqoopAdvancedParams;
}
public void setSqoopAdvancedParams(List<Property> sqoopAdvancedParams) {
this.sqoopAdvancedParams = sqoopAdvancedParams;
}
@Override @Override
public boolean checkParameters() { public boolean checkParameters() {
return StringUtils.isNotEmpty(modelType)&&
boolean sqoopParamsCheck = false;
if (StringUtils.isEmpty(jobType)) {
return sqoopParamsCheck;
}
if (SqoopJobType.TEMPLATE.getDescp().equals(jobType)) {
sqoopParamsCheck = StringUtils.isEmpty(customShell) &&
StringUtils.isNotEmpty(modelType) &&
StringUtils.isNotEmpty(jobName) &&
concurrency != 0 && concurrency != 0 &&
StringUtils.isNotEmpty(sourceType) && StringUtils.isNotEmpty(sourceType) &&
StringUtils.isNotEmpty(targetType) && StringUtils.isNotEmpty(targetType) &&
StringUtils.isNotEmpty(sourceParams) && StringUtils.isNotEmpty(sourceParams) &&
StringUtils.isNotEmpty(targetParams); StringUtils.isNotEmpty(targetParams);
} else if (SqoopJobType.CUSTOM.getDescp().equals(jobType)) {
sqoopParamsCheck = StringUtils.isNotEmpty(customShell) &&
StringUtils.isEmpty(jobName);
}
return sqoopParamsCheck;
} }
@Override @Override

2
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java

@ -106,7 +106,7 @@ public class TargetMysqlParameter {
this.preQuery = preQuery; this.preQuery = preQuery;
} }
public boolean isUpdate() { public boolean getIsUpdate() {
return isUpdate; return isUpdate;
} }

6
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java

@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.master.consumer;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
@ -258,13 +259,15 @@ public class TaskPriorityQueueConsumer extends Thread{
/** /**
* set datax task relation * set sqoop task relation
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext * @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @param taskNode taskNode * @param taskNode taskNode
*/ */
private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) { private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) {
SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class); SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class);
// sqoop job type is template set task relation
if (sqoopParameters.getJobType().equals(SqoopJobType.TEMPLATE.getDescp())) {
SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class);
@ -283,6 +286,7 @@ public class TaskPriorityQueueConsumer extends Thread{
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
} }
} }
}
/** /**
* set SQL task relation * set SQL task relation

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java

@ -16,10 +16,17 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.List;
/** /**
* common script generator * common script generator
*/ */
@ -32,6 +39,38 @@ public class CommonGenerator {
try{ try{
result.append("sqoop ") result.append("sqoop ")
.append(sqoopParameters.getModelType()); .append(sqoopParameters.getModelType());
//set sqoop job name
result.append(" -D mapred.job.name")
.append(Constants.EQUAL_SIGN)
.append(sqoopParameters.getJobName());
//set hadoop custom param
List<Property> hadoopCustomParams = sqoopParameters.getHadoopCustomParams();
if (CollectionUtils.isNotEmpty(hadoopCustomParams)) {
for (Property hadoopCustomParam : hadoopCustomParams) {
String hadoopCustomParamStr = " -D " + hadoopCustomParam.getProp()
+ Constants.EQUAL_SIGN + hadoopCustomParam.getValue();
if (StringUtils.isNotEmpty(hadoopCustomParamStr)) {
result.append(hadoopCustomParamStr);
}
}
}
//set sqoop advanced custom param
List<Property> sqoopAdvancedParams = sqoopParameters.getSqoopAdvancedParams();
if (CollectionUtils.isNotEmpty(sqoopAdvancedParams)) {
for (Property sqoopAdvancedParam : sqoopAdvancedParams) {
String sqoopAdvancedParamStr = " " + sqoopAdvancedParam.getProp()
+ " " + sqoopAdvancedParam.getValue();
if (StringUtils.isNotEmpty(sqoopAdvancedParamStr)) {
result.append(sqoopAdvancedParamStr);
}
}
}
if(sqoopParameters.getConcurrency() >0){ if(sqoopParameters.getConcurrency() >0){
result.append(" -m ") result.append(" -m ")
.append(sqoopParameters.getConcurrency()); .append(sqoopParameters.getConcurrency());

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java

@ -16,6 +16,7 @@
*/ */
package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; package org.apache.dolphinscheduler.server.worker.task.sqoop.generator;
import org.apache.dolphinscheduler.common.enums.SqoopJobType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HdfsSourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HdfsSourceGenerator;
@ -62,14 +63,23 @@ public class SqoopJobGenerator {
* @return * @return
*/ */
public String generateSqoopJob(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext){ public String generateSqoopJob(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext){
String sqoopScripts = "";
if (SqoopJobType.TEMPLATE.getDescp().equals(sqoopParameters.getJobType())) {
createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType()); createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType());
if(sourceGenerator == null || targetGenerator == null){ if(sourceGenerator == null || targetGenerator == null){
return null; throw new RuntimeException("sqoop task source type or target type is null");
} }
return commonGenerator.generate(sqoopParameters) sqoopScripts = commonGenerator.generate(sqoopParameters)
+ sourceGenerator.generate(sqoopParameters,taskExecutionContext) + sourceGenerator.generate(sqoopParameters,taskExecutionContext)
+ targetGenerator.generate(sqoopParameters,taskExecutionContext); + targetGenerator.generate(sqoopParameters,taskExecutionContext);
} else if (SqoopJobType.CUSTOM.getDescp().equals(sqoopParameters.getJobType())) {
sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n");
}
return sqoopScripts;
} }
/** /**

14
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java

@ -77,19 +77,19 @@ public class MysqlSourceGenerator implements ISourceGenerator {
}else{ }else{
srcQuery += " WHERE $CONDITIONS"; srcQuery += " WHERE $CONDITIONS";
} }
result.append(" --query \'"+srcQuery+"\'"); result.append(" --query \'").append(srcQuery).append("\'");
} }
List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive(); List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive();
if(mapColumnHive != null && !mapColumnHive.isEmpty()){ if(mapColumnHive != null && !mapColumnHive.isEmpty()){
String columnMap = ""; StringBuilder columnMap = new StringBuilder();
for(Property item:mapColumnHive){ for(Property item:mapColumnHive){
columnMap = item.getProp()+"="+ item.getValue()+","; columnMap.append(item.getProp()).append("=").append(item.getValue()).append(",");
} }
if(StringUtils.isNotEmpty(columnMap)){ if(StringUtils.isNotEmpty(columnMap.toString())){
result.append(" --map-column-hive ") result.append(" --map-column-hive ")
.append(columnMap.substring(0,columnMap.length()-1)); .append(columnMap.substring(0,columnMap.length()-1));
} }
@ -98,12 +98,12 @@ public class MysqlSourceGenerator implements ISourceGenerator {
List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava(); List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava();
if(mapColumnJava != null && !mapColumnJava.isEmpty()){ if(mapColumnJava != null && !mapColumnJava.isEmpty()){
String columnMap = ""; StringBuilder columnMap = new StringBuilder();
for(Property item:mapColumnJava){ for(Property item:mapColumnJava){
columnMap = item.getProp()+"="+ item.getValue()+","; columnMap.append(item.getProp()).append("=").append(item.getValue()).append(",");
} }
if(StringUtils.isNotEmpty(columnMap)){ if(StringUtils.isNotEmpty(columnMap.toString())){
result.append(" --map-column-java ") result.append(" --map-column-java ")
.append(columnMap.substring(0,columnMap.length()-1)); .append(columnMap.substring(0,columnMap.length()-1));
} }

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java

@ -78,7 +78,7 @@ public class MysqlTargetGenerator implements ITargetGenerator {
result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'");
} }
if(targetMysqlParameter.isUpdate() if(targetMysqlParameter.getIsUpdate()
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())
&& StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){
result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey())

126
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java

@ -17,11 +17,9 @@
package org.apache.dolphinscheduler.server.worker.task.sqoop; package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.task.TaskProps;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
@ -35,7 +33,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import java.util.*; import java.util.Date;
/** /**
* sqoop task test * sqoop task test
@ -52,64 +50,98 @@ public class SqoopTaskTest {
@Before @Before
public void before() throws Exception{ public void before() throws Exception{
processService = Mockito.mock(ProcessService.class); processService = Mockito.mock(ProcessService.class);
Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource());
applicationContext = Mockito.mock(ApplicationContext.class); applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext(); SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext); springApplicationContext.setApplicationContext(applicationContext);
Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
TaskProps props = new TaskProps(); TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
props.setTaskAppId(String.valueOf(System.currentTimeMillis())); taskExecutionContext.setTaskAppId(String.valueOf(System.currentTimeMillis()));
props.setTenantCode("1"); taskExecutionContext.setTenantCode("1");
props.setEnvFile(".dolphinscheduler_env.sh"); taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh");
props.setTaskStartTime(new Date()); taskExecutionContext.setStartTime(new Date());
props.setTaskTimeout(0); taskExecutionContext.setTaskTimeout(0);
props.setTaskParams("{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," +
"\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," +
sqoopTask = new SqoopTask(new TaskExecutionContext(),logger); "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," +
"\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," +
"\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" +
",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," +
"\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," +
"\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}");
sqoopTask = new SqoopTask(taskExecutionContext,logger);
//test sqoop tash init method
sqoopTask.init(); sqoopTask.init();
} }
/**
* test SqoopJobGenerator
*/
@Test @Test
public void testGenerator(){ public void testGenerator(){
String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext();
SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class);
//sqoop TEMPLATE job
//import mysql to HDFS with hadoo
String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}],\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]," +
"\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters mysqlToHdfsParams = JSON.parseObject(mysqlToHdfs,SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator(); SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters1,new TaskExecutionContext()); String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams,mysqlTaskExecutionContext);
String expected = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'";
Assert.assertEquals(expected, script); Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript);
String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; //export hdfs to mysql using update mode
SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class); String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," +
"\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," +
String script2 = generator.generateSqoopJob(sqoopParameters2,new TaskExecutionContext()); "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; SqoopParameters hdfsToMysqlParams = JSON.parseObject(hdfsToMysql,SqoopParameters.class);
Assert.assertEquals(expected2, script2); String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams,mysqlTaskExecutionContext);
String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript);
SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class);
//export hive to mysql
String script3 = generator.generateSqoopJob(sqoopParameters3,new TaskExecutionContext()); String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; SqoopParameters hiveToMysqlParams = JSON.parseObject(hiveToMysql,SqoopParameters.class);
Assert.assertEquals(expected3, script3); String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams,mysqlTaskExecutionContext);
String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript);
SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class);
//import mysql to hive
String script4 = generator.generateSqoopJob(sqoopParameters4,new TaskExecutionContext()); String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; SqoopParameters mysqlToHiveParams = JSON.parseObject(mysqlToHive,SqoopParameters.class);
Assert.assertEquals(expected4, script4); String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams,mysqlTaskExecutionContext);
String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";
Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript);
//sqoop CUSTOM job
String sqoopCustomString = "{\"jobType\":\"CUSTOM\",\"localParams\":[],\"customShell\":\"sqoop import\"}";
SqoopParameters sqoopCustomParams = JSON.parseObject(sqoopCustomString, SqoopParameters.class);
String sqoopCustomScript = generator.generateSqoopJob(sqoopCustomParams, new TaskExecutionContext());
String sqoopCustomExpected = "sqoop import";
Assert.assertEquals(sqoopCustomExpected, sqoopCustomScript);
} }
private DataSource getDataSource() {
DataSource dataSource = new DataSource(); /**
dataSource.setType(DbType.MYSQL); * get taskExecutionContext include mysql
dataSource.setConnectionParams( * @return TaskExecutionContext
"{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"); */
dataSource.setUserId(1); private TaskExecutionContext getMysqlTaskExecutionContext() {
return dataSource; TaskExecutionContext taskExecutionContext = new TaskExecutionContext();
SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext();
String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}";
sqoopTaskExecutionContext.setDataSourceId(2);
sqoopTaskExecutionContext.setDataTargetId(2);
sqoopTaskExecutionContext.setSourcetype(0);
sqoopTaskExecutionContext.setTargetConnectionParams(mysqlTargetConnectionParams);
sqoopTaskExecutionContext.setSourceConnectionParams(mysqlSourceConnectionParams);
sqoopTaskExecutionContext.setTargetType(0);
taskExecutionContext.setSqoopTaskExecutionContext(sqoopTaskExecutionContext);
return taskExecutionContext;
} }
@Test @Test

2
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/datasource.vue

@ -131,7 +131,7 @@
}, },
created () { created () {
let supportType = this.supportType || [] let supportType = this.supportType || []
this.typeList = _.cloneDeep(this.store.state.dag.dsTypeListS) this.typeList = this.data.typeList || _.cloneDeep(this.store.state.dag.dsTypeListS)
// Have a specified data source // Have a specified data source
if (supportType.length) { if (supportType.length) {
let is = (type) => { let is = (type) => {

292
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue

@ -17,13 +17,49 @@
<template> <template>
<div class="sql-model"> <div class="sql-model">
<m-list-box>
<div slot="text">{{$t('Custom Job')}}</div>
<div slot="content">
<x-switch
v-model="isCustomTask"
@on-click="_onSwitch"
:disabled="isDetails"
>
</x-switch>
</div>
</m-list-box>
<m-list-box v-show="isCustomTask">
<div slot="text">{{$t('Custom Script')}}</div>
<div slot="content">
<div class="from-mirror">
<textarea
id="code-shell-mirror"
name="code-shell-mirror"
style="opacity: 0;">
</textarea>
</div>
</div>
</m-list-box>
<template v-if="!isCustomTask">
<m-list-box>
<div slot="text">{{$t('Sqoop Job Name')}}</div>
<div slot="content">
<x-input
:disabled="isDetails"
type="text"
v-model="jobName"
:placeholder="$t('Please enter Job Name(required)')">
</x-input>
</div>
</m-list-box>
<m-list-box> <m-list-box>
<div slot="text">{{$t('Direct')}}</div> <div slot="text">{{$t('Direct')}}</div>
<div slot="content"> <div slot="content">
<x-select <x-select
style="width: 130px;" style="width: 130px;"
v-model="modelType" v-model="modelType"
:disabled="isDetails"> :disabled="isDetails"
@on-change="_handleModelTypeChange">
<x-option <x-option
v-for="city in modelTypeList" v-for="city in modelTypeList"
:key="city.code" :key="city.code"
@ -33,6 +69,28 @@
</x-select> </x-select>
</div> </div>
</m-list-box> </m-list-box>
<m-list-box>
<div slot="text" style="width: 110px;">{{$t('Hadoop Custom Params')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnHadoopParams"
@on-local-params="_onHadoopCustomParams"
:udp-list="hadoopCustomParams"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<m-list-box>
<div slot="text" style="width: 100px;">{{$t('Sqoop Advanced Parameters')}}</div>
<div slot="content">
<m-local-params
ref="refMapColumnAdvancedParams"
@on-local-params="_onSqoopAdvancedParams"
:udp-list="sqoopAdvancedParams"
:hide="false">
</m-local-params>
</div>
</m-list-box>
<template> <template>
<m-list-box> <m-list-box>
@ -65,7 +123,9 @@
<m-datasource <m-datasource
ref="refSourceDs" ref="refSourceDs"
@on-dsData="_onSourceDsData" @on-dsData="_onSourceDsData"
:data="{ type:sourceMysqlParams.srcType,datasource:sourceMysqlParams.srcDatasource }" :data="{type:sourceMysqlParams.srcType,
typeList: [{id: 0, code: 'MYSQL', disabled: false}],
datasource:sourceMysqlParams.srcDatasource }"
> >
</m-datasource> </m-datasource>
</div> </div>
@ -385,7 +445,9 @@
<m-datasource <m-datasource
ref="refTargetDs" ref="refTargetDs"
@on-dsData="_onTargetDsData" @on-dsData="_onTargetDsData"
:data="{ type:targetMysqlParams.targetType,datasource:targetMysqlParams.targetDatasource }" :data="{ type:targetMysqlParams.targetType,
typeList: [{id: 0, code: 'MYSQL', disabled: false}],
datasource:targetMysqlParams.targetDatasource }"
> >
</m-datasource> </m-datasource>
</div> </div>
@ -474,7 +536,7 @@
</x-input> </x-input>
</div> </div>
</m-list-box> </m-list-box>
</template>
<m-list-box> <m-list-box>
<div slot="text">{{$t('Custom Parameters')}}</div> <div slot="text">{{$t('Custom Parameters')}}</div>
<div slot="content"> <div slot="content">
@ -498,16 +560,38 @@
import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror'
let editor let editor
let shellEditor
export default { export default {
name: 'sql', name: 'sql',
data () { data () {
return { return {
/**
* Is Custom Task
*/
isCustomTask: false,
/** /**
* Customer Params * Customer Params
*/ */
localParams: [], localParams: [],
/**
* Hadoop Custom Params
*/
hadoopCustomParams: [],
/**
* Sqoop Advanced Params
*/
sqoopAdvancedParams: [],
/**
* script
*/
customShell: '',
/**
* task name
*/
jobName: '',
/** /**
* mysql query type * mysql query type
*/ */
@ -524,6 +608,10 @@
* concurrency * concurrency
*/ */
concurrency:1, concurrency:1,
/**
* default job type
*/
jobType:'TEMPLATE',
/** /**
* direct model type * direct model type
*/ */
@ -535,12 +623,6 @@
{ {
code: "MYSQL" code: "MYSQL"
}, },
{
code:"HDFS"
},
{
code:"HIVE"
}
], ],
targetTypeList:[ targetTypeList:[
@ -620,8 +702,32 @@
}, },
methods: { methods: {
_onSwitch(is){
if(is) {
this.jobType = 'CUSTOM'
this.isCustomTask = true
setTimeout(() => {
this._handlerShellEditor()
}, 200)
} else {
this.jobType = 'TEMPLATE'
this.isCustomTask = false
setTimeout(() => {
this._handlerEditor()
}, 200)
}
},
_handleQueryType(o){ _handleQueryType(o){
this.sourceMysqlParams.srcQueryType = this.srcQueryType this.sourceMysqlParams.srcQueryType = this.srcQueryType
this._getTargetTypeList(this.sourceType)
this.targetType = this.targetTypeList[0].code
},
_handleModelTypeChange(a){
this._getSourceTypeList(a.label)
this.sourceType = this.sourceTypeList[0].code
this._handleSourceTypeChange({label: this.sourceType, value: this.sourceType})
}, },
_handleSourceTypeChange(a){ _handleSourceTypeChange(a){
@ -629,9 +735,50 @@
this.targetType = this.targetTypeList[0].code this.targetType = this.targetTypeList[0].code
}, },
_getSourceTypeList(data){
switch(data){
case 'import':
this.sourceTypeList = [
{
code:"MYSQL"
},
]
break;
case 'export':
this.sourceTypeList = [
{
code: "HDFS"
},
{
code: "HIVE"
}
]
break;
default:
this.sourceTypeList = [
{
code:"MYSQL"
},
{
code:"HIVE"
},
{
code:"HDFS"
}
]
break;
}
},
_getTargetTypeList(data){ _getTargetTypeList(data){
switch(data){ switch(data){
case 'MYSQL': case 'MYSQL':
if (this.srcQueryType === "1") {
this.targetTypeList = [
{
code: "HDFS"
}]
} else {
this.targetTypeList = [ this.targetTypeList = [
{ {
code: "HIVE" code: "HIVE"
@ -640,6 +787,7 @@
code: "HDFS" code: "HDFS"
} }
] ]
}
break; break;
case 'HDFS': case 'HDFS':
this.targetTypeList = [ this.targetTypeList = [
@ -670,12 +818,10 @@
_onMapColumnHive (a) { _onMapColumnHive (a) {
this.sourceMysqlParams.mapColumnHive = a this.sourceMysqlParams.mapColumnHive = a
console.log(this.sourceMysqlParams.mapColumnHive)
}, },
_onMapColumnJava (a) { _onMapColumnJava (a) {
this.sourceMysqlParams.mapColumnJava = a this.sourceMysqlParams.mapColumnJava = a
console.log(this.sourceMysqlParams.mapColumnJava)
}, },
/** /**
@ -701,7 +847,8 @@
var params = null var params = null
switch(this.sourceType){ switch(this.sourceType){
case "MYSQL": case "MYSQL":
this.sourceMysqlParams.srcQuerySql = editor ? editor.getValue() : this.sourceMysqlParams.srcQuerySql this.sourceMysqlParams.srcQuerySql = this.sourceMysqlParams.srcQueryType === "1" && editor ?
editor.getValue() : this.sourceMysqlParams.srcQuerySql
params = JSON.stringify(this.sourceMysqlParams) params = JSON.stringify(this.sourceMysqlParams)
break; break;
case "ORACLE": case "ORACLE":
@ -791,6 +938,21 @@
* verification * verification
*/ */
_verification () { _verification () {
let sqoopParams = {
jobType: this.jobType,
localParams: this.localParams
}
if (this.jobType === 'CUSTOM') {
if (!shellEditor.getValue()) {
this.$message.warning(`${i18n.$t('Please enter Custom Shell(required)')}`)
return false
}
sqoopParams['customShell'] = shellEditor.getValue()
} else {
if (!this.jobName) {
this.$message.warning(`${i18n.$t('Please enter Job Name(required)')}`)
return false
}
switch (this.sourceType) { switch (this.sourceType) {
case "MYSQL": case "MYSQL":
@ -802,16 +964,22 @@
this.$message.warning(`${i18n.$t('Please enter a SQL Statement(required)')}`) this.$message.warning(`${i18n.$t('Please enter a SQL Statement(required)')}`)
return false return false
} }
this.sourceMysqlParams.srcTable = ""
this.sourceMysqlParams.srcColumnType = "0"
this.sourceMysqlParams.srcColumns = ""
} else { } else {
if (this.sourceMysqlParams.srcTable === "") { if (this.sourceMysqlParams.srcTable === "") {
this.$message.warning(`${i18n.$t('Please enter Mysql Table(required)')}`) this.$message.warning(`${i18n.$t('Please enter Mysql Table(required)')}`)
return false return false
} }
this.sourceMysqlParams.srcQuerySql = ""
if (this.sourceMysqlParams.srcColumnType === "1" && this.sourceMysqlParams.srcColumns === "") { if (this.sourceMysqlParams.srcColumnType === "1" && this.sourceMysqlParams.srcColumns === "") {
this.$message.warning(`${i18n.$t('Please enter Columns (Comma separated)')}`) this.$message.warning(`${i18n.$t('Please enter Columns (Comma separated)')}`)
return false return false
} }
if (this.sourceMysqlParams.srcColumnType === "0") {
this.sourceMysqlParams.srcColumns = ""
}
} }
break; break;
@ -865,17 +1033,19 @@
default: default:
break; break;
} }
sqoopParams['jobName'] = this.jobName
sqoopParams['hadoopCustomParams'] = this.hadoopCustomParams
sqoopParams['sqoopAdvancedParams'] = this.sqoopAdvancedParams
sqoopParams['concurrency'] = this.concurrency
sqoopParams['modelType'] = this.modelType
sqoopParams['sourceType'] = this.sourceType
sqoopParams['targetType'] = this.targetType
sqoopParams['targetParams'] = this._handleTargetParams()
sqoopParams['sourceParams'] = this._handleSourceParams()
}
// storage // storage
this.$emit('on-params', { this.$emit('on-params', sqoopParams)
concurrency:this.concurrency,
modelType:this.modelType,
sourceType:this.sourceType,
targetType:this.targetType,
sourceParams:this._handleSourceParams(),
targetParams:this._handleTargetParams(),
localParams:this.localParams
})
return true return true
}, },
@ -912,6 +1082,33 @@
return editor return editor
}, },
/**
* Processing code highlighting
*/
_handlerShellEditor () {
this._destroyShellEditor()
// shellEditor
shellEditor = codemirror('code-shell-mirror', {
mode: 'shell',
readOnly: this.isDetails
})
this.keypress = () => {
if (!shellEditor.getOption('readOnly')) {
shellEditor.showHint({
completeSingle: false
})
}
}
// Monitor keyboard
shellEditor.on('keypress', this.keypress)
shellEditor.setValue(this.customShell)
return shellEditor
},
/** /**
* return localParams * return localParams
*/ */
@ -919,6 +1116,20 @@
this.localParams = a this.localParams = a
}, },
/**
* return hadoopParams
*/
_onHadoopCustomParams (a) {
this.hadoopCustomParams = a
},
/**
* return sqoopAdvancedParams
*/
_onSqoopAdvancedParams (a) {
this.sqoopAdvancedParams = a
},
_cacheParams () { _cacheParams () {
this.$emit('on-cache-params', { this.$emit('on-cache-params', {
concurrency:this.concurrency, concurrency:this.concurrency,
@ -939,6 +1150,13 @@
editor = null editor = null
} }
}, },
_destroyShellEditor () {
if (shellEditor) {
shellEditor.toTextArea() // Uninstall
shellEditor.off($('.code-shell-mirror'), 'keypress', this.keypress)
shellEditor.off($('.code-shell-mirror'), 'changes', this.changes)
}
}
}, },
watch: { watch: {
// Listening to sqlType // Listening to sqlType
@ -970,15 +1188,25 @@
// Non-null objects represent backfill // Non-null objects represent backfill
if (!_.isEmpty(o)) { if (!_.isEmpty(o)) {
this.concurrency = o.params.concurrency || 1, this.jobType = o.params.jobType
this.modelType = o.params.modelType, this.isCustomTask = false
this.sourceType = o.params.sourceType, if (this.jobType === 'CUSTOM') {
this.customShell = o.params.customShell
this.isCustomTask = true
} else {
this.jobName = o.params.jobName
this.hadoopCustomParams = o.params.hadoopCustomParams
this.sqoopAdvancedParams = o.params.sqoopAdvancedParams
this.concurrency = o.params.concurrency || 1
this.modelType = o.params.modelType
this.sourceType = o.params.sourceType
this._getTargetTypeList(this.sourceType) this._getTargetTypeList(this.sourceType)
this.targetType = o.params.targetType, this.targetType = o.params.targetType
this._getSourceParams(o.params.sourceParams), this._getSourceParams(o.params.sourceParams)
this._getTargetParams(o.params.targetParams), this._getTargetParams(o.params.targetParams)
this.localParams = o.params.localParams this.localParams = o.params.localParams
} }
}
}, },
mounted () { mounted () {
@ -986,6 +1214,10 @@
this._handlerEditor() this._handlerEditor()
}, 200) }, 200)
setTimeout(() => {
this._handlerShellEditor()
}, 200)
setTimeout(() => { setTimeout(() => {
this.srcQueryType = this.sourceMysqlParams.srcQueryType this.srcQueryType = this.sourceMysqlParams.srcQueryType
}, 500) }, 500)

7
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -540,6 +540,9 @@ export default {
'Whether directory': 'Whether directory', 'Whether directory': 'Whether directory',
Yes: 'Yes', Yes: 'Yes',
No: 'No', No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
@ -554,6 +557,8 @@ export default {
'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key', 'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct', Direct: 'Direct',
Type: 'Type', Type: 'Type',
ModelType: 'ModelType', ModelType: 'ModelType',
@ -587,6 +592,8 @@ export default {
'All Columns': 'All Columns', 'All Columns': 'All Columns',
'Some Columns': 'Some Columns', 'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow', 'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'Successful branch flow and failed branch flow are required', 'Successful branch flow and failed branch flow are required': 'Successful branch flow and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources',

7
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -540,6 +540,9 @@ export default {
'Whether directory': '是否文件夹', 'Whether directory': '是否文件夹',
Yes: '是', Yes: '是',
No: '否', No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名 , 隔开', 'Please enter Columns (Comma separated)': '请输入列名 , 隔开',
@ -554,6 +557,8 @@ export default {
'Please enter Lines Terminated': '请输入行分隔符', 'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度', 'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列', 'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向', Direct: '流向',
Type: '类型', Type: '类型',
ModelType: '模式', ModelType: '模式',
@ -587,6 +592,8 @@ export default {
'All Columns': '全表导入', 'All Columns': '全表导入',
'Some Columns': '选择列', 'Some Columns': '选择列',
'Branch flow': '分支流转', 'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': '成功分支流转和失败分支流转必填', 'Successful branch flow and failed branch flow are required': '成功分支流转和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源', 'Unauthorized or deleted resources': '未授权或已删除资源',

1
pom.xml

@ -821,6 +821,7 @@
<include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include>

Loading…
Cancel
Save