|
|
|
@ -97,7 +97,7 @@ public class SqoopTaskTest {
|
|
|
|
|
SqoopJobGenerator generator = new SqoopJobGenerator(); |
|
|
|
|
String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext); |
|
|
|
|
String mysqlToHdfsExpected = |
|
|
|
|
"sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test " |
|
|
|
|
"sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect \"jdbc:mysql://192.168.0.111:3306/test\" " |
|
|
|
|
+ "--username kylo --password \"123456\" --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile " |
|
|
|
|
+ "--delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; |
|
|
|
|
Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript); |
|
|
|
@ -111,7 +111,7 @@ public class SqoopTaskTest {
|
|
|
|
|
SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class); |
|
|
|
|
String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext); |
|
|
|
|
String hdfsToMysqlScriptExpected = |
|
|
|
|
"sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test " |
|
|
|
|
"sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect \"jdbc:mysql://192.168.0.111:3306/test\" " |
|
|
|
|
+ "--username kylo --password \"123456\" --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' " |
|
|
|
|
+ "--lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; |
|
|
|
|
Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript); |
|
|
|
@ -128,7 +128,7 @@ public class SqoopTaskTest {
|
|
|
|
|
String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext); |
|
|
|
|
String hiveToMysqlExpected = |
|
|
|
|
"sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date " |
|
|
|
|
+ "--hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" --table person_3 " |
|
|
|
|
+ "--hcatalog-partition-values 2020-02-17 --connect \"jdbc:mysql://192.168.0.111:3306/test\" --username kylo --password \"123456\" --table person_3 " |
|
|
|
|
+ "--fields-terminated-by '@' --lines-terminated-by '\\n'"; |
|
|
|
|
Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript); |
|
|
|
|
|
|
|
|
@ -143,7 +143,7 @@ public class SqoopTaskTest {
|
|
|
|
|
SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class); |
|
|
|
|
String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext); |
|
|
|
|
String mysqlToHiveExpected = |
|
|
|
|
"sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" " |
|
|
|
|
"sqoop import -D mapred.job.name=sqoop_import -m 1 --connect \"jdbc:mysql://192.168.0.111:3306/test\" --username kylo --password \"123456\" " |
|
|
|
|
+ "--query \"SELECT * FROM person_2 WHERE \\$CONDITIONS\" --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 " |
|
|
|
|
+ "--create-hive-table --hive-overwrite --delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; |
|
|
|
|
Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript); |
|
|
|
|