samz406
5 years ago
committed by
GitHub
86 changed files with 3732 additions and 1678 deletions
@ -1,49 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<!-- |
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more |
||||
~ contributor license agreements. See the NOTICE file distributed with |
||||
~ this work for additional information regarding copyright ownership. |
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
~ (the "License"); you may not use this file except in compliance with |
||||
~ the License. You may obtain a copy of the License at |
||||
~ |
||||
~ http://www.apache.org/licenses/LICENSE-2.0 |
||||
~ |
||||
~ Unless required by applicable law or agreed to in writing, software |
||||
~ distributed under the License is distributed on an "AS IS" BASIS, |
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
~ See the License for the specific language governing permissions and |
||||
~ limitations under the License. |
||||
--> |
||||
|
||||
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||
<configuration scan="true" scanPeriod="120 seconds"> <!--debug="true" --> |
||||
<property name="log.base" value="logs" /> |
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<appender name="ALERTLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-alert.log</file> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>20</maxHistory> |
||||
<maxFileSize>64MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<root level="INFO"> |
||||
<appender-ref ref="ALERTLOGFILE"/> |
||||
</root> |
||||
</configuration> |
@ -1,60 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<!-- |
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more |
||||
~ contributor license agreements. See the NOTICE file distributed with |
||||
~ this work for additional information regarding copyright ownership. |
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
~ (the "License"); you may not use this file except in compliance with |
||||
~ the License. You may obtain a copy of the License at |
||||
~ |
||||
~ http://www.apache.org/licenses/LICENSE-2.0 |
||||
~ |
||||
~ Unless required by applicable law or agreed to in writing, software |
||||
~ distributed under the License is distributed on an "AS IS" BASIS, |
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
~ See the License for the specific language governing permissions and |
||||
~ limitations under the License. |
||||
--> |
||||
|
||||
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||
<configuration scan="true" scanPeriod="120 seconds"> |
||||
<logger name="org.apache.zookeeper" level="WARN"/> |
||||
<logger name="org.apache.hbase" level="WARN"/> |
||||
<logger name="org.apache.hadoop" level="WARN"/> |
||||
|
||||
<property name="log.base" value="logs" /> |
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<appender name="APISERVERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<!-- Log level filter --> |
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<file>${log.base}/dolphinscheduler-api-server.log</file> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>64MB</maxFileSize> |
||||
</rollingPolicy> |
||||
|
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
|
||||
</appender> |
||||
|
||||
<root level="INFO"> |
||||
<appender-ref ref="APISERVERLOGFILE" /> |
||||
</root> |
||||
</configuration> |
@ -0,0 +1,192 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.task.datax; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.apache.dolphinscheduler.common.task.AbstractParameters; |
||||
|
||||
/** |
||||
* DataX parameter |
||||
*/ |
||||
public class DataxParameters extends AbstractParameters { |
||||
|
||||
/** |
||||
* data source type,eg MYSQL, POSTGRES ... |
||||
*/ |
||||
private String dsType; |
||||
|
||||
/** |
||||
* datasource id |
||||
*/ |
||||
private int dataSource; |
||||
|
||||
/** |
||||
* data target type,eg MYSQL, POSTGRES ... |
||||
*/ |
||||
private String dtType; |
||||
|
||||
/** |
||||
* datatarget id |
||||
*/ |
||||
private int dataTarget; |
||||
|
||||
/** |
||||
* sql |
||||
*/ |
||||
private String sql; |
||||
|
||||
/** |
||||
* target table |
||||
*/ |
||||
private String targetTable; |
||||
|
||||
/** |
||||
* Pre Statements |
||||
*/ |
||||
private List<String> preStatements; |
||||
|
||||
/** |
||||
* Post Statements |
||||
*/ |
||||
private List<String> postStatements; |
||||
|
||||
/** |
||||
* speed byte num |
||||
*/ |
||||
private int jobSpeedByte; |
||||
|
||||
/** |
||||
* speed record count |
||||
*/ |
||||
private int jobSpeedRecord; |
||||
|
||||
public String getDsType() { |
||||
return dsType; |
||||
} |
||||
|
||||
public void setDsType(String dsType) { |
||||
this.dsType = dsType; |
||||
} |
||||
|
||||
public int getDataSource() { |
||||
return dataSource; |
||||
} |
||||
|
||||
public void setDataSource(int dataSource) { |
||||
this.dataSource = dataSource; |
||||
} |
||||
|
||||
public String getDtType() { |
||||
return dtType; |
||||
} |
||||
|
||||
public void setDtType(String dtType) { |
||||
this.dtType = dtType; |
||||
} |
||||
|
||||
public int getDataTarget() { |
||||
return dataTarget; |
||||
} |
||||
|
||||
public void setDataTarget(int dataTarget) { |
||||
this.dataTarget = dataTarget; |
||||
} |
||||
|
||||
public String getSql() { |
||||
return sql; |
||||
} |
||||
|
||||
public void setSql(String sql) { |
||||
this.sql = sql; |
||||
} |
||||
|
||||
public String getTargetTable() { |
||||
return targetTable; |
||||
} |
||||
|
||||
public void setTargetTable(String targetTable) { |
||||
this.targetTable = targetTable; |
||||
} |
||||
|
||||
public List<String> getPreStatements() { |
||||
return preStatements; |
||||
} |
||||
|
||||
public void setPreStatements(List<String> preStatements) { |
||||
this.preStatements = preStatements; |
||||
} |
||||
|
||||
public List<String> getPostStatements() { |
||||
return postStatements; |
||||
} |
||||
|
||||
public void setPostStatements(List<String> postStatements) { |
||||
this.postStatements = postStatements; |
||||
} |
||||
|
||||
public int getJobSpeedByte() { |
||||
return jobSpeedByte; |
||||
} |
||||
|
||||
public void setJobSpeedByte(int jobSpeedByte) { |
||||
this.jobSpeedByte = jobSpeedByte; |
||||
} |
||||
|
||||
public int getJobSpeedRecord() { |
||||
return jobSpeedRecord; |
||||
} |
||||
|
||||
public void setJobSpeedRecord(int jobSpeedRecord) { |
||||
this.jobSpeedRecord = jobSpeedRecord; |
||||
} |
||||
|
||||
@Override |
||||
public boolean checkParameters() { |
||||
if (!(dataSource != 0 |
||||
&& dataTarget != 0 |
||||
&& StringUtils.isNotEmpty(sql) |
||||
&& StringUtils.isNotEmpty(targetTable))) { |
||||
return false; |
||||
} |
||||
|
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public List<String> getResourceFilesList() { |
||||
return new ArrayList<>(); |
||||
} |
||||
|
||||
@Override |
||||
public String toString() { |
||||
return "DataxParameters{" + |
||||
"dsType='" + dsType + '\'' + |
||||
", dataSource=" + dataSource + |
||||
", dtType='" + dtType + '\'' + |
||||
", dataTarget=" + dataTarget + |
||||
", sql='" + sql + '\'' + |
||||
", targetTable='" + targetTable + '\'' + |
||||
", preStatements=" + preStatements + |
||||
", postStatements=" + postStatements + |
||||
", jobSpeedByte=" + jobSpeedByte + |
||||
", jobSpeedRecord=" + jobSpeedRecord + |
||||
'}'; |
||||
} |
||||
} |
@ -0,0 +1,169 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<!-- |
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more |
||||
~ contributor license agreements. See the NOTICE file distributed with |
||||
~ this work for additional information regarding copyright ownership. |
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
~ (the "License"); you may not use this file except in compliance with |
||||
~ the License. You may obtain a copy of the License at |
||||
~ |
||||
~ http://www.apache.org/licenses/LICENSE-2.0 |
||||
~ |
||||
~ Unless required by applicable law or agreed to in writing, software |
||||
~ distributed under the License is distributed on an "AS IS" BASIS, |
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
~ See the License for the specific language governing permissions and |
||||
~ limitations under the License. |
||||
--> |
||||
|
||||
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||
<configuration scan="true" scanPeriod="120 seconds"> <!--debug="true" --> |
||||
|
||||
<property name="log.base" value="logs"/> |
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
|
||||
<!-- master server logback config start --> |
||||
<appender name="MASTERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-master.log</file> |
||||
<!--<filter class="org.apache.dolphinscheduler.common.log.MasterLogFilter"> |
||||
<level>INFO</level> |
||||
</filter>--> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>200MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
<!-- master server logback config end --> |
||||
|
||||
|
||||
<!-- worker server logback config start --> |
||||
<conversionRule conversionWord="messsage" |
||||
converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/> |
||||
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> |
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<filter class="org.apache.dolphinscheduler.common.log.TaskLogFilter"/> |
||||
<Discriminator class="org.apache.dolphinscheduler.common.log.TaskLogDiscriminator"> |
||||
<key>taskAppId</key> |
||||
<logBase>${log.base}</logBase> |
||||
</Discriminator> |
||||
<sift> |
||||
<appender name="FILE-${taskAppId}" class="ch.qos.logback.core.FileAppender"> |
||||
<file>${log.base}/${taskAppId}.log</file> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
<append>true</append> |
||||
</appender> |
||||
</sift> |
||||
</appender> |
||||
<appender name="WORKERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-worker.log</file> |
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<filter class="org.apache.dolphinscheduler.common.log.WorkerLogFilter"/> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>200MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
<!-- worker server logback config end --> |
||||
|
||||
|
||||
<!-- alert server logback config start --> |
||||
<appender name="ALERTLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-alert.log</file> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>20</maxHistory> |
||||
<maxFileSize>64MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
<!-- alert server logback config end --> |
||||
|
||||
|
||||
<!-- api server logback config start --> |
||||
<appender name="APILOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-api-server.log</file> |
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>64MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
<!-- api server logback config end --> |
||||
|
||||
<logger name="org.apache.zookeeper" level="WARN"/> |
||||
<logger name="org.apache.hbase" level="WARN"/> |
||||
<logger name="org.apache.hadoop" level="WARN"/> |
||||
|
||||
|
||||
<root level="INFO"> |
||||
<appender-ref ref="STDOUT"/> |
||||
|
||||
<if condition='p("server").contains("master-server")'> |
||||
<then> |
||||
<appender-ref ref="MASTERLOGFILE"/> |
||||
</then> |
||||
</if> |
||||
<if condition='p("server").contains("worker-server")'> |
||||
<then> |
||||
<appender-ref ref="TASKLOGFILE"/> |
||||
<appender-ref ref="WORKERLOGFILE"/> |
||||
</then> |
||||
</if> |
||||
<if condition='p("server").contains("alert-server")'> |
||||
<then> |
||||
<appender-ref ref="ALERTLOGFILE"/> |
||||
</then> |
||||
</if> |
||||
<if condition='p("server").contains("api-server")'> |
||||
<then> |
||||
<appender-ref ref="APILOGFILE"/> |
||||
</then> |
||||
</if> |
||||
</root> |
||||
|
||||
</configuration> |
@ -0,0 +1,118 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.log; |
||||
|
||||
import ch.qos.logback.classic.Level; |
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
import ch.qos.logback.classic.spi.IThrowableProxy; |
||||
import ch.qos.logback.classic.spi.LoggerContextVO; |
||||
import ch.qos.logback.core.spi.FilterReply; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Marker; |
||||
import java.util.Map; |
||||
|
||||
public class MasterLogFilterTest { |
||||
|
||||
@Test |
||||
public void decide() { |
||||
MasterLogFilter masterLogFilter = new MasterLogFilter(); |
||||
|
||||
|
||||
FilterReply filterReply = masterLogFilter.decide(new ILoggingEvent() { |
||||
@Override |
||||
public String getThreadName() { |
||||
return Constants.THREAD_NAME_MASTER_SERVER; |
||||
} |
||||
|
||||
@Override |
||||
public Level getLevel() { |
||||
return Level.INFO; |
||||
} |
||||
|
||||
@Override |
||||
public String getMessage() { |
||||
return "master insert into queue success, task : shell2"; |
||||
// return "consume tasks: [2_177_2_704_-1],there still have 0 tasks need to be executed";
|
||||
} |
||||
|
||||
@Override |
||||
public Object[] getArgumentArray() { |
||||
return new Object[0]; |
||||
} |
||||
|
||||
@Override |
||||
public String getFormattedMessage() { |
||||
return "master insert into queue success, task : shell2"; |
||||
} |
||||
|
||||
@Override |
||||
public String getLoggerName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public LoggerContextVO getLoggerContextVO() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public IThrowableProxy getThrowableProxy() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public StackTraceElement[] getCallerData() { |
||||
return new StackTraceElement[0]; |
||||
} |
||||
|
||||
@Override |
||||
public boolean hasCallerData() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public Marker getMarker() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMDCPropertyMap() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMdc() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public long getTimeStamp() { |
||||
return 0; |
||||
} |
||||
|
||||
@Override |
||||
public void prepareForDeferredProcessing() { |
||||
|
||||
} |
||||
}); |
||||
|
||||
Assert.assertEquals(FilterReply.ACCEPT, filterReply); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,179 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.log; |
||||
|
||||
|
||||
import ch.qos.logback.classic.Level; |
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
import ch.qos.logback.classic.spi.IThrowableProxy; |
||||
import ch.qos.logback.classic.spi.LoggerContextVO; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.utils.SensitiveLogUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.slf4j.Marker; |
||||
|
||||
import java.util.Map; |
||||
import java.util.regex.Matcher; |
||||
import java.util.regex.Pattern; |
||||
|
||||
public class SensitiveDataConverterTest { |
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(SensitiveDataConverterTest.class); |
||||
|
||||
/** |
||||
* password pattern |
||||
*/ |
||||
private final Pattern pwdPattern = Pattern.compile(Constants.DATASOURCE_PASSWORD_REGEX); |
||||
|
||||
private final String logMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\"," + |
||||
"\"database\":\"carbond\"," + |
||||
"\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\"," + |
||||
"\"user\":\"view\"," + |
||||
"\"password\":\"view1\"}"; |
||||
|
||||
private final String maskLogMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\"," + |
||||
"\"database\":\"carbond\"," + |
||||
"\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\"," + |
||||
"\"user\":\"view\"," + |
||||
"\"password\":\"******\"}"; |
||||
@Test |
||||
public void convert() { |
||||
SensitiveDataConverter sensitiveDataConverter = new SensitiveDataConverter(); |
||||
String result = sensitiveDataConverter.convert(new ILoggingEvent() { |
||||
@Override |
||||
public String getThreadName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Level getLevel() { |
||||
return Level.INFO; |
||||
} |
||||
|
||||
@Override |
||||
public String getMessage() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Object[] getArgumentArray() { |
||||
return new Object[0]; |
||||
} |
||||
|
||||
@Override |
||||
public String getFormattedMessage() { |
||||
return logMsg; |
||||
} |
||||
|
||||
@Override |
||||
public String getLoggerName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public LoggerContextVO getLoggerContextVO() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public IThrowableProxy getThrowableProxy() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public StackTraceElement[] getCallerData() { |
||||
return new StackTraceElement[0]; |
||||
} |
||||
|
||||
@Override |
||||
public boolean hasCallerData() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public Marker getMarker() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMDCPropertyMap() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMdc() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public long getTimeStamp() { |
||||
return 0; |
||||
} |
||||
|
||||
@Override |
||||
public void prepareForDeferredProcessing() { |
||||
|
||||
} |
||||
}); |
||||
|
||||
Assert.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg)); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* mask sensitive logMsg - sql task datasource password |
||||
*/ |
||||
@Test |
||||
public void testPwdLogMsgConverter() { |
||||
logger.info("parameter : {}", logMsg); |
||||
logger.info("parameter : {}", passwordHandler(pwdPattern, logMsg)); |
||||
|
||||
Assert.assertNotEquals(logMsg, passwordHandler(pwdPattern, logMsg)); |
||||
Assert.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg)); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* password regex test |
||||
* |
||||
* @param logMsg original log |
||||
*/ |
||||
private static String passwordHandler(Pattern pattern, String logMsg) { |
||||
|
||||
Matcher matcher = pattern.matcher(logMsg); |
||||
|
||||
StringBuffer sb = new StringBuffer(logMsg.length()); |
||||
|
||||
while (matcher.find()) { |
||||
|
||||
String password = matcher.group(); |
||||
|
||||
String maskPassword = SensitiveLogUtils.maskDataSourcePwd(password); |
||||
|
||||
matcher.appendReplacement(sb, maskPassword); |
||||
} |
||||
matcher.appendTail(sb); |
||||
|
||||
return sb.toString(); |
||||
} |
||||
|
||||
|
||||
|
||||
} |
@ -0,0 +1,155 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.log; |
||||
|
||||
import ch.qos.logback.classic.Level; |
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
import ch.qos.logback.classic.spi.IThrowableProxy; |
||||
import ch.qos.logback.classic.spi.LoggerContextVO; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.slf4j.Marker; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static org.junit.Assert.*; |
||||
|
||||
public class TaskLogDiscriminatorTest { |
||||
|
||||
/** |
||||
* log base |
||||
*/ |
||||
private String logBase = "logs"; |
||||
|
||||
TaskLogDiscriminator taskLogDiscriminator; |
||||
|
||||
@Before |
||||
public void before(){ |
||||
taskLogDiscriminator = new TaskLogDiscriminator(); |
||||
taskLogDiscriminator.setLogBase("logs"); |
||||
taskLogDiscriminator.setKey("123"); |
||||
} |
||||
|
||||
@Test |
||||
public void getDiscriminatingValue() { |
||||
String result = taskLogDiscriminator.getDiscriminatingValue(new ILoggingEvent() { |
||||
@Override |
||||
public String getThreadName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Level getLevel() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public String getMessage() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Object[] getArgumentArray() { |
||||
return new Object[0]; |
||||
} |
||||
|
||||
@Override |
||||
public String getFormattedMessage() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public String getLoggerName() { |
||||
return "[taskAppId=TASK-1-1-1"; |
||||
} |
||||
|
||||
@Override |
||||
public LoggerContextVO getLoggerContextVO() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public IThrowableProxy getThrowableProxy() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public StackTraceElement[] getCallerData() { |
||||
return new StackTraceElement[0]; |
||||
} |
||||
|
||||
@Override |
||||
public boolean hasCallerData() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public Marker getMarker() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMDCPropertyMap() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMdc() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public long getTimeStamp() { |
||||
return 0; |
||||
} |
||||
|
||||
@Override |
||||
public void prepareForDeferredProcessing() { |
||||
|
||||
} |
||||
}); |
||||
Assert.assertEquals("1/1/", result); |
||||
} |
||||
|
||||
@Test |
||||
public void start() { |
||||
taskLogDiscriminator.start(); |
||||
Assert.assertEquals(true, taskLogDiscriminator.isStarted()); |
||||
} |
||||
|
||||
@Test |
||||
public void getKey() { |
||||
Assert.assertEquals("123", taskLogDiscriminator.getKey()); |
||||
} |
||||
|
||||
@Test |
||||
public void setKey() { |
||||
|
||||
taskLogDiscriminator.setKey("123"); |
||||
} |
||||
|
||||
@Test |
||||
public void getLogBase() { |
||||
Assert.assertEquals("logs", taskLogDiscriminator.getLogBase()); |
||||
} |
||||
|
||||
@Test |
||||
public void setLogBase() { |
||||
taskLogDiscriminator.setLogBase("logs"); |
||||
} |
||||
} |
@ -0,0 +1,120 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.log; |
||||
|
||||
import ch.qos.logback.classic.Level; |
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
import ch.qos.logback.classic.spi.IThrowableProxy; |
||||
import ch.qos.logback.classic.spi.LoggerContextVO; |
||||
import ch.qos.logback.core.spi.FilterReply; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.utils.LoggerUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Marker; |
||||
|
||||
import java.util.Map; |
||||
|
||||
|
||||
public class TaskLogFilterTest { |
||||
|
||||
@Test |
||||
public void decide() { |
||||
TaskLogFilter taskLogFilter = new TaskLogFilter(); |
||||
|
||||
|
||||
FilterReply filterReply = taskLogFilter.decide(new ILoggingEvent() { |
||||
@Override |
||||
public String getThreadName() { |
||||
return LoggerUtils.TASK_LOGGER_THREAD_NAME; |
||||
} |
||||
|
||||
@Override |
||||
public Level getLevel() { |
||||
return Level.INFO; |
||||
} |
||||
|
||||
@Override |
||||
public String getMessage() { |
||||
return "raw script : echo 222"; |
||||
} |
||||
|
||||
@Override |
||||
public Object[] getArgumentArray() { |
||||
return new Object[0]; |
||||
} |
||||
|
||||
@Override |
||||
public String getFormattedMessage() { |
||||
return "raw script : echo 222"; |
||||
} |
||||
|
||||
@Override |
||||
public String getLoggerName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public LoggerContextVO getLoggerContextVO() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public IThrowableProxy getThrowableProxy() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public StackTraceElement[] getCallerData() { |
||||
return new StackTraceElement[0]; |
||||
} |
||||
|
||||
@Override |
||||
public boolean hasCallerData() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public Marker getMarker() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMDCPropertyMap() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMdc() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public long getTimeStamp() { |
||||
return 0; |
||||
} |
||||
|
||||
@Override |
||||
public void prepareForDeferredProcessing() { |
||||
|
||||
} |
||||
}); |
||||
|
||||
Assert.assertEquals(FilterReply.ACCEPT, filterReply); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,119 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.log; |
||||
|
||||
import ch.qos.logback.classic.Level; |
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
import ch.qos.logback.classic.spi.IThrowableProxy; |
||||
import ch.qos.logback.classic.spi.LoggerContextVO; |
||||
import ch.qos.logback.core.spi.FilterReply; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Marker; |
||||
|
||||
import java.util.Map; |
||||
|
||||
|
||||
public class WorkerLogFilterTest { |
||||
|
||||
@Test |
||||
public void decide() { |
||||
WorkerLogFilter workerLogFilter = new WorkerLogFilter(); |
||||
|
||||
|
||||
FilterReply filterReply = workerLogFilter.decide(new ILoggingEvent() { |
||||
@Override |
||||
public String getThreadName() { |
||||
return Constants.THREAD_NAME_WORKER_SERVER; |
||||
} |
||||
|
||||
@Override |
||||
public Level getLevel() { |
||||
return Level.INFO; |
||||
} |
||||
|
||||
@Override |
||||
public String getMessage() { |
||||
return "consume tasks: [2_177_2_704_-1],there still have 0 tasks need to be executed"; |
||||
} |
||||
|
||||
@Override |
||||
public Object[] getArgumentArray() { |
||||
return new Object[0]; |
||||
} |
||||
|
||||
@Override |
||||
public String getFormattedMessage() { |
||||
return "consume tasks: [2_177_2_704_-1],there still have 0 tasks need to be executed"; |
||||
} |
||||
|
||||
@Override |
||||
public String getLoggerName() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public LoggerContextVO getLoggerContextVO() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public IThrowableProxy getThrowableProxy() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public StackTraceElement[] getCallerData() { |
||||
return new StackTraceElement[0]; |
||||
} |
||||
|
||||
@Override |
||||
public boolean hasCallerData() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public Marker getMarker() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMDCPropertyMap() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public Map<String, String> getMdc() { |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public long getTimeStamp() { |
||||
return 0; |
||||
} |
||||
|
||||
@Override |
||||
public void prepareForDeferredProcessing() { |
||||
|
||||
} |
||||
}); |
||||
|
||||
Assert.assertEquals(FilterReply.ACCEPT, filterReply); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,43 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.common.utils; |
||||
|
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import java.util.List; |
||||
|
||||
public class LoggerUtilsTest { |
||||
private Logger logger = LoggerFactory.getLogger(LoggerUtilsTest.class); |
||||
|
||||
@Test |
||||
public void buildTaskId() { |
||||
|
||||
String taskId = LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX,79,4084,15210); |
||||
|
||||
Assert.assertEquals(" - [taskAppId=TASK-79-4084-15210]", taskId); |
||||
} |
||||
|
||||
@Test |
||||
public void getAppIds() { |
||||
List<String> appIdList = LoggerUtils.getAppIds("Running job: application_1_1",logger); |
||||
Assert.assertEquals("application_1_1", appIdList.get(0)); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,129 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.utils; |
||||
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.DbType; |
||||
|
||||
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser; |
||||
import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser; |
||||
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser; |
||||
import com.alibaba.druid.sql.dialect.sqlserver.parser.SQLServerStatementParser; |
||||
import com.alibaba.druid.sql.parser.SQLStatementParser; |
||||
|
||||
|
||||
public class DataxUtils { |
||||
|
||||
public static final String DATAX_READER_PLUGIN_MYSQL = "mysqlreader"; |
||||
|
||||
public static final String DATAX_READER_PLUGIN_POSTGRESQL = "postgresqlreader"; |
||||
|
||||
public static final String DATAX_READER_PLUGIN_ORACLE = "oraclereader"; |
||||
|
||||
public static final String DATAX_READER_PLUGIN_SQLSERVER = "sqlserverreader"; |
||||
|
||||
public static final String DATAX_WRITER_PLUGIN_MYSQL = "mysqlwriter"; |
||||
|
||||
public static final String DATAX_WRITER_PLUGIN_POSTGRESQL = "postgresqlwriter"; |
||||
|
||||
public static final String DATAX_WRITER_PLUGIN_ORACLE = "oraclewriter"; |
||||
|
||||
public static final String DATAX_WRITER_PLUGIN_SQLSERVER = "sqlserverwriter"; |
||||
|
||||
public static String getReaderPluginName(DbType dbType) { |
||||
switch (dbType) { |
||||
case MYSQL: |
||||
return DATAX_READER_PLUGIN_MYSQL; |
||||
case POSTGRESQL: |
||||
return DATAX_READER_PLUGIN_POSTGRESQL; |
||||
case ORACLE: |
||||
return DATAX_READER_PLUGIN_ORACLE; |
||||
case SQLSERVER: |
||||
return DATAX_READER_PLUGIN_SQLSERVER; |
||||
default: |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
public static String getWriterPluginName(DbType dbType) { |
||||
switch (dbType) { |
||||
case MYSQL: |
||||
return DATAX_WRITER_PLUGIN_MYSQL; |
||||
case POSTGRESQL: |
||||
return DATAX_WRITER_PLUGIN_POSTGRESQL; |
||||
case ORACLE: |
||||
return DATAX_WRITER_PLUGIN_ORACLE; |
||||
case SQLSERVER: |
||||
return DATAX_WRITER_PLUGIN_SQLSERVER; |
||||
default: |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
public static SQLStatementParser getSqlStatementParser(DbType dbType, String sql) { |
||||
switch (dbType) { |
||||
case MYSQL: |
||||
return new MySqlStatementParser(sql); |
||||
case POSTGRESQL: |
||||
return new PGSQLStatementParser(sql); |
||||
case ORACLE: |
||||
return new OracleStatementParser(sql); |
||||
case SQLSERVER: |
||||
return new SQLServerStatementParser(sql); |
||||
default: |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
public static String[] convertKeywordsColumns(DbType dbType, String[] columns) { |
||||
if (columns == null) { |
||||
return null; |
||||
} |
||||
|
||||
String[] toColumns = new String[columns.length]; |
||||
for (int i = 0; i < columns.length; i++ ) { |
||||
toColumns[i] = doConvertKeywordsColumn(dbType, columns[i]); |
||||
} |
||||
|
||||
return toColumns; |
||||
} |
||||
|
||||
public static String doConvertKeywordsColumn(DbType dbType, String column) { |
||||
if (column == null) { |
||||
return column; |
||||
} |
||||
|
||||
column = column.trim(); |
||||
column = column.replace("`", ""); |
||||
column = column.replace("\"", ""); |
||||
column = column.replace("'", ""); |
||||
|
||||
switch (dbType) { |
||||
case MYSQL: |
||||
return String.format("`%s`", column); |
||||
case POSTGRESQL: |
||||
return String.format("\"%s\"", column); |
||||
case ORACLE: |
||||
return String.format("\"%s\"", column); |
||||
case SQLSERVER: |
||||
return String.format("`%s`", column); |
||||
default: |
||||
return column; |
||||
} |
||||
} |
||||
|
||||
} |
@ -1,79 +0,0 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.utils; |
||||
|
||||
import org.quartz.impl.triggers.CronTriggerImpl; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import java.text.ParseException; |
||||
import java.util.Date; |
||||
import java.util.LinkedList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* ScheduleUtils |
||||
*/ |
||||
public class ScheduleUtils { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ScheduleUtils.class); |
||||
|
||||
/** |
||||
* Get the execution time of the time interval |
||||
* @param cron |
||||
* @param from |
||||
* @param to |
||||
* @return |
||||
*/ |
||||
public static List<Date> getRecentTriggerTime(String cron, Date from, Date to) { |
||||
return getRecentTriggerTime(cron, Integer.MAX_VALUE, from, to); |
||||
} |
||||
|
||||
/** |
||||
* Get the execution time of the time interval |
||||
* @param cron |
||||
* @param size |
||||
* @param from |
||||
* @param to |
||||
* @return |
||||
*/ |
||||
public static List<Date> getRecentTriggerTime(String cron, int size, Date from, Date to) { |
||||
List list = new LinkedList<Date>(); |
||||
if(to.before(from)){ |
||||
logger.error("schedule date from:{} must before date to:{}!", from, to); |
||||
return list; |
||||
} |
||||
try { |
||||
CronTriggerImpl trigger = new CronTriggerImpl(); |
||||
trigger.setCronExpression(cron); |
||||
trigger.setStartTime(from); |
||||
trigger.setEndTime(to); |
||||
trigger.computeFirstFireTime(null); |
||||
for (int i = 0; i < size; i++) { |
||||
Date schedule = trigger.getNextFireTime(); |
||||
if(null == schedule){ |
||||
break; |
||||
} |
||||
list.add(schedule); |
||||
trigger.triggered(null); |
||||
} |
||||
} catch (ParseException e) { |
||||
logger.error("cron:{} error:{}", cron, e.getMessage()); |
||||
} |
||||
return java.util.Collections.unmodifiableList(list); |
||||
} |
||||
} |
@ -0,0 +1,522 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.worker.task.datax; |
||||
|
||||
|
||||
import java.io.File; |
||||
import java.nio.charset.Charset; |
||||
import java.nio.file.Files; |
||||
import java.nio.file.Path; |
||||
import java.nio.file.StandardOpenOption; |
||||
import java.nio.file.attribute.FileAttribute; |
||||
import java.nio.file.attribute.PosixFilePermission; |
||||
import java.nio.file.attribute.PosixFilePermissions; |
||||
import java.sql.Connection; |
||||
import java.sql.DriverManager; |
||||
import java.sql.PreparedStatement; |
||||
import java.sql.ResultSet; |
||||
import java.sql.ResultSetMetaData; |
||||
import java.sql.SQLException; |
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
|
||||
import org.apache.commons.io.FileUtils; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.DbType; |
||||
import org.apache.dolphinscheduler.common.job.db.BaseDataSource; |
||||
import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; |
||||
import org.apache.dolphinscheduler.common.process.Property; |
||||
import org.apache.dolphinscheduler.common.task.AbstractParameters; |
||||
import org.apache.dolphinscheduler.common.task.datax.DataxParameters; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils; |
||||
import org.apache.dolphinscheduler.common.utils.ParameterUtils; |
||||
import org.apache.dolphinscheduler.dao.ProcessDao; |
||||
import org.apache.dolphinscheduler.dao.entity.DataSource; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; |
||||
import org.apache.dolphinscheduler.server.utils.DataxUtils; |
||||
import org.apache.dolphinscheduler.server.utils.ParamUtils; |
||||
import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; |
||||
import org.apache.dolphinscheduler.server.worker.task.AbstractTask; |
||||
import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; |
||||
import org.apache.dolphinscheduler.server.worker.task.TaskProps; |
||||
import org.slf4j.Logger; |
||||
|
||||
import com.alibaba.druid.sql.ast.SQLStatement; |
||||
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; |
||||
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr; |
||||
import com.alibaba.druid.sql.ast.statement.SQLSelect; |
||||
import com.alibaba.druid.sql.ast.statement.SQLSelectItem; |
||||
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock; |
||||
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; |
||||
import com.alibaba.druid.sql.ast.statement.SQLUnionQuery; |
||||
import com.alibaba.druid.sql.parser.SQLStatementParser; |
||||
import com.alibaba.fastjson.JSONObject; |
||||
|
||||
|
||||
/** |
||||
* DataX task |
||||
*/ |
||||
public class DataxTask extends AbstractTask { |
||||
|
||||
/** |
||||
* python process(datax only supports version 2.7 by default) |
||||
*/ |
||||
private static final String DATAX_PYTHON = "python2.7"; |
||||
|
||||
/** |
||||
* datax home path |
||||
*/ |
||||
private static final String DATAX_HOME_EVN = "${DATAX_HOME}"; |
||||
|
||||
/** |
||||
* datax channel count |
||||
*/ |
||||
private static final int DATAX_CHANNEL_COUNT = 1; |
||||
|
||||
/** |
||||
* datax parameters |
||||
*/ |
||||
private DataxParameters dataXParameters; |
||||
|
||||
/** |
||||
* task dir |
||||
*/ |
||||
private String taskDir; |
||||
|
||||
/** |
||||
* shell command executor |
||||
*/ |
||||
private ShellCommandExecutor shellCommandExecutor; |
||||
|
||||
/** |
||||
* process database access |
||||
*/ |
||||
private ProcessDao processDao; |
||||
|
||||
/** |
||||
* constructor |
||||
* |
||||
* @param props |
||||
* props |
||||
* @param logger |
||||
* logger |
||||
*/ |
||||
public DataxTask(TaskProps props, Logger logger) { |
||||
super(props, logger); |
||||
|
||||
this.taskDir = props.getTaskDir(); |
||||
logger.info("task dir : {}", taskDir); |
||||
|
||||
this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, props.getTaskDir(), props.getTaskAppId(), |
||||
props.getTaskInstId(), props.getTenantCode(), props.getEnvFile(), props.getTaskStartTime(), |
||||
props.getTaskTimeout(), logger); |
||||
|
||||
this.processDao = SpringApplicationContext.getBean(ProcessDao.class); |
||||
} |
||||
|
||||
/** |
||||
* init DataX config |
||||
*/ |
||||
@Override |
||||
public void init() { |
||||
logger.info("datax task params {}", taskProps.getTaskParams()); |
||||
dataXParameters = JSONUtils.parseObject(taskProps.getTaskParams(), DataxParameters.class); |
||||
|
||||
if (!dataXParameters.checkParameters()) { |
||||
throw new RuntimeException("datax task params is not valid"); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* run DataX process |
||||
* |
||||
* @throws Exception |
||||
*/ |
||||
@Override |
||||
public void handle() |
||||
throws Exception { |
||||
try { |
||||
// set the name of the current thread
|
||||
String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); |
||||
Thread.currentThread().setName(threadLoggerInfoName); |
||||
|
||||
// run datax process
|
||||
String jsonFilePath = buildDataxJsonFile(); |
||||
String shellCommandFilePath = buildShellCommandFile(jsonFilePath); |
||||
exitStatusCode = shellCommandExecutor.run(shellCommandFilePath, processDao); |
||||
} |
||||
catch (Exception e) { |
||||
exitStatusCode = -1; |
||||
throw e; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* cancel DataX process |
||||
* |
||||
* @param cancelApplication |
||||
* @throws Exception |
||||
*/ |
||||
@Override |
||||
public void cancelApplication(boolean cancelApplication) |
||||
throws Exception { |
||||
// cancel process
|
||||
shellCommandExecutor.cancelApplication(); |
||||
} |
||||
|
||||
/** |
||||
* build datax configuration file |
||||
* |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
private String buildDataxJsonFile() |
||||
throws Exception { |
||||
// generate json
|
||||
String fileName = String.format("%s/%s_job.json", taskDir, taskProps.getTaskAppId()); |
||||
|
||||
Path path = new File(fileName).toPath(); |
||||
if (Files.exists(path)) { |
||||
return fileName; |
||||
} |
||||
|
||||
JSONObject job = new JSONObject(); |
||||
job.put("content", buildDataxJobContentJson()); |
||||
job.put("setting", buildDataxJobSettingJson()); |
||||
|
||||
JSONObject root = new JSONObject(); |
||||
root.put("job", job); |
||||
root.put("core", buildDataxCoreJson()); |
||||
|
||||
logger.debug("datax job json : {}", root.toString()); |
||||
|
||||
// create datax json file
|
||||
FileUtils.writeStringToFile(new File(fileName), root.toString(), Charset.forName("UTF-8")); |
||||
return fileName; |
||||
} |
||||
|
||||
/** |
||||
* build datax job config |
||||
* |
||||
* @return |
||||
* @throws SQLException |
||||
*/ |
||||
private List<JSONObject> buildDataxJobContentJson() |
||||
throws SQLException { |
||||
DataSource dataSource = processDao.findDataSourceById(dataXParameters.getDataSource()); |
||||
BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(dataSource.getType(), |
||||
dataSource.getConnectionParams()); |
||||
|
||||
DataSource dataTarget = processDao.findDataSourceById(dataXParameters.getDataTarget()); |
||||
BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(dataTarget.getType(), |
||||
dataTarget.getConnectionParams()); |
||||
|
||||
List<JSONObject> readerConnArr = new ArrayList<>(); |
||||
JSONObject readerConn = new JSONObject(); |
||||
readerConn.put("querySql", new String[] {dataXParameters.getSql()}); |
||||
readerConn.put("jdbcUrl", new String[] {dataSourceCfg.getJdbcUrl()}); |
||||
readerConnArr.add(readerConn); |
||||
|
||||
JSONObject readerParam = new JSONObject(); |
||||
readerParam.put("username", dataSourceCfg.getUser()); |
||||
readerParam.put("password", dataSourceCfg.getPassword()); |
||||
readerParam.put("connection", readerConnArr); |
||||
|
||||
JSONObject reader = new JSONObject(); |
||||
reader.put("name", DataxUtils.getReaderPluginName(dataSource.getType())); |
||||
reader.put("parameter", readerParam); |
||||
|
||||
List<JSONObject> writerConnArr = new ArrayList<>(); |
||||
JSONObject writerConn = new JSONObject(); |
||||
writerConn.put("table", new String[] {dataXParameters.getTargetTable()}); |
||||
writerConn.put("jdbcUrl", dataTargetCfg.getJdbcUrl()); |
||||
writerConnArr.add(writerConn); |
||||
|
||||
JSONObject writerParam = new JSONObject(); |
||||
writerParam.put("username", dataTargetCfg.getUser()); |
||||
writerParam.put("password", dataTargetCfg.getPassword()); |
||||
writerParam.put("column", |
||||
parsingSqlColumnNames(dataSource.getType(), dataTarget.getType(), dataSourceCfg, dataXParameters.getSql())); |
||||
writerParam.put("connection", writerConnArr); |
||||
|
||||
if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) { |
||||
writerParam.put("preSql", dataXParameters.getPreStatements()); |
||||
} |
||||
|
||||
if (CollectionUtils.isNotEmpty(dataXParameters.getPostStatements())) { |
||||
writerParam.put("postSql", dataXParameters.getPostStatements()); |
||||
} |
||||
|
||||
JSONObject writer = new JSONObject(); |
||||
writer.put("name", DataxUtils.getWriterPluginName(dataTarget.getType())); |
||||
writer.put("parameter", writerParam); |
||||
|
||||
List<JSONObject> contentList = new ArrayList<>(); |
||||
JSONObject content = new JSONObject(); |
||||
content.put("reader", reader); |
||||
content.put("writer", writer); |
||||
contentList.add(content); |
||||
|
||||
return contentList; |
||||
} |
||||
|
||||
/** |
||||
* build datax setting config |
||||
* |
||||
* @return |
||||
*/ |
||||
private JSONObject buildDataxJobSettingJson() { |
||||
JSONObject speed = new JSONObject(); |
||||
speed.put("channel", DATAX_CHANNEL_COUNT); |
||||
|
||||
if (dataXParameters.getJobSpeedByte() > 0) { |
||||
speed.put("byte", dataXParameters.getJobSpeedByte()); |
||||
} |
||||
|
||||
if (dataXParameters.getJobSpeedRecord() > 0) { |
||||
speed.put("record", dataXParameters.getJobSpeedRecord()); |
||||
} |
||||
|
||||
JSONObject errorLimit = new JSONObject(); |
||||
errorLimit.put("record", 0); |
||||
errorLimit.put("percentage", 0); |
||||
|
||||
JSONObject setting = new JSONObject(); |
||||
setting.put("speed", speed); |
||||
setting.put("errorLimit", errorLimit); |
||||
|
||||
return setting; |
||||
} |
||||
|
||||
private JSONObject buildDataxCoreJson() { |
||||
JSONObject speed = new JSONObject(); |
||||
speed.put("channel", DATAX_CHANNEL_COUNT); |
||||
|
||||
if (dataXParameters.getJobSpeedByte() > 0) { |
||||
speed.put("byte", dataXParameters.getJobSpeedByte()); |
||||
} |
||||
|
||||
if (dataXParameters.getJobSpeedRecord() > 0) { |
||||
speed.put("record", dataXParameters.getJobSpeedRecord()); |
||||
} |
||||
|
||||
JSONObject channel = new JSONObject(); |
||||
channel.put("speed", speed); |
||||
|
||||
JSONObject transport = new JSONObject(); |
||||
transport.put("channel", channel); |
||||
|
||||
JSONObject core = new JSONObject(); |
||||
core.put("transport", transport); |
||||
|
||||
return core; |
||||
} |
||||
|
||||
/** |
||||
* create command |
||||
* |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
private String buildShellCommandFile(String jobConfigFilePath) |
||||
throws Exception { |
||||
// generate scripts
|
||||
String fileName = String.format("%s/%s_node.sh", taskDir, taskProps.getTaskAppId()); |
||||
Path path = new File(fileName).toPath(); |
||||
|
||||
if (Files.exists(path)) { |
||||
return fileName; |
||||
} |
||||
|
||||
// datax python command
|
||||
StringBuilder sbr = new StringBuilder(); |
||||
sbr.append(DATAX_PYTHON); |
||||
sbr.append(" "); |
||||
sbr.append(DATAX_HOME_EVN); |
||||
sbr.append(" "); |
||||
sbr.append(jobConfigFilePath); |
||||
String dataxCommand = sbr.toString(); |
||||
|
||||
// find process instance by task id
|
||||
ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); |
||||
|
||||
// combining local and global parameters
|
||||
Map<String, Property> paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), |
||||
taskProps.getDefinedParams(), dataXParameters.getLocalParametersMap(), |
||||
processInstance.getCmdTypeIfComplement(), processInstance.getScheduleTime()); |
||||
if (paramsMap != null) { |
||||
dataxCommand = ParameterUtils.convertParameterPlaceholders(dataxCommand, ParamUtils.convert(paramsMap)); |
||||
} |
||||
|
||||
logger.debug("raw script : {}", dataxCommand); |
||||
|
||||
// create shell command file
|
||||
Set<PosixFilePermission> perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); |
||||
FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); |
||||
Files.createFile(path, attr); |
||||
Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND); |
||||
|
||||
return fileName; |
||||
} |
||||
|
||||
/** |
||||
* parsing synchronized column names in SQL statements |
||||
* |
||||
* @param dsType |
||||
* the database type of the data source |
||||
* @param dtType |
||||
* the database type of the data target |
||||
* @param dataSourceCfg |
||||
* the database connection parameters of the data source |
||||
* @param sql |
||||
* sql for data synchronization |
||||
* @return |
||||
*/ |
||||
private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource dataSourceCfg, String sql) { |
||||
String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql); |
||||
|
||||
if (columnNames == null || columnNames.length == 0) { |
||||
logger.info("try to execute sql analysis query column name"); |
||||
columnNames = tryExecuteSqlResolveColumnNames(dataSourceCfg, sql); |
||||
} |
||||
|
||||
notNull(columnNames, String.format("parsing sql columns failed : %s", sql)); |
||||
|
||||
return DataxUtils.convertKeywordsColumns(dtType, columnNames); |
||||
} |
||||
|
||||
/** |
||||
* try grammatical parsing column |
||||
* |
||||
* @param dbType |
||||
* database type |
||||
* @param sql |
||||
* sql for data synchronization |
||||
* @return column name array |
||||
* @throws RuntimeException |
||||
*/ |
||||
private String[] tryGrammaticalAnalysisSqlColumnNames(DbType dbType, String sql) { |
||||
String[] columnNames; |
||||
|
||||
try { |
||||
SQLStatementParser parser = DataxUtils.getSqlStatementParser(dbType, sql); |
||||
notNull(parser, String.format("database driver [%s] is not support", dbType.toString())); |
||||
|
||||
SQLStatement sqlStatement = parser.parseStatement(); |
||||
SQLSelectStatement sqlSelectStatement = (SQLSelectStatement)sqlStatement; |
||||
SQLSelect sqlSelect = sqlSelectStatement.getSelect(); |
||||
|
||||
List<SQLSelectItem> selectItemList = null; |
||||
if (sqlSelect.getQuery() instanceof SQLSelectQueryBlock) { |
||||
SQLSelectQueryBlock block = (SQLSelectQueryBlock)sqlSelect.getQuery(); |
||||
selectItemList = block.getSelectList(); |
||||
} else if (sqlSelect.getQuery() instanceof SQLUnionQuery) { |
||||
SQLUnionQuery unionQuery = (SQLUnionQuery)sqlSelect.getQuery(); |
||||
SQLSelectQueryBlock block = (SQLSelectQueryBlock)unionQuery.getRight(); |
||||
selectItemList = block.getSelectList(); |
||||
} |
||||
|
||||
notNull(selectItemList, |
||||
String.format("select query type [%s] is not support", sqlSelect.getQuery().toString())); |
||||
|
||||
columnNames = new String[selectItemList.size()]; |
||||
for (int i = 0; i < selectItemList.size(); i++ ) { |
||||
SQLSelectItem item = selectItemList.get(i); |
||||
|
||||
String columnName = null; |
||||
|
||||
if (item.getAlias() != null) { |
||||
columnName = item.getAlias(); |
||||
} else if (item.getExpr() != null) { |
||||
if (item.getExpr() instanceof SQLPropertyExpr) { |
||||
SQLPropertyExpr expr = (SQLPropertyExpr)item.getExpr(); |
||||
columnName = expr.getName(); |
||||
} else if (item.getExpr() instanceof SQLIdentifierExpr) { |
||||
SQLIdentifierExpr expr = (SQLIdentifierExpr)item.getExpr(); |
||||
columnName = expr.getName(); |
||||
} |
||||
} else { |
||||
throw new RuntimeException( |
||||
String.format("grammatical analysis sql column [ %s ] failed", item.toString())); |
||||
} |
||||
|
||||
if (columnName == null) { |
||||
throw new RuntimeException( |
||||
String.format("grammatical analysis sql column [ %s ] failed", item.toString())); |
||||
} |
||||
|
||||
columnNames[i] = columnName; |
||||
} |
||||
} |
||||
catch (Exception e) { |
||||
logger.warn(e.getMessage(), e); |
||||
return null; |
||||
} |
||||
|
||||
return columnNames; |
||||
} |
||||
|
||||
/** |
||||
* try to execute sql to resolve column names |
||||
* |
||||
* @param baseDataSource |
||||
* the database connection parameters |
||||
* @param sql |
||||
* sql for data synchronization |
||||
* @return column name array |
||||
*/ |
||||
public String[] tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, String sql) { |
||||
String[] columnNames; |
||||
sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql); |
||||
sql = sql.replace(";", ""); |
||||
|
||||
try ( |
||||
Connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), baseDataSource.getUser(), |
||||
baseDataSource.getPassword()); |
||||
PreparedStatement stmt = connection.prepareStatement(sql); |
||||
ResultSet resultSet = stmt.executeQuery()) { |
||||
|
||||
ResultSetMetaData md = resultSet.getMetaData(); |
||||
int num = md.getColumnCount(); |
||||
columnNames = new String[num]; |
||||
for (int i = 1; i <= num; i++ ) { |
||||
columnNames[i - 1] = md.getColumnName(i); |
||||
} |
||||
} |
||||
catch (SQLException e) { |
||||
logger.warn(e.getMessage(), e); |
||||
return null; |
||||
} |
||||
|
||||
return columnNames; |
||||
} |
||||
|
||||
@Override |
||||
public AbstractParameters getParameters() { |
||||
return dataXParameters; |
||||
} |
||||
|
||||
private void notNull(Object obj, String message) { |
||||
if (obj == null) { |
||||
throw new RuntimeException(message); |
||||
} |
||||
} |
||||
|
||||
} |
@ -1,52 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<!-- |
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more |
||||
~ contributor license agreements. See the NOTICE file distributed with |
||||
~ this work for additional information regarding copyright ownership. |
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
~ (the "License"); you may not use this file except in compliance with |
||||
~ the License. You may obtain a copy of the License at |
||||
~ |
||||
~ http://www.apache.org/licenses/LICENSE-2.0 |
||||
~ |
||||
~ Unless required by applicable law or agreed to in writing, software |
||||
~ distributed under the License is distributed on an "AS IS" BASIS, |
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
~ See the License for the specific language governing permissions and |
||||
~ limitations under the License. |
||||
--> |
||||
|
||||
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||
<configuration scan="true" scanPeriod="120 seconds"> <!--debug="true" --> |
||||
<property name="log.base" value="logs" /> |
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<appender name="MASTERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<file>${log.base}/dolphinscheduler-master.log</file> |
||||
<filter class="org.apache.dolphinscheduler.server.master.log.MasterLogFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>200MB</maxFileSize> |
||||
</rollingPolicy> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<root level="INFO"> |
||||
<appender-ref ref="MASTERLOGFILE"/> |
||||
</root> |
||||
</configuration> |
@ -0,0 +1,108 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.utils; |
||||
|
||||
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser; |
||||
import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser; |
||||
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser; |
||||
import com.alibaba.druid.sql.dialect.sqlserver.parser.SQLServerStatementParser; |
||||
import org.apache.dolphinscheduler.common.enums.DbType; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import static org.junit.Assert.assertEquals; |
||||
import static org.junit.Assert.assertTrue; |
||||
|
||||
/** |
||||
* DataxUtils Tester. |
||||
*/ |
||||
public class DataxUtilsTest { |
||||
|
||||
/** |
||||
* |
||||
* Method: getReaderPluginName(DbType dbType) |
||||
* |
||||
*/ |
||||
@Test |
||||
public void testGetReaderPluginName() { |
||||
assertEquals(DataxUtils.DATAX_READER_PLUGIN_MYSQL, DataxUtils.getReaderPluginName(DbType.MYSQL)); |
||||
assertEquals(DataxUtils.DATAX_READER_PLUGIN_POSTGRESQL, DataxUtils.getReaderPluginName(DbType.POSTGRESQL)); |
||||
assertEquals(DataxUtils.DATAX_READER_PLUGIN_SQLSERVER, DataxUtils.getReaderPluginName(DbType.SQLSERVER)); |
||||
assertEquals(DataxUtils.DATAX_READER_PLUGIN_ORACLE, DataxUtils.getReaderPluginName(DbType.ORACLE)); |
||||
assertTrue(DataxUtils.getReaderPluginName(DbType.DB2) == null); |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* Method: getWriterPluginName(DbType dbType) |
||||
* |
||||
*/ |
||||
@Test |
||||
public void testGetWriterPluginName() { |
||||
assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_MYSQL, DataxUtils.getWriterPluginName(DbType.MYSQL)); |
||||
assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_POSTGRESQL, DataxUtils.getWriterPluginName(DbType.POSTGRESQL)); |
||||
assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_SQLSERVER, DataxUtils.getWriterPluginName(DbType.SQLSERVER)); |
||||
assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_ORACLE, DataxUtils.getWriterPluginName(DbType.ORACLE)); |
||||
assertTrue(DataxUtils.getWriterPluginName(DbType.DB2) == null); |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* Method: getSqlStatementParser(DbType dbType, String sql) |
||||
* |
||||
*/ |
||||
@Test |
||||
public void testGetSqlStatementParser() throws Exception { |
||||
assertTrue(DataxUtils.getSqlStatementParser(DbType.MYSQL, "select 1") instanceof MySqlStatementParser); |
||||
assertTrue(DataxUtils.getSqlStatementParser(DbType.POSTGRESQL, "select 1") instanceof PGSQLStatementParser); |
||||
assertTrue(DataxUtils.getSqlStatementParser(DbType.ORACLE, "select 1") instanceof OracleStatementParser); |
||||
assertTrue(DataxUtils.getSqlStatementParser(DbType.SQLSERVER, "select 1") instanceof SQLServerStatementParser); |
||||
assertTrue(DataxUtils.getSqlStatementParser(DbType.DB2, "select 1") == null); |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* Method: convertKeywordsColumns(DbType dbType, String[] columns) |
||||
* |
||||
*/ |
||||
@Test |
||||
public void testConvertKeywordsColumns() throws Exception { |
||||
String[] fromColumns = new String[]{"`select`", "from", "\"where\"", " table "}; |
||||
String[] targetColumns = new String[]{"`select`", "`from`", "`where`", "`table`"}; |
||||
|
||||
String[] toColumns = DataxUtils.convertKeywordsColumns(DbType.MYSQL, fromColumns); |
||||
|
||||
assertTrue(fromColumns.length == toColumns.length); |
||||
|
||||
for (int i = 0; i < toColumns.length; i++) { |
||||
assertEquals(targetColumns[i], toColumns[i]); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* Method: doConvertKeywordsColumn(DbType dbType, String column) |
||||
* |
||||
*/ |
||||
@Test |
||||
public void testDoConvertKeywordsColumn() throws Exception { |
||||
assertEquals("`select`", DataxUtils.doConvertKeywordsColumn(DbType.MYSQL, " \"`select`\" ")); |
||||
assertEquals("\"select\"", DataxUtils.doConvertKeywordsColumn(DbType.POSTGRESQL, " \"`select`\" ")); |
||||
assertEquals("`select`", DataxUtils.doConvertKeywordsColumn(DbType.SQLSERVER, " \"`select`\" ")); |
||||
assertEquals("\"select\"", DataxUtils.doConvertKeywordsColumn(DbType.ORACLE, " \"`select`\" ")); |
||||
assertEquals("select", DataxUtils.doConvertKeywordsColumn(DbType.DB2, " \"`select`\" ")); |
||||
} |
||||
} |
@ -1,44 +0,0 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.server.utils; |
||||
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.junit.Test; |
||||
import java.util.Date; |
||||
import static org.junit.Assert.assertEquals; |
||||
|
||||
/** |
||||
* Test ScheduleUtils |
||||
*/ |
||||
public class ScheduleUtilsTest { |
||||
|
||||
/** |
||||
* Test the getRecentTriggerTime method |
||||
*/ |
||||
@Test |
||||
public void testGetRecentTriggerTime() { |
||||
Date from = DateUtils.stringToDate("2020-01-01 00:00:00"); |
||||
Date to = DateUtils.stringToDate("2020-01-31 01:00:00"); |
||||
// test date
|
||||
assertEquals(0, ScheduleUtils.getRecentTriggerTime("0 0 0 * * ? ", to, from).size()); |
||||
// test error cron
|
||||
assertEquals(0, ScheduleUtils.getRecentTriggerTime("0 0 0 * *", from, to).size()); |
||||
// test cron
|
||||
assertEquals(31, ScheduleUtils.getRecentTriggerTime("0 0 0 * * ? ", from, to).size()); |
||||
} |
||||
} |
@ -1,92 +0,0 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.worker.log; |
||||
|
||||
|
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.server.utils.SensitiveLogUtil; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import java.util.regex.Matcher; |
||||
import java.util.regex.Pattern; |
||||
|
||||
public class SensitiveDataConverterTest { |
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(SensitiveDataConverterTest.class); |
||||
|
||||
/** |
||||
* password pattern |
||||
*/ |
||||
private final Pattern pwdPattern = Pattern.compile(Constants.DATASOURCE_PASSWORD_REGEX); |
||||
|
||||
|
||||
/** |
||||
* mask sensitive logMsg - sql task datasource password |
||||
*/ |
||||
@Test |
||||
public void testPwdLogMsgConverter() { |
||||
|
||||
String logMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\"," + |
||||
"\"database\":\"carbond\"," + |
||||
"\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\"," + |
||||
"\"user\":\"view\"," + |
||||
"\"password\":\"view1\"}"; |
||||
|
||||
String maskLogMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\"," + |
||||
"\"database\":\"carbond\"," + |
||||
"\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\"," + |
||||
"\"user\":\"view\"," + |
||||
"\"password\":\"******\"}"; |
||||
|
||||
|
||||
logger.info("parameter : {}", logMsg); |
||||
logger.info("parameter : {}", passwordHandler(pwdPattern, logMsg)); |
||||
|
||||
Assert.assertNotEquals(logMsg, passwordHandler(pwdPattern, logMsg)); |
||||
Assert.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg)); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* password regex test |
||||
* |
||||
* @param logMsg original log |
||||
*/ |
||||
private static String passwordHandler(Pattern pattern, String logMsg) { |
||||
|
||||
Matcher matcher = pattern.matcher(logMsg); |
||||
|
||||
StringBuffer sb = new StringBuffer(logMsg.length()); |
||||
|
||||
while (matcher.find()) { |
||||
|
||||
String password = matcher.group(); |
||||
|
||||
String maskPassword = SensitiveLogUtil.maskDataSourcePwd(password); |
||||
|
||||
matcher.appendReplacement(sb, maskPassword); |
||||
} |
||||
matcher.appendTail(sb); |
||||
|
||||
return sb.toString(); |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,352 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package org.apache.dolphinscheduler.server.worker.task.datax; |
||||
|
||||
|
||||
import java.lang.reflect.Method; |
||||
import java.util.Arrays; |
||||
import java.util.Date; |
||||
import java.util.List; |
||||
|
||||
import com.alibaba.fastjson.JSONObject; |
||||
import org.apache.dolphinscheduler.common.enums.CommandType; |
||||
import org.apache.dolphinscheduler.common.enums.DbType; |
||||
import org.apache.dolphinscheduler.common.job.db.BaseDataSource; |
||||
import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; |
||||
import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; |
||||
import org.apache.dolphinscheduler.dao.ProcessDao; |
||||
import org.apache.dolphinscheduler.dao.entity.DataSource; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; |
||||
import org.apache.dolphinscheduler.server.utils.DataxUtils; |
||||
import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; |
||||
import org.apache.dolphinscheduler.server.worker.task.TaskProps; |
||||
import org.junit.After; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.mockito.Mockito; |
||||
import org.powermock.api.mockito.PowerMockito; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.context.ApplicationContext; |
||||
|
||||
/** |
||||
* DataxTask Tester. |
||||
*/ |
||||
public class DataxTaskTest { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataxTaskTest.class); |
||||
|
||||
private DataxTask dataxTask; |
||||
|
||||
private ProcessDao processDao; |
||||
|
||||
private ShellCommandExecutor shellCommandExecutor; |
||||
|
||||
private ApplicationContext applicationContext; |
||||
|
||||
@Before |
||||
public void before() |
||||
throws Exception { |
||||
processDao = Mockito.mock(ProcessDao.class); |
||||
shellCommandExecutor = Mockito.mock(ShellCommandExecutor.class); |
||||
|
||||
applicationContext = Mockito.mock(ApplicationContext.class); |
||||
SpringApplicationContext springApplicationContext = new SpringApplicationContext(); |
||||
springApplicationContext.setApplicationContext(applicationContext); |
||||
Mockito.when(applicationContext.getBean(ProcessDao.class)).thenReturn(processDao); |
||||
|
||||
TaskProps props = new TaskProps(); |
||||
props.setTaskDir("/tmp"); |
||||
props.setTaskAppId(String.valueOf(System.currentTimeMillis())); |
||||
props.setTaskInstId(1); |
||||
props.setTenantCode("1"); |
||||
props.setEnvFile(".dolphinscheduler_env.sh"); |
||||
props.setTaskStartTime(new Date()); |
||||
props.setTaskTimeout(0); |
||||
props.setTaskParams( |
||||
"{\"targetTable\":\"test\",\"postStatements\":[],\"jobSpeedByte\":1024,\"jobSpeedRecord\":1000,\"dtType\":\"MYSQL\",\"datasource\":1,\"dsType\":\"MYSQL\",\"datatarget\":2,\"jobSpeedByte\":0,\"sql\":\"select 1 as test from dual\",\"preStatements\":[\"delete from test\"],\"postStatements\":[\"delete from test\"]}"); |
||||
dataxTask = PowerMockito.spy(new DataxTask(props, logger)); |
||||
dataxTask.init(); |
||||
|
||||
Mockito.when(processDao.findDataSourceById(1)).thenReturn(getDataSource()); |
||||
Mockito.when(processDao.findDataSourceById(2)).thenReturn(getDataSource()); |
||||
Mockito.when(processDao.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); |
||||
|
||||
String fileName = String.format("%s/%s_node.sh", props.getTaskDir(), props.getTaskAppId()); |
||||
Mockito.when(shellCommandExecutor.run(fileName, processDao)).thenReturn(0); |
||||
} |
||||
|
||||
private DataSource getDataSource() { |
||||
DataSource dataSource = new DataSource(); |
||||
dataSource.setType(DbType.MYSQL); |
||||
dataSource.setConnectionParams( |
||||
"{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"); |
||||
dataSource.setUserId(1); |
||||
return dataSource; |
||||
} |
||||
|
||||
private ProcessInstance getProcessInstance() { |
||||
ProcessInstance processInstance = new ProcessInstance(); |
||||
processInstance.setCommandType(CommandType.START_PROCESS); |
||||
processInstance.setScheduleTime(new Date()); |
||||
return processInstance; |
||||
} |
||||
|
||||
@After |
||||
public void after() |
||||
throws Exception {} |
||||
|
||||
/** |
||||
* Method: DataxTask() |
||||
*/ |
||||
@Test |
||||
public void testDataxTask() |
||||
throws Exception { |
||||
TaskProps props = new TaskProps(); |
||||
props.setTaskDir("/tmp"); |
||||
props.setTaskAppId(String.valueOf(System.currentTimeMillis())); |
||||
props.setTaskInstId(1); |
||||
props.setTenantCode("1"); |
||||
Assert.assertNotNull(new DataxTask(props, logger)); |
||||
} |
||||
|
||||
/** |
||||
* Method: init |
||||
*/ |
||||
@Test |
||||
public void testInit() |
||||
throws Exception { |
||||
try { |
||||
dataxTask.init(); |
||||
} catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: handle() |
||||
*/ |
||||
@Test |
||||
public void testHandle() |
||||
throws Exception { |
||||
try { |
||||
dataxTask.handle(); |
||||
} catch (RuntimeException e) { |
||||
if (e.getMessage().indexOf("process error . exitCode is : -1") < 0) { |
||||
Assert.fail(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: cancelApplication() |
||||
*/ |
||||
@Test |
||||
public void testCancelApplication() |
||||
throws Exception { |
||||
try { |
||||
dataxTask.cancelApplication(true); |
||||
} catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource |
||||
* dataSourceCfg, String sql) |
||||
*/ |
||||
@Test |
||||
public void testParsingSqlColumnNames() |
||||
throws Exception { |
||||
try { |
||||
BaseDataSource dataSource = DataSourceFactory.getDatasource(getDataSource().getType(), |
||||
getDataSource().getConnectionParams()); |
||||
|
||||
Method method = DataxTask.class.getDeclaredMethod("parsingSqlColumnNames", DbType.class, DbType.class, BaseDataSource.class, String.class); |
||||
method.setAccessible(true); |
||||
String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, DbType.MYSQL, dataSource, "select 1 as a, 2 as `table` from dual"); |
||||
|
||||
Assert.assertNotNull(columns); |
||||
|
||||
Assert.assertTrue(columns.length == 2); |
||||
|
||||
Assert.assertEquals("[`a`, `table`]", Arrays.toString(columns)); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: tryGrammaticalParsingSqlColumnNames(DbType dbType, String sql) |
||||
*/ |
||||
@Test |
||||
public void testTryGrammaticalAnalysisSqlColumnNames() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("tryGrammaticalAnalysisSqlColumnNames", DbType.class, String.class); |
||||
method.setAccessible(true); |
||||
String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, "select t1.a, t1.b from test t1 union all select a, t2.b from (select a, b from test) t2"); |
||||
|
||||
Assert.assertNotNull(columns); |
||||
|
||||
Assert.assertTrue(columns.length == 2); |
||||
|
||||
Assert.assertEquals("[a, b]", Arrays.toString(columns)); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, |
||||
* String sql) |
||||
*/ |
||||
@Test |
||||
public void testTryExecuteSqlResolveColumnNames() |
||||
throws Exception { |
||||
// TODO: Test goes here...
|
||||
} |
||||
|
||||
/** |
||||
* Method: buildDataxJsonFile() |
||||
*/ |
||||
@Test |
||||
public void testBuildDataxJsonFile() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("buildDataxJsonFile"); |
||||
method.setAccessible(true); |
||||
String filePath = (String) method.invoke(dataxTask, null); |
||||
Assert.assertNotNull(filePath); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: buildDataxJobContentJson() |
||||
*/ |
||||
@Test |
||||
public void testBuildDataxJobContentJson() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("buildDataxJobContentJson"); |
||||
method.setAccessible(true); |
||||
List<JSONObject> contentList = (List<JSONObject>) method.invoke(dataxTask, null); |
||||
Assert.assertNotNull(contentList); |
||||
|
||||
JSONObject content = contentList.get(0); |
||||
JSONObject reader = (JSONObject) content.get("reader"); |
||||
Assert.assertNotNull(reader); |
||||
|
||||
String readerPluginName = (String) reader.get("name"); |
||||
Assert.assertEquals(DataxUtils.DATAX_READER_PLUGIN_MYSQL, readerPluginName); |
||||
|
||||
JSONObject writer = (JSONObject) content.get("writer"); |
||||
Assert.assertNotNull(writer); |
||||
|
||||
String writerPluginName = (String) writer.get("name"); |
||||
Assert.assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_MYSQL, writerPluginName); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: buildDataxJobSettingJson() |
||||
*/ |
||||
@Test |
||||
public void testBuildDataxJobSettingJson() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("buildDataxJobSettingJson"); |
||||
method.setAccessible(true); |
||||
JSONObject setting = (JSONObject) method.invoke(dataxTask, null); |
||||
Assert.assertNotNull(setting); |
||||
Assert.assertNotNull(setting.get("speed")); |
||||
Assert.assertNotNull(setting.get("errorLimit")); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: buildDataxCoreJson() |
||||
*/ |
||||
@Test |
||||
public void testBuildDataxCoreJson() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("buildDataxCoreJson"); |
||||
method.setAccessible(true); |
||||
JSONObject coreConfig = (JSONObject) method.invoke(dataxTask, null); |
||||
Assert.assertNotNull(coreConfig); |
||||
Assert.assertNotNull(coreConfig.get("transport")); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: buildShellCommandFile(String jobConfigFilePath) |
||||
*/ |
||||
@Test |
||||
public void testBuildShellCommandFile() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("buildShellCommandFile", String.class); |
||||
method.setAccessible(true); |
||||
Assert.assertNotNull(method.invoke(dataxTask, "test.json")); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Method: getParameters |
||||
*/ |
||||
@Test |
||||
public void testGetParameters() |
||||
throws Exception { |
||||
Assert.assertTrue(dataxTask.getParameters() != null); |
||||
} |
||||
|
||||
/** |
||||
* Method: notNull(Object obj, String message) |
||||
*/ |
||||
@Test |
||||
public void testNotNull() |
||||
throws Exception { |
||||
try { |
||||
Method method = DataxTask.class.getDeclaredMethod("notNull", Object.class, String.class); |
||||
method.setAccessible(true); |
||||
method.invoke(dataxTask, "abc", "test throw RuntimeException"); |
||||
} |
||||
catch (Exception e) { |
||||
Assert.fail(e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,292 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0 |
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
<template> |
||||
<div class="datax-model"> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('Datasource')}}</div> |
||||
<div slot="content"> |
||||
<m-datasource |
||||
ref="refDs" |
||||
@on-dsData="_onDsData" |
||||
:supportType="['MYSQL','POSTGRESQL', 'ORACLE', 'SQLSERVER']" |
||||
:data="{ type:dsType,datasource:datasource }"> |
||||
</m-datasource> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('SQL Statement')}}</div> |
||||
<div slot="content"> |
||||
<div class="from-mirror"> |
||||
<textarea |
||||
id="code-sql-mirror" |
||||
name="code-sql-mirror" |
||||
style="opacity: 0;"> |
||||
</textarea> |
||||
</div> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('TargetDataBase')}}</div> |
||||
<div slot="content"> |
||||
<m-datasource |
||||
ref="refDt" |
||||
@on-dsData="_onDtData" |
||||
:supportType="['MYSQL','POSTGRESQL', 'ORACLE', 'SQLSERVER']" |
||||
:data="{ type:dtType,datasource:datatarget }"> |
||||
</m-datasource> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('TargetTable')}}</div> |
||||
<div slot="content"> |
||||
<x-input |
||||
type="input" |
||||
v-model="targetTable" |
||||
:placeholder="$t('Please enter the table of target')" |
||||
autocomplete="off"> |
||||
</x-input> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('TargetDataBase')}}{{$t('Pre Statement')}}</div> |
||||
<div slot="content"> |
||||
<m-statement-list |
||||
ref="refPreStatements" |
||||
@on-statement-list="_onPreStatements" |
||||
:statement-list="preStatements"> |
||||
</m-statement-list> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text">{{$t('TargetDataBase')}}{{$t('Post Statement')}}</div> |
||||
<div slot="content"> |
||||
<m-statement-list |
||||
ref="refPostStatements" |
||||
@on-statement-list="_onPostStatements" |
||||
:statement-list="postStatements"> |
||||
</m-statement-list> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text"> |
||||
<span>{{$t('SpeedByte')}}</span> |
||||
</div> |
||||
<div slot="content"> |
||||
<m-select-input v-model="jobSpeedByte" :list="[0,1,10,50,100,512]"> |
||||
</m-select-input> |
||||
<span>({{$t('0 means unlimited by byte')}})</span> |
||||
</div> |
||||
</m-list-box> |
||||
<m-list-box> |
||||
<div slot="text"> |
||||
<span>{{$t('SpeedRecord')}}</span> |
||||
</div> |
||||
<div slot="content"> |
||||
<m-select-input v-model="jobSpeedRecord" :list="[0,500,1000,1500,2000,2500,3000]"> |
||||
</m-select-input> |
||||
<span>({{$t('0 means unlimited by count')}})</span> |
||||
</div> |
||||
</m-list-box> |
||||
</div> |
||||
</template> |
||||
<script> |
||||
import _ from 'lodash' |
||||
import i18n from '@/module/i18n' |
||||
import mListBox from './_source/listBox' |
||||
import mDatasource from './_source/datasource' |
||||
import mLocalParams from './_source/localParams' |
||||
import mStatementList from './_source/statementList' |
||||
import disabledState from '@/module/mixin/disabledState' |
||||
import mSelectInput from '../_source/selectInput' |
||||
import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' |
||||
|
||||
let editor |
||||
|
||||
export default { |
||||
name: 'datax', |
||||
|
||||
data () { |
||||
return { |
||||
// Data source type |
||||
dsType: '', |
||||
// data source |
||||
datasource: '', |
||||
// Data source type |
||||
dtType: '', |
||||
// data source |
||||
datatarget: '', |
||||
// Return to the selected data source |
||||
rtDatasource: '', |
||||
// Return to the selected data target |
||||
rtDatatarget: '', |
||||
// Sql statement |
||||
sql: '', |
||||
// target table |
||||
targetTable: '', |
||||
// Pre statements |
||||
preStatements: [], |
||||
// Post statements |
||||
postStatements: [], |
||||
// speed byte |
||||
jobSpeedByte: 0, |
||||
// speed record |
||||
jobSpeedRecord: 1000, |
||||
} |
||||
}, |
||||
mixins: [disabledState], |
||||
props: { |
||||
backfillItem: Object, |
||||
createNodeId: Number |
||||
}, |
||||
methods: { |
||||
/** |
||||
* return data source |
||||
*/ |
||||
_onDsData (o) { |
||||
this.dsType = o.type |
||||
this.rtDatasource = o.datasource |
||||
}, |
||||
/** |
||||
* return data target |
||||
*/ |
||||
_onDtData (o) { |
||||
this.dtType = o.type |
||||
this.rtDatatarget = o.datasource |
||||
}, |
||||
/** |
||||
* return pre statements |
||||
*/ |
||||
_onPreStatements (a) { |
||||
this.preStatements = a |
||||
}, |
||||
/** |
||||
* return post statements |
||||
*/ |
||||
_onPostStatements (a) { |
||||
this.postStatements = a |
||||
}, |
||||
/** |
||||
* verification |
||||
*/ |
||||
_verification () { |
||||
if (!editor.getValue()) { |
||||
this.$message.warning(`${i18n.$t('Please enter a SQL Statement(required)')}`) |
||||
return false |
||||
} |
||||
|
||||
// datasource Subcomponent verification |
||||
if (!this.$refs.refDs._verifDatasource()) { |
||||
return false |
||||
} |
||||
|
||||
// datasource Subcomponent verification |
||||
if (!this.$refs.refDt._verifDatasource()) { |
||||
return false |
||||
} |
||||
|
||||
if (!this.targetTable) { |
||||
this.$message.warning(`${i18n.$t('Please enter a Target Table(required)')}`) |
||||
return false |
||||
} |
||||
|
||||
// preStatements Subcomponent verification |
||||
if (!this.$refs.refPreStatements._verifProp()) { |
||||
return false |
||||
} |
||||
|
||||
// postStatements Subcomponent verification |
||||
if (!this.$refs.refPostStatements._verifProp()) { |
||||
return false |
||||
} |
||||
|
||||
// storage |
||||
this.$emit('on-params', { |
||||
dsType: this.dsType, |
||||
dataSource: this.rtDatasource, |
||||
dtType: this.dtType, |
||||
dataTarget: this.rtDatatarget, |
||||
sql: editor.getValue(), |
||||
targetTable: this.targetTable, |
||||
jobSpeedByte: this.jobSpeedByte * 1024, |
||||
jobSpeedRecord: this.jobSpeedRecord, |
||||
preStatements: this.preStatements, |
||||
postStatements: this.postStatements |
||||
}) |
||||
return true |
||||
}, |
||||
/** |
||||
* Processing code highlighting |
||||
*/ |
||||
_handlerEditor () { |
||||
// editor |
||||
editor = codemirror('code-sql-mirror', { |
||||
mode: 'sql', |
||||
readOnly: this.isDetails |
||||
}) |
||||
|
||||
this.keypress = () => { |
||||
if (!editor.getOption('readOnly')) { |
||||
editor.showHint({ |
||||
completeSingle: false |
||||
}) |
||||
} |
||||
} |
||||
|
||||
// Monitor keyboard |
||||
editor.on('keypress', this.keypress) |
||||
|
||||
editor.setValue(this.sql) |
||||
|
||||
return editor |
||||
} |
||||
}, |
||||
created () { |
||||
let o = this.backfillItem |
||||
|
||||
// Non-null objects represent backfill |
||||
if (!_.isEmpty(o)) { |
||||
// backfill |
||||
this.dsType = o.params.dsType || '' |
||||
this.datasource = o.params.dataSource || '' |
||||
this.dtType = o.params.dtType || '' |
||||
this.datatarget = o.params.dataTarget || '' |
||||
this.sql = o.params.sql || '' |
||||
this.targetTable = o.params.targetTable || '' |
||||
this.jobSpeedByte = o.params.jobSpeedByte / 1024 || 0 |
||||
this.jobSpeedRecord = o.params.jobSpeedRecord || 0 |
||||
this.preStatements = o.params.preStatements || [] |
||||
this.postStatements = o.params.postStatements || [] |
||||
} |
||||
}, |
||||
mounted () { |
||||
setTimeout(() => { |
||||
this._handlerEditor() |
||||
}, 200) |
||||
}, |
||||
destroyed () { |
||||
/** |
||||
* Destroy the editor instance |
||||
*/ |
||||
if (editor) { |
||||
editor.toTextArea() // Uninstall |
||||
editor.off($('.code-sql-mirror'), 'keypress', this.keypress) |
||||
} |
||||
}, |
||||
computed: {}, |
||||
components: { mListBox, mDatasource, mLocalParams, mStatementList, mSelectInput } |
||||
} |
||||
</script> |
After Width: | Height: | Size: 571 B |
After Width: | Height: | Size: 3.0 KiB |
Loading…
Reference in new issue