Browse Source

[Feature][datasource] Change DataSource Connection Pool from Druid to HiKariCP (#6490) (#6828)

* [Feature][datasource] Change DataSource Connection Pool from Druid to HiKariCP (#6490)

* fix pom dep scope

* filter alert

* filter alert

* filter alert

* fix checkstyle

* remove hikaricp version

* update pom

* add properties

* add properties

* rename properties

* fix e2e fail

* rename properties
3.0.0/version-upgrade
mask 3 years ago committed by GitHub
parent
commit
a998415afe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      docker/build/conf/dolphinscheduler/quartz.properties.tpl
  2. 0
      dolphinscheduler-api/src/main/resources/application-api.properties
  3. 30
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  4. 4
      dolphinscheduler-dao/pom.xml
  5. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java
  6. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java
  7. 57
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java
  8. 2
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java
  9. 10
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/pom.xml
  10. 16
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java
  11. 55
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProvider.java
  12. 4
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClientTest.java
  13. 12
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProviderTest.java
  14. 4
      dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java
  15. 3
      dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java
  16. 19
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/HikariConnectionProvider.java
  17. 2
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java
  18. 2
      dolphinscheduler-service/src/main/resources/quartz.properties
  19. 2
      dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java
  20. 16
      dolphinscheduler-standalone-server/src/main/resources/application-h2.properties
  21. 4
      script/dolphinscheduler-daemon.sh

2
docker/build/conf/dolphinscheduler/quartz.properties.tpl

@ -51,4 +51,4 @@
#============================================================================
# Configure Datasources
#============================================================================
#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider
#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.HikariConnectionProvider

0
dolphinscheduler-api/src/main/resources/application.properties → dolphinscheduler-api/src/main/resources/application-api.properties

30
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -467,39 +467,37 @@ public final class Constants {
public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout";
public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize";
public static final String SPRING_DATASOURCE_CONNECTION_TIMEOUT = "spring.datasource.connectionTimeout";
public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle";
public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive";
public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis";
public static final String SPRING_DATASOURCE_IDLE_TIMEOUT = "spring.datasource.idleTimeout";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis";
public static final String SPRING_DATASOURCE_MAX_LIFE_TIME = "spring.datasource.maxLifetime";
public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis";
public static final String SPRING_DATASOURCE_VALIDATION_TIMEOUT = "spring.datasource.validationTimeout";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery";
public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle";
public static final String SPRING_DATASOURCE_LEAK_DETECTION_THRESHOLD = "spring.datasource.leakDetectionThreshold";
public static final String SPRING_DATASOURCE_INITIALIZATION_FAIL_TIMEOUT = "spring.datasource.initializationFailTimeout";
public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow";
public static final String SPRING_DATASOURCE_IS_AUTOCOMMIT = "spring.datasource.isAutoCommit";
public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn";
public static final String SPRING_DATASOURCE_CACHE_PREP_STMTS = "spring.datasource.cachePrepStmts";
public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements";
public static final String SPRING_DATASOURCE_PREP_STMT_CACHE_SIZE = "spring.datasource.prepStmtCacheSize";
public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit";
public static final String SPRING_DATASOURCE_PREP_STMT_CACHE_SQL_LIMIT = "spring.datasource.prepStmtCacheSqlLimit";
public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive";
public static final String CACHE_PREP_STMTS = "cachePrepStmts";
public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize";
public static final String PREP_STMT_CACHE_SIZE = "prepStmtCacheSize";
public static final String DEVELOPMENT = "development";
public static final String PREP_STMT_CACHE_SQL_LIMIT = "prepStmtCacheSqlLimit";
public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties";

4
dolphinscheduler-dao/pom.xml

@ -106,10 +106,6 @@
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java

@ -31,7 +31,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.alibaba.druid.pool.DruidDataSource;
import com.zaxxer.hikari.HikariDataSource;
/**
@ -45,7 +45,7 @@ public class MonitorDBDao {
public static final String VARIABLE_NAME = "variable_name";
@Autowired
private DruidDataSource dataSource;
private HikariDataSource dataSource;
/**
* get current db performance

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java

@ -82,7 +82,7 @@ public class ConnectionFactory extends SpringConnectionFactory {
/**
* get the data source
*
* @return druid dataSource
* @return dataSource
*/
private DataSource buildDataSource() throws SQLException {

57
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java

@ -56,13 +56,13 @@ import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.core.MybatisConfiguration;
import com.baomidou.mybatisplus.core.config.GlobalConfig;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
import com.google.common.collect.Lists;
import com.zaxxer.hikari.HikariDataSource;
/**
* data source connection factory
@ -106,7 +106,7 @@ public class SpringConnectionFactory {
/**
* get the data source
*
* @return druid dataSource
* @return dataSource
*/
@Bean(destroyMethod = "", name = "datasource")
public DataSource dataSource() throws SQLException {
@ -115,34 +115,31 @@ public class SpringConnectionFactory {
initializeH2Datasource();
}
DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setDriverClassName(driverClassName);
druidDataSource.setUrl(PropertyUtils.getString(Constants.SPRING_DATASOURCE_URL));
druidDataSource.setUsername(PropertyUtils.getString(Constants.SPRING_DATASOURCE_USERNAME));
druidDataSource.setPassword(PropertyUtils.getString(Constants.SPRING_DATASOURCE_PASSWORD));
druidDataSource.setValidationQuery(PropertyUtils.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY, "SELECT 1"));
druidDataSource.setPoolPreparedStatements(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS, true));
druidDataSource.setTestWhileIdle(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE, true));
druidDataSource.setTestOnBorrow(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW, true));
druidDataSource.setTestOnReturn(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN, false));
druidDataSource.setKeepAlive(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE, true));
druidDataSource.setMinIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5));
druidDataSource.setMaxActive(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
druidDataSource.setMaxWait(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT, 60000));
druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE, 20));
druidDataSource.setInitialSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE, 5));
druidDataSource.setTimeBetweenEvictionRunsMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS, 60000));
druidDataSource.setTimeBetweenConnectErrorMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS, 60000));
druidDataSource.setMinEvictableIdleTimeMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS, 300000));
druidDataSource.setValidationQueryTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT, 3));
//auto commit
druidDataSource.setDefaultAutoCommit(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT, true));
druidDataSource.init();
logger.info("Initialize Druid DataSource success");
return druidDataSource;
HikariDataSource dataSource = new HikariDataSource();
dataSource.setDriverClassName(driverClassName);
dataSource.setJdbcUrl(PropertyUtils.getString(Constants.SPRING_DATASOURCE_URL));
dataSource.setUsername(PropertyUtils.getString(Constants.SPRING_DATASOURCE_USERNAME));
dataSource.setPassword(PropertyUtils.getString(Constants.SPRING_DATASOURCE_PASSWORD));
dataSource.setConnectionTestQuery(PropertyUtils.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY, "SELECT 1"));
dataSource.setMinimumIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5));
dataSource.setMaximumPoolSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
dataSource.setConnectionTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_CONNECTION_TIMEOUT, 30000));
dataSource.setIdleTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_IDLE_TIMEOUT, 600000));
dataSource.setMaxLifetime(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_LIFE_TIME, 1800000));
dataSource.setValidationTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_VALIDATION_TIMEOUT, 5000));
dataSource.setLeakDetectionThreshold(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_LEAK_DETECTION_THRESHOLD, 0));
dataSource.setInitializationFailTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_INITIALIZATION_FAIL_TIMEOUT, 1));
dataSource.setAutoCommit(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_IS_AUTOCOMMIT, true));
dataSource.addDataSourceProperty(Constants.CACHE_PREP_STMTS, PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_CACHE_PREP_STMTS, true));
dataSource.addDataSourceProperty(Constants.PREP_STMT_CACHE_SIZE, PropertyUtils.getInt(Constants.SPRING_DATASOURCE_PREP_STMT_CACHE_SIZE, 250));
dataSource.addDataSourceProperty(Constants.PREP_STMT_CACHE_SQL_LIMIT, PropertyUtils.getInt(Constants.SPRING_DATASOURCE_PREP_STMT_CACHE_SQL_LIMIT, 2048));
logger.info("Initialize DataSource DataSource success");
return dataSource;
}
private void initializeH2Datasource() {

2
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java

@ -81,7 +81,7 @@ public abstract class UpgradeDao {
/**
* get datasource
* @return DruidDataSource
* @return DataSource
*/
public static DataSource getDataSource(){
return ConnectionFactory.getInstance().getDataSource();

10
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/pom.xml

@ -69,11 +69,6 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
@ -161,5 +156,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
</dependency>
</dependencies>
</project>

16
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java

@ -30,8 +30,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import com.alibaba.druid.pool.DruidDataSource;
import com.google.common.base.Stopwatch;
import com.zaxxer.hikari.HikariDataSource;
public class CommonDataSourceClient implements DataSourceClient {
@ -42,7 +42,7 @@ public class CommonDataSourceClient implements DataSourceClient {
public static final String COMMON_VALIDATION_QUERY = "select 1";
protected final BaseConnectionParam baseConnectionParam;
protected DruidDataSource druidDataSource;
protected HikariDataSource dataSource;
protected JdbcTemplate jdbcTemplate;
public CommonDataSourceClient(BaseConnectionParam baseConnectionParam) {
@ -63,8 +63,8 @@ public class CommonDataSourceClient implements DataSourceClient {
}
protected void initClient(BaseConnectionParam baseConnectionParam) {
this.druidDataSource = JdbcDataSourceProvider.createJdbcDataSource(baseConnectionParam);
this.jdbcTemplate = new JdbcTemplate(druidDataSource);
this.dataSource = JdbcDataSourceProvider.createJdbcDataSource(baseConnectionParam);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
protected void checkUser(BaseConnectionParam baseConnectionParam) {
@ -110,9 +110,9 @@ public class CommonDataSourceClient implements DataSourceClient {
@Override
public Connection getConnection() {
try {
return this.druidDataSource.getConnection();
return this.dataSource.getConnection();
} catch (SQLException e) {
logger.error("get druidDataSource Connection fail SQLException: {}", e.getMessage(), e);
logger.error("get DataSource Connection fail SQLException: {}", e.getMessage(), e);
return null;
}
}
@ -120,8 +120,8 @@ public class CommonDataSourceClient implements DataSourceClient {
@Override
public void close() {
logger.info("do close dataSource.");
this.druidDataSource.close();
this.druidDataSource = null;
this.dataSource.close();
this.dataSource = null;
this.jdbcTemplate = null;
}

55
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProvider.java

@ -25,7 +25,7 @@ import org.apache.dolphinscheduler.spi.utils.PropertyUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource;
import com.zaxxer.hikari.HikariDataSource;
/**
* Jdbc Data Source Provider
@ -34,51 +34,50 @@ public class JdbcDataSourceProvider {
private static final Logger logger = LoggerFactory.getLogger(JdbcDataSourceProvider.class);
public static DruidDataSource createJdbcDataSource(BaseConnectionParam properties) {
logger.info("Creating DruidDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
public static HikariDataSource createJdbcDataSource(BaseConnectionParam properties) {
logger.info("Creating HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
HikariDataSource dataSource = new HikariDataSource();
DruidDataSource druidDataSource = new DruidDataSource();
dataSource.setDriverClassName(properties.getDriverClassName());
dataSource.setJdbcUrl(properties.getJdbcUrl());
dataSource.setUsername(properties.getUser());
dataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword()));
druidDataSource.setDriverClassName(properties.getDriverClassName());
druidDataSource.setUrl(properties.getJdbcUrl());
druidDataSource.setUsername(properties.getUser());
druidDataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword()));
druidDataSource.setMinIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5));
druidDataSource.setMaxActive(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
druidDataSource.setTestOnBorrow(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW, false));
dataSource.setMinimumIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5));
dataSource.setMaximumPoolSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
dataSource.setConnectionTestQuery(properties.getValidationQuery());
if (properties.getProps() != null) {
properties.getProps().forEach(druidDataSource::addConnectionProperty);
properties.getProps().forEach(dataSource::addDataSourceProperty);
}
logger.info("Creating DruidDataSource pool success.");
return druidDataSource;
logger.info("Creating HikariDataSource pool success.");
return dataSource;
}
/**
* @return One Session Jdbc DataSource
*/
public static DruidDataSource createOneSessionJdbcDataSource(BaseConnectionParam properties) {
logger.info("Creating OneSession DruidDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
public static HikariDataSource createOneSessionJdbcDataSource(BaseConnectionParam properties) {
logger.info("Creating OneSession HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50));
DruidDataSource druidDataSource = new DruidDataSource();
HikariDataSource dataSource = new HikariDataSource();
druidDataSource.setDriverClassName(properties.getDriverClassName());
druidDataSource.setUrl(properties.getJdbcUrl());
druidDataSource.setUsername(properties.getUser());
druidDataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword()));
dataSource.setDriverClassName(properties.getDriverClassName());
dataSource.setJdbcUrl(properties.getJdbcUrl());
dataSource.setUsername(properties.getUser());
dataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword()));
druidDataSource.setMinIdle(1);
druidDataSource.setMaxActive(1);
druidDataSource.setTestOnBorrow(true);
dataSource.setMinimumIdle(1);
dataSource.setMaximumPoolSize(1);
dataSource.setConnectionTestQuery(properties.getValidationQuery());
if (properties.getProps() != null) {
properties.getProps().forEach(druidDataSource::addConnectionProperty);
properties.getProps().forEach(dataSource::addDataSourceProperty);
}
logger.info("Creating OneSession DruidDataSource pool success.");
return druidDataSource;
logger.info("Creating OneSession HikariDataSource pool success.");
return dataSource;
}
}

4
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClientTest.java

@ -34,11 +34,11 @@ import org.powermock.core.classloader.annotations.SuppressStaticInitializationFo
import org.powermock.modules.junit4.PowerMockRunner;
import org.springframework.jdbc.core.JdbcTemplate;
import com.alibaba.druid.pool.DruidDataSource;
import com.zaxxer.hikari.HikariDataSource;
@RunWith(PowerMockRunner.class)
@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient")
@PrepareForTest(value = {DruidDataSource.class, CommonDataSourceClient.class, JdbcDataSourceProvider.class, JdbcTemplate.class, Connection.class})
@PrepareForTest(value = {HikariDataSource.class, CommonDataSourceClient.class, JdbcDataSourceProvider.class, JdbcTemplate.class, Connection.class})
public class CommonDataSourceClientTest {
@Mock

12
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProviderTest.java

@ -27,25 +27,25 @@ import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.alibaba.druid.pool.DruidDataSource;
import com.zaxxer.hikari.HikariDataSource;
@RunWith(PowerMockRunner.class)
@PrepareForTest(value = {DruidDataSource.class, JdbcDataSourceProvider.class})
@PrepareForTest(value = {HikariDataSource.class, JdbcDataSourceProvider.class})
public class JdbcDataSourceProviderTest {
@Test
public void testCreateJdbcDataSource() {
PowerMockito.mockStatic(JdbcDataSourceProvider.class);
DruidDataSource druidDataSource = PowerMockito.mock(DruidDataSource.class);
PowerMockito.when(JdbcDataSourceProvider.createJdbcDataSource(Mockito.any())).thenReturn(druidDataSource);
HikariDataSource dataSource = PowerMockito.mock(HikariDataSource.class);
PowerMockito.when(JdbcDataSourceProvider.createJdbcDataSource(Mockito.any())).thenReturn(dataSource);
Assert.assertNotNull(JdbcDataSourceProvider.createJdbcDataSource(new MysqlConnectionParam()));
}
@Test
public void testCreateOneSessionJdbcDataSource() {
PowerMockito.mockStatic(JdbcDataSourceProvider.class);
DruidDataSource druidDataSource = PowerMockito.mock(DruidDataSource.class);
PowerMockito.when(JdbcDataSourceProvider.createOneSessionJdbcDataSource(Mockito.any())).thenReturn(druidDataSource);
HikariDataSource dataSource = PowerMockito.mock(HikariDataSource.class);
PowerMockito.when(JdbcDataSourceProvider.createOneSessionJdbcDataSource(Mockito.any())).thenReturn(dataSource);
Assert.assertNotNull(JdbcDataSourceProvider.createOneSessionJdbcDataSource(new MysqlConnectionParam()));
}

4
dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java

@ -39,13 +39,13 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import com.alibaba.druid.pool.DruidDataSource;
import com.zaxxer.hikari.HikariDataSource;
public class HiveDataSourceClient extends CommonDataSourceClient {
private static final Logger logger = LoggerFactory.getLogger(HiveDataSourceClient.class);
protected DruidDataSource oneSessionDataSource;
protected HikariDataSource oneSessionDataSource;
private JdbcTemplate oneSessionJdbcTemplate;
private UserGroupInformation ugi;

3
dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java

@ -71,7 +71,8 @@ import py4j.GatewayServer;
"org.apache.dolphinscheduler.server.master.*",
"org.apache.dolphinscheduler.server.worker.*",
"org.apache.dolphinscheduler.server.monitor.*",
"org.apache.dolphinscheduler.server.log.*"
"org.apache.dolphinscheduler.server.log.*",
"org.apache.dolphinscheduler.alert.*"
})
})
public class PythonGatewayServer extends SpringBootServletInitializer {

19
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java → dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/HikariConnectionProvider.java

@ -14,24 +14,27 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.quartz;
import com.alibaba.druid.pool.DruidDataSource;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.quartz.utils.ConnectionProvider;
import java.sql.Connection;
import java.sql.SQLException;
import org.quartz.utils.ConnectionProvider;
import com.zaxxer.hikari.HikariDataSource;
/**
* druid connection provider
* hikari connection provider
*/
public class DruidConnectionProvider implements ConnectionProvider {
public class HikariConnectionProvider implements ConnectionProvider {
private final DruidDataSource dataSource;
private final HikariDataSource dataSource;
public DruidConnectionProvider(){
this.dataSource = SpringApplicationContext.getBean(DruidDataSource.class);
public HikariConnectionProvider() {
this.dataSource = SpringApplicationContext.getBean(HikariDataSource.class);
}
@Override
@ -40,7 +43,7 @@ public class DruidConnectionProvider implements ConnectionProvider {
}
@Override
public void shutdown() throws SQLException {
public void shutdown() {
dataSource.close();
}

2
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java

@ -173,7 +173,7 @@ public class QuartzExecutors {
properties.setProperty(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL, conf.getString(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL, QUARTZ_CLUSTERCHECKININTERVAL));
properties.setProperty(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK, conf.getString(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK, QUARTZ_ACQUIRETRIGGERSWITHINLOCK));
properties.setProperty(ORG_QUARTZ_JOBSTORE_DATASOURCE, conf.getString(ORG_QUARTZ_JOBSTORE_DATASOURCE, QUARTZ_DATASOURCE));
properties.setProperty(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, conf.getString(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, DruidConnectionProvider.class.getName()));
properties.setProperty(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, conf.getString(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, HikariConnectionProvider.class.getName()));
schedulerFactory.initialize(properties);
scheduler = schedulerFactory.getScheduler();

2
dolphinscheduler-service/src/main/resources/quartz.properties

@ -51,4 +51,4 @@
#============================================================================
# Configure Datasources
#============================================================================
#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider
#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.HikariConnectionProvider

2
dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java

@ -41,6 +41,6 @@ public class StandaloneServer {
WorkerServer.class,
AlertServer.class,
PythonGatewayServer.class
).profiles("h2").run(args);
).profiles("h2", "api").run(args);
}
}

16
dolphinscheduler-standalone-server/src/main/resources/application-h2.properties

@ -0,0 +1,16 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

4
script/dolphinscheduler-daemon.sh

@ -67,12 +67,12 @@ if [ "$command" = "api-server" ]; then
HEAP_OPTS="-Xms1g -Xmx1g -Xmn512m"
export DOLPHINSCHEDULER_OPTS="$HEAP_OPTS $DOLPHINSCHEDULER_OPTS $API_SERVER_OPTS"
elif [ "$command" = "master-server" ]; then
LOG_FILE="-Dlogging.config=classpath:logback-master.xml -Ddruid.mysql.usePingMethod=false"
LOG_FILE="-Dlogging.config=classpath:logback-master.xml"
CLASS=org.apache.dolphinscheduler.server.master.MasterServer
HEAP_OPTS="-Xms4g -Xmx4g -Xmn2g"
export DOLPHINSCHEDULER_OPTS="$HEAP_OPTS $DOLPHINSCHEDULER_OPTS $MASTER_SERVER_OPTS"
elif [ "$command" = "worker-server" ]; then
LOG_FILE="-Dlogging.config=classpath:logback-worker.xml -Ddruid.mysql.usePingMethod=false"
LOG_FILE="-Dlogging.config=classpath:logback-worker.xml"
CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer
HEAP_OPTS="-Xms2g -Xmx2g -Xmn1g"
export DOLPHINSCHEDULER_OPTS="$HEAP_OPTS $DOLPHINSCHEDULER_OPTS $WORKER_SERVER_OPTS"

Loading…
Cancel
Save