Browse Source

change sqlSessionTemplate singleton and reformat code (#1126)

* move updateTaskState into try/catch block in case of exception

* fix NPE

* using conf.getInt instead of getString

* for AbstractZKClient, remove the log, for it will print the same log message in createZNodePath.
for AlertDao, correct the spelling.

* duplicate

* refactor getTaskWorkerGroupId

* add friendly log

* update hearbeat thread num = 1

* fix the bug when worker execute task using queue. and remove checking Tenant user anymore in TaskScheduleThread

* 1. move verifyTaskInstanceIsNull after taskInstance
2. keep verifyTenantIsNull/verifyTaskInstanceIsNull clean and readable

* fix the message

* delete before check to avoid KeeperException$NoNodeException

* fix the message

* check processInstance state before delete tenant

* check processInstance state before delete worker group

* refactor

* merge api constants into common constatns

* update the resource perm

* update the dataSource perm

* fix CheckUtils.checkUserParams method

* update AlertGroupService, extends from BaseService, remove duplicate methods

* refactor

* modify method name

* add hasProjectAndPerm method

* using checkProject instead of getResultStatus

* delete checkAuth method, using hasProjectAndPerm instead.

* correct spelling

* add transactional for deleteWorkerGroupById

* add Transactional for deleteProcessInstanceById method

* change sqlSessionTemplate singleton

* change sqlSessionTemplate singleton and reformat code
pull/2/head
Tboy 5 years ago committed by lgcareer
parent
commit
da10e7fa88
  1. 192
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java

192
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java

@ -38,107 +38,117 @@ import javax.sql.DataSource;
* data source connection factory
*/
public class ConnectionFactory {
private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class);
private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class);
private static SqlSessionFactory sqlSessionFactory;
private static SqlSessionFactory sqlSessionFactory;
/**
* Load configuration file
*/
private static org.apache.commons.configuration.Configuration conf;
private static SqlSessionTemplate sqlSessionTemplate;
static {
try {
conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES);
}catch (ConfigurationException e){
logger.error("load configuration exception",e);
System.exit(1);
/**
* Load configuration file
*/
private static org.apache.commons.configuration.Configuration conf;
static {
try {
conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES);
} catch (ConfigurationException e) {
logger.error("load configuration exception", e);
System.exit(1);
}
}
}
/**
* get the data source
*/
public static DruidDataSource getDataSource() {
DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setDriverClassName(conf.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME));
druidDataSource.setUrl(conf.getString(Constants.SPRING_DATASOURCE_URL));
druidDataSource.setUsername(conf.getString(Constants.SPRING_DATASOURCE_USERNAME));
druidDataSource.setPassword(conf.getString(Constants.SPRING_DATASOURCE_PASSWORD));
druidDataSource.setValidationQuery(conf.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY));
druidDataSource.setPoolPreparedStatements(conf.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS));
druidDataSource.setTestWhileIdle(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE));
druidDataSource.setTestOnBorrow(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW));
druidDataSource.setTestOnReturn(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN));
druidDataSource.setKeepAlive(conf.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE));
druidDataSource.setMinIdle(conf.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE));
druidDataSource.setMaxActive(conf.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE));
druidDataSource.setMaxWait(conf.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT));
druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(conf.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE));
druidDataSource.setInitialSize(conf.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE));
druidDataSource.setTimeBetweenEvictionRunsMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS));
druidDataSource.setTimeBetweenConnectErrorMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS));
druidDataSource.setMinEvictableIdleTimeMillis(conf.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS));
druidDataSource.setValidationQueryTimeout(conf.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT));
//auto commit
druidDataSource.setDefaultAutoCommit(conf.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT));
return druidDataSource;
}
/**
* get sql session factory
*/
public static SqlSessionFactory getSqlSessionFactory() throws Exception {
if (sqlSessionFactory == null) {
synchronized (ConnectionFactory.class) {
if (sqlSessionFactory == null) {
DataSource dataSource = getDataSource();
TransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment("development", transactionFactory, dataSource);
/**
* get the data source
*/
public static DruidDataSource getDataSource() {
DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setDriverClassName(conf.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME));
druidDataSource.setUrl(conf.getString(Constants.SPRING_DATASOURCE_URL));
druidDataSource.setUsername(conf.getString(Constants.SPRING_DATASOURCE_USERNAME));
druidDataSource.setPassword(conf.getString(Constants.SPRING_DATASOURCE_PASSWORD));
druidDataSource.setValidationQuery(conf.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY));
druidDataSource.setPoolPreparedStatements(conf.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS));
druidDataSource.setTestWhileIdle(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE));
druidDataSource.setTestOnBorrow(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW));
druidDataSource.setTestOnReturn(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN));
druidDataSource.setKeepAlive(conf.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE));
druidDataSource.setMinIdle(conf.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE));
druidDataSource.setMaxActive(conf.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE));
druidDataSource.setMaxWait(conf.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT));
druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(conf.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE));
druidDataSource.setInitialSize(conf.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE));
druidDataSource.setTimeBetweenEvictionRunsMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS));
druidDataSource.setTimeBetweenConnectErrorMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS));
druidDataSource.setMinEvictableIdleTimeMillis(conf.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS));
druidDataSource.setValidationQueryTimeout(conf.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT));
//auto commit
druidDataSource.setDefaultAutoCommit(conf.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT));
return druidDataSource;
}
MybatisConfiguration configuration = new MybatisConfiguration();
configuration.setEnvironment(environment);
configuration.setLazyLoadingEnabled(true);
configuration.addMappers("org.apache.dolphinscheduler.dao.mapper");
/**
* get sql session factory
*/
public static SqlSessionFactory getSqlSessionFactory() throws Exception {
if (sqlSessionFactory == null) {
synchronized (ConnectionFactory.class) {
if (sqlSessionFactory == null) {
DataSource dataSource = getDataSource();
TransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment("development", transactionFactory, dataSource);
MybatisConfiguration configuration = new MybatisConfiguration();
configuration.setEnvironment(environment);
configuration.setLazyLoadingEnabled(true);
configuration.addMappers("org.apache.dolphinscheduler.dao.mapper");
MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean();
sqlSessionFactoryBean.setConfiguration(configuration);
sqlSessionFactoryBean.setDataSource(dataSource);
sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums");
sqlSessionFactory = sqlSessionFactoryBean.getObject();
return sqlSessionFactory;
}
}
}
MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean();
sqlSessionFactoryBean.setConfiguration(configuration);
sqlSessionFactoryBean.setDataSource(dataSource);
return sqlSessionFactory;
}
sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums");
sqlSessionFactory = sqlSessionFactoryBean.getObject();
return sqlSessionFactory;
/**
* get sql session
*/
public static SqlSession getSqlSession() {
if (sqlSessionTemplate == null) {
synchronized (ConnectionFactory.class) {
if (sqlSessionTemplate == null) {
try {
sqlSessionTemplate = new SqlSessionTemplate(getSqlSessionFactory());
return sqlSessionTemplate;
} catch (Exception e) {
logger.error("getSqlSession error", e);
throw new RuntimeException(e);
}
}
}
}
}
return sqlSessionTemplate;
}
return sqlSessionFactory;
}
/**
* get sql session
*/
public static SqlSession getSqlSession() {
try {
return new SqlSessionTemplate(getSqlSessionFactory());
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("get sqlSession failed!");
}
}
public static <T> T getMapper(Class<T> type){
try {
return getSqlSession().getMapper(type);
} catch (Exception e) {
logger.error(e.getMessage(),e);
throw new RuntimeException("get mapper failed!");
public static <T> T getMapper(Class<T> type) {
try {
return getSqlSession().getMapper(type);
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new RuntimeException("get mapper failed!");
}
}
}
}

Loading…
Cancel
Save