Browse Source
* move updateTaskState into try/catch block in case of exception * fix NPE * using conf.getInt instead of getString * for AbstractZKClient, remove the log, for it will print the same log message in createZNodePath. for AlertDao, correct the spelling. * duplicate * refactor getTaskWorkerGroupId * add friendly log * update hearbeat thread num = 1 * fix the bug when worker execute task using queue. and remove checking Tenant user anymore in TaskScheduleThread * 1. move verifyTaskInstanceIsNull after taskInstance 2. keep verifyTenantIsNull/verifyTaskInstanceIsNull clean and readable * fix the message * delete before check to avoid KeeperException$NoNodeException * fix the message * check processInstance state before delete tenant * check processInstance state before delete worker group * refactor * merge api constants into common constatns * update the resource perm * update the dataSource perm * fix CheckUtils.checkUserParams method * update AlertGroupService, extends from BaseService, remove duplicate methods * refactor * modify method name * add hasProjectAndPerm method * using checkProject instead of getResultStatus * delete checkAuth method, using hasProjectAndPerm instead. * correct spelling * add transactional for deleteWorkerGroupById * add Transactional for deleteProcessInstanceById method * change sqlSessionTemplate singleton * change sqlSessionTemplate singleton and reformat code * fix unsuitable error message * update shutdownhook methods * fix worker log bug * fix api server debug mode bug * upgrade zk version * delete this line ,for zkClient.close() will do the whole thing * fix master server shutdown error * degrade zk version and add FourLetterWordMain class * fix PathChildrenCache not close * add Transactional for createSession method * add more message for java-doc * delete App, let spring manage connectionFactory * add license * add class Application for test support * refactor masterServer and workerServer * add args * fix the spring transaction not work bug * remove author * delete @Bean annotation * rename application.properties to application-dao.propertiespull/2/head
Tboy
5 years ago
committed by
qiaozhanwei
7 changed files with 161 additions and 39 deletions
@ -0,0 +1,142 @@ |
|||||||
|
/* |
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||||
|
* contributor license agreements. See the NOTICE file distributed with |
||||||
|
* this work for additional information regarding copyright ownership. |
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||||
|
* (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
*/ |
||||||
|
package org.apache.dolphinscheduler.dao.datasource; |
||||||
|
|
||||||
|
import com.alibaba.druid.pool.DruidDataSource; |
||||||
|
import com.baomidou.mybatisplus.core.MybatisConfiguration; |
||||||
|
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; |
||||||
|
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; |
||||||
|
import org.apache.commons.configuration.ConfigurationException; |
||||||
|
import org.apache.commons.configuration.PropertiesConfiguration; |
||||||
|
import org.apache.dolphinscheduler.common.Constants; |
||||||
|
import org.apache.ibatis.session.SqlSession; |
||||||
|
import org.apache.ibatis.session.SqlSessionFactory; |
||||||
|
import org.mybatis.spring.SqlSessionTemplate; |
||||||
|
import org.mybatis.spring.annotation.MapperScan; |
||||||
|
import org.slf4j.Logger; |
||||||
|
import org.slf4j.LoggerFactory; |
||||||
|
import org.springframework.context.annotation.Bean; |
||||||
|
import org.springframework.context.annotation.Configuration; |
||||||
|
import org.springframework.jdbc.datasource.DataSourceTransactionManager; |
||||||
|
|
||||||
|
|
||||||
|
/** |
||||||
|
* data source connection factory |
||||||
|
*/ |
||||||
|
@Configuration |
||||||
|
@MapperScan("org.apache.dolphinscheduler.*.mapper") |
||||||
|
public class SpringConnectionFactory { |
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(SpringConnectionFactory.class); |
||||||
|
|
||||||
|
/** |
||||||
|
* Load configuration file |
||||||
|
*/ |
||||||
|
protected static org.apache.commons.configuration.Configuration conf; |
||||||
|
|
||||||
|
static { |
||||||
|
try { |
||||||
|
conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES); |
||||||
|
} catch (ConfigurationException e) { |
||||||
|
logger.error("load configuration exception", e); |
||||||
|
System.exit(1); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* pagination interceptor |
||||||
|
* @return pagination interceptor |
||||||
|
*/ |
||||||
|
@Bean |
||||||
|
public PaginationInterceptor paginationInterceptor() { |
||||||
|
return new PaginationInterceptor(); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* get the data source |
||||||
|
* @return druid dataSource |
||||||
|
*/ |
||||||
|
@Bean |
||||||
|
public DruidDataSource dataSource() { |
||||||
|
|
||||||
|
DruidDataSource druidDataSource = new DruidDataSource(); |
||||||
|
|
||||||
|
druidDataSource.setDriverClassName(conf.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); |
||||||
|
druidDataSource.setUrl(conf.getString(Constants.SPRING_DATASOURCE_URL)); |
||||||
|
druidDataSource.setUsername(conf.getString(Constants.SPRING_DATASOURCE_USERNAME)); |
||||||
|
druidDataSource.setPassword(conf.getString(Constants.SPRING_DATASOURCE_PASSWORD)); |
||||||
|
druidDataSource.setValidationQuery(conf.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY)); |
||||||
|
|
||||||
|
druidDataSource.setPoolPreparedStatements(conf.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS)); |
||||||
|
druidDataSource.setTestWhileIdle(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE)); |
||||||
|
druidDataSource.setTestOnBorrow(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW)); |
||||||
|
druidDataSource.setTestOnReturn(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN)); |
||||||
|
druidDataSource.setKeepAlive(conf.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE)); |
||||||
|
|
||||||
|
druidDataSource.setMinIdle(conf.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE)); |
||||||
|
druidDataSource.setMaxActive(conf.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE)); |
||||||
|
druidDataSource.setMaxWait(conf.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT)); |
||||||
|
druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(conf.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE)); |
||||||
|
druidDataSource.setInitialSize(conf.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE)); |
||||||
|
druidDataSource.setTimeBetweenEvictionRunsMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS)); |
||||||
|
druidDataSource.setTimeBetweenConnectErrorMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS)); |
||||||
|
druidDataSource.setMinEvictableIdleTimeMillis(conf.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS)); |
||||||
|
druidDataSource.setValidationQueryTimeout(conf.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT)); |
||||||
|
//auto commit
|
||||||
|
druidDataSource.setDefaultAutoCommit(conf.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT)); |
||||||
|
|
||||||
|
return druidDataSource; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* * get transaction manager |
||||||
|
* @return DataSourceTransactionManager |
||||||
|
*/ |
||||||
|
@Bean |
||||||
|
public DataSourceTransactionManager transactionManager() { |
||||||
|
return new DataSourceTransactionManager(dataSource()); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* * get sql session factory |
||||||
|
* @return sqlSessionFactory |
||||||
|
* @throws Exception sqlSessionFactory exception |
||||||
|
*/ |
||||||
|
@Bean |
||||||
|
public SqlSessionFactory sqlSessionFactory() throws Exception { |
||||||
|
MybatisConfiguration configuration = new MybatisConfiguration(); |
||||||
|
configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); |
||||||
|
configuration.addInterceptor(paginationInterceptor()); |
||||||
|
|
||||||
|
MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); |
||||||
|
sqlSessionFactoryBean.setConfiguration(configuration); |
||||||
|
sqlSessionFactoryBean.setDataSource(dataSource()); |
||||||
|
|
||||||
|
sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); |
||||||
|
return sqlSessionFactoryBean.getObject(); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* get sql session |
||||||
|
* @return sqlSession |
||||||
|
*/ |
||||||
|
@Bean |
||||||
|
public SqlSession sqlSession() throws Exception{ |
||||||
|
return new SqlSessionTemplate(sqlSessionFactory()); |
||||||
|
} |
||||||
|
|
||||||
|
} |
Loading…
Reference in new issue