Browse Source

Pull request #5777: REPORT-71021 恢复创建线程启停机制

Merge in CORE/base-third from ~RINOUX/base-third:release/11.0 to release/11.0

* commit '1d18148c9765a8d3bd17addfb308708ba830e438':
  REPORT-71021 恢复创建线程启停机制
release/11.0
rinoux 3 years ago
parent
commit
ab35235c48
  1. 2
      fine-druid/readme.MD
  2. 279
      fine-druid/src/main/java/com/fr/third/alibaba/druid/pool/DruidDataSource.java

2
fine-druid/readme.MD

@ -1,4 +1,5 @@
# Alibaba Druid # Alibaba Druid
- FineReport更新时间 `2022-04-27` - FineReport更新时间 `2022-04-27`
- Druid版本 1.2.9 - Druid版本 1.2.9
- [github地址](https://github.com/alibaba/druid) - [github地址](https://github.com/alibaba/druid)
@ -14,3 +15,4 @@
| 1.2.9 | 2022-05-05 | MysqlUtils.getLastPacketReceivedTimeMs根据类加载器区分连接实现等,不在使用全局变量 | | 1.2.9 | 2022-05-05 | MysqlUtils.getLastPacketReceivedTimeMs根据类加载器区分连接实现等,不在使用全局变量 |
| 1.2.9 | 2022-05-05 | com.fr.third.alibaba.druid.util.Utils.loadClass改为优先从线程类加载器加载类 | | 1.2.9 | 2022-05-05 | com.fr.third.alibaba.druid.util.Utils.loadClass改为优先从线程类加载器加载类 |
| 1.2.9 | 2022-05-05 | 恢复com.fr.third.alibaba.druid.pool.DruidDataSourceFactory对hibernate配置属性的支持 | | 1.2.9 | 2022-05-05 | 恢复com.fr.third.alibaba.druid.pool.DruidDataSourceFactory对hibernate配置属性的支持 |
| 1.2.9 | 2022-05-10 | 恢复DruidDataSource中的创建线程启停机制 |

279
fine-druid/src/main/java/com/fr/third/alibaba/druid/pool/DruidDataSource.java

@ -134,6 +134,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
private volatile Future<?> createSchedulerFuture; private volatile Future<?> createSchedulerFuture;
private CreateConnectionThread createConnectionThread; private CreateConnectionThread createConnectionThread;
private PeriodDetectionThread periodDetectionThread;
private DestroyConnectionThread destroyConnectionThread; private DestroyConnectionThread destroyConnectionThread;
private LogStatsThread logStatsThread; private LogStatsThread logStatsThread;
private int createTaskCount; private int createTaskCount;
@ -141,7 +142,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
private volatile long createTaskIdSeed = 1L; private volatile long createTaskIdSeed = 1L;
private long[] createTasks; private long[] createTasks;
private final CountDownLatch initedLatch = new CountDownLatch(2); private CountDownLatch initedLatch = new CountDownLatch(2);
private volatile boolean enable = true; private volatile boolean enable = true;
@ -161,6 +162,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public static ThreadLocal<Long> waitNanosLocal = new ThreadLocal<Long>(); public static ThreadLocal<Long> waitNanosLocal = new ThreadLocal<Long>();
private boolean logDifferentThread = true; private boolean logDifferentThread = true;
private volatile boolean keepAlive = false; private volatile boolean keepAlive = false;
private SQLException initException = null;
private boolean asyncInit = false; private boolean asyncInit = false;
protected boolean killWhenSocketReadTimeout = false; protected boolean killWhenSocketReadTimeout = false;
protected boolean checkExecuteTime = false; protected boolean checkExecuteTime = false;
@ -178,11 +180,11 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
protected static final AtomicLongFieldUpdater<DruidDataSource> createTaskIdSeedUpdater protected static final AtomicLongFieldUpdater<DruidDataSource> createTaskIdSeedUpdater
= AtomicLongFieldUpdater.newUpdater(DruidDataSource.class, "createTaskIdSeed"); = AtomicLongFieldUpdater.newUpdater(DruidDataSource.class, "createTaskIdSeed");
public DruidDataSource(){ public DruidDataSource() {
this(false); this(false);
} }
public DruidDataSource(boolean fairLock){ public DruidDataSource(boolean fairLock) {
super(fairLock); super(fairLock);
configFromPropety(System.getProperties()); configFromPropety(System.getProperties());
@ -526,6 +528,45 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
} }
private synchronized void doSomethingBeforeCreationThreadBreak() {
String threadName = "Druid-ConnectionPool-Create-" + System.identityHashCode(this) + this.getUrl();
createConnectionThread = new CreateConnectionThread(threadName);
createConnectionThread.setStarted(false);
String destroyName = "Druid-ConnectionPool-Destroy-" + System.identityHashCode(this) + this.getUrl();
if (destroyConnectionThread != null) {
if (!destroyConnectionThread.isInterrupted()) {
destroyConnectionThread.interrupt();
}
}
destroyConnectionThread = new DestroyConnectionThread(destroyName);
destroyConnectionThread.setStarted(false);
initedLatch = new CountDownLatch(2);
}
private void checkThread() throws SQLException {
if (createConnectionThread == null) {
throw new IllegalStateException("createConnectionThread not start!");
}
if (destroyConnectionThread == null) {
throw new IllegalStateException("destroyConnectionThread not start!");
}
if (!createConnectionThread.isStarted() && !destroyConnectionThread.isStarted()) {
synchronized (this) {//线程安全问题,加个双检锁
if (!createConnectionThread.isStarted() && !destroyConnectionThread.isStarted()) {
createConnectionThread.setStarted(true);
createConnectionThread.start();
destroyConnectionThread.setStarted(true);
destroyConnectionThread.start();
try {
initedLatch.await();
} catch (InterruptedException e) {
throw new SQLException(e.getMessage(), e);
}
}
}
}
}
public boolean isKillWhenSocketReadTimeout() { public boolean isKillWhenSocketReadTimeout() {
return killWhenSocketReadTimeout; return killWhenSocketReadTimeout;
} }
@ -798,6 +839,11 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
public void init() throws SQLException { public void init() throws SQLException {
if (initException != null) {
LOG.error("{dataSource-" + this.getID() + "} init error", initException);
throw initException;
}
if (inited) { if (inited) {
return; return;
} }
@ -944,6 +990,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
createAndLogThread(); createAndLogThread();
createAndStartCreatorThread(); createAndStartCreatorThread();
createAndStartDestroyThread(); createAndStartDestroyThread();
createAndStartDetectThread();
initedLatch.await(); initedLatch.await();
init = true; init = true;
@ -968,16 +1015,13 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} catch (SQLException e) { } catch (SQLException e) {
LOG.error("{dataSource-" + this.getID() + "} init error", e); LOG.error("{dataSource-" + this.getID() + "} init error", e);
initException = e;
throw e; throw e;
} catch (InterruptedException e) { } catch (InterruptedException e) {
throw new SQLException(e.getMessage(), e); throw new SQLException(e.getMessage(), e);
} catch (RuntimeException e){ } catch (Throwable e) {
LOG.error("{dataSource-" + this.getID() + "} init error", e); initException = new SQLException(e.getMessage());
throw e;
} catch (Error e){
LOG.error("{dataSource-" + this.getID() + "} init error", e);
throw e; throw e;
} finally { } finally {
inited = true; inited = true;
lock.unlock(); lock.unlock();
@ -1075,7 +1119,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
period = 1000; period = 1000;
} }
destroySchedulerFuture = destroyScheduler.scheduleAtFixedRate(destroyTask, period, period, destroySchedulerFuture = destroyScheduler.scheduleAtFixedRate(destroyTask, period, period,
TimeUnit.MILLISECONDS); TimeUnit.MILLISECONDS);
initedLatch.countDown(); initedLatch.countDown();
return; return;
} }
@ -1087,7 +1131,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
protected void createAndStartCreatorThread() { protected void createAndStartCreatorThread() {
if (createScheduler == null) { if (createScheduler == null) {
String threadName = "Druid-ConnectionPool-Create-" + System.identityHashCode(this); String threadName = "Druid-ConnectionPool-Create-" + System.identityHashCode(this) + this.getUrl();
createConnectionThread = new CreateConnectionThread(threadName); createConnectionThread = new CreateConnectionThread(threadName);
createConnectionThread.start(); createConnectionThread.start();
return; return;
@ -1096,6 +1140,15 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
initedLatch.countDown(); initedLatch.countDown();
} }
private void createAndStartDetectThread() {
if (createScheduler == null) {
String threadName = "Druid-ConnectionPool-Detection-" + System.identityHashCode(this) + this.getUrl();
periodDetectionThread = new PeriodDetectionThread(threadName);
periodDetectionThread.start();
}
}
/** /**
* load filters from SPI ServiceLoader * load filters from SPI ServiceLoader
* *
@ -1181,21 +1234,21 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
return; return;
} }
String errorMessage = ""; String infoMessage = "";
if (testOnBorrow) { if (isTestOnBorrow()) {
errorMessage += "testOnBorrow is true, "; infoMessage += "testOnBorrow is true, ";
} }
if (testOnReturn) { if (isTestOnReturn()) {
errorMessage += "testOnReturn is true, "; infoMessage += "testOnReturn is true, ";
} }
if (testWhileIdle) { if (isTestWhileIdle()) {
errorMessage += "testWhileIdle is true, "; infoMessage += "testWhileIdle is true, ";
} }
LOG.error(errorMessage + "validationQuery not set"); LOG.info(infoMessage + "validationQuery not set");
} }
protected void resolveDriver() throws SQLException { protected void resolveDriver() throws SQLException {
@ -1233,7 +1286,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if (driver.getMajorVersion() < 10) { if (driver.getMajorVersion() < 10) {
throw new SQLException("not support oracle driver " + driver.getMajorVersion() + "." throw new SQLException("not support oracle driver " + driver.getMajorVersion() + "."
+ driver.getMinorVersion()); + driver.getMinorVersion());
} }
if (driver.getMajorVersion() == 10 && isUseOracleImplicitCache()) { if (driver.getMajorVersion() == 10 && isUseOracleImplicitCache()) {
@ -1277,7 +1330,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if (query instanceof SQLSelectQueryBlock) { if (query instanceof SQLSelectQueryBlock) {
if (((SQLSelectQueryBlock) query).getFrom() == null) { if (((SQLSelectQueryBlock) query).getFrom() == null) {
LOG.error("invalid oracle validationQuery. " + validationQuery + ", may should be : " + validationQuery LOG.error("invalid oracle validationQuery. " + validationQuery + ", may should be : " + validationQuery
+ " FROM DUAL"); + " FROM DUAL");
} }
} }
} }
@ -1306,7 +1359,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if (query instanceof SQLSelectQueryBlock) { if (query instanceof SQLSelectQueryBlock) {
if (((SQLSelectQueryBlock) query).getFrom() == null) { if (((SQLSelectQueryBlock) query).getFrom() == null) {
LOG.error("invalid db2 validationQuery. " + validationQuery + ", may should be : " + validationQuery LOG.error("invalid db2 validationQuery. " + validationQuery + ", may should be : " + validationQuery
+ " FROM SYSDUMMY"); + " FROM SYSDUMMY");
} }
} }
} }
@ -1325,8 +1378,8 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
this.validConnectionChecker = new OracleValidConnectionChecker(); this.validConnectionChecker = new OracleValidConnectionChecker();
} else if (realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER) } else if (realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER)
|| realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER_SQLJDBC4) || realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER_SQLJDBC4)
|| realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER_JTDS)) { || realDriverClassName.equals(JdbcConstants.SQL_SERVER_DRIVER_JTDS)) {
this.validConnectionChecker = new MSSQLValidConnectionChecker(); this.validConnectionChecker = new MSSQLValidConnectionChecker();
} else if (realDriverClassName.equals(JdbcConstants.POSTGRESQL_DRIVER) } else if (realDriverClassName.equals(JdbcConstants.POSTGRESQL_DRIVER)
@ -1351,7 +1404,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
for (Class<?> driverClass = driver.getClass();;) { for (Class<?> driverClass = driver.getClass(); ; ) {
String realDriverClassName = driverClass.getName(); String realDriverClassName = driverClass.getName();
if (realDriverClassName.equals(JdbcConstants.MYSQL_DRIVER) // if (realDriverClassName.equals(JdbcConstants.MYSQL_DRIVER) //
|| realDriverClassName.equals(JdbcConstants.MYSQL_DRIVER_6)) { || realDriverClassName.equals(JdbcConstants.MYSQL_DRIVER_6)) {
@ -1402,6 +1455,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public DruidPooledConnection getConnection(long maxWaitMillis) throws SQLException { public DruidPooledConnection getConnection(long maxWaitMillis) throws SQLException {
init(); init();
checkThread();
if (filters.size() > 0) { if (filters.size() > 0) {
FilterChainImpl filterChain = new FilterChainImpl(this); FilterChainImpl filterChain = new FilterChainImpl(this);
return filterChain.dataSource_connect(this, maxWaitMillis); return filterChain.dataSource_connect(this, maxWaitMillis);
@ -1422,7 +1476,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public DruidPooledConnection getConnectionDirect(long maxWaitMillis) throws SQLException { public DruidPooledConnection getConnectionDirect(long maxWaitMillis) throws SQLException {
int notFullTimeoutRetryCnt = 0; int notFullTimeoutRetryCnt = 0;
for (;;) { for (; ; ) {
// handle notFullTimeoutRetry // handle notFullTimeoutRetry
DruidPooledConnection poolableConnection; DruidPooledConnection poolableConnection;
try { try {
@ -1456,10 +1510,10 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if (testWhileIdle) { if (testWhileIdle) {
final DruidConnectionHolder holder = poolableConnection.holder; final DruidConnectionHolder holder = poolableConnection.holder;
long currentTimeMillis = System.currentTimeMillis(); long currentTimeMillis = System.currentTimeMillis();
long lastActiveTimeMillis = holder.lastActiveTimeMillis; long lastActiveTimeMillis = holder.lastActiveTimeMillis;
long lastExecTimeMillis = holder.lastExecTimeMillis; long lastExecTimeMillis = holder.lastExecTimeMillis;
long lastKeepTimeMillis = holder.lastKeepTimeMillis; long lastKeepTimeMillis = holder.lastKeepTimeMillis;
if (checkExecuteTime if (checkExecuteTime
&& lastExecTimeMillis != lastActiveTimeMillis) { && lastExecTimeMillis != lastActiveTimeMillis) {
@ -1470,7 +1524,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
lastActiveTimeMillis = lastKeepTimeMillis; lastActiveTimeMillis = lastKeepTimeMillis;
} }
long idleMillis = currentTimeMillis - lastActiveTimeMillis; long idleMillis = currentTimeMillis - lastActiveTimeMillis;
long timeBetweenEvictionRunsMillis = this.timeBetweenEvictionRunsMillis; long timeBetweenEvictionRunsMillis = this.timeBetweenEvictionRunsMillis;
@ -1480,7 +1534,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if (idleMillis >= timeBetweenEvictionRunsMillis if (idleMillis >= timeBetweenEvictionRunsMillis
|| idleMillis < 0 // unexcepted branch || idleMillis < 0 // unexcepted branch
) { ) {
boolean validate = testConnectionInternal(poolableConnection.holder, poolableConnection.conn); boolean validate = testConnectionInternal(poolableConnection.holder, poolableConnection.conn);
if (!validate) { if (!validate) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
@ -1488,7 +1542,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
discardConnection(poolableConnection.holder); discardConnection(poolableConnection.holder);
continue; continue;
} }
} }
} }
@ -1591,7 +1645,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
DruidConnectionHolder holder; DruidConnectionHolder holder;
for (boolean createDirect = false;;) { for (boolean createDirect = false; ; ) {
if (createDirect) { if (createDirect) {
createStartNanosUpdater.set(this, System.nanoTime()); createStartNanosUpdater.set(this, System.nanoTime());
if (creatingCountUpdater.compareAndSet(this, 0, 1)) { if (creatingCountUpdater.compareAndSet(this, 0, 1)) {
@ -1740,10 +1794,10 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
StringBuilder buf = new StringBuilder(128); StringBuilder buf = new StringBuilder(128);
buf.append("wait millis ")// buf.append("wait millis ")//
.append(waitNanos / (1000 * 1000))// .append(waitNanos / (1000 * 1000))//
.append(", active ").append(activeCount)// .append(", active ").append(activeCount)//
.append(", maxActive ").append(maxActive)// .append(", maxActive ").append(maxActive)//
.append(", creating ").append(creatingCount)// .append(", creating ").append(creatingCount)//
; ;
if (creatingCount > 0 && createStartNanos > 0) { if (creatingCount > 0 && createStartNanos > 0) {
long createElapseMillis = (System.nanoTime() - createStartNanos) / (1000 * 1000); long createElapseMillis = (System.nanoTime() - createStartNanos) / (1000 * 1000);
@ -1756,17 +1810,21 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
buf.append(", createErrorCount ").append(createErrorCount); buf.append(", createErrorCount ").append(createErrorCount);
} }
List<JdbcSqlStatValue> sqlList = this.getDataSourceStat().getRuningSqlList(); JdbcDataSourceStat sourceStat = this.getDataSourceStat();
for (int i = 0; i < sqlList.size(); ++i) { if (sourceStat != null) {
if (i != 0) { List<JdbcSqlStatValue> sqlList = sourceStat.getRuningSqlList();
buf.append('\n'); for (int i = 0; i < sqlList.size(); ++i) {
} else { if (i != 0) {
buf.append(", "); buf.append('\n');
} else {
buf.append(", ");
}
JdbcSqlStatValue sql = sqlList.get(i);
buf.append("runningSqlCount ");
buf.append(sql.getRunningCount());
buf.append(" : ");
buf.append(sql.getSql());
} }
JdbcSqlStatValue sql = sqlList.get(i);
buf.append("runningSqlCount ").append(sql.getRunningCount());
buf.append(" : ");
buf.append(sql.getSql());
} }
String errorMessage = buf.toString(); String errorMessage = buf.toString();
@ -1858,13 +1916,12 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
lock.unlock(); lock.unlock();
} }
if(onFatalError && holder != null && holder.getDataSource() != null) { if (onFatalError && holder != null && holder.getDataSource() != null) {
ReentrantLock dataSourceLock = holder.getDataSource().lock; ReentrantLock dataSourceLock = holder.getDataSource().lock;
dataSourceLock.lock(); dataSourceLock.lock();
try { try {
emptySignal(); emptySignal();
} } finally {
finally {
dataSourceLock.unlock(); dataSourceLock.unlock();
} }
} }
@ -1901,8 +1958,8 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
if (logDifferentThread // if (logDifferentThread //
&& (!isAsyncCloseConnectionEnable()) // && (!isAsyncCloseConnectionEnable()) //
&& pooledConnection.ownerThread != Thread.currentThread()// && pooledConnection.ownerThread != Thread.currentThread()//
) { ) {
LOG.warn("get/close not same thread"); LOG.warn("get/close not same thread");
} }
@ -2097,6 +2154,10 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
destroyConnectionThread.interrupt(); destroyConnectionThread.interrupt();
} }
if (periodDetectionThread != null) {
periodDetectionThread.interrupt();
}
if (createSchedulerFuture != null) { if (createSchedulerFuture != null) {
createSchedulerFuture.cancel(true); createSchedulerFuture.cancel(true);
} }
@ -2154,7 +2215,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
@Override @Override
public Object run() { public Object run() {
ObjectName objectName = DruidDataSourceStatManager.addDataSource(DruidDataSource.this, ObjectName objectName = DruidDataSourceStatManager.addDataSource(DruidDataSource.this,
DruidDataSource.this.name); DruidDataSource.this.name);
DruidDataSource.this.setObjectName(objectName); DruidDataSource.this.setObjectName(objectName);
DruidDataSource.this.mbeanRegistered = true; DruidDataSource.this.mbeanRegistered = true;
@ -2248,7 +2309,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
private DruidConnectionHolder pollLast(long nanos) throws InterruptedException, SQLException { private DruidConnectionHolder pollLast(long nanos) throws InterruptedException, SQLException {
long estimate = nanos; long estimate = nanos;
for (;;) { for (; ; ) {
if (poolingCount == 0) { if (poolingCount == 0) {
emptySignal(); // send signal to CreateThread create connection emptySignal(); // send signal to CreateThread create connection
@ -2269,8 +2330,8 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
try { try {
long startEstimate = estimate; long startEstimate = estimate;
estimate = notEmpty.awaitNanos(estimate); // signal by estimate = notEmpty.awaitNanos(estimate); // signal by
// recycle or // recycle or
// creator // creator
notEmptyWaitCount++; notEmptyWaitCount++;
notEmptyWaitNanos += (startEstimate - estimate); notEmptyWaitNanos += (startEstimate - estimate);
@ -2494,7 +2555,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
value.setCommitCount(commitCountUpdater.getAndSet(this, 0)); value.setCommitCount(commitCountUpdater.getAndSet(this, 0));
value.setRollbackCount(rollbackCountUpdater.getAndSet(this, 0)); value.setRollbackCount(rollbackCountUpdater.getAndSet(this, 0));
value.setPstmtCacheHitCount(cachedPreparedStatementHitCountUpdater.getAndSet(this,0)); value.setPstmtCacheHitCount(cachedPreparedStatementHitCountUpdater.getAndSet(this, 0));
value.setPstmtCacheMissCount(cachedPreparedStatementMissCountUpdater.getAndSet(this, 0)); value.setPstmtCacheMissCount(cachedPreparedStatementMissCountUpdater.getAndSet(this, 0));
value.setStartTransactionCount(startTransactionCountUpdater.getAndSet(this, 0)); value.setStartTransactionCount(startTransactionCountUpdater.getAndSet(this, 0));
@ -2572,7 +2633,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
clearCreateTask(createTaskId); clearCreateTask(createTaskId);
if (poolingCount + createTaskCount < notEmptyWaitThreadCount // if (poolingCount + createTaskCount < notEmptyWaitThreadCount //
&& activeCount + poolingCount + createTaskCount < maxActive) { && activeCount + poolingCount + createTaskCount < maxActive) {
emptySignal(); emptySignal();
} }
} }
@ -2584,7 +2645,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public class CreateConnectionTask implements Runnable { public class CreateConnectionTask implements Runnable {
private int errorCount = 0; private int errorCount = 0;
private boolean initTask = false; private boolean initTask = false;
private final long taskId; private final long taskId;
@ -2603,7 +2664,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
private void runInternal() { private void runInternal() {
for (;;) { for (; ; ) {
// addLast // addLast
lock.lock(); lock.lock();
@ -2769,8 +2830,9 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
public class CreateConnectionThread extends Thread { public class CreateConnectionThread extends Thread {
private volatile boolean started = true;
public CreateConnectionThread(String name){ public CreateConnectionThread(String name) {
super(name); super(name);
this.setDaemon(true); this.setDaemon(true);
} }
@ -2780,7 +2842,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
long lastDiscardCount = 0; long lastDiscardCount = 0;
int errorCount = 0; int errorCount = 0;
for (;;) { for (; ; ) {
// addLast // addLast
try { try {
lock.lockInterruptibly(); lock.lockInterruptibly();
@ -2829,6 +2891,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
if ((!closing) && (!closed)) { if ((!closing) && (!closed)) {
LOG.error("create connection Thread Interrupted, url: " + jdbcUrl, e); LOG.error("create connection Thread Interrupted, url: " + jdbcUrl, e);
} }
DruidDataSource.this.doSomethingBeforeCreationThreadBreak();
break; break;
} finally { } finally {
lock.unlock(); lock.unlock();
@ -2838,9 +2901,13 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
try { try {
connection = createPhysicalConnection(); connection = createPhysicalConnection();
} catch (SQLException e) { } catch (SQLException | RuntimeException e) {
LOG.error("create connection SQLException, url: " + jdbcUrl + ", errorCode " + e.getErrorCode() if (e instanceof SQLException) {
+ ", state " + e.getSQLState(), e); LOG.error("create connection error, url: " + jdbcUrl + ", errorCode " + ((SQLException) e).getErrorCode()
+ ", state " + ((SQLException) e).getSQLState(), e);
} else {
LOG.error("create connection error", e);
}
errorCount++; errorCount++;
if (errorCount > connectionErrorRetryAttempts && timeBetweenConnectErrorMillis > 0) { if (errorCount > connectionErrorRetryAttempts && timeBetweenConnectErrorMillis > 0) {
@ -2861,17 +2928,16 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
try { try {
Thread.sleep(timeBetweenConnectErrorMillis); Thread.sleep(timeBetweenConnectErrorMillis);
} catch (InterruptedException interruptEx) { } catch (InterruptedException ignore) {
break;
} }
DruidDataSource.this.doSomethingBeforeCreationThreadBreak();
break;
} }
} catch (RuntimeException e) {
LOG.error("create connection RuntimeException", e);
setFailContinuous(true);
continue;
} catch (Error e) { } catch (Error e) {
LOG.error("create connection Error", e); LOG.error("create connection Error", e);
setFailContinuous(true); setFailContinuous(true);
DruidDataSource.this.doSomethingBeforeCreationThreadBreak();
break; break;
} }
@ -2892,11 +2958,49 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
} }
} }
public boolean isStarted() {
return started;
}
public void setStarted(boolean started) {
this.started = started;
}
}
//周期性检查生产线程状态,因为在终止生产线程的时候,为了不让生产线程疯狂重试数据库,只是生成了一个生产线程,但是并没有start,需要一个守护线程
//周期性检查线程状态,帮助其启动。
private class PeriodDetectionThread extends Thread {
public PeriodDetectionThread(String name) {
super(name);
this.setDaemon(true);
}
public void run() {
while (true) {
synchronized (DruidDataSource.this) {
//生产线程发生了切换,并且有线程在等待连接,需要主动唤醒生产线程,否则由getConnection方法来唤醒生产线程
if (!createConnectionThread.started && !destroyConnectionThread.started && notEmptyWaitThreadCount > 0) {
createConnectionThread.setStarted(true);
createConnectionThread.start();
destroyConnectionThread.setStarted(true);
destroyConnectionThread.start();
}
}
try {
Thread.sleep(30000);
} catch (InterruptedException ignore) {
break;
}
}
}
} }
public class DestroyConnectionThread extends Thread { public class DestroyConnectionThread extends Thread {
public DestroyConnectionThread(String name){ private volatile boolean started = true;
public DestroyConnectionThread(String name) {
super(name); super(name);
this.setDaemon(true); this.setDaemon(true);
} }
@ -2904,7 +3008,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public void run() { public void run() {
initedLatch.countDown(); initedLatch.countDown();
for (;;) { for (; ; ) {
// 从前面开始删除 // 从前面开始删除
try { try {
if (closed || closing) { if (closed || closing) {
@ -2928,6 +3032,13 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
} }
public boolean isStarted() {
return started;
}
public void setStarted(boolean started) {
this.started = started;
}
} }
public class DestroyTask implements Runnable { public class DestroyTask implements Runnable {
@ -2948,14 +3059,14 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
public class LogStatsThread extends Thread { public class LogStatsThread extends Thread {
public LogStatsThread(String name){ public LogStatsThread(String name) {
super(name); super(name);
this.setDaemon(true); this.setDaemon(true);
} }
public void run() { public void run() {
try { try {
for (;;) { for (; ; ) {
try { try {
logStats(); logStats();
} catch (Exception e) { } catch (Exception e) {
@ -2981,7 +3092,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
try { try {
Iterator<DruidPooledConnection> iter = activeConnections.keySet().iterator(); Iterator<DruidPooledConnection> iter = activeConnections.keySet().iterator();
for (; iter.hasNext();) { for (; iter.hasNext(); ) {
DruidPooledConnection pooledConnection = iter.next(); DruidPooledConnection pooledConnection = iter.next();
if (pooledConnection.isRunning()) { if (pooledConnection.isRunning()) {
@ -3033,7 +3144,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
buf.append("ownerThread current state is " + pooledConnection.getOwnerThread().getState() buf.append("ownerThread current state is " + pooledConnection.getOwnerThread().getState()
+ ", current stackTrace\n"); + ", current stackTrace\n");
trace = pooledConnection.getOwnerThread().getStackTrace(); trace = pooledConnection.getOwnerThread().getStackTrace();
for (int i = 0; i < trace.length; i++) { for (int i = 0; i < trace.length; i++) {
buf.append("\tat "); buf.append("\tat ");
@ -3049,7 +3160,9 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
return removeCount; return removeCount;
} }
/** Instance key */ /**
* Instance key
*/
protected String instanceKey = null; protected String instanceKey = null;
public Reference getReference() throws NamingException { public Reference getReference() throws NamingException {
@ -3130,7 +3243,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
for (int i = 0; i < poolingCount; ++i) { for (int i = 0; i < poolingCount; ++i) {
DruidConnectionHolder connection = connections[i]; DruidConnectionHolder connection = connections[i];
if ((onFatalError || fatalErrorIncrement > 0) && (lastFatalErrorTimeMillis > connection.connectTimeMillis)) { if ((onFatalError || fatalErrorIncrement > 0) && (lastFatalErrorTimeMillis > connection.connectTimeMillis)) {
keepAliveConnections[keepAliveCount++] = connection; keepAliveConnections[keepAliveCount++] = connection;
continue; continue;
} }
@ -3754,7 +3867,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
if (this.statLogger != null if (this.statLogger != null
&& (this.statLogger.getClass() == iface || DruidDataSourceStatLogger.class == iface)) { && (this.statLogger.getClass() == iface || DruidDataSourceStatLogger.class == iface)) {
return true; return true;
} }
@ -3770,7 +3883,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
if (this.statLogger != null if (this.statLogger != null
&& (this.statLogger.getClass() == iface || DruidDataSourceStatLogger.class == iface)) { && (this.statLogger.getClass() == iface || DruidDataSourceStatLogger.class == iface)) {
return (T) statLogger; return (T) statLogger;
} }
@ -3814,7 +3927,7 @@ public class DruidDataSource extends DruidAbstractDataSource implements DruidDat
} }
int fillCount = 0; int fillCount = 0;
for (;;) { for (; ; ) {
try { try {
lock.lockInterruptibly(); lock.lockInterruptibly();
} catch (InterruptedException e) { } catch (InterruptedException e) {

Loading…
Cancel
Save