Browse Source

Pull request #1: REPORT-74418 informix等老驱动未实现部分Connection接口导致druid创建线程退出

Merge in ~RINOUX/base-third from release/11.0 to final/11.0

* commit 'eb730e83a7c7554ea93d60ca956e442f55b8c3b2':
  REPORT-74418 informix等老驱动未实现部分Connection接口导致druid创建线程退出
  REPORT-74418 informix等老驱动未实现部分Connection接口导致druid创建线程退出
  REPORT-74418 informix等老驱动未实现部分Connection接口导致druid创建线程退出
final/11.0
rinoux 2 years ago
parent
commit
35ade87a64
  1. 17
      fine-druid/readme.MD
  2. 172
      fine-druid/src/main/java/com/fr/third/alibaba/druid/pool/DruidConnectionHolder.java

17
fine-druid/readme.MD

@ -8,11 +8,12 @@
> 由于需要适配数据源的一些特性和默认配置,对Druid有部分修改,后续更新都要保留下列修改
| 版本 | 日期 | FineReport变动 |
|-------|------------|-------------------------------------------------------------------------------------|
| 1.2.9 | 2022-04-27 | DruidAbstractDataSource.testConnectionInternal()判断timeBetweenEvictionRunsMillis > 0 |
| 1.2.9 | 2022-04-27 | MysqlUtils.getLastPacketReceivedTimeMs添加驱动判断 |
| 1.2.9 | 2022-05-05 | MysqlUtils.getLastPacketReceivedTimeMs根据类加载器区分连接实现等,不在使用全局变量 |
| 1.2.9 | 2022-05-05 | com.fr.third.alibaba.druid.util.Utils.loadClass改为优先从线程类加载器加载类 |
| 1.2.9 | 2022-05-05 | 恢复com.fr.third.alibaba.druid.pool.DruidDataSourceFactory对hibernate配置属性的支持 |
| 1.2.9 | 2022-05-10 | 恢复DruidDataSource中的创建线程启停机制 |
| 版本 | 日期 | 任务编号 | FineReport变动 |
|-------|------------|----|------------------------------------------------------------------------------------|
| 1.2.9 | 2022-04-27 | |DruidAbstractDataSource.testConnectionInternal()判断timeBetweenEvictionRunsMillis > 0 |
| 1.2.9 | 2022-04-27 | |MysqlUtils.getLastPacketReceivedTimeMs添加驱动判断 |
| 1.2.9 | 2022-05-05 | |MysqlUtils.getLastPacketReceivedTimeMs根据类加载器区分连接实现等,不在使用全局变量 |
| 1.2.9 | 2022-05-05 | | com.fr.third.alibaba.druid.util.Utils.loadClass改为优先从线程类加载器加载类 |
| 1.2.9 | 2022-05-05 | |恢复com.fr.third.alibaba.druid.pool.DruidDataSourceFactory对hibernate配置属性的支持 |
| 1.2.9 | 2022-05-10 | |恢复DruidDataSource中的创建线程启停机制 |
| 1.2.9 | 2022-06-27 | REPORT-74418 |DruidConnectionHolder初始化时捕获AbstractMethodError异常防止获取getHoldability错误导致创建线程退出 |

172
fine-druid/src/main/java/com/fr/third/alibaba/druid/pool/DruidConnectionHolder.java

@ -15,84 +15,86 @@
*/
package com.fr.third.alibaba.druid.pool;
import com.fr.third.alibaba.druid.DbType;
import com.fr.third.alibaba.druid.pool.DruidAbstractDataSource.PhysicalConnectionInfo;
import com.fr.third.alibaba.druid.proxy.jdbc.WrapperProxy;
import com.fr.third.alibaba.druid.support.logging.Log;
import com.fr.third.alibaba.druid.support.logging.LogFactory;
import com.fr.third.alibaba.druid.util.JdbcUtils;
import com.fr.third.alibaba.druid.util.Utils;
import javax.sql.ConnectionEventListener;
import javax.sql.StatementEventListener;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.ReentrantLock;
import javax.sql.ConnectionEventListener;
import javax.sql.StatementEventListener;
import com.fr.third.alibaba.druid.DbType;
import com.fr.third.alibaba.druid.pool.DruidAbstractDataSource.PhysicalConnectionInfo;
import com.fr.third.alibaba.druid.proxy.jdbc.WrapperProxy;
import com.fr.third.alibaba.druid.support.logging.Log;
import com.fr.third.alibaba.druid.support.logging.LogFactory;
import com.fr.third.alibaba.druid.util.JdbcConstants;
import com.fr.third.alibaba.druid.util.JdbcUtils;
import com.fr.third.alibaba.druid.util.Utils;
/**
* @author wenshao [szujobs@hotmail.com]
*/
public final class DruidConnectionHolder {
private final static Log LOG = LogFactory.getLog(DruidConnectionHolder.class);
public static boolean holdabilityUnsupported = false;
private final static Log LOG = LogFactory.getLog(DruidConnectionHolder.class);
protected final DruidAbstractDataSource dataSource;
protected final long connectionId;
protected final Connection conn;
static Set<DbType> holdabilityUnsupportedDbTypes = new HashSet<>(Arrays.asList(DbType.sybase, DbType.db2, DbType.hive, DbType.odps));
public static boolean holdabilityUnsupported = false;
protected final DruidAbstractDataSource dataSource;
protected final long connectionId;
protected final Connection conn;
protected final List<ConnectionEventListener> connectionEventListeners = new CopyOnWriteArrayList<ConnectionEventListener>();
protected final List<StatementEventListener> statementEventListeners = new CopyOnWriteArrayList<StatementEventListener>();
protected final long connectTimeMillis;
protected volatile long lastActiveTimeMillis;
protected volatile long lastExecTimeMillis;
protected volatile long lastKeepTimeMillis;
protected volatile long lastValidTimeMillis;
protected long useCount = 0;
private long keepAliveCheckCount = 0;
private long lastNotEmptyWaitNanos;
private final long createNanoSpan;
protected PreparedStatementPool statementPool;
protected final List<Statement> statementTrace = new ArrayList<Statement>(2);
protected final boolean defaultReadOnly;
protected final int defaultHoldability;
protected final int defaultTransactionIsolation;
protected final boolean defaultAutoCommit;
protected boolean underlyingReadOnly;
protected int underlyingHoldability;
protected int underlyingTransactionIsolation;
protected boolean underlyingAutoCommit;
protected volatile boolean discard = false;
protected volatile boolean active = false;
protected final Map<String, Object> variables;
protected final Map<String, Object> globleVariables;
final ReentrantLock lock = new ReentrantLock();
protected String initSchema;
protected final List<StatementEventListener> statementEventListeners = new CopyOnWriteArrayList<StatementEventListener>();
protected final long connectTimeMillis;
protected volatile long lastActiveTimeMillis;
protected volatile long lastExecTimeMillis;
protected volatile long lastKeepTimeMillis;
protected volatile long lastValidTimeMillis;
protected long useCount = 0;
private long keepAliveCheckCount = 0;
private long lastNotEmptyWaitNanos;
private final long createNanoSpan;
protected PreparedStatementPool statementPool;
protected final List<Statement> statementTrace = new ArrayList<Statement>(2);
protected final boolean defaultReadOnly;
protected final int defaultHoldability;
protected final int defaultTransactionIsolation;
protected final boolean defaultAutoCommit;
protected boolean underlyingReadOnly;
protected int underlyingHoldability;
protected int underlyingTransactionIsolation;
protected boolean underlyingAutoCommit;
protected volatile boolean discard = false;
protected volatile boolean active = false;
protected final Map<String, Object> variables;
protected final Map<String, Object> globleVariables;
final ReentrantLock lock = new ReentrantLock();
protected String initSchema;
public DruidConnectionHolder(DruidAbstractDataSource dataSource, PhysicalConnectionInfo pyConnectInfo)
throws SQLException{
throws SQLException {
this(dataSource,
pyConnectInfo.getPhysicalConnection(),
pyConnectInfo.getConnectNanoSpan(),
pyConnectInfo.getVairiables(),
pyConnectInfo.getGlobalVairiables());
pyConnectInfo.getPhysicalConnection(),
pyConnectInfo.getConnectNanoSpan(),
pyConnectInfo.getVairiables(),
pyConnectInfo.getGlobalVairiables());
}
public DruidConnectionHolder(DruidAbstractDataSource dataSource, Connection conn, long connectNanoSpan)
throws SQLException{
throws SQLException {
this(dataSource, conn, connectNanoSpan, null, null);
}
public DruidConnectionHolder(DruidAbstractDataSource dataSource, Connection conn, long connectNanoSpan,
Map<String, Object> variables, Map<String, Object> globleVariables)
throws SQLException{
throws SQLException {
this.dataSource = dataSource;
this.conn = conn;
this.createNanoSpan = connectNanoSpan;
@ -101,9 +103,7 @@ public final class DruidConnectionHolder {
this.connectTimeMillis = System.currentTimeMillis();
this.lastActiveTimeMillis = connectTimeMillis;
this.lastExecTimeMillis = connectTimeMillis;
this.underlyingAutoCommit = conn.getAutoCommit();
this.lastExecTimeMillis = connectTimeMillis;
if (conn instanceof WrapperProxy) {
this.connectionId = ((WrapperProxy) conn).getId();
@ -111,46 +111,40 @@ public final class DruidConnectionHolder {
this.connectionId = dataSource.createConnectionId();
}
{
boolean initUnderlyHoldability = !holdabilityUnsupported;
DbType dbType = DbType.of(dataSource.dbTypeName);
if (dbType == DbType.sybase //
|| dbType == DbType.db2 //
|| dbType == DbType.hive //
|| dbType == DbType.odps //
) {
initUnderlyHoldability = false;
}
if (initUnderlyHoldability) {
try {
this.underlyingHoldability = conn.getHoldability();
} catch (UnsupportedOperationException e) {
holdabilityUnsupported = true;
LOG.warn("getHoldability unsupported", e);
} catch (SQLFeatureNotSupportedException e) {
holdabilityUnsupported = true;
LOG.warn("getHoldability unsupported", e);
} catch (SQLException e) {
// bug fixed for hive jdbc-driver
if ("Method not supported".equals(e.getMessage())) {
holdabilityUnsupported = true;
}
LOG.warn("getHoldability error", e);
}
// 下面是一些驱动的默认值,reset时用到
// autoCommit
this.underlyingAutoCommit = conn.getAutoCommit();
// holdability
DbType dbType = DbType.of(dataSource.dbTypeName);
boolean initUnderlyingHoldability = !holdabilityUnsupported;
if (holdabilityUnsupportedDbTypes.contains(dbType)) {
initUnderlyingHoldability = false;
}
if (initUnderlyingHoldability) {
try {
this.underlyingHoldability = conn.getHoldability();
} catch (Throwable e) {
holdabilityUnsupported = true;
LOG.warn("getHoldability error", e);
}
}
this.underlyingReadOnly = conn.isReadOnly();
// readOnly
try {
this.underlyingReadOnly = conn.isReadOnly();
} catch (Throwable e) {
LOG.warn("isReadOnly error", e);
}
// transactionIsolation
try {
this.underlyingTransactionIsolation = conn.getTransactionIsolation();
} catch (SQLException e) {
// compartible for alibaba corba
if ("HY000".equals(e.getSQLState())
|| "com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException".equals(e.getClass().getName())) {
// skip
} else {
throw e;
}
} catch (Throwable e) {
LOG.warn("getTransactionIsolation error", e);
}
this.defaultHoldability = underlyingHoldability;

Loading…
Cancel
Save