From 7daf99e302a7000f7752873acb7cae437cc8a3c5 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 28 Aug 2024 21:29:39 -0400 Subject: [PATCH 01/21] VERY MUCH still a WIP of refactoring the JDBC driver to use V2 --- .../clickhouse/jdbc/AbstractResultSet.java | 712 ------------------ .../com/clickhouse/jdbc/ClickHouseArray.java | 95 --- .../com/clickhouse/jdbc/ClickHouseBlob.java | 76 -- .../com/clickhouse/jdbc/ClickHouseClob.java | 91 --- .../clickhouse/jdbc/ClickHouseConnection.java | 325 -------- .../clickhouse/jdbc/ClickHouseDataSource.java | 2 +- .../clickhouse/jdbc/ClickHouseResultSet.java | 26 +- .../clickhouse/jdbc/ClickHouseStatement.java | 42 -- .../com/clickhouse/jdbc/ClickHouseXml.java | 69 -- .../clickhouse/jdbc/CombinedResultSet.java | 575 -------------- .../java/com/clickhouse/jdbc/JdbcWrapper.java | 18 +- 11 files changed, 15 insertions(+), 2016 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/AbstractResultSet.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseArray.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseBlob.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseClob.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseConnection.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseXml.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/CombinedResultSet.java diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/AbstractResultSet.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/AbstractResultSet.java deleted file mode 100644 index b1ca6c1c3..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/AbstractResultSet.java +++ /dev/null @@ -1,712 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.NClob; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLType; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Time; -import java.sql.Timestamp; - -public abstract class AbstractResultSet extends JdbcWrapper implements ResultSet { - protected void ensureOpen() throws SQLException { - if (isClosed()) { - throw SqlExceptionUtils.clientError("Cannot operate on a closed ResultSet"); - } - } - - @Override - public boolean absolute(int row) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("absolute not implemented"); - } - - @Override - public void afterLast() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("afterLast not implemented"); - } - - @Override - public void beforeFirst() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("beforeFirst not implemented"); - } - - @Override - public void cancelRowUpdates() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("cancelRowUpdates not implemented"); - } - - @Override - public void clearWarnings() throws SQLException { - ensureOpen(); - } - - @Override - public void deleteRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("deleteRow not implemented"); - } - - @Override - public boolean first() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("first not implemented"); - } - - @Override - public int getConcurrency() throws SQLException { - ensureOpen(); - - return ResultSet.CONCUR_READ_ONLY; - } - - @Override - public int getFetchDirection() throws SQLException { - ensureOpen(); - - return ResultSet.FETCH_FORWARD; - } - - @Override - public int getHoldability() throws SQLException { - ensureOpen(); - - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public int getType() throws SQLException { - ensureOpen(); - - return ResultSet.TYPE_FORWARD_ONLY; - } - - @Override - public SQLWarning getWarnings() throws SQLException { - ensureOpen(); - - return null; - } - - @Override - public void insertRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("insertRow not implemented"); - } - - @Override - public boolean last() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("last not implemented"); - } - - @Override - public void moveToCurrentRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("moveToCurrentRow not implemented"); - } - - @Override - public void moveToInsertRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("moveToInsertRow not implemented"); - } - - @Override - public boolean previous() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("previous not implemented"); - } - - @Override - public void refreshRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("refreshRow not implemented"); - } - - @Override - public boolean relative(int rows) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("relative not implemented"); - } - - @Override - public boolean rowDeleted() throws SQLException { - ensureOpen(); - - return false; - } - - @Override - public boolean rowInserted() throws SQLException { - ensureOpen(); - - return false; - } - - @Override - public boolean rowUpdated() throws SQLException { - ensureOpen(); - - return false; - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - ensureOpen(); - - if (direction != ResultSet.FETCH_FORWARD) { - throw SqlExceptionUtils.unsupportedError("only FETCH_FORWARD is supported in setFetchDirection"); - } - } - - @Override - public void updateArray(int columnIndex, Array x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateArray not implemented"); - } - - @Override - public void updateArray(String columnLabel, Array x) throws SQLException { - updateArray(findColumn(columnLabel), x); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateAsciiStream not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { - updateAsciiStream(findColumn(columnLabel), x); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateAsciiStream not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { - updateAsciiStream(findColumn(columnLabel), x, length); - } - - @Override - public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateAsciiStream not implemented"); - } - - @Override - public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { - updateAsciiStream(findColumn(columnLabel), x, length); - } - - @Override - public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBigDecimal not implemented"); - } - - @Override - public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { - updateBigDecimal(findColumn(columnLabel), x); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBinaryStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { - updateBinaryStream(findColumn(columnLabel), x); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBinaryStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { - updateBinaryStream(findColumn(columnLabel), x, length); - } - - @Override - public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBinaryStream not implemented"); - } - - @Override - public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { - updateBinaryStream(findColumn(columnLabel), x, length); - } - - @Override - public void updateBlob(int columnIndex, Blob x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, Blob x) throws SQLException { - updateBlob(findColumn(columnLabel), x); - } - - @Override - public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { - updateBlob(findColumn(columnLabel), inputStream); - } - - @Override - public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBlob not implemented"); - } - - @Override - public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { - updateBlob(findColumn(columnLabel), inputStream, length); - } - - @Override - public void updateBoolean(int columnIndex, boolean x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBoolean not implemented"); - } - - @Override - public void updateBoolean(String columnLabel, boolean x) throws SQLException { - updateBoolean(findColumn(columnLabel), x); - } - - @Override - public void updateByte(int columnIndex, byte x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateByte not implemented"); - } - - @Override - public void updateByte(String columnLabel, byte x) throws SQLException { - updateByte(findColumn(columnLabel), x); - } - - @Override - public void updateBytes(int columnIndex, byte[] x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateBytes not implemented"); - } - - @Override - public void updateBytes(String columnLabel, byte[] x) throws SQLException { - updateBytes(findColumn(columnLabel), x); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader reader) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateCharacterStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { - updateCharacterStream(findColumn(columnLabel), reader); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader reader, int length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateCharacterStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { - updateCharacterStream(findColumn(columnLabel), reader, length); - } - - @Override - public void updateCharacterStream(int columnIndex, Reader reader, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateCharacterStream not implemented"); - } - - @Override - public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - updateCharacterStream(findColumn(columnLabel), reader, length); - } - - @Override - public void updateClob(int columnIndex, Clob x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Clob x) throws SQLException { - updateClob(findColumn(columnLabel), x); - } - - @Override - public void updateClob(int columnIndex, Reader reader) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Reader reader) throws SQLException { - updateClob(findColumn(columnLabel), reader); - } - - @Override - public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateClob not implemented"); - } - - @Override - public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { - updateClob(findColumn(columnLabel), reader, length); - } - - @Override - public void updateDate(int columnIndex, Date x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateDate not implemented"); - } - - @Override - public void updateDate(String columnLabel, Date x) throws SQLException { - updateDate(findColumn(columnLabel), x); - } - - @Override - public void updateDouble(int columnIndex, double x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateDouble not implemented"); - } - - @Override - public void updateDouble(String columnLabel, double x) throws SQLException { - updateDouble(findColumn(columnLabel), x); - } - - @Override - public void updateFloat(int columnIndex, float x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateFloat not implemented"); - } - - @Override - public void updateFloat(String columnLabel, float x) throws SQLException { - updateFloat(findColumn(columnLabel), x); - } - - @Override - public void updateInt(int columnIndex, int x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateInt not implemented"); - } - - @Override - public void updateInt(String columnLabel, int x) throws SQLException { - updateInt(findColumn(columnLabel), x); - } - - @Override - public void updateLong(int columnIndex, long x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateLong not implemented"); - } - - @Override - public void updateLong(String columnLabel, long x) throws SQLException { - updateLong(findColumn(columnLabel), x); - } - - @Override - public void updateNCharacterStream(int columnIndex, Reader reader) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNCharacterStream not implemented"); - } - - @Override - public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { - updateNCharacterStream(findColumn(columnLabel), reader); - } - - @Override - public void updateNCharacterStream(int columnIndex, Reader reader, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNCharacterStream not implemented"); - } - - @Override - public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - updateNCharacterStream(findColumn(columnLabel), reader, length); - } - - @Override - public void updateNClob(int columnIndex, NClob nClob) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, NClob nClob) throws SQLException { - updateNClob(findColumn(columnLabel), nClob); - } - - @Override - public void updateNClob(int columnIndex, Reader reader) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, Reader reader) throws SQLException { - updateNClob(findColumn(columnLabel), reader); - } - - @Override - public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNClob not implemented"); - } - - @Override - public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { - updateNClob(findColumn(columnLabel), reader, length); - } - - @Override - public void updateNString(int columnIndex, String nString) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNString not implemented"); - } - - @Override - public void updateNString(String columnLabel, String nString) throws SQLException { - updateNString(findColumn(columnLabel), nString); - } - - @Override - public void updateNull(int columnIndex) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateNull not implemented"); - } - - @Override - public void updateNull(String columnLabel) throws SQLException { - updateNull(findColumn(columnLabel)); - } - - @Override - public void updateObject(int columnIndex, Object x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateObject not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x) throws SQLException { - updateObject(findColumn(columnLabel), x); - } - - @Override - public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateObject not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { - updateObject(findColumn(columnLabel), x, scaleOrLength); - } - - @Override - public void updateObject(int columnIndex, Object x, SQLType targetSqlType) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateObject not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x, SQLType targetSqlType) throws SQLException { - updateObject(findColumn(columnLabel), x, targetSqlType); - } - - @Override - public void updateObject(int columnIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateObject not implemented"); - } - - @Override - public void updateObject(String columnLabel, Object x, SQLType targetSqlType, int scaleOrLength) - throws SQLException { - updateObject(findColumn(columnLabel), x, targetSqlType, scaleOrLength); - } - - @Override - public void updateRef(int columnIndex, Ref x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateRef not implemented"); - } - - @Override - public void updateRef(String columnLabel, Ref x) throws SQLException { - updateRef(findColumn(columnLabel), x); - } - - @Override - public void updateRow() throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateRow not implemented"); - } - - @Override - public void updateRowId(int columnIndex, RowId x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateRowId not implemented"); - } - - @Override - public void updateRowId(String columnLabel, RowId x) throws SQLException { - updateRowId(findColumn(columnLabel), x); - } - - @Override - public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateSQLXML not implemented"); - } - - @Override - public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { - updateSQLXML(findColumn(columnLabel), xmlObject); - } - - @Override - public void updateShort(int columnIndex, short x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateShort not implemented"); - } - - @Override - public void updateShort(String columnLabel, short x) throws SQLException { - updateShort(findColumn(columnLabel), x); - } - - @Override - public void updateString(int columnIndex, String x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateString not implemented"); - } - - @Override - public void updateString(String columnLabel, String x) throws SQLException { - updateString(findColumn(columnLabel), x); - } - - @Override - public void updateTime(int columnIndex, Time x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateTime not implemented"); - } - - @Override - public void updateTime(String columnLabel, Time x) throws SQLException { - updateTime(findColumn(columnLabel), x); - } - - @Override - public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("updateTimestamp not implemented"); - } - - @Override - public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { - updateTimestamp(findColumn(columnLabel), x); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseArray.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseArray.java deleted file mode 100644 index 3bd83d413..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseArray.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.Array; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Map; - -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; - -public class ClickHouseArray implements Array { - private final int columnIndex; - private ClickHouseResultSet resultSet; - - protected ClickHouseArray(ClickHouseResultSet resultSet, int columnIndex) throws SQLException { - this.resultSet = ClickHouseChecker.nonNull(resultSet, "ResultSet"); - resultSet.ensureRead(columnIndex); - this.columnIndex = columnIndex; - } - - protected void ensureValid() throws SQLException { - if (resultSet == null) { - throw SqlExceptionUtils.clientError("Cannot operate on a freed Array object"); - } - } - - protected ClickHouseColumn getBaseColumn() { - return resultSet.columns.get(columnIndex - 1).getArrayBaseColumn(); - } - - @Override - public String getBaseTypeName() throws SQLException { - ensureValid(); - - return getBaseColumn().getDataType().name(); - } - - @Override - public int getBaseType() throws SQLException { - ensureValid(); - - return resultSet.mapper.toSqlType(getBaseColumn(), resultSet.defaultTypeMap); - } - - @Override - public Object getArray() throws SQLException { - ensureValid(); - - return resultSet.getValue(columnIndex).asObject(); - } - - @Override - public Object getArray(Map> map) throws SQLException { - return getArray(); - } - - @Override - public Object getArray(long index, int count) throws SQLException { - ensureValid(); - - throw SqlExceptionUtils.unsupportedError("getArray not implemented"); - } - - @Override - public Object getArray(long index, int count, Map> map) throws SQLException { - return getArray(index, count); - } - - @Override - public ResultSet getResultSet() throws SQLException { - ensureValid(); - - throw SqlExceptionUtils.unsupportedError("getResultSet not implemented"); - } - - @Override - public ResultSet getResultSet(Map> map) throws SQLException { - return getResultSet(); - } - - @Override - public ResultSet getResultSet(long index, int count) throws SQLException { - return getResultSet(); - } - - @Override - public ResultSet getResultSet(long index, int count, Map> map) throws SQLException { - return getResultSet(); - } - - @Override - public void free() throws SQLException { - this.resultSet = null; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseBlob.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseBlob.java deleted file mode 100644 index 2aa23e44d..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseBlob.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.OutputStream; -import java.sql.Blob; -import java.sql.SQLException; - -public class ClickHouseBlob implements Blob { - - @Override - public long length() throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public byte[] getBytes(long pos, int length) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public InputStream getBinaryStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public long position(byte[] pattern, long start) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public long position(Blob pattern, long start) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public int setBytes(long pos, byte[] bytes) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public int setBytes(long pos, byte[] bytes, int offset, int len) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public OutputStream setBinaryStream(long pos) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public void truncate(long len) throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public void free() throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public InputStream getBinaryStream(long pos, long length) throws SQLException { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseClob.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseClob.java deleted file mode 100644 index 0a6b7afe4..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseClob.java +++ /dev/null @@ -1,91 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Reader; -import java.io.Writer; -import java.sql.Clob; -import java.sql.NClob; -import java.sql.SQLException; - -public class ClickHouseClob implements NClob { - - @Override - public long length() throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public String getSubString(long pos, int length) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Reader getCharacterStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public InputStream getAsciiStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public long position(String searchstr, long start) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public long position(Clob searchstr, long start) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public int setString(long pos, String str) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public int setString(long pos, String str, int offset, int len) throws SQLException { - // TODO Auto-generated method stub - return 0; - } - - @Override - public OutputStream setAsciiStream(long pos) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Writer setCharacterStream(long pos) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public void truncate(long len) throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public void free() throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public Reader getCharacterStream(long pos, long length) throws SQLException { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseConnection.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseConnection.java deleted file mode 100644 index 60b519dd6..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseConnection.java +++ /dev/null @@ -1,325 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.Serializable; -import java.net.URI; -import java.sql.CallableStatement; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Calendar; -import java.util.Collections; -import java.util.Map; -import java.util.Optional; -import java.util.TimeZone; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseTransaction; -import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValue; -import com.clickhouse.data.ClickHouseVersion; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; - -public interface ClickHouseConnection extends Connection { - static final String COLUMN_ELEMENT = "element"; - static final String COLUMN_ARRAY = "array"; - - // The name of the application currently utilizing the connection - static final String PROP_APPLICATION_NAME = "ApplicationName"; - static final String PROP_CUSTOM_HTTP_HEADERS = "CustomHttpHeaders"; - static final String PROP_CUSTOM_HTTP_PARAMS = "CustomHttpParameters"; - // The name of the user that the application using the connection is performing - // work for. This may not be the same as the user name that was used in - // establishing the connection. - // private static final String PROP_CLIENT_USER = "ClientUser"; - // The hostname of the computer the application using the connection is running - // on. - // private static final String PROP_CLIENT_HOST = "ClientHostname"; - - @Override - default ClickHouseArray createArrayOf(String typeName, Object[] elements) throws SQLException { - ClickHouseConfig config = getConfig(); - ClickHouseColumn col = ClickHouseColumn.of(COLUMN_ELEMENT, typeName); - ClickHouseColumn arrCol = ClickHouseColumn.of(COLUMN_ARRAY, ClickHouseDataType.Array, false, col); - ClickHouseValue val = arrCol.newValue(config); - if (elements == null && !col.isNestedType() && !col.isNullable()) { - int nullAsDefault = getJdbcConfig().getNullAsDefault(); - if (nullAsDefault > 1) { - val.resetToDefault(); - } else if (nullAsDefault < 1) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("Cannot set null to non-nullable column [%s]", col)); - } - } else { - val.update(elements); - } - ClickHouseResultSet rs = new ClickHouseResultSet(getCurrentDatabase(), ClickHouseSqlStatement.DEFAULT_TABLE, - createStatement(), ClickHouseSimpleResponse.of(config, Collections.singletonList(arrCol), - new Object[][] { new Object[] { val.asObject() } })); - rs.next(); - return new ClickHouseArray(rs, 1); - } - - @Override - default ClickHouseBlob createBlob() throws SQLException { - return new ClickHouseBlob(); - } - - @Override - default ClickHouseClob createClob() throws SQLException { - return new ClickHouseClob(); - } - - @Override - default ClickHouseStruct createStruct(String typeName, Object[] attributes) throws SQLException { - return new ClickHouseStruct(typeName, attributes); - } - - @Override - default ClickHouseXml createSQLXML() throws SQLException { - return new ClickHouseXml(); - } - - @Override - default ClickHouseStatement createStatement() throws SQLException { - return createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - default ClickHouseStatement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - return createStatement(resultSetType, resultSetConcurrency, ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - ClickHouseStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException; - - @Override - default CallableStatement prepareCall(String sql) throws SQLException { - return prepareCall(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - default CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return prepareCall(sql, resultSetType, resultSetConcurrency, ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - default CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, - int resultSetHoldability) throws SQLException { - throw SqlExceptionUtils.unsupportedError("prepareCall not implemented"); - } - - @Override - default PreparedStatement prepareStatement(String sql) throws SQLException { - return prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - default PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { - if (autoGeneratedKeys != Statement.NO_GENERATED_KEYS) { - // not entirely true, what if the table engine is JDBC? - throw SqlExceptionUtils.unsupportedError("Only NO_GENERATED_KEYS is supported"); - } - - return prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Override - default PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { - // not entirely true, what if the table engine is JDBC? - throw SqlExceptionUtils.unsupportedError("ClickHouse does not support auto generated keys"); - } - - @Override - default PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { - // not entirely true, what if the table engine is JDBC? - throw SqlExceptionUtils.unsupportedError("ClickHouse does not support auto generated keys"); - } - - @Override - default PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) - throws SQLException { - return prepareStatement(sql, resultSetType, resultSetConcurrency, ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - /** - * Starts a new transaction. It's no-op for a newly started transaction. - * - * @throws SQLException when current transaction is active state or not able to - * start new transaction - */ - void begin() throws SQLException; - - /** - * Gets configuration tied to this connection. - * - * @return non-null configuration - */ - ClickHouseConfig getConfig(); - - /** - * Checks whether custom setting is allowed or not. - * - * @return true if custom setting is allowed; false otherwise - */ - boolean allowCustomSetting(); - - /** - * Gets current database. {@link #getSchema()} is similar but it will check if - * connection is closed or not hence may throw {@link SQLException}. - * - * @return non-null database name - */ - String getCurrentDatabase(); - - /** - * Sets current database. - * - * @param database non-empty database name - * @param check whether to check if the database exists or not - * @throws SQLException when failed to change current database - */ - void setCurrentDatabase(String database, boolean check) throws SQLException; - - /** - * Gets current user. - * - * @return non-null user name - */ - String getCurrentUser(); - - /** - * Gets default calendar which can be used to create timestamp. - * - * @return non-null calendar - */ - Calendar getDefaultCalendar(); - - /** - * Gets effective time zone. When - * {@link com.clickhouse.client.ClickHouseConfig#isUseServerTimeZone()} returns - * {@code false}, - * {@link com.clickhouse.client.ClickHouseConfig#getUseTimeZone()} - * will be used as effective time zone, which will be used for reading and - * writing timestamp values. - * - * @return effective time zone - */ - Optional getEffectiveTimeZone(); - - /** - * Gets cached value of {@code TimeZone.getDefault()}. - * - * @return non-null cached JVM time zone - */ - TimeZone getJvmTimeZone(); - - /** - * Gets server time zone, which is either same as result of - * {@code select timezone()}, or the overrided value from - * {@link com.clickhouse.client.ClickHouseConfig#getServerTimeZone()}. - * - * @return non-null server time zone - */ - TimeZone getServerTimeZone(); - - /** - * Gets server version. - * - * @return non-null server version - */ - ClickHouseVersion getServerVersion(); - - /** - * Gets current transaction. - * - * @return current transaction, which could be null - */ - ClickHouseTransaction getTransaction(); - - /** - * Gets URI of the connection. - * - * @return URI of the connection - */ - URI getUri(); - - /** - * Gets JDBC-specific configuration. - * - * @return non-null JDBC-specific configuration - */ - JdbcConfig getJdbcConfig(); - - /** - * Gets JDBC type mapping. Same as {@code getJdbcConfig().getMapper()}. - * - * @return non-null JDBC type mapping - */ - default JdbcTypeMapping getJdbcTypeMapping() { - return getJdbcConfig().getDialect(); - } - - /** - * Gets max insert block size. Pay attention that INSERT into one partition in - * one table of MergeTree family up to max_insert_block_size rows is - * transactional. - * - * @return value of max_insert_block_size - */ - long getMaxInsertBlockSize(); - - /** - * Checks whether transaction is supported. - * - * @return true if transaction is supported; false otherwise - */ - boolean isTransactionSupported(); - - /** - * Checks whether implicit transaction is supported. - * - * @return true if implicit transaction is supported; false otherwise - */ - boolean isImplicitTransactionSupported(); - - /** - * Creates a new query ID. - * - * @return universal unique query ID - */ - String newQueryId(); - - /** - * Parses the given sql. - * - * @param sql sql to parse - * @param config configuration which might be used for parsing, could be null - * @return non-null parsed sql statements - * @deprecated will be removed in 0.5, please use - * {@link #parse(String, ClickHouseConfig, Map)} instead - */ - @Deprecated - default ClickHouseSqlStatement[] parse(String sql, ClickHouseConfig config) { - return parse(sql, config, null); - } - - /** - * Parses the given sql. - * - * @param sql sql to parse - * @param config configuration which might be used for parsing, could be null - * @param settings server settings - * @return non-null parsed sql statements - */ - ClickHouseSqlStatement[] parse(String sql, ClickHouseConfig config, Map settings); -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java index 2c2d0876a..112c1f6ba 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java @@ -13,7 +13,7 @@ import java.util.Properties; import java.util.logging.Logger; -public class ClickHouseDataSource extends JdbcWrapper implements DataSource { +public class ClickHouseDataSource implements DataSource, JdbcWrapper { private final String url; private final Properties props; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java index be2679b05..89bfc4e14 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java @@ -12,19 +12,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; -import java.sql.Array; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.NClob; -import java.sql.Ref; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; +import java.sql.*; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; @@ -36,14 +24,13 @@ import java.util.Map; import java.util.TimeZone; -import com.clickhouse.client.ClickHouseConfig; import com.clickhouse.client.ClickHouseResponse; import com.clickhouse.data.ClickHouseColumn; import com.clickhouse.data.ClickHouseRecord; import com.clickhouse.data.ClickHouseUtils; import com.clickhouse.data.ClickHouseValue; -public class ClickHouseResultSet extends AbstractResultSet { +public class ClickHouseResultSet implements ResultSet, JdbcWrapper { private ClickHouseRecord currentRow; private Iterator rowCursor; private int rowNumber; @@ -51,10 +38,9 @@ public class ClickHouseResultSet extends AbstractResultSet { protected final String database; protected final String table; - protected final ClickHouseStatement statement; + protected final Statement statement; protected final ClickHouseResponse response; - protected final ClickHouseConfig config; protected final boolean wrapObject; protected final List columns; protected final Calendar defaultCalendar; @@ -96,8 +82,7 @@ public class ClickHouseResultSet extends AbstractResultSet { this.nullAsDefault = false; } - public ClickHouseResultSet(String database, String table, ClickHouseStatement statement, - ClickHouseResponse response) throws SQLException { + public ClickHouseResultSet(String database, String table, Statement statement, ClickHouseResponse response) throws SQLException { if (database == null || table == null || statement == null || response == null) { throw new IllegalArgumentException("Non-null database, table, statement, and response are required"); } @@ -107,8 +92,7 @@ public ClickHouseResultSet(String database, String table, ClickHouseStatement st this.statement = statement; this.response = response; - ClickHouseConnection conn = statement.getConnection(); - this.config = statement.getConfig(); + Connection conn = statement.getConnection(); this.wrapObject = statement.getConnection().getJdbcConfig().useWrapperObject(); this.defaultCalendar = conn.getDefaultCalendar(); diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStatement.java deleted file mode 100644 index 3a3bdc36a..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStatement.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.OutputStream; -import java.sql.SQLException; -import java.sql.Statement; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseRequest.Mutation; - -public interface ClickHouseStatement extends Statement { - @Override - ClickHouseConnection getConnection() throws SQLException; - - ClickHouseConfig getConfig(); - - /** - * Gets mirrored output stream. - * - * @return mirrored output stream, could be null - */ - OutputStream getMirroredOutput(); - - /** - * Sets mirrored output stream, which will be later used for dumping all - * {@link java.sql.ResultSet} generated by this statement, via - * {@link com.clickhouse.data.ClickHouseInputStream#setCopyToTarget(OutputStream)}. - * - * @param out mirrored output stream, could be null - */ - void setMirroredOutput(OutputStream out); - - int getNullAsDefault(); - - void setNullAsDefault(int level); - - ClickHouseRequest getRequest(); - - default Mutation write() { - return getRequest().write(); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseXml.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseXml.java deleted file mode 100644 index a18741625..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseXml.java +++ /dev/null @@ -1,69 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Reader; -import java.io.Writer; -import java.sql.SQLException; -import java.sql.SQLXML; - -import javax.xml.transform.Result; -import javax.xml.transform.Source; - -public class ClickHouseXml implements SQLXML { - - @Override - public void free() throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public InputStream getBinaryStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public OutputStream setBinaryStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Reader getCharacterStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Writer setCharacterStream() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getString() throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public void setString(String value) throws SQLException { - // TODO Auto-generated method stub - - } - - @Override - public T getSource(Class sourceClass) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public T setResult(Class resultClass) throws SQLException { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/CombinedResultSet.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/CombinedResultSet.java deleted file mode 100644 index 5dd15c653..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/CombinedResultSet.java +++ /dev/null @@ -1,575 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.NClob; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.Collection; -import java.util.Map; - -/** - * Wrapper of multiple ResultSets. - */ -public class CombinedResultSet extends AbstractResultSet { - private final ResultSet[] results; - - private int nextIndex; - private ResultSet current; - private int rowNumber; - private boolean isClosed; - - protected ResultSet current() throws SQLException { - if (current == null) { - throw new SQLException("No result to access", SqlExceptionUtils.SQL_STATE_NO_DATA); - } - return current; - } - - protected boolean hasNext() throws SQLException { - if (current == null) { - return false; - } else if (current.next()) { - return true; - } else if (nextIndex >= results.length) { - return false; - } - - boolean hasNext = false; - while (nextIndex < results.length) { - if (current != null) { - current.close(); - } - - current = results[nextIndex++]; - if (current != null && current.next()) { - hasNext = true; - break; - } - } - - return hasNext; - } - - public CombinedResultSet(ResultSet... results) { - this.nextIndex = 0; - this.rowNumber = 0; - this.isClosed = false; - if (results == null || results.length == 0) { - this.results = new ResultSet[0]; - this.current = null; - } else { - this.results = results; - for (ResultSet rs : results) { - this.nextIndex++; - if (this.current == null && rs != null) { - this.current = rs; - break; - } - } - } - } - - public CombinedResultSet(Collection results) { - this.nextIndex = 0; - this.rowNumber = 0; - this.isClosed = false; - if (results == null || results.isEmpty()) { - this.results = new ResultSet[0]; - this.current = null; - } else { - int len = results.size(); - this.results = new ResultSet[len]; - int i = 0; - for (ResultSet rs : results) { - this.results[i++] = rs; - if (this.current == null && rs != null) { - this.current = rs; - this.nextIndex = i; - } - } - if (this.nextIndex == 0) { - this.nextIndex = len; - } - } - } - - @Override - public boolean next() throws SQLException { - if (hasNext()) { - this.rowNumber++; - return true; - } - - return false; - } - - @Override - public void close() throws SQLException { - for (ResultSet rs : results) { - if (rs == null) { - continue; - } - - try { - rs.close(); - } catch (Exception e) { - // ignore - } - } - - isClosed = true; - } - - @Override - public boolean wasNull() throws SQLException { - return current().wasNull(); - } - - @Override - public String getString(int columnIndex) throws SQLException { - return current().getString(columnIndex); - } - - @Override - public boolean getBoolean(int columnIndex) throws SQLException { - return current().getBoolean(columnIndex); - } - - @Override - public byte getByte(int columnIndex) throws SQLException { - return current().getByte(columnIndex); - } - - @Override - public short getShort(int columnIndex) throws SQLException { - return current().getShort(columnIndex); - } - - @Override - public int getInt(int columnIndex) throws SQLException { - return current().getInt(columnIndex); - } - - @Override - public long getLong(int columnIndex) throws SQLException { - return current().getLong(columnIndex); - } - - @Override - public float getFloat(int columnIndex) throws SQLException { - return current().getFloat(columnIndex); - } - - @Override - public double getDouble(int columnIndex) throws SQLException { - return current().getDouble(columnIndex); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - return current().getBigDecimal(columnIndex, scale); - } - - @Override - public byte[] getBytes(int columnIndex) throws SQLException { - return current().getBytes(columnIndex); - } - - @Override - public Date getDate(int columnIndex) throws SQLException { - return current().getDate(columnIndex); - } - - @Override - public Time getTime(int columnIndex) throws SQLException { - return current().getTime(columnIndex); - } - - @Override - public Timestamp getTimestamp(int columnIndex) throws SQLException { - return current().getTimestamp(columnIndex); - } - - @Override - public InputStream getAsciiStream(int columnIndex) throws SQLException { - return current().getAsciiStream(columnIndex); - } - - @Override - public InputStream getUnicodeStream(int columnIndex) throws SQLException { - return current().getUnicodeStream(columnIndex); - } - - @Override - public InputStream getBinaryStream(int columnIndex) throws SQLException { - return current().getBinaryStream(columnIndex); - } - - @Override - public String getString(String columnLabel) throws SQLException { - return current().getString(columnLabel); - } - - @Override - public boolean getBoolean(String columnLabel) throws SQLException { - return current().getBoolean(columnLabel); - } - - @Override - public byte getByte(String columnLabel) throws SQLException { - return current().getByte(columnLabel); - } - - @Override - public short getShort(String columnLabel) throws SQLException { - return current().getShort(columnLabel); - } - - @Override - public int getInt(String columnLabel) throws SQLException { - return current().getInt(columnLabel); - } - - @Override - public long getLong(String columnLabel) throws SQLException { - return current().getLong(columnLabel); - } - - @Override - public float getFloat(String columnLabel) throws SQLException { - return current().getFloat(columnLabel); - } - - @Override - public double getDouble(String columnLabel) throws SQLException { - return current().getDouble(columnLabel); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - return current().getBigDecimal(columnLabel, scale); - } - - @Override - public byte[] getBytes(String columnLabel) throws SQLException { - return current().getBytes(columnLabel); - } - - @Override - public Date getDate(String columnLabel) throws SQLException { - return current().getDate(columnLabel); - } - - @Override - public Time getTime(String columnLabel) throws SQLException { - return current().getTime(columnLabel); - } - - @Override - public Timestamp getTimestamp(String columnLabel) throws SQLException { - return current().getTimestamp(columnLabel); - } - - @Override - public InputStream getAsciiStream(String columnLabel) throws SQLException { - return current().getAsciiStream(columnLabel); - } - - @Override - public InputStream getUnicodeStream(String columnLabel) throws SQLException { - return current().getUnicodeStream(columnLabel); - } - - @Override - public InputStream getBinaryStream(String columnLabel) throws SQLException { - return current().getBinaryStream(columnLabel); - } - - @Override - public String getCursorName() throws SQLException { - return current().getCursorName(); - } - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - return current().getMetaData(); - } - - @Override - public Object getObject(int columnIndex) throws SQLException { - return current().getObject(columnIndex); - } - - @Override - public Object getObject(String columnLabel) throws SQLException { - return current().getObject(columnLabel); - } - - @Override - public int findColumn(String columnLabel) throws SQLException { - return current().findColumn(columnLabel); - } - - @Override - public Reader getCharacterStream(int columnIndex) throws SQLException { - return current().getCharacterStream(columnIndex); - } - - @Override - public Reader getCharacterStream(String columnLabel) throws SQLException { - return current().getCharacterStream(columnLabel); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - return current().getBigDecimal(columnIndex); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - return current().getBigDecimal(columnLabel); - } - - @Override - public boolean isBeforeFirst() throws SQLException { - return rowNumber == 0 && current().isBeforeFirst(); - } - - @Override - public boolean isAfterLast() throws SQLException { - if (nextIndex >= results.length) { - return current().isAfterLast(); - } else { - ResultSet rs = current(); - boolean isAfterLast = false; - while ((isAfterLast = rs.isAfterLast()) && next()) { - rs = current(); - } - return isAfterLast; - } - } - - @Override - public boolean isFirst() throws SQLException { - return rowNumber == 1 && current().isFirst(); - } - - @Override - public boolean isLast() throws SQLException { - if (nextIndex >= results.length) { - return current().isLast(); - } else { - ResultSet rs = current(); - boolean isLast = false; - while ((isLast = rs.isLast()) && next()) { - rs = current(); - } - return isLast; - } - } - - @Override - public int getRow() throws SQLException { - return rowNumber; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - ensureOpen(); - } - - @Override - public int getFetchSize() throws SQLException { - return current().getFetchSize(); - } - - @Override - public boolean rowUpdated() throws SQLException { - return current().rowUpdated(); - } - - @Override - public boolean rowInserted() throws SQLException { - return current().rowInserted(); - } - - @Override - public boolean rowDeleted() throws SQLException { - return current().rowDeleted(); - } - - @Override - public Statement getStatement() throws SQLException { - return current().getStatement(); - } - - @Override - public Object getObject(int columnIndex, Map> map) throws SQLException { - return current().getObject(columnIndex, map); - } - - @Override - public Ref getRef(int columnIndex) throws SQLException { - return current().getRef(columnIndex); - } - - @Override - public Blob getBlob(int columnIndex) throws SQLException { - return current().getBlob(columnIndex); - } - - @Override - public Clob getClob(int columnIndex) throws SQLException { - return current().getClob(columnIndex); - } - - @Override - public Array getArray(int columnIndex) throws SQLException { - return current().getArray(columnIndex); - } - - @Override - public Object getObject(String columnLabel, Map> map) throws SQLException { - return current().getObject(columnLabel, map); - } - - @Override - public Ref getRef(String columnLabel) throws SQLException { - return current().getRef(columnLabel); - } - - @Override - public Blob getBlob(String columnLabel) throws SQLException { - return current().getBlob(columnLabel); - } - - @Override - public Clob getClob(String columnLabel) throws SQLException { - return current().getClob(columnLabel); - } - - @Override - public Array getArray(String columnLabel) throws SQLException { - return current().getArray(columnLabel); - } - - @Override - public Date getDate(int columnIndex, Calendar cal) throws SQLException { - return current().getDate(columnIndex, cal); - } - - @Override - public Date getDate(String columnLabel, Calendar cal) throws SQLException { - return current().getDate(columnLabel, cal); - } - - @Override - public Time getTime(int columnIndex, Calendar cal) throws SQLException { - return current().getTime(columnIndex, cal); - } - - @Override - public Time getTime(String columnLabel, Calendar cal) throws SQLException { - return current().getTime(columnLabel, cal); - } - - @Override - public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - return current().getTimestamp(columnIndex, cal); - } - - @Override - public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - return current().getTimestamp(columnLabel, cal); - } - - @Override - public URL getURL(int columnIndex) throws SQLException { - return current().getURL(columnIndex); - } - - @Override - public URL getURL(String columnLabel) throws SQLException { - return current().getURL(columnLabel); - } - - @Override - public RowId getRowId(int columnIndex) throws SQLException { - return current().getRowId(columnIndex); - } - - @Override - public RowId getRowId(String columnLabel) throws SQLException { - return current().getRowId(columnLabel); - } - - @Override - public boolean isClosed() throws SQLException { - return isClosed; - } - - @Override - public NClob getNClob(int columnIndex) throws SQLException { - return current().getNClob(columnIndex); - } - - @Override - public NClob getNClob(String columnLabel) throws SQLException { - return current().getNClob(columnLabel); - } - - @Override - public SQLXML getSQLXML(int columnIndex) throws SQLException { - return current().getSQLXML(columnIndex); - } - - @Override - public SQLXML getSQLXML(String columnLabel) throws SQLException { - return current().getSQLXML(columnLabel); - } - - @Override - public String getNString(int columnIndex) throws SQLException { - return current().getNString(columnIndex); - } - - @Override - public String getNString(String columnLabel) throws SQLException { - return current().getNString(columnLabel); - } - - @Override - public Reader getNCharacterStream(int columnIndex) throws SQLException { - return current().getNCharacterStream(columnIndex); - } - - @Override - public Reader getNCharacterStream(String columnLabel) throws SQLException { - return current().getNCharacterStream(columnLabel); - } - - @Override - public T getObject(int columnIndex, Class type) throws SQLException { - return current().getObject(columnIndex, type); - } - - @Override - public T getObject(String columnLabel, Class type) throws SQLException { - return current().getObject(columnLabel, type); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java index e9169d03b..430d09927 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java @@ -2,16 +2,16 @@ import java.sql.SQLException; -public abstract class JdbcWrapper { - public T unwrap(Class iface) throws SQLException { - if (iface.isAssignableFrom(getClass())) { - return iface.cast(this); - } - - throw SqlExceptionUtils.unsupportedError("Cannot unwrap to " + iface.getName()); +interface JdbcWrapper { + default boolean isWrapperFor(Class iface) throws SQLException { + return iface != null && iface.isAssignableFrom(getClass()); } - public boolean isWrapperFor(Class iface) throws SQLException { - return iface.isAssignableFrom(getClass()); + @SuppressWarnings("unchecked") + default T unwrap(Class iface) throws SQLException { + if (isWrapperFor(iface)) { + iface.cast(this); + } + throw SqlExceptionUtils.unsupportedError("Cannot unwrap to " + iface.getName()); } } From 70ed0b204b462b31f9e0d113c62dc2fcca77d150 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 28 Aug 2024 21:34:24 -0400 Subject: [PATCH 02/21] WIP Connection changes --- .../main/java/com/clickhouse/jdbc/ClickHouseDataSource.java | 5 +++-- .../src/main/java/com/clickhouse/jdbc/JdbcWrapper.java | 3 ++- .../clickhouse/jdbc/internal/ClickHouseConnectionImpl.java | 4 +--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java index 112c1f6ba..d67a98994 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java @@ -8,6 +8,7 @@ import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser.ConnectionInfo; import java.io.PrintWriter; +import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Properties; @@ -42,12 +43,12 @@ public ClickHouseDataSource(String url, Properties properties) throws SQLExcepti } @Override - public ClickHouseConnection getConnection() throws SQLException { + public Connection getConnection() throws SQLException { return new ClickHouseConnectionImpl(connInfo); } @Override - public ClickHouseConnection getConnection(String username, String password) throws SQLException { + public Connection getConnection(String username, String password) throws SQLException { if (username == null || username.isEmpty()) { throw SqlExceptionUtils.clientError("Non-empty user name is required"); } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java index 430d09927..d719ca8c7 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java @@ -1,8 +1,9 @@ package com.clickhouse.jdbc; import java.sql.SQLException; +import java.sql.Wrapper; -interface JdbcWrapper { +public interface JdbcWrapper extends Wrapper { default boolean isWrapperFor(Class iface) throws SQLException { return iface != null && iface.isAssignableFrom(getClass()); } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java index e0a66466d..6968cfa28 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java @@ -53,10 +53,8 @@ import com.clickhouse.data.ClickHouseVersion; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHouseConnection; import com.clickhouse.jdbc.ClickHouseDatabaseMetaData; import com.clickhouse.jdbc.ClickHouseDriver; -import com.clickhouse.jdbc.ClickHouseStatement; import com.clickhouse.jdbc.JdbcConfig; import com.clickhouse.jdbc.JdbcParameterizedQuery; import com.clickhouse.jdbc.JdbcParseHandler; @@ -68,7 +66,7 @@ import com.clickhouse.jdbc.parser.ParseHandler; import com.clickhouse.jdbc.parser.StatementType; -public class ClickHouseConnectionImpl extends JdbcWrapper implements ClickHouseConnection { +public class ClickHouseConnectionImpl implements Connection, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(ClickHouseConnectionImpl.class); static final String SETTING_READONLY = "readonly"; From 1f3c83d4d1ec159b50701e1a4a9107738ff9af7b Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 4 Sep 2024 03:44:22 -0400 Subject: [PATCH 03/21] Removing old code - based on other (similar) connectors we shouldn't need so much specificity --- clickhouse-jdbc/README.md | 159 +-- .../clickhouse/jdbc/ClickHouseDataSource.java | 95 -- .../jdbc/ClickHouseDatabaseMetaData.java | 14 +- .../com/clickhouse/jdbc/ClickHouseDriver.java | 201 --- .../ClickHouseJdbcUrlParser.java | 4 +- .../jdbc/ClickHousePreparedStatement.java | 223 --- .../clickhouse/jdbc/ClickHouseResultSet.java | 755 ---------- .../jdbc/ClickHouseScrollableResultSet.java | 75 - .../com/clickhouse/jdbc/ConnectionImpl.java | 308 +++++ .../com/clickhouse/jdbc/DataSourceImpl.java | 68 + .../java/com/clickhouse/jdbc/DriverImpl.java | 50 + .../jdbc/JdbcParameterizedQuery.java | 162 --- .../main/java/com/clickhouse/jdbc/Main.java | 1062 --------------- .../jdbc/PreparedStatementImpl.java | 580 ++++++++ .../com/clickhouse/jdbc/ResultSetImpl.java | 992 ++++++++++++++ .../com/clickhouse/jdbc/StatementImpl.java | 289 ++++ .../internal/AbstractPreparedStatement.java | 173 --- .../internal/ClickHouseConnectionImpl.java | 1209 ----------------- .../internal/ClickHouseParameterMetaData.java | 93 -- .../internal/ClickHouseStatementImpl.java | 1013 -------------- .../internal/InputBasedPreparedStatement.java | 486 ------- .../jdbc/internal/JdbcSavepoint.java | 42 - .../jdbc/internal/JdbcTransaction.java | 159 --- .../internal/SqlBasedPreparedStatement.java | 659 --------- .../StreamBasedPreparedStatement.java | 350 ----- .../internal/TableBasedPreparedStatement.java | 291 ---- .../META-INF/services/java.sql.Driver | 2 +- .../clickhouse/jdbc/AccessManagementTest.java | 5 +- .../jdbc/ClickHousePreparedStatementTest.java | 11 +- .../jdbc/ClickHouseResultSetTest.java | 4 +- .../jdbc/CombinedResultSetTest.java | 32 +- ...ourceTest.java => DataSourceImplTest.java} | 21 +- ...useDriverTest.java => DriverImplTest.java} | 6 +- .../clickhouse/jdbc/JdbcIntegrationTest.java | 13 +- .../com/clickhouse/jdbc/JdbcIssuesTest.java | 15 +- .../ClickHouseConnectionImplTest.java | 5 +- .../internal/ClickHouseJdbcUrlParserTest.java | 3 +- 37 files changed, 2350 insertions(+), 7279 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDriver.java rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/{internal => }/ClickHouseJdbcUrlParser.java (97%) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHousePreparedStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseScrollableResultSet.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParameterizedQuery.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Main.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/AbstractPreparedStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseParameterMetaData.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseStatementImpl.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/InputBasedPreparedStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcSavepoint.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcTransaction.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/SqlBasedPreparedStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/StreamBasedPreparedStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/TableBasedPreparedStatement.java rename clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/{ClickHouseDataSourceTest.java => DataSourceImplTest.java} (88%) rename clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/{ClickHouseDriverTest.java => DriverImplTest.java} (86%) diff --git a/clickhouse-jdbc/README.md b/clickhouse-jdbc/README.md index e3abc2599..8c5271b7c 100644 --- a/clickhouse-jdbc/README.md +++ b/clickhouse-jdbc/README.md @@ -1,162 +1,7 @@ # ClickHouse JDBC driver -THe official JDBC driver for ClickHouse +The official JDBC driver for ClickHouse ## Documentation See the [ClickHouse website](https://clickhouse.com/docs/en/integrations/language-clients/java/jdbc) for the full documentation entry. ## Examples -For more example please check [here](https://github.com/ClickHouse/clickhouse-java/tree/main/examples/jdbc). -## Upgrade path -### to 0.3.2+ - -Please refer to cheatsheet below to upgrade JDBC driver to 0.3.2+. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Item<= 0.3.1-patch>= 0.3.2
1pom.xml
<dependency>
-    <groupId>ru.yandex.clickhouse</groupId>
-    <artifactId>clickhouse-jdbc</artifactId>
-    <version>0.3.1-patch</version>
-    <classifier>shaded</classifier>
-    <exclusions>
-        <exclusion>
-            <groupId>*</groupId>
-            <artifactId>*</artifactId>
-        </exclusion>
-    </exclusions>
-</dependency>
-
<dependency>
-    <groupId>com.clickhouse</groupId>
-    <artifactId>clickhouse-jdbc</artifactId>
-    <version>0.3.2-patch11</version>
-    <classifier>all</classifier>
-    <exclusions>
-        <exclusion>
-            <groupId>*</groupId>
-            <artifactId>*</artifactId>
-        </exclusion>
-    </exclusions>
-</dependency>
-
2driver classru.yandex.clickhouse.ClickHouseDrivercom.clickhouse.jdbc.ClickHouseDriver
3connection string
jdbc:clickhouse://[user[:password]@]host:port[/database][?parameters]
jdbc:(ch|clickhouse)[:protocol]://endpoint[,endpoint][/database][?parameters][#tags]
-endpoint: [protocol://]host[:port][/database][?parameters][#tags]
-protocol: (grpc|grpcs|http|https|tcp|tcps)
-
4custom settings
String jdbcUrl = "jdbc:clickhouse://localhost:8123/default?socket_timeout=6000000"
-    // custom server settings
-    + "&max_bytes_before_external_group_by=16000000000"
-    + "&optimize_aggregation_in_order=0"
-    + "&join_default_strictness=ANY"
-    + "&join_algorithm=auto"
-    + "&max_memory_usage=20000000000"; 
String jdbcUrl = "jdbc:clickhouse://localhost/default?socket_timeout=6000000"
-    // or properties.setProperty("custom_settings", "a=1,b=2,c=3")
-    + "&custom_settings="
-    // url encoded settings separated by comma
-    + "max_bytes_before_external_group_by%3D16000000000%2C"
-    + "optimize_aggregation_in_order%3D0%2C"
-    + "join_default_strictness%3DANY%2C"
-    + "join_algorithm%3Dauto%2C"
-    + "max_memory_usage%3D20000000000"; 
5load balancing
String connString = "jdbc:clickhouse://server1:8123,server2:8123,server3:8123/database";
-BalancedClickhouseDataSource balancedDs = new BalancedClickhouseDataSource(
-    connString).scheduleActualization(5000, TimeUnit.MILLISECONDS);
-ClickHouseConnection conn = balancedDs.getConnection("default", "");
-
String connString = "jdbc:ch://server1,server2,server3/database"
-    + "?load_balancing_policy=random&health_check_interval=5000&failover=2";
-ClickHouseDataSource ds = new ClickHouseDataSource(connString);
-ClickHouseConnection conn = ds.getConnection("default", "");
-
6DateTime
try (PreparedStatement ps = conn.preparedStatement("insert into mytable(start_datetime, string_value) values(?,?)") {
-    ps.setObject(1, LocalDateTime.now());
-    ps.setString(2, "value");
-    ps.executeUpdate();
-}
-
try (PreparedStatement ps = conn.preparedStatement("insert into mytable(start_datetime, string_value) values(?,?)") {
-    // resolution of DateTime32 or DateTime without scale is 1 second
-    ps.setObject(1, LocalDateTime.now().truncatedTo(ChronoUnit.SECONDS));
-    ps.setString(2, "value");
-    ps.executeUpdate();
-}
-
7extended API
ClickHouseStatement sth = connection.createStatement();
-sth.write().send("INSERT INTO test.writer", new ClickHouseStreamCallback() {
-    @Override
-    public void writeTo(ClickHouseRowBinaryStream stream) throws IOException {
-        for (int i = 0; i < 10; i++) {
-            stream.writeInt32(i);
-            stream.writeString("Name " + i);
-        }
-    }
-}, ClickHouseFormat.RowBinary); // RowBinary or Native are supported
-
// 0.3.2
-Statement sth = connection.createStatement();
-sth.unwrap(ClickHouseRequest.class).write().table("test.writer")
-    .format(ClickHouseFormat.RowBinary).data(out -> {
-    for (int i = 0; i < 10; i++) {
-        // write data into the piped stream in current thread
-        BinaryStreamUtils.writeInt32(out, i);
-        BinaryStreamUtils.writeString(out, "Name " + i);
-    }
-}).sendAndWait();
-
-// since 0.4
-PreparedStatement ps = connection.preparedStatement("insert into test.writer format RowBinary");
-ps.setObject(new ClickHouseWriter() {
-@Override
-public void write(ClickHouseOutputStream out) throws IOException {
-for (int i = 0; i < 10; i++) {
-// write data into the piped stream in current thread
-BinaryStreamUtils.writeInt32(out, i);
-BinaryStreamUtils.writeString(out, "Name " + i);
-}
-}
-});
-// ClickHouseWriter will be executed in a separate thread
-ps.executeUpdate();
-
+For more example please check [here](https://github.com/ClickHouse/clickhouse-java/tree/main/examples/jdbc). \ No newline at end of file diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java deleted file mode 100644 index d67a98994..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDataSource.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.clickhouse.jdbc; - -import javax.sql.DataSource; - -import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.jdbc.internal.ClickHouseConnectionImpl; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser.ConnectionInfo; - -import java.io.PrintWriter; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.util.Properties; -import java.util.logging.Logger; - -public class ClickHouseDataSource implements DataSource, JdbcWrapper { - private final String url; - private final Properties props; - - protected final ClickHouseDriver driver; - protected final ConnectionInfo connInfo; - - protected PrintWriter printWriter; - protected int loginTimeoutSeconds = 0; - - public ClickHouseDataSource(String url) throws SQLException { - this(url, new Properties()); - } - - public ClickHouseDataSource(String url, Properties properties) throws SQLException { - if (url == null) { - throw new IllegalArgumentException("Incorrect ClickHouse jdbc url. It must be not null"); - } - this.url = url; - this.props = new Properties(); - if (properties != null && !properties.isEmpty()) { - this.props.putAll(properties); - } - - this.driver = new ClickHouseDriver(); - this.connInfo = ClickHouseJdbcUrlParser.parse(url, properties); - } - - @Override - public Connection getConnection() throws SQLException { - return new ClickHouseConnectionImpl(connInfo); - } - - @Override - public Connection getConnection(String username, String password) throws SQLException { - if (username == null || username.isEmpty()) { - throw SqlExceptionUtils.clientError("Non-empty user name is required"); - } - - if (password == null) { - password = ""; - } - - if (username.equals(props.getProperty(ClickHouseDefaults.USER.getKey())) - && password.equals(props.getProperty(ClickHouseDefaults.PASSWORD.getKey()))) { - return new ClickHouseConnectionImpl(connInfo); - } - - Properties properties = new Properties(); - properties.putAll(this.props); - properties.setProperty(ClickHouseDefaults.USER.getKey(), username); - properties.setProperty(ClickHouseDefaults.PASSWORD.getKey(), password); - return new ClickHouseConnectionImpl(url, properties); - } - - @Override - public PrintWriter getLogWriter() throws SQLException { - return printWriter; - } - - @Override - public void setLogWriter(PrintWriter out) throws SQLException { - printWriter = out; - } - - @Override - public void setLoginTimeout(int seconds) throws SQLException { - loginTimeoutSeconds = seconds; - } - - @Override - public int getLoginTimeout() throws SQLException { - return loginTimeoutSeconds; - } - - public Logger getParentLogger() throws SQLFeatureNotSupportedException { - return ClickHouseDriver.parentLogger; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java index 9b23aa5f5..19235bc13 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java @@ -46,7 +46,7 @@ protected ResultSet empty(String columns) throws SQLException { } protected ResultSet fixed(String columns, Object[][] values) throws SQLException { - return new ClickHouseResultSet("", "", connection.createStatement(), + return new ResultSetImpl("", "", connection.createStatement(), ClickHouseSimpleResponse.of(connection.getConfig(), ClickHouseColumn.parse(columns), values)); } @@ -66,7 +66,7 @@ protected ResultSet query(String sql, ClickHouseRecordTransformer func, boolean SQLException error = null; try (ClickHouseStatement stmt = connection.createStatement()) { stmt.setLargeMaxRows(0L); - return new ClickHouseResultSet("", "", stmt, + return new ResultSetImpl("", "", stmt, // load everything into memory ClickHouseSimpleResponse.of(stmt.getRequest() .format(ClickHouseFormat.RowBinaryWithNamesAndTypes) @@ -149,17 +149,17 @@ public String getDriverName() throws SQLException { @Override public String getDriverVersion() throws SQLException { - return ClickHouseDriver.driverVersionString; + return DriverImpl.driverVersionString; } @Override public int getDriverMajorVersion() { - return ClickHouseDriver.driverVersion.getMajorVersion(); + return DriverImpl.driverVersion.getMajorVersion(); } @Override public int getDriverMinorVersion() { - return ClickHouseDriver.driverVersion.getMinorVersion(); + return DriverImpl.driverVersion.getMinorVersion(); } @Override @@ -1217,12 +1217,12 @@ public int getDatabaseMinorVersion() throws SQLException { @Override public int getJDBCMajorVersion() throws SQLException { - return ClickHouseDriver.specVersion.getMajorVersion(); + return DriverImpl.specVersion.getMajorVersion(); } @Override public int getJDBCMinorVersion() throws SQLException { - return ClickHouseDriver.specVersion.getMinorVersion(); + return DriverImpl.specVersion.getMinorVersion(); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDriver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDriver.java deleted file mode 100644 index e10aa19f9..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDriver.java +++ /dev/null @@ -1,201 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.Serializable; -import java.sql.Driver; -import java.sql.DriverManager; -import java.sql.DriverPropertyInfo; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.ServiceLoader; -import java.util.Map.Entry; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.config.ClickHouseOption; -import com.clickhouse.data.ClickHouseVersion; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.internal.ClickHouseConnectionImpl; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser; - -/** - * JDBC driver for ClickHouse. It takes a connection string like below for - * connecting to ClickHouse server: - * {@code jdbc:(ch|clickhouse)[:]://[[:]@][:][/][?,[[,]]} - * - *

- * For examples: - *

    - *
  • {@code jdbc:clickhouse://localhost:8123/system}
  • - *
  • {@code jdbc:clickhouse://admin:password@localhost/system?socket_time=30}
  • - *
  • {@code jdbc:clickhouse://localhost/system?protocol=grpc}
  • - *
- */ -public class ClickHouseDriver implements Driver { - private static final Logger log = LoggerFactory.getLogger(ClickHouseDriver.class); - - private static final Map clientSpecificOptions; - - static final String driverVersionString; - static final ClickHouseVersion driverVersion; - static final ClickHouseVersion specVersion; - - static final java.util.logging.Logger parentLogger = java.util.logging.Logger.getLogger("com.clickhouse.jdbc"); - - static { - String str = ClickHouseDriver.class.getPackage().getImplementationVersion(); - if (str != null && !str.isEmpty()) { - char[] chars = str.toCharArray(); - for (int i = 0, len = chars.length; i < len; i++) { - if (Character.isDigit(chars[i])) { - str = str.substring(i); - break; - } - } - driverVersionString = str; - } else { - driverVersionString = ""; - } - driverVersion = ClickHouseVersion.of(driverVersionString); - specVersion = ClickHouseVersion.of(ClickHouseDriver.class.getPackage().getSpecificationVersion()); - - try { - DriverManager.registerDriver(new ClickHouseDriver()); - } catch (SQLException e) { - throw new IllegalStateException(e); - } - - log.debug("ClickHouse Driver %s(JDBC: %s) registered", driverVersion, specVersion); - - // client-specific options - Map m = new LinkedHashMap<>(); - try { - for (ClickHouseClient c : ServiceLoader.load(ClickHouseClient.class, - ClickHouseDriver.class.getClassLoader())) { - Class clazz = c.getOptionClass(); - if (clazz == null || clazz == ClickHouseClientOption.class) { - continue; - } - for (ClickHouseOption o : clazz.getEnumConstants()) { - m.put(o.getKey(), o); - } - } - } catch (Exception e) { - log.warn("Failed to load client-specific options", e); - } - - clientSpecificOptions = Collections.unmodifiableMap(m); - } - - public static Map toClientOptions(Properties props) { - if (props == null || props.isEmpty()) { - return Collections.emptyMap(); - } - - Map options = new HashMap<>(); - for (Entry e : props.entrySet()) { - if (e.getKey() == null || e.getValue() == null) { - continue; - } - - String key = e.getKey().toString(); - ClickHouseOption o = ClickHouseClientOption.fromKey(key); - if (o == null) { - o = clientSpecificOptions.get(key); - } - - if (o != null) { - options.put(o, ClickHouseOption.fromString(e.getValue().toString(), o.getValueType())); - } - } - - return options; - } - - private DriverPropertyInfo create(ClickHouseOption option, Properties props) { - DriverPropertyInfo propInfo = new DriverPropertyInfo(option.getKey(), - props.getProperty(option.getKey(), String.valueOf(option.getEffectiveDefaultValue()))); - propInfo.required = false; - propInfo.description = option.getDescription(); - propInfo.choices = null; - - Class clazz = option.getValueType(); - if (Boolean.class == clazz || boolean.class == clazz) { - propInfo.choices = new String[] { "true", "false" }; - } else if (clazz.isEnum()) { - Object[] values = clazz.getEnumConstants(); - String[] names = new String[values.length]; - int index = 0; - for (Object v : values) { - names[index++] = ((Enum) v).name(); - } - propInfo.choices = names; - } - return propInfo; - } - - @Override - public boolean acceptsURL(String url) throws SQLException { - return url != null && (url.startsWith(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX) - || url.startsWith(ClickHouseJdbcUrlParser.JDBC_ABBREVIATION_PREFIX)); - } - - @Override - public ClickHouseConnection connect(String url, Properties info) throws SQLException { - if (!acceptsURL(url)) { - return null; - } - - log.debug("Creating connection"); - return new ClickHouseConnectionImpl(url, info); - } - - @Override - public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { - try { - info = ClickHouseJdbcUrlParser.parse(url, info).getProperties(); - } catch (Exception e) { - log.error("Could not parse url %s", url, e); - } - - List result = new ArrayList<>(ClickHouseClientOption.values().length * 2); - for (ClickHouseClientOption option : ClickHouseClientOption.values()) { - result.add(create(option, info)); - } - - // and then client-specific options - for (ClickHouseOption option : clientSpecificOptions.values()) { - result.add(create(option, info)); - } - - result.addAll(JdbcConfig.getDriverProperties()); - return result.toArray(new DriverPropertyInfo[0]); - } - - @Override - public int getMajorVersion() { - return driverVersion.getMajorVersion(); - } - - @Override - public int getMinorVersion() { - return driverVersion.getMinorVersion(); - } - - @Override - public boolean jdbcCompliant() { - return false; - } - - @Override - public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { - return parentLogger; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParser.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java similarity index 97% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParser.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java index faf8650aa..ce00c07f9 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParser.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java @@ -1,4 +1,4 @@ -package com.clickhouse.jdbc.internal; +package com.clickhouse.jdbc; import java.net.URISyntaxException; import java.sql.SQLException; @@ -12,8 +12,6 @@ import com.clickhouse.data.ClickHouseChecker; import com.clickhouse.data.ClickHouseFormat; import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.jdbc.JdbcConfig; -import com.clickhouse.jdbc.SqlExceptionUtils; public class ClickHouseJdbcUrlParser { public static class ConnectionInfo { diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHousePreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHousePreparedStatement.java deleted file mode 100644 index 1b82d5d1e..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHousePreparedStatement.java +++ /dev/null @@ -1,223 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.NClob; -import java.sql.PreparedStatement; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLXML; -import java.sql.Time; -import java.sql.Timestamp; -import java.sql.Types; - -import com.clickhouse.data.ClickHouseInputStream; -import com.clickhouse.data.format.BinaryStreamUtils; - -public interface ClickHousePreparedStatement extends PreparedStatement { - @Override - default void setNull(int parameterIndex, int sqlType) throws SQLException { - setNull(parameterIndex, sqlType, null); - } - - @Override - default void setBoolean(int parameterIndex, boolean x) throws SQLException { - setByte(parameterIndex, x ? (byte) 1 : (byte) 0); - } - - @Override - default void setDate(int parameterIndex, Date x) throws SQLException { - setDate(parameterIndex, x, null); - } - - @Override - default void setTime(int parameterIndex, Time x) throws SQLException { - setTime(parameterIndex, x, null); - } - - @Override - default void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - setTimestamp(parameterIndex, x, null); - } - - @Override - default void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { - setCharacterStream(parameterIndex, new InputStreamReader(x, StandardCharsets.US_ASCII), length); - } - - @Override - default void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { - setCharacterStream(parameterIndex, new InputStreamReader(x, StandardCharsets.UTF_8), length); - } - - @Override - default void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { - setBinaryStream(parameterIndex, x, (long) length); - } - - @Override - default void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { - setObject(parameterIndex, x, targetSqlType, 0); - } - - @Override - default void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { - String s = null; - if (reader != null) { - try { - s = BinaryStreamUtils.readString(reader, length); - } catch (Throwable e) { // IOException and potentially OOM error - throw SqlExceptionUtils.clientError(e); - } - } - - setString(parameterIndex, s); - } - - @Override - default void setRef(int parameterIndex, Ref x) throws SQLException { - throw SqlExceptionUtils.unsupportedError("setRef not implemented"); - } - - @Override - default void setBlob(int parameterIndex, Blob x) throws SQLException { - if (x != null) { - setBinaryStream(parameterIndex, x.getBinaryStream()); - } else { - setNull(parameterIndex, Types.BLOB); - } - } - - @Override - default void setClob(int parameterIndex, Clob x) throws SQLException { - if (x != null) { - setCharacterStream(parameterIndex, x.getCharacterStream()); - } else { - setNull(parameterIndex, Types.CLOB); - } - } - - @Override - default ResultSetMetaData getMetaData() throws SQLException { - ResultSet currentResult = getResultSet(); - if (currentResult != null) { - return currentResult.getMetaData(); - } else if (getLargeUpdateCount() != -1L) { - return null; // Update query - } - - return describeQueryResult(); - } - - default ResultSetMetaData describeQueryResult() throws SQLException { - return null; - } - - @Override - default void setURL(int parameterIndex, URL x) throws SQLException { - if (x != null) { - setString(parameterIndex, String.valueOf(x)); - } else { - setNull(parameterIndex, Types.VARCHAR); - } - } - - @Override - default void setRowId(int parameterIndex, RowId x) throws SQLException { - throw SqlExceptionUtils.unsupportedError("setRowId not implemented"); - } - - @Override - default void setNString(int parameterIndex, String value) throws SQLException { - setString(parameterIndex, value); - } - - @Override - default void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { - setCharacterStream(parameterIndex, value, length); - } - - @Override - default void setNClob(int parameterIndex, NClob value) throws SQLException { - setClob(parameterIndex, value); - } - - @Override - default void setClob(int parameterIndex, Reader reader, long length) throws SQLException { - setCharacterStream(parameterIndex, reader, length); - } - - @Override - default void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { - setBinaryStream(parameterIndex, inputStream, length); - } - - @Override - default void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { - setClob(parameterIndex, reader, length); - } - - @Override - default void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - throw SqlExceptionUtils.unsupportedError("setSQLXML not implemented"); - } - - @Override - default void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { - setCharacterStream(parameterIndex, new InputStreamReader(x, StandardCharsets.US_ASCII), length); - } - - @Override - default void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { - setBinaryStream(parameterIndex, length < 0L ? x : ClickHouseInputStream.wrap(x, 0, length, null)); - } - - @Override - default void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { - throw SqlExceptionUtils.unsupportedError("setCharacterStream not implemented"); - } - - @Override - default void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { - setCharacterStream(parameterIndex, new InputStreamReader(x, StandardCharsets.US_ASCII)); - } - - @Override - default void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - throw SqlExceptionUtils.unsupportedError("setBinaryStream not implemented"); - } - - @Override - default void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { - setCharacterStream(parameterIndex, reader, -1L); - } - - @Override - default void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { - setCharacterStream(parameterIndex, value); - } - - @Override - default void setClob(int parameterIndex, Reader reader) throws SQLException { - setCharacterStream(parameterIndex, reader); - } - - @Override - default void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { - setBinaryStream(parameterIndex, inputStream); - } - - @Override - default void setNClob(int parameterIndex, Reader reader) throws SQLException { - setClob(parameterIndex, reader); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java deleted file mode 100644 index 89bfc4e14..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSet.java +++ /dev/null @@ -1,755 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Reader; -import java.io.StringReader; -import java.io.UncheckedIOException; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.sql.*; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.util.Calendar; -import java.util.Collections; -import java.util.GregorianCalendar; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; - -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseRecord; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValue; - -public class ClickHouseResultSet implements ResultSet, JdbcWrapper { - private ClickHouseRecord currentRow; - private Iterator rowCursor; - private int rowNumber; - private int lastReadColumn; // 1-based - - protected final String database; - protected final String table; - protected final Statement statement; - protected final ClickHouseResponse response; - - protected final boolean wrapObject; - protected final List columns; - protected final Calendar defaultCalendar; - protected final int maxRows; - protected final boolean nullAsDefault; - protected final ClickHouseResultSetMetaData metaData; - - protected final JdbcTypeMapping mapper; - protected final Map> defaultTypeMap; - - // only for testing purpose - ClickHouseResultSet(String database, String table, ClickHouseResponse response) { - this.database = database; - this.table = table; - this.statement = null; - this.response = response; - - this.config = null; - this.wrapObject = false; - this.defaultCalendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); - - this.mapper = JdbcTypeMapping.getDefaultMapping(); - this.defaultTypeMap = Collections.emptyMap(); - this.currentRow = null; - try { - this.columns = response.getColumns(); - this.metaData = new ClickHouseResultSetMetaData(new JdbcConfig(), database, table, columns, this.mapper, - defaultTypeMap); - - this.rowCursor = response.records().iterator(); - } catch (Exception e) { - throw new IllegalStateException(e); - } - - this.rowNumber = 0; // before the first row - this.lastReadColumn = 0; - - this.maxRows = 0; - this.nullAsDefault = false; - } - - public ClickHouseResultSet(String database, String table, Statement statement, ClickHouseResponse response) throws SQLException { - if (database == null || table == null || statement == null || response == null) { - throw new IllegalArgumentException("Non-null database, table, statement, and response are required"); - } - - this.database = database; - this.table = table; - this.statement = statement; - this.response = response; - - Connection conn = statement.getConnection(); - this.wrapObject = statement.getConnection().getJdbcConfig().useWrapperObject(); - this.defaultCalendar = conn.getDefaultCalendar(); - - OutputStream output = statement.getMirroredOutput(); - if (output != null) { - try { - response.getInputStream().setCopyToTarget(output); - } catch (IOException e) { - throw SqlExceptionUtils.clientError(e); - } - } - - this.mapper = statement.getConnection().getJdbcTypeMapping(); - Map> typeMap = conn.getTypeMap(); - this.defaultTypeMap = typeMap != null && !typeMap.isEmpty() ? Collections.unmodifiableMap(typeMap) - : Collections.emptyMap(); - this.currentRow = null; - try { - this.columns = response.getColumns(); - this.metaData = new ClickHouseResultSetMetaData(conn.getJdbcConfig(), database, table, columns, this.mapper, - defaultTypeMap); - - this.rowCursor = response.records().iterator(); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - - this.rowNumber = 0; // before the first row - this.lastReadColumn = 0; - - this.maxRows = statement.getMaxRows(); - this.nullAsDefault = statement.getNullAsDefault() > 1; - } - - protected void ensureRead(int columnIndex) throws SQLException { - ensureOpen(); - - if (currentRow == null) { - throw new SQLException("No data available for reading", SqlExceptionUtils.SQL_STATE_NO_DATA); - } else if (columnIndex < 1 || columnIndex > columns.size()) { - throw SqlExceptionUtils.clientError(ClickHouseUtils - .format("Column index must between 1 and %d but we got %d", columns.size() + 1, columnIndex)); - } - } - - // this method is mocked in a test, do not make it final :-) - protected List getColumns() { - return metaData.getColumns(); - } - - protected ClickHouseValue getValue(int columnIndex) throws SQLException { - ensureRead(columnIndex); - - ClickHouseValue v = currentRow.getValue(columnIndex - 1); - if (nullAsDefault && v.isNullOrEmpty()) { - v.resetToDefault(); - } - lastReadColumn = columnIndex; - return v; - } - - /** - * Check if there is another row. - * - * @return {@code true} if this result set has another row after the current - * cursor position, {@code false} else - * @throws SQLException if something goes wrong - */ - protected boolean hasNext() throws SQLException { - try { - return (maxRows == 0 || rowNumber < maxRows) && rowCursor.hasNext(); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - } - - public BigInteger getBigInteger(int columnIndex) throws SQLException { - return getValue(columnIndex).asBigInteger(); - } - - public BigInteger getBigInteger(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asBigInteger(); - } - - public String[] getColumnNames() { - String[] columnNames = new String[columns.size()]; - int index = 0; - for (ClickHouseColumn c : getColumns()) { - columnNames[index++] = c.getColumnName(); - } - return columnNames; - } - - @Override - public void close() throws SQLException { - this.response.close(); - } - - @Override - public int findColumn(String columnLabel) throws SQLException { - ensureOpen(); - - if (columnLabel == null || columnLabel.isEmpty()) { - throw SqlExceptionUtils.clientError("Non-empty column label is required"); - } - - int index = 0; - for (ClickHouseColumn c : columns) { - index++; - if (columnLabel.equalsIgnoreCase(c.getColumnName())) { - return index; - } - } - - throw SqlExceptionUtils.clientError( - ClickHouseUtils.format("Column [%s] does not exist in %d columns", columnLabel, columns.size())); - } - - @Override - public Array getArray(int columnIndex) throws SQLException { - return new ClickHouseArray(this, columnIndex); - } - - @Override - public Array getArray(String columnLabel) throws SQLException { - return new ClickHouseArray(this, findColumn(columnLabel)); - } - - @Override - public InputStream getAsciiStream(int columnIndex) throws SQLException { - ClickHouseValue v = getValue(columnIndex); - return v.isNullOrEmpty() ? null : new ByteArrayInputStream(v.asBinary(StandardCharsets.US_ASCII)); - } - - @Override - public InputStream getAsciiStream(String columnLabel) throws SQLException { - return getAsciiStream(findColumn(columnLabel)); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - return getValue(columnIndex).asBigDecimal(); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asBigDecimal(); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - return getValue(columnIndex).asBigDecimal(scale); - } - - @Override - public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - return getValue(findColumn(columnLabel)).asBigDecimal(scale); - } - - @Override - public InputStream getBinaryStream(int columnIndex) throws SQLException { - ClickHouseValue v = getValue(columnIndex); - return v.isNullOrEmpty() ? null : new ByteArrayInputStream(v.asBinary()); - } - - @Override - public InputStream getBinaryStream(String columnLabel) throws SQLException { - return getBinaryStream(findColumn(columnLabel)); - } - - @Override - public Blob getBlob(int columnIndex) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Blob getBlob(String columnLabel) throws SQLException { - return getBlob(findColumn(columnLabel)); - } - - @Override - public boolean getBoolean(int columnIndex) throws SQLException { - return getValue(columnIndex).asBoolean(); - } - - @Override - public boolean getBoolean(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asBoolean(); - } - - @Override - public byte getByte(int columnIndex) throws SQLException { - return getValue(columnIndex).asByte(); - } - - @Override - public byte getByte(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asByte(); - } - - @Override - public byte[] getBytes(int columnIndex) throws SQLException { - return getValue(columnIndex).asBinary(); - } - - @Override - public byte[] getBytes(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asBinary(); - } - - @Override - public Reader getCharacterStream(int columnIndex) throws SQLException { - ClickHouseValue v = getValue(columnIndex); - return v.isNullOrEmpty() ? null : new StringReader(v.asString()); - } - - @Override - public Reader getCharacterStream(String columnLabel) throws SQLException { - return getCharacterStream(findColumn(columnLabel)); - } - - @Override - public Clob getClob(int columnIndex) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Clob getClob(String columnLabel) throws SQLException { - return getClob(findColumn(columnLabel)); - } - - @Override - public String getCursorName() throws SQLException { - ensureOpen(); - - // TODO Auto-generated method stub - return null; - } - - @Override - public Date getDate(int columnIndex) throws SQLException { - return getDate(columnIndex, null); - } - - @Override - public Date getDate(String columnLabel) throws SQLException { - return getDate(findColumn(columnLabel), null); - } - - @Override - public Date getDate(int columnIndex, Calendar cal) throws SQLException { - ClickHouseValue value = getValue(columnIndex); - if (value.isNullOrEmpty()) { - return null; - } - - LocalDate d = value.asDate(); - Calendar c = (Calendar) (cal != null ? cal : defaultCalendar).clone(); - c.clear(); - c.set(d.getYear(), d.getMonthValue() - 1, d.getDayOfMonth(), 0, 0, 0); - return new Date(c.getTimeInMillis()); - } - - @Override - public Date getDate(String columnLabel, Calendar cal) throws SQLException { - return getDate(findColumn(columnLabel), cal); - } - - @Override - public double getDouble(int columnIndex) throws SQLException { - return getValue(columnIndex).asDouble(); - } - - @Override - public double getDouble(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asDouble(); - } - - @Override - public int getFetchSize() throws SQLException { - ensureOpen(); - - return statement != null ? statement.getFetchSize() : 0; - } - - @Override - public float getFloat(int columnIndex) throws SQLException { - return getValue(columnIndex).asFloat(); - } - - @Override - public float getFloat(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asFloat(); - } - - @Override - public int getInt(int columnIndex) throws SQLException { - return getValue(columnIndex).asInteger(); - } - - @Override - public int getInt(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asInteger(); - } - - @Override - public long getLong(int columnIndex) throws SQLException { - return getValue(columnIndex).asLong(); - } - - @Override - public long getLong(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asLong(); - } - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - ensureOpen(); - - return metaData; - } - - @Override - public Reader getNCharacterStream(int columnIndex) throws SQLException { - return getCharacterStream(columnIndex); - } - - @Override - public Reader getNCharacterStream(String columnLabel) throws SQLException { - return getCharacterStream(findColumn(columnLabel)); - } - - @Override - public NClob getNClob(int columnIndex) throws SQLException { - // TODO Auto-generated method stub - return null; - } - - @Override - public NClob getNClob(String columnLabel) throws SQLException { - return getNClob(findColumn(columnLabel)); - } - - @Override - public String getNString(int columnIndex) throws SQLException { - return getValue(columnIndex).asString(); - } - - @Override - public String getNString(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asString(); - } - - @Override - public Object getObject(int columnIndex) throws SQLException { - return getObject(columnIndex, defaultTypeMap); - } - - @Override - public Object getObject(String columnLabel) throws SQLException { - return getObject(findColumn(columnLabel), defaultTypeMap); - } - - @Override - public Object getObject(int columnIndex, Map> map) throws SQLException { - if (map == null) { - map = defaultTypeMap; - } - - ClickHouseValue v = getValue(columnIndex); - ClickHouseColumn c = columns.get(columnIndex - 1); - - Class javaType = null; - if (!map.isEmpty() && (javaType = map.get(c.getOriginalTypeName())) == null) { - javaType = map.get(c.getDataType().name()); - } - - Object value; - if (!wrapObject) { - value = javaType != null ? v.asObject(javaType) : v.asObject(); - } else if (c.isArray()) { - value = new ClickHouseArray(this, columnIndex); - } else if (c.isTuple() || c.isNested() || c.isMap()) { - value = new ClickHouseStruct(c.getDataType().name(), v.asArray()); - } else { - value = javaType != null ? v.asObject(javaType) : v.asObject(); - } - - return value; - } - - @Override - public Object getObject(String columnLabel, Map> map) throws SQLException { - return getObject(findColumn(columnLabel), map); - } - - @Override - public T getObject(int columnIndex, Class type) throws SQLException { - return getValue(columnIndex).asObject(type); - } - - @Override - public T getObject(String columnLabel, Class type) throws SQLException { - return getValue(findColumn(columnLabel)).asObject(type); - } - - @Override - public Ref getRef(int columnIndex) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("getRef not implemented"); - } - - @Override - public Ref getRef(String columnLabel) throws SQLException { - return getRef(findColumn(columnLabel)); - } - - @Override - public int getRow() throws SQLException { - ensureOpen(); - - return rowNumber; - } - - @Override - public RowId getRowId(int columnIndex) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("getRowId not implemented"); - } - - @Override - public RowId getRowId(String columnLabel) throws SQLException { - return getRowId(findColumn(columnLabel)); - } - - @Override - public SQLXML getSQLXML(int columnIndex) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils.unsupportedError("getSQLXML not implemented"); - } - - @Override - public SQLXML getSQLXML(String columnLabel) throws SQLException { - return getSQLXML(findColumn(columnLabel)); - } - - @Override - public short getShort(int columnIndex) throws SQLException { - return getValue(columnIndex).asShort(); - } - - @Override - public short getShort(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asShort(); - } - - @Override - public Statement getStatement() throws SQLException { - ensureOpen(); - - return statement; - } - - @Override - public String getString(int columnIndex) throws SQLException { - return getValue(columnIndex).asString(); - } - - @Override - public String getString(String columnLabel) throws SQLException { - return getValue(findColumn(columnLabel)).asString(); - } - - @Override - public Time getTime(int columnIndex) throws SQLException { - return getTime(columnIndex, null); - } - - @Override - public Time getTime(String columnLabel) throws SQLException { - return getTime(findColumn(columnLabel), null); - } - - @Override - public Time getTime(int columnIndex, Calendar cal) throws SQLException { - ClickHouseValue value = getValue(columnIndex); - if (value.isNullOrEmpty()) { - return null; - } - - // unfortunately java.sql.Time does not support fractional seconds - LocalTime lt = value.asTime(); - - Calendar c = (Calendar) (cal != null ? cal : defaultCalendar).clone(); - c.clear(); - c.set(1970, 0, 1, lt.getHour(), lt.getMinute(), lt.getSecond()); - return new Time(c.getTimeInMillis()); - } - - @Override - public Time getTime(String columnLabel, Calendar cal) throws SQLException { - return getTime(findColumn(columnLabel), cal); - } - - @Override - public Timestamp getTimestamp(int columnIndex) throws SQLException { - return getTimestamp(columnIndex, null); - } - - @Override - public Timestamp getTimestamp(String columnLabel) throws SQLException { - return getTimestamp(findColumn(columnLabel), null); - } - - @Override - public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - ClickHouseValue value = getValue(columnIndex); - if (value.isNullOrEmpty()) { - return null; - } - - ClickHouseColumn column = columns.get(columnIndex - 1); - TimeZone tz = column.getTimeZone(); - LocalDateTime dt = tz == null ? value.asDateTime(column.getScale()) - : value.asOffsetDateTime(column.getScale()).toLocalDateTime(); - - Calendar c = (Calendar) (cal != null ? cal : defaultCalendar).clone(); - c.set(dt.getYear(), dt.getMonthValue() - 1, dt.getDayOfMonth(), dt.getHour(), dt.getMinute(), - dt.getSecond()); - Timestamp timestamp = new Timestamp(c.getTimeInMillis()); - timestamp.setNanos(dt.getNano()); - - return timestamp; - } - - @Override - public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - return getTimestamp(findColumn(columnLabel), cal); - } - - @Override - public URL getURL(int columnIndex) throws SQLException { - try { - return new URL(getString(columnIndex)); - } catch (MalformedURLException e) { - throw SqlExceptionUtils.clientError(e); - } - } - - @Override - public URL getURL(String columnLabel) throws SQLException { - try { - return new URL(getString(columnLabel)); - } catch (MalformedURLException e) { - throw SqlExceptionUtils.clientError(e); - } - } - - @Override - public InputStream getUnicodeStream(int columnIndex) throws SQLException { - ClickHouseValue v = getValue(columnIndex); - return v.isNullOrEmpty() ? null : new ByteArrayInputStream(v.asBinary(StandardCharsets.UTF_8)); - } - - @Override - public InputStream getUnicodeStream(String columnLabel) throws SQLException { - return getUnicodeStream(findColumn(columnLabel)); - } - - @Override - public boolean isAfterLast() throws SQLException { - ensureOpen(); - - return currentRow == null && !hasNext(); - } - - @Override - public boolean isBeforeFirst() throws SQLException { - ensureOpen(); - - return getRow() == 0; - } - - @Override - public boolean isClosed() throws SQLException { - return response.isClosed(); - } - - @Override - public boolean isFirst() throws SQLException { - ensureOpen(); - - return getRow() == 1; - } - - @Override - public boolean isLast() throws SQLException { - ensureOpen(); - - return currentRow != null && !hasNext(); - } - - @Override - public boolean next() throws SQLException { - ensureOpen(); - - lastReadColumn = 0; - boolean hasNext = true; - if (hasNext()) { - try { - currentRow = rowCursor.next(); - } catch (UncheckedIOException e) { - throw SqlExceptionUtils.handle(e); - } - rowNumber++; - } else { - currentRow = null; - hasNext = false; - } - return hasNext; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - ensureOpen(); - } - - @Override - public boolean wasNull() throws SQLException { - ensureOpen(); - - try { - return currentRow != null && lastReadColumn > 0 && getColumns().get(lastReadColumn - 1).isNullable() - && currentRow.getValue(lastReadColumn - 1).isNullOrEmpty(); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return iface == ClickHouseResponse.class || iface == ClickHouseRecord.class || super.isWrapperFor(iface); - } - - @Override - public T unwrap(Class iface) throws SQLException { - if (iface == ClickHouseResponse.class) { - return iface.cast(response); - } else if (iface == ClickHouseRecord.class) { - return iface.cast(currentRow); - } else { - return super.unwrap(iface); - } - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseScrollableResultSet.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseScrollableResultSet.java deleted file mode 100644 index 2ba831608..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseScrollableResultSet.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.SQLException; -import java.util.LinkedList; -import java.util.List; - -import com.clickhouse.client.ClickHouseResponse; - -public class ClickHouseScrollableResultSet extends ClickHouseResultSet { - - private final List records; - - public ClickHouseScrollableResultSet(String database, String table, ClickHouseStatement statement, - ClickHouseResponse response) throws SQLException { - super(database, table, statement, response); - - this.records = new LinkedList<>(); - } - - @Override - public int getType() throws SQLException { - return TYPE_SCROLL_INSENSITIVE; - } - - @Override - public void beforeFirst() throws SQLException { - absolute(0); - } - - @Override - public void afterLast() throws SQLException { - absolute(-1); - next(); - } - - @Override - public boolean first() throws SQLException { - return absolute(1); - } - - @Override - public boolean last() throws SQLException { - return absolute(-1); - } - - @Override - public boolean absolute(int row) throws SQLException { - return false; - // TODO implemetation - /* - * if (row == 0) { rowNumber = 0; values = null; return false; } else if (row > - * 0) { if (row <= lines.size()) { rowNumber = row; values = lines.get(row - 1); - * return true; } absolute(lines.size()); while (getRow() < row && hasNext()) { - * next(); } if (row == getRow()) { return true; } else { next(); return false; - * } } else { // We have to check the number of total rows while (hasNext()) { - * next(); } if (-row > lines.size()) { // there is not so many rows // Put the - * cursor before the first row return absolute(0); } return - * absolute(lines.size() + 1 + row); } - */ - } - - @Override - public boolean relative(int rows) throws SQLException { - int r = getRow() + rows; - if (r < 0) { - r = 0; - } - return absolute(r); - } - - @Override - public boolean previous() throws SQLException { - return relative(-1); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java new file mode 100644 index 000000000..72fedcaf9 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -0,0 +1,308 @@ +package com.clickhouse.jdbc; + +import java.sql.*; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +public class ConnectionImpl implements Connection { + @Override + public Statement createStatement() throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + return null; + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + return null; + } + + @Override + public String nativeSQL(String sql) throws SQLException { + return ""; + } + + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + + } + + @Override + public boolean getAutoCommit() throws SQLException { + return false; + } + + @Override + public void commit() throws SQLException { + + } + + @Override + public void rollback() throws SQLException { + + } + + @Override + public void close() throws SQLException { + + } + + @Override + public boolean isClosed() throws SQLException { + return false; + } + + @Override + public DatabaseMetaData getMetaData() throws SQLException { + return null; + } + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + + } + + @Override + public boolean isReadOnly() throws SQLException { + return false; + } + + @Override + public void setCatalog(String catalog) throws SQLException { + + } + + @Override + public String getCatalog() throws SQLException { + return ""; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + + } + + @Override + public int getTransactionIsolation() throws SQLException { + return 0; + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException { + + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return null; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return null; + } + + @Override + public Map> getTypeMap() throws SQLException { + return Map.of(); + } + + @Override + public void setTypeMap(Map> map) throws SQLException { + + } + + @Override + public void setHoldability(int holdability) throws SQLException { + + } + + @Override + public int getHoldability() throws SQLException { + return 0; + } + + @Override + public Savepoint setSavepoint() throws SQLException { + return null; + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + return null; + } + + @Override + public void rollback(Savepoint savepoint) throws SQLException { + + } + + @Override + public void releaseSavepoint(Savepoint savepoint) throws SQLException { + + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return null; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + return null; + } + + @Override + public Clob createClob() throws SQLException { + return null; + } + + @Override + public Blob createBlob() throws SQLException { + return null; + } + + @Override + public NClob createNClob() throws SQLException { + return null; + } + + @Override + public SQLXML createSQLXML() throws SQLException { + return null; + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return false; + } + + @Override + public void setClientInfo(String name, String value) throws SQLClientInfoException { + + } + + @Override + public void setClientInfo(Properties properties) throws SQLClientInfoException { + + } + + @Override + public String getClientInfo(String name) throws SQLException { + return ""; + } + + @Override + public Properties getClientInfo() throws SQLException { + return null; + } + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + return null; + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + return null; + } + + @Override + public void setSchema(String schema) throws SQLException { + + } + + @Override + public String getSchema() throws SQLException { + return ""; + } + + @Override + public void abort(Executor executor) throws SQLException { + + } + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { + + } + + @Override + public int getNetworkTimeout() throws SQLException { + return 0; + } + + @Override + public void beginRequest() throws SQLException { + Connection.super.beginRequest(); + } + + @Override + public void endRequest() throws SQLException { + Connection.super.endRequest(); + } + + @Override + public boolean setShardingKeyIfValid(ShardingKey shardingKey, ShardingKey superShardingKey, int timeout) throws SQLException { + return Connection.super.setShardingKeyIfValid(shardingKey, superShardingKey, timeout); + } + + @Override + public boolean setShardingKeyIfValid(ShardingKey shardingKey, int timeout) throws SQLException { + return Connection.super.setShardingKeyIfValid(shardingKey, timeout); + } + + @Override + public void setShardingKey(ShardingKey shardingKey, ShardingKey superShardingKey) throws SQLException { + Connection.super.setShardingKey(shardingKey, superShardingKey); + } + + @Override + public void setShardingKey(ShardingKey shardingKey) throws SQLException { + Connection.super.setShardingKey(shardingKey); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return false; + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java new file mode 100644 index 000000000..da8696fa5 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java @@ -0,0 +1,68 @@ +package com.clickhouse.jdbc; + +import javax.sql.DataSource; +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.ConnectionBuilder; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.ShardingKeyBuilder; +import java.util.logging.Logger; + +public class DataSourceImpl implements DataSource, JdbcWrapper { + + @Override + public Connection getConnection() throws SQLException { + return null; + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + return null; + } + + @Override + public PrintWriter getLogWriter() throws SQLException { + return null; + } + + @Override + public void setLogWriter(PrintWriter out) throws SQLException { + + } + + @Override + public void setLoginTimeout(int seconds) throws SQLException { + + } + + @Override + public int getLoginTimeout() throws SQLException { + return 0; + } + + @Override + public ConnectionBuilder createConnectionBuilder() throws SQLException { + return DataSource.super.createConnectionBuilder(); + } + + @Override + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + return null; + } + + @Override + public ShardingKeyBuilder createShardingKeyBuilder() throws SQLException { + return DataSource.super.createShardingKeyBuilder(); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return JdbcWrapper.super.isWrapperFor(iface); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return JdbcWrapper.super.unwrap(iface); + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java new file mode 100644 index 000000000..f8a65d955 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java @@ -0,0 +1,50 @@ +package com.clickhouse.jdbc; + +import java.sql.*; +import java.util.*; + +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + +/** + * JDBC driver for ClickHouse. + */ +public class DriverImpl implements Driver { + private static final Logger log = LoggerFactory.getLogger(DriverImpl.class); + + + @Override + public Connection connect(String url, Properties info) throws SQLException { + return null; + } + + @Override + public boolean acceptsURL(String url) throws SQLException { + return false; + } + + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { + return new DriverPropertyInfo[0]; + } + + @Override + public int getMajorVersion() { + return 0; + } + + @Override + public int getMinorVersion() { + return 0; + } + + @Override + public boolean jdbcCompliant() { + return false; + } + + @Override + public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException("Method not supported"); + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParameterizedQuery.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParameterizedQuery.java deleted file mode 100644 index 2511be1f9..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParameterizedQuery.java +++ /dev/null @@ -1,162 +0,0 @@ -package com.clickhouse.jdbc; - -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseParameterizedQuery; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValues; - -/** - * A parameterized query is a parsed query with parameters being extracted for - * substitution. - */ -public final class JdbcParameterizedQuery extends ClickHouseParameterizedQuery { - /** - * Creates an instance by parsing the given query. - * - * @param config non-null configuration - * @param query non-empty SQL query - * @return parameterized query - */ - public static JdbcParameterizedQuery of(ClickHouseConfig config, String query) { - // cache if query.length() is greater than 1024? - return new JdbcParameterizedQuery(config, query); - } - - private JdbcParameterizedQuery(ClickHouseConfig config, String query) { - super(config, query); - } - - @Override - protected String parse() { - int paramIndex = 0; - int partIndex = 0; - int len = originalQuery.length(); - for (int i = 0; i < len; i++) { - char ch = originalQuery.charAt(i); - if (ClickHouseUtils.isQuote(ch)) { - i = ClickHouseUtils.skipQuotedString(originalQuery, i, len, ch) - 1; - } else if (ch == '?') { - int idx = ClickHouseUtils.skipContentsUntil(originalQuery, i + 2, len, '?', ':'); - if (idx < len && originalQuery.charAt(idx - 1) == ':' && originalQuery.charAt(idx) != ':' - && originalQuery.charAt(idx - 2) != ':') { - i = idx - 1; - } else { - addPart(originalQuery.substring(partIndex, i), paramIndex++, null); - partIndex = i + 1; - } - } else if (ch == ';') { - throw new IllegalArgumentException(ClickHouseUtils.format( - "Multi-statement query cannot be used in prepared statement. Please remove semicolon at %d and everything after it.", - i)); - } else if (i + 1 < len) { - char nextCh = originalQuery.charAt(i + 1); - if (ch == '-' && nextCh == ch) { - i = ClickHouseUtils.skipSingleLineComment(originalQuery, i + 2, len) - 1; - } else if (ch == '/' && nextCh == '*') { - i = ClickHouseUtils.skipMultiLineComment(originalQuery, i + 2, len) - 1; - } - } - } - - return partIndex < len ? originalQuery.substring(partIndex, len) : null; - } - - @Override - public void apply(StringBuilder builder, Collection params) { - if (!hasParameter()) { - builder.append(originalQuery); - return; - } - - Iterator it = params == null ? Collections.emptyIterator() : params.iterator(); - for (QueryPart p : getParts()) { - builder.append(p.part); - builder.append(it.hasNext() ? it.next() : ClickHouseValues.NULL_EXPR); - } - - appendLastPartIfExists(builder); - } - - @Override - public void apply(StringBuilder builder, Object param, Object... more) { - if (!hasParameter()) { - builder.append(originalQuery); - return; - } - - int len = more == null ? 0 : more.length + 1; - int index = 0; - for (QueryPart p : getParts()) { - builder.append(p.part); - if (index > 0) { - param = index < len ? more[index - 1] : null; // NOSONAR - } - builder.append(toSqlExpression(p.paramName, param)); - index++; - } - - appendLastPartIfExists(builder); - } - - @Override - public void apply(StringBuilder builder, Object[] values) { - if (!hasParameter()) { - builder.append(originalQuery); - return; - } - - int len = values == null ? 0 : values.length; - int index = 0; - for (QueryPart p : getParts()) { - builder.append(p.part); - builder.append( - index < len ? toSqlExpression(p.paramName, values[index]) : ClickHouseValues.NULL_EXPR); // NOSONAR - index++; - } - - appendLastPartIfExists(builder); - } - - @Override - public void apply(StringBuilder builder, String param, String... more) { - if (!hasParameter()) { - builder.append(originalQuery); - return; - } - - int len = more == null ? 0 : more.length + 1; - int index = 0; - for (QueryPart p : getParts()) { - builder.append(p.part); - if (index > 0) { - param = index < len ? more[index - 1] : ClickHouseValues.NULL_EXPR; // NOSONAR - } - builder.append(param); - index++; - } - - appendLastPartIfExists(builder); - } - - @Override - public void apply(StringBuilder builder, String[] values) { - if (!hasParameter()) { - builder.append(originalQuery); - return; - } - - int len = values == null ? 0 : values.length; - int index = 0; - for (QueryPart p : getParts()) { - builder.append(p.part); - builder.append(index < len ? values[index] : ClickHouseValues.NULL_EXPR); // NOSONAR - index++; - } - - appendLastPartIfExists(builder); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Main.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Main.java deleted file mode 100644 index fbe69f495..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Main.java +++ /dev/null @@ -1,1062 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.time.LocalDateTime; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseException; -import com.clickhouse.client.ClickHouseNode; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.client.ClickHouseRequest.Mutation; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.data.ClickHouseByteBuffer; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataProcessor; -import com.clickhouse.data.ClickHouseDataStreamFactory; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseDeserializer; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseInputStream; -import com.clickhouse.data.ClickHouseOutputStream; -import com.clickhouse.data.ClickHouseRecord; -import com.clickhouse.data.ClickHouseSerializer; -import com.clickhouse.data.ClickHouseValue; -import com.clickhouse.data.format.BinaryStreamUtils; -import com.clickhouse.data.value.ClickHouseByteValue; -import com.clickhouse.data.value.ClickHouseLongValue; -import com.clickhouse.data.value.ClickHouseStringValue; -import com.clickhouse.jdbc.internal.ClickHouseConnectionImpl; - -public final class Main { - public static class Pojo { - private byte b; - private long l; - private BigDecimal d; - private LocalDateTime t; - private long[] a; - private List p; - private Object[][] n; - private Object j; - - public void setByte(byte b) { - this.b = b; - } - - public byte getByte() { - return b; - } - - public void setLong(long l) { - this.l = l; - } - - public long getLong() { - return l; - } - - public void setDecimal(BigDecimal d) { - this.d = d; - } - - public BigDecimal getDecimal() { - return d; - } - - public void setDateTime(LocalDateTime t) { - this.t = t; - } - - public LocalDateTime getDateTime() { - return t; - } - - public void setArray(long[] a) { - this.a = a; - } - - public long[] getArray() { - return a; - } - - public void setTuple(List p) { - this.p = p; - } - - public List getTuple() { - return p; - } - - public void setNested(Object[][] n) { - this.n = n; - } - - public Object[][] getNested() { - return n; - } - - public void setJson(Object j) { - this.j = j; - } - - public Object getJson() { - return j; - } - } - - static class Options { - final String action; - final int batch; - final boolean mapping; - final boolean output; - final int samples; - final boolean serde; - final String type; - final boolean verbose; - - final String url; - final String query; - final String file; - - final boolean requiresJdbc; - - private Options(String url, String query, String file) { - action = System.getProperty("action", "read").toLowerCase(); - batch = Integer.getInteger("batch", 1000); - mapping = Boolean.getBoolean("mapping"); - output = Boolean.getBoolean("output"); - samples = Integer.getInteger("samples", 500000000); - serde = !"false".equalsIgnoreCase(System.getProperty("serde", "")); - type = System.getProperty("type", "").toLowerCase(); - verbose = Boolean.getBoolean("verbose"); - - this.url = url; - this.requiresJdbc = url.length() > 5 && "jdbc:".equalsIgnoreCase(url.substring(0, 5)); - - if (query == null || query.isEmpty()) { - this.query = isLoadAction() || isWriteAction() ? getInsertQuery() : getSelectQuery(); - } else { - this.query = query; - } - if (file == null || file.isEmpty()) { - if (output) { - this.file = requiresJdbc ? "jdbc.out" : "java.out"; - } else { - this.file = ""; - } - } else { - this.file = file; - } - - if (verbose) { - println("Arguments:"); - println(" - url=%s", this.url); - println(" - query=%s", this.query); - println(" - file=%s", this.file); - println(); - println("Options:\n - action=%s, batch=%d, mapping=%s,\n - output=%s, samples=%d, serde=%s, type=%s", - action, batch, mapping, output, samples, serde, type); - } - } - - int getSamples() { - // final int s; - // if (isMixed() || isTuple() || isNested()) { - // s = samples / 5; - // } else if (isArray()) { - // s = samples / 1000; - // } else if (isJson()) { - // s = samples / 500; - // } else { - // s = samples; - // } - // return s; - return samples; - } - - boolean hasFile() { - return !file.isEmpty(); - } - - boolean hasMapping() { - return mapping; - } - - boolean isDumpAction() { - return "dump".equals(action); - } - - boolean isLoadAction() { - return "load".equals(action); - } - - boolean isWriteAction() { - return "write".equals(action); - } - - boolean isInt8() { - return "int8".equals(type); - } - - boolean isUInt64() { - return "uint64".equals(type); - } - - boolean isString() { - return "string".equals(type); - } - - boolean isDateTime() { - return "datetime".equals(type); - } - - boolean isDecimal() { - return "decimal".equals(type); - } - - boolean isMixed() { - return "mixed".equals(type); - } - - boolean isArray() { - return "array".equals(type); - } - - boolean isTuple() { - return "tuple".equals(type); - } - - boolean isNested() { - return "nested".equals(type); - } - - boolean isJson() { - return "json".equals(type); - } - - List getColumns() { - final List columns; - if (isInt8()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Int8, false)); - } else if (isUInt64()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.UInt64, false)); - } else if (isString()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.String, false)); - } else if (isDateTime()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.DateTime, false)); - } else if (isDecimal()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Decimal128, false, 0, 6)); - } else if (isMixed()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Int8, false), - ClickHouseColumn.of(null, ClickHouseDataType.UInt64, false), - ClickHouseColumn.of(null, ClickHouseDataType.String, false), - ClickHouseColumn.of(null, ClickHouseDataType.DateTime, false), - ClickHouseColumn.of(null, ClickHouseDataType.Decimal128, false, 0, 6)); - } else if (isArray()) { - columns = Arrays.asList(ClickHouseColumn.of(null, "Array(Int32)")); - } else if (isTuple()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Tuple, false, - ClickHouseColumn.of(null, ClickHouseDataType.Int8, false), - ClickHouseColumn.of(null, ClickHouseDataType.UInt64, false), - ClickHouseColumn.of(null, ClickHouseDataType.String, false), - ClickHouseColumn.of(null, ClickHouseDataType.DateTime, false), - ClickHouseColumn.of(null, ClickHouseDataType.Decimal128, false, 0, 6))); - } else if (isNested()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Nested, false, - ClickHouseColumn.of(null, ClickHouseDataType.Int8, false), - ClickHouseColumn.of(null, ClickHouseDataType.UInt64, false), - ClickHouseColumn.of(null, ClickHouseDataType.String, false), - ClickHouseColumn.of(null, ClickHouseDataType.DateTime, false), - ClickHouseColumn.of(null, ClickHouseDataType.Decimal128, false, 0, 6))); - } else if (isJson()) { - columns = Arrays.asList(ClickHouseColumn.of(null, ClickHouseDataType.Tuple, false, - ClickHouseColumn.of(null, ClickHouseDataType.Int8, false), - ClickHouseColumn.of(null, ClickHouseDataType.UInt64, false), - ClickHouseColumn.of(null, ClickHouseDataType.String, false), - ClickHouseColumn.of(null, ClickHouseDataType.DateTime, false), - ClickHouseColumn.of(null, ClickHouseDataType.Decimal128, false, 0, 6))); - } else { - columns = null; - } - return columns; - } - - ClickHouseDeserializer getDerializer(ClickHouseConfig config) throws IOException { - final List columns = getColumns(); - if (columns == null || columns.isEmpty()) { - throw new IllegalStateException("Not column information available for query: " + query); - } - - final ClickHouseDataProcessor processor = ClickHouseDataStreamFactory.getInstance().getProcessor(config, - null, ClickHouseOutputStream.empty(), null, columns); - final ClickHouseDeserializer[] deserializers = processor.getDeserializers(config, columns); - return deserializers.length == 1 ? deserializers[0] - : ClickHouseDeserializer.of(Arrays.asList(deserializers)); - } - - ClickHouseSerializer getSerializer(ClickHouseConfig config) throws IOException { - final ClickHouseSerializer[] serializers = getSerializers(config); - return serializers.length == 1 ? serializers[0] : ClickHouseSerializer.of(Arrays.asList(serializers)); - } - - ClickHouseSerializer[] getSerializers(ClickHouseConfig config) throws IOException { - final List columns = getColumns(); - if (columns == null || columns.isEmpty()) { - throw new IllegalStateException("Not column information available for query: " + query); - } - - final ClickHouseDataProcessor processor = ClickHouseDataStreamFactory.getInstance().getProcessor(config, - null, ClickHouseOutputStream.empty(), null, columns); - return processor.getSerializers(config, columns); - } - - String getSelectQuery() { - final String selectQuery; - if (isInt8()) { - selectQuery = "select number::Int8 `byte` from numbers(%d)"; - } else if (isUInt64()) { - selectQuery = "select number `long` from numbers(%d)"; - } else if (isString()) { - selectQuery = "select toString(number) `string` from numbers(%d)"; - } else if (isDateTime()) { - selectQuery = "select toDateTime(number) `datetime` from numbers(%d)"; - } else if (isDecimal()) { - selectQuery = "select toDecimal128(number, 6) `decimal` from numbers(%d)"; - } else if (isMixed()) { - selectQuery = "select number::Int8 `byte`, number `long`, toString(number) `string`, toDateTime(number) `datetime`, toDecimal128(number, 6) `decimal` from numbers(%d)"; - } else if (isArray()) { - selectQuery = "select range(100000, 101000 + number %% 1000) as `array` from numbers(%d)"; - } else if (isTuple()) { - selectQuery = "select tuple(number::Int8, number, toString(number), toDateTime(number), toDecimal128(number, 6)) `tuple` from numbers(%d)"; - } else if (isNested()) { - selectQuery = "select [(number::Int8, number, toString(number), toDateTime(number), toDecimal128(number, 6))]::Nested(a Int8, b UInt64, c String, d DateTime, e Decimal128(6)) `nested` from numbers(%d)"; - } else if (isJson()) { - selectQuery = "select (number::Int8, number, toString(number), toDateTime(number), toDecimal128(number, 6), range(1000,1005), [tuple(number, number+1)])::Tuple(a Int8, b UInt64, c String, d DateTime, e Decimal128(6), f Array(UInt16), g Nested(x UInt64, y UInt64)) `json` from numbers(%d)"; - } else { - selectQuery = "select %d"; - } - return String.format(selectQuery, getSamples()); - } - - String getInsertQuery() { - return type.isEmpty() ? "insert into test_insert" : "insert into test_insert_" + type; - } - } - - static class GenericQuery { - static final ClickHouseFormat defaultFormat = ClickHouseFormat.RowBinaryWithNamesAndTypes; - - protected final Options options; - - protected GenericQuery(Options options) { - this.options = options; - } - - final long run() throws ClickHouseException, SQLException { - final long rows; - if (options.isDumpAction()) { - rows = dump(); - } else if (options.isLoadAction()) { - rows = load(options); - } else if (options.isWriteAction()) { - rows = write(options); - } else { - rows = read(options); - } - return rows; - } - - long read(ResultSet rs) throws SQLException { - long count = 0L; - final int len = rs.getMetaData().getColumnCount(); - while (rs.next()) { - Object obj = null; - for (int i = 1; i <= len; i++) { - // autoboxing to ensure we "got" the value - obj = rs.getObject(i); - } - if (obj != null) { - count++; - } - } - return count; - } - - long read(ClickHouseResponse response) throws ClickHouseException { - long count = 0L; - int len = response.getColumns().size(); - for (ClickHouseRecord r : response.records()) { - Object obj = null; - for (int i = 0; i < len; i++) { - // autoboxing just for comparison - obj = r.getValue(i).asObject(); - } - if (obj != null) { - count++; - } - } - return count; - } - - long write(Connection conn) throws SQLException { - throw new UnsupportedOperationException("No idea how to write data for custom query"); - } - - long write(Mutation request) throws ClickHouseException { - throw new UnsupportedOperationException("No idea how to write data for custom query"); - } - - final long dump() throws ClickHouseException, SQLException { - final long rows; - if (options.requiresJdbc) { - try (ClickHouseConnection conn = new ClickHouseConnectionImpl(options.url)) { - ClickHouseRequest request = conn.unwrap(ClickHouseRequest.class).query(options.query); - if (!request.getServer().getConfig().hasOption(ClickHouseClientOption.FORMAT)) { - request.format(defaultFormat); - } - request.output(options.file); - try (ClickHouseResponse response = request.executeAndWait()) { - rows = response.getSummary().getReadRows(); - } - } - } else { // java client - final ClickHouseNode server = ClickHouseNode.of(options.url); - try (ClickHouseClient client = ClickHouseClient.newInstance(server.getProtocol())) { - ClickHouseRequest request = client.read(server).query(options.query); - if (!server.getConfig().hasOption(ClickHouseClientOption.FORMAT)) { - request.format(defaultFormat); - } - request.output(options.file); - try (ClickHouseResponse response = request.query(options.query).executeAndWait()) { - rows = response.getSummary().getReadRows(); - } - } - } - return rows; - } - - final long load(Options options) throws ClickHouseException, SQLException { - final long rows; - if (options.requiresJdbc) { - try (ClickHouseConnection conn = new ClickHouseConnectionImpl(options.url)) { - ClickHouseFormat format = conn.getConfig().getFormat(); - if (!conn.unwrap(ClickHouseRequest.class).getServer().getConfig() - .hasOption(ClickHouseClientOption.FORMAT)) { - format = defaultFormat.defaultInputFormat(); - } - try (PreparedStatement stmt = conn.prepareStatement(options.query + " format " + format.name())) { // NOSONAR - stmt.setObject(1, new File(options.file)); - rows = stmt.executeLargeUpdate(); - } - } - } else { // java client - final ClickHouseNode server = ClickHouseNode.of(options.url); - try (ClickHouseClient client = ClickHouseClient.newInstance(server.getProtocol())) { - Mutation request = client.write(server).data(options.file); - if (!server.getConfig().hasOption(ClickHouseClientOption.FORMAT)) { - request.format(defaultFormat.defaultInputFormat()); - } - try (ClickHouseResponse response = request - .query(options.query + " format " + request.getConfig().getFormat().name()) - .executeAndWait()) { - rows = response.getSummary().getWrittenRows(); - } - } - } - return rows; - } - - final long read(Options options) throws ClickHouseException, SQLException { - final long rows; - if (options.requiresJdbc) { - try (ClickHouseConnection conn = new ClickHouseConnectionImpl(options.url); - ClickHouseStatement stmt = conn.createStatement()) { - if (options.hasFile()) { - try { - stmt.setMirroredOutput( - !"-".equals(options.file) ? new FileOutputStream(options.file, false) : System.out); // NOSONAR - } catch (IOException e) { - throw SqlExceptionUtils.clientError(e); - } - } - try (ResultSet rs = stmt.executeQuery(options.query)) { - rows = read(rs); - } - } - } else { // java client - final ClickHouseNode server = ClickHouseNode.of(options.url); - try (ClickHouseClient client = ClickHouseClient.newInstance(server.getProtocol())) { - ClickHouseRequest request = client.read(server).query(options.query); - if (!server.getConfig().hasOption(ClickHouseClientOption.FORMAT)) { - request.format(defaultFormat); - } - try (ClickHouseResponse response = request.executeAndWait()) { - if (options.hasFile()) { - try { - response.getInputStream().setCopyToTarget( - !"-".equals(options.file) ? new FileOutputStream(options.file, false) // NOSONAR - : System.out); // NOSONAR - } catch (IOException e) { - throw ClickHouseException.of(e, server); - } - } - - if (options.hasMapping()) { - long count = 0L; - for (Pojo p : response.records(Pojo.class)) { - if (p != null) { - count++; - } - } - rows = count; - } else { - rows = read(response); - } - } - } - } - return rows; - } - - final long write(Options options) throws ClickHouseException, SQLException { - final long rows; - if (options.requiresJdbc) { - try (ClickHouseConnection conn = new ClickHouseConnectionImpl(options.url)) { - rows = write(conn); - } - } else { // java client - final ClickHouseNode server = ClickHouseNode.of(options.url); - try (ClickHouseClient client = ClickHouseClient.newInstance(server.getProtocol())) { - Mutation request = client.write(server).query(options.query).data(options.file); - if (!server.getConfig().hasOption(ClickHouseClientOption.FORMAT)) { - request.format(defaultFormat.defaultInputFormat()); - } - rows = write(request); - } - } - return rows; - } - } - - static class Int8Query extends GenericQuery { - Int8Query(Options options) { - super(options); - } - - @Override - long read(ResultSet rs) throws SQLException { - long count = 0L; - final int len = rs.getMetaData().getColumnCount(); - byte v = (byte) 0; - while (rs.next()) { - for (int i = 1; i <= len; i++) { - v = rs.getByte(i); - } - count++; - } - long lastValue = 0xFFL & v; - return count >= lastValue ? count : lastValue; - } - - @Override - long read(ClickHouseResponse response) throws ClickHouseException { - long count = 0L; - byte v = (byte) 0; - if (options.serde) { - if (options.verbose) { - println("Deserialization: records"); - } - for (ClickHouseRecord r : response.records()) { - // only one column - v = r.getValue(0).asByte(); - count++; - } - } else { - if (options.verbose) { - println("Deserialization: readByte"); - } - try (ClickHouseInputStream in = response.getInputStream()) { - for (long i = 0L, len = options.samples; i < len; i++) { - v = in.readByte(); - count++; - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - long lastValue = 0xFFL & v; - return count >= lastValue ? count : lastValue; - } - - @Override - long write(Connection conn) throws SQLException { - try (PreparedStatement stmt = conn.prepareStatement(options.query)) { - final int batchSize = options.batch; - long count = 0L; - long rows = 0L; - for (long i = 0, len = options.getSamples(); i < len; i++) { - stmt.setByte(1, (byte) i); - stmt.addBatch(); - if ((count = (i + 1) % batchSize) == 0L) { - rows += stmt.executeLargeBatch().length; - } - } - if (count > 0L) { - rows += stmt.executeLargeBatch().length; - } - return rows; - } - } - - @Override - long write(Mutation request) throws ClickHouseException { - try (ClickHouseResponse response = request.data(o -> { - if (options.serde) { - ClickHouseConfig config = request.getConfig(); - ClickHouseSerializer serializer = options.getSerializer(config); - ClickHouseValue value = ClickHouseByteValue.ofNull(); - if (options.verbose) { - println("Serialization: %s -> %s", serializer, value); - } - for (long i = 0L, len = options.samples; i < len; i++) { - serializer.serialize(value.update(i), o); - } - } else { - if (options.verbose) { - println("Serialization: writeByte"); - } - for (long i = 0L, len = options.samples; i < len; i++) { - o.writeByte((byte) i); - } - } - }).executeAndWait()) { - return response.getSummary().getWrittenRows(); - } - } - } - - static class UInt64Query extends GenericQuery { - UInt64Query(Options options) { - super(options); - } - - @Override - long read(ResultSet rs) throws SQLException { - long count = 0L; - final int len = rs.getMetaData().getColumnCount(); - long v = 0L; - while (rs.next()) { - for (int i = 1; i <= len; i++) { - v = rs.getLong(i); - } - count++; - } - return count >= v ? count : v; - } - - @Override - long read(ClickHouseResponse response) throws ClickHouseException { - long count = 0L; - long v = 0L; - if (options.serde) { - if (options.verbose) { - println("Deserialization: records"); - } - for (ClickHouseRecord r : response.records()) { - // only one column - v = r.getValue(0).asLong(); - count++; - } - } else { - if (options.verbose) { - println("Deserialization: readByte"); - } - try (ClickHouseInputStream in = response.getInputStream()) { - for (long i = 0L, len = options.samples; i < len; i++) { - v = in.readBuffer(8).asLong(); - count++; - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - return count >= v ? count : v; - } - - @Override - long write(Connection conn) throws SQLException { - try (PreparedStatement stmt = conn.prepareStatement(options.query)) { - final int batchSize = options.batch; - long count = 0L; - long rows = 0L; - for (long i = 0, len = options.getSamples(); i < len; i++) { - stmt.setLong(1, i); - stmt.addBatch(); - if ((count = (i + 1) % batchSize) == 0L) { - rows += stmt.executeLargeBatch().length; - } - } - if (count > 0L) { - rows += stmt.executeLargeBatch().length; - } - return rows; - } - } - - @Override - long write(Mutation request) throws ClickHouseException { - try (ClickHouseResponse response = request.data(o -> { - if (options.serde) { - ClickHouseConfig config = request.getConfig(); - ClickHouseSerializer serializer = options.getSerializer(config); - ClickHouseValue value = ClickHouseLongValue.ofUnsignedNull(); - if (options.verbose) { - println("Serialization: %s -> %s", serializer, value); - } - for (long i = 0L, len = options.samples; i < len; i++) { - serializer.serialize(value.update(i), o); - } - } else { - if (options.verbose) { - println("Serialization: writeLong"); - } - for (long i = 0L, len = options.samples; i < len; i++) { - BinaryStreamUtils.writeUnsignedInt64(o, i); - } - } - }).executeAndWait()) { - return response.getSummary().getWrittenRows(); - } - } - } - - static class StringQuery extends GenericQuery { - StringQuery(Options options) { - super(options); - } - - @Override - long read(ResultSet rs) throws SQLException { - long count = 0L; - final int len = rs.getMetaData().getColumnCount(); - String v = null; - while (rs.next()) { - for (int i = 1; i <= len; i++) { - v = rs.getString(i); - } - count++; - } - return v != null ? count : 0L; - } - - @Override - long read(ClickHouseResponse response) throws ClickHouseException { - long count = 0L; - String v = null; - if (options.serde) { - if (options.verbose) { - println("Deserialization: records"); - } - for (ClickHouseRecord r : response.records()) { - // only one column - v = r.getValue(0).asString(); - count++; - } - } else { - if (options.verbose) { - println("Deserialization: readByte"); - } - try (ClickHouseInputStream in = response.getInputStream()) { - for (long i = 0L, len = options.samples; i < len; i++) { - v = in.readUnicodeString(); - count++; - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - return v != null ? count : 0L; - } - - @Override - long write(Connection conn) throws SQLException { - try (PreparedStatement stmt = conn.prepareStatement(options.query)) { - final int batchSize = options.batch; - long count = 0L; - long rows = 0L; - for (long i = 0, len = options.getSamples(); i < len; i++) { - stmt.setString(1, Long.toString(i)); - stmt.addBatch(); - if ((count = (i + 1) % batchSize) == 0L) { - rows += stmt.executeLargeBatch().length; - } - } - if (count > 0L) { - rows += stmt.executeLargeBatch().length; - } - return rows; - } - } - - @Override - long write(Mutation request) throws ClickHouseException { - try (ClickHouseResponse response = request.data(o -> { - if (options.serde) { - ClickHouseConfig config = request.getConfig(); - ClickHouseSerializer serializer = options.getSerializer(config); - ClickHouseValue value = ClickHouseStringValue.ofNull(); - if (options.verbose) { - println("Serialization: %s -> %s", serializer, value); - } - for (long i = 0L, len = options.samples; i < len; i++) { - serializer.serialize(value.update(i), o); - } - } else { - if (options.verbose) { - println("Serialization: writeString"); - } - for (long i = 0L, len = options.samples; i < len; i++) { - o.writeUnicodeString(Long.toString(i)); - } - } - }).executeAndWait()) { - return response.getSummary().getWrittenRows(); - } - } - } - - static class MixedQuery extends GenericQuery { - MixedQuery(Options options) { - super(options); - } - - @Override - long read(ResultSet rs) throws SQLException { - long count = 0L; - byte b = (byte) 0; - long l = 0L; - String s = null; - Object t = null; - BigDecimal d = null; - while (rs.next()) { - b = rs.getByte(1); - l = rs.getLong(2); - s = rs.getString(3); - t = rs.getObject(4); - d = rs.getBigDecimal(5); - count++; - } - return l > b && l > 0L && s != null && t != null && d != null ? count : 0L; - } - - @Override - long read(ClickHouseResponse response) throws ClickHouseException { - long count = 0L; - byte b = (byte) 0; - long l = 0L; - String s = null; - Object t = null; - BigDecimal d = null; - if (options.serde) { - if (options.verbose) { - println("Deserialization: records"); - } - for (ClickHouseRecord r : response.records()) { - b = r.getValue(0).asByte(); - l = r.getValue(1).asLong(); - s = r.getValue(2).asString(); - t = r.getValue(3).asDateTime(); - d = r.getValue(4).asBigDecimal(); - count++; - } - if (l > b && l > 0L && t != null && d != null) { - // ignore - } else { - s = null; - } - } else { - if (options.verbose) { - println("Deserialization: read(Byte, Long, String, DateTime, Decimal)"); - } - try (ClickHouseInputStream in = response.getInputStream()) { - response.getColumns(); - for (long i = 0L, len = options.samples; i < len; i++) { - b = in.readByte(); - l = in.readBuffer(8).asLong(); - s = in.readUnicodeString(); - t = in.readBuffer(4).asDateTime(); - d = in.readBuffer(16).asBigDecimal(6); - count++; - } - if (l > b && l > 0L && t != null && d != null) { - // ignore - } else { - s = null; - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - return s != null ? count : 0L; - } - - @Override - long write(Connection conn) throws SQLException { - try (PreparedStatement stmt = conn.prepareStatement(options.query)) { - final int batchSize = options.batch; - long count = 0L; - long rows = 0L; - for (long i = 0, len = options.getSamples(); i < len; i++) { - stmt.setByte(1, (byte) (i % 256)); - stmt.setLong(2, i); - stmt.setString(3, Long.toString(i)); - stmt.setLong(4, i); - stmt.setLong(5, i); - stmt.addBatch(); - if ((count = (i + 1) % batchSize) == 0L) { - rows += stmt.executeLargeBatch().length; - } - } - if (count > 0L) { - rows += stmt.executeLargeBatch().length; - } - return rows; - } - } - - @Override - long write(Mutation request) throws ClickHouseException { - try (ClickHouseResponse response = request.data(o -> { - if (options.serde) { - ClickHouseConfig config = request.getConfig(); - ClickHouseSerializer[] serializers = options.getSerializers(config); - ClickHouseValue value = ClickHouseLongValue.ofNull(); - if (options.verbose) { - println("Serialization: %s -> %s", serializers, value); - } - for (long i = 0L, len = options.samples, l = serializers.length; i < len; i++) { - for (int j = 0; j < l; j++) { - serializers[j].serialize(value.update(i), o); - } - } - } else { - if (options.verbose) { - println("Serialization: read(Byte, Long, String, DateTime, Decimal)"); - } - for (long i = 0L, len = options.samples; i < len; i++) { - o.writeByte((byte) (i % 256)); - BinaryStreamUtils.writeUnsignedInt64(o, i); - o.writeUnicodeString(Long.toString(i)); - BinaryStreamUtils.writeUnsignedInt32(o, i); - BinaryStreamUtils.writeInt128(o, BigInteger.valueOf(i)); - } - } - }).executeAndWait()) { - return response.getSummary().getWrittenRows(); - } - } - } - - private static void println() { - System.out.println(); // NOSONAR - } - - private static void println(Object msg, Object... args) { - if (args == null || args.length == 0) { - System.out.println(msg); // NOSONAR - } else { - System.out.println(String.format(Locale.ROOT, Objects.toString(msg), args)); // NOSONAR - } - } - - private static void printUsage() { - String execFile = "clickhouse-jdbc-bin"; - try { - File file = Paths.get(Main.class.getProtectionDomain().getCodeSource().getLocation().toURI()) - .toFile(); - if (file.isFile()) { - execFile = file.getName(); - if (!Files.isExecutable(file.toPath())) { - execFile = "java -jar " + execFile; - } - } else { - execFile = "java -cp " + file.getCanonicalPath() + " " + Main.class.getName(); - } - } catch (Exception e) { - // ignore - } - - final int index = execFile.indexOf(' '); - println("Usage: %s [QUERY] [FILE]", - index > 0 ? (execFile.substring(0, index) + " [PROPERTIES]" + execFile.substring(index)) - : (execFile + " [PROPERTIES]")); - println(); - println("Properties: -Dkey=value [-Dkey=value]*"); - println(" action \tAction, one of read(default), write, dump(no deserialization), and load(no serialization)"); - println(" batch \tBatch size for JDBC writing, defaults to 1000"); - println(" mapping\tWhether to map record into POJO, defaults to false"); - println(" output \tWhether to write raw response into stdout or a file(java.out or jdbc.out), defaults to false"); - println(" samples\tSamples, defaults to 500000000"); - println(" serde \tWhether to use default serialization/deserializion mechanism in Java client, defaults to true"); - println(" type \tPredefined QUERY, one of Int8, UInt64, String, Array, Tuple, Nested, and Mixed"); - println(" verbose\tWhether to show logs, defaults to false"); - println(); - println("Examples:"); - println(" - %s 'https://localhost?sslmode=none' 'select 1' -", - index > 0 ? (execFile.substring(0, index) + " -Dverbose=true" + execFile.substring(index)) - : (execFile + " -Dverbose=true")); - println(" - %s 'jdbc:ch://user:password@localhost:8123/default' 'select 1' output.file", execFile); - println(" - %s 'jdbc:ch:http://node1,node2,node3/default' 'insert into table1' input.file", execFile); - } - - public static void main(String[] args) throws Exception { - if ((args == null || args.length < 1) || args.length > 3) { - printUsage(); - System.exit(0); - } - - final Options options = new Options(args[0].trim(), args.length > 1 ? args[1].trim() : null, - args.length > 2 ? args[2].trim() : null); - - final GenericQuery query; - if (options.isInt8()) { - query = new Int8Query(options); - } else if (options.isUInt64()) { - query = new UInt64Query(options); - } else if (options.isString()) { - query = new StringQuery(options); - } else if (options.isMixed()) { - query = new MixedQuery(options); - } else { - query = new GenericQuery(options); - } - - final long startTime = options.verbose ? System.nanoTime() : 0L; - final long rows = query.run(); - if (options.verbose) { - long elapsedNanos = System.nanoTime() - startTime; - println("\nProcessed %,d rows in %,.2f ms (%,.2f rows/s)", rows, elapsedNanos / 1_000_000D, - rows * 1_000_000_000D / elapsedNanos); - } - System.exit(rows > 0L ? 0 : 1); - } - - private Main() { - } -} \ No newline at end of file diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java new file mode 100644 index 000000000..c65b92b0c --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -0,0 +1,580 @@ +package com.clickhouse.jdbc; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.*; +import java.util.Calendar; + +public class PreparedStatementImpl extends StatementImpl implements PreparedStatement { + @Override + public ResultSet executeQuery() throws SQLException { + return null; + } + + @Override + public int executeUpdate() throws SQLException { + return 0; + } + + @Override + public void setNull(int parameterIndex, int sqlType) throws SQLException { + + } + + @Override + public void setBoolean(int parameterIndex, boolean x) throws SQLException { + + } + + @Override + public void setByte(int parameterIndex, byte x) throws SQLException { + + } + + @Override + public void setShort(int parameterIndex, short x) throws SQLException { + + } + + @Override + public void setInt(int parameterIndex, int x) throws SQLException { + + } + + @Override + public void setLong(int parameterIndex, long x) throws SQLException { + + } + + @Override + public void setFloat(int parameterIndex, float x) throws SQLException { + + } + + @Override + public void setDouble(int parameterIndex, double x) throws SQLException { + + } + + @Override + public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { + + } + + @Override + public void setString(int parameterIndex, String x) throws SQLException { + + } + + @Override + public void setBytes(int parameterIndex, byte[] x) throws SQLException { + + } + + @Override + public void setDate(int parameterIndex, Date x) throws SQLException { + + } + + @Override + public void setTime(int parameterIndex, Time x) throws SQLException { + + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { + + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { + + } + + @Override + public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { + + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { + + } + + @Override + public void clearParameters() throws SQLException { + + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { + + } + + @Override + public void setObject(int parameterIndex, Object x) throws SQLException { + + } + + @Override + public boolean execute() throws SQLException { + return false; + } + + @Override + public void addBatch() throws SQLException { + + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { + + } + + @Override + public void setRef(int parameterIndex, Ref x) throws SQLException { + + } + + @Override + public void setBlob(int parameterIndex, Blob x) throws SQLException { + + } + + @Override + public void setClob(int parameterIndex, Clob x) throws SQLException { + + } + + @Override + public void setArray(int parameterIndex, Array x) throws SQLException { + + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + return null; + } + + @Override + public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { + + } + + @Override + public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { + + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { + + } + + @Override + public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { + + } + + @Override + public void setURL(int parameterIndex, URL x) throws SQLException { + + } + + @Override + public ParameterMetaData getParameterMetaData() throws SQLException { + return null; + } + + @Override + public void setRowId(int parameterIndex, RowId x) throws SQLException { + + } + + @Override + public void setNString(int parameterIndex, String value) throws SQLException { + + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { + + } + + @Override + public void setNClob(int parameterIndex, NClob value) throws SQLException { + + } + + @Override + public void setClob(int parameterIndex, Reader reader, long length) throws SQLException { + + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { + + } + + @Override + public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { + + } + + @Override + public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { + + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { + + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { + + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { + + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { + + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { + + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { + + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { + + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { + + } + + @Override + public void setClob(int parameterIndex, Reader reader) throws SQLException { + + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { + + } + + @Override + public void setNClob(int parameterIndex, Reader reader) throws SQLException { + + } + + @Override + public void setObject(int parameterIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + PreparedStatement.super.setObject(parameterIndex, x, targetSqlType, scaleOrLength); + } + + @Override + public void setObject(int parameterIndex, Object x, SQLType targetSqlType) throws SQLException { + PreparedStatement.super.setObject(parameterIndex, x, targetSqlType); + } + + @Override + public long executeLargeUpdate() throws SQLException { + return PreparedStatement.super.executeLargeUpdate(); + } + + @Override + public ResultSet executeQuery(String sql) throws SQLException { + return null; + } + + @Override + public int executeUpdate(String sql) throws SQLException { + return 0; + } + + @Override + public void close() throws SQLException { + + } + + @Override + public int getMaxFieldSize() throws SQLException { + return 0; + } + + @Override + public void setMaxFieldSize(int max) throws SQLException { + + } + + @Override + public int getMaxRows() throws SQLException { + return 0; + } + + @Override + public void setMaxRows(int max) throws SQLException { + + } + + @Override + public void setEscapeProcessing(boolean enable) throws SQLException { + + } + + @Override + public int getQueryTimeout() throws SQLException { + return 0; + } + + @Override + public void setQueryTimeout(int seconds) throws SQLException { + + } + + @Override + public void cancel() throws SQLException { + + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException { + + } + + @Override + public void setCursorName(String name) throws SQLException { + + } + + @Override + public boolean execute(String sql) throws SQLException { + return false; + } + + @Override + public ResultSet getResultSet() throws SQLException { + return null; + } + + @Override + public int getUpdateCount() throws SQLException { + return 0; + } + + @Override + public boolean getMoreResults() throws SQLException { + return false; + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + + } + + @Override + public int getFetchDirection() throws SQLException { + return 0; + } + + @Override + public void setFetchSize(int rows) throws SQLException { + + } + + @Override + public int getFetchSize() throws SQLException { + return 0; + } + + @Override + public int getResultSetConcurrency() throws SQLException { + return 0; + } + + @Override + public int getResultSetType() throws SQLException { + return 0; + } + + @Override + public void addBatch(String sql) throws SQLException { + + } + + @Override + public void clearBatch() throws SQLException { + + } + + @Override + public int[] executeBatch() throws SQLException { + return new int[0]; + } + + @Override + public Connection getConnection() throws SQLException { + return null; + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + return false; + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + return null; + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + return 0; + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + return 0; + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + return 0; + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + return false; + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + return false; + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + return false; + } + + @Override + public int getResultSetHoldability() throws SQLException { + return 0; + } + + @Override + public boolean isClosed() throws SQLException { + return false; + } + + @Override + public void setPoolable(boolean poolable) throws SQLException { + + } + + @Override + public boolean isPoolable() throws SQLException { + return false; + } + + @Override + public void closeOnCompletion() throws SQLException { + + } + + @Override + public boolean isCloseOnCompletion() throws SQLException { + return false; + } + + @Override + public long getLargeUpdateCount() throws SQLException { + return PreparedStatement.super.getLargeUpdateCount(); + } + + @Override + public void setLargeMaxRows(long max) throws SQLException { + PreparedStatement.super.setLargeMaxRows(max); + } + + @Override + public long getLargeMaxRows() throws SQLException { + return PreparedStatement.super.getLargeMaxRows(); + } + + @Override + public long[] executeLargeBatch() throws SQLException { + return PreparedStatement.super.executeLargeBatch(); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + return PreparedStatement.super.executeLargeUpdate(sql); + } + + @Override + public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + return PreparedStatement.super.executeLargeUpdate(sql, autoGeneratedKeys); + } + + @Override + public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { + return PreparedStatement.super.executeLargeUpdate(sql, columnIndexes); + } + + @Override + public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { + return PreparedStatement.super.executeLargeUpdate(sql, columnNames); + } + + @Override + public String enquoteLiteral(String val) throws SQLException { + return PreparedStatement.super.enquoteLiteral(val); + } + + @Override + public String enquoteIdentifier(String identifier, boolean alwaysQuote) throws SQLException { + return PreparedStatement.super.enquoteIdentifier(identifier, alwaysQuote); + } + + @Override + public boolean isSimpleIdentifier(String identifier) throws SQLException { + return PreparedStatement.super.isSimpleIdentifier(identifier); + } + + @Override + public String enquoteNCharLiteral(String val) throws SQLException { + return PreparedStatement.super.enquoteNCharLiteral(val); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return false; + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java new file mode 100644 index 000000000..6d605b28c --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -0,0 +1,992 @@ +package com.clickhouse.jdbc; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.*; +import java.util.Calendar; +import java.util.Map; + +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + +public class ResultSetImpl implements ResultSet, JdbcWrapper { + private static final Logger log = LoggerFactory.getLogger(ResultSetImpl.class); + + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return JdbcWrapper.super.isWrapperFor(iface); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return JdbcWrapper.super.unwrap(iface); + } + + @Override + public boolean next() throws SQLException { + return false; + } + + @Override + public void close() throws SQLException { + + } + + @Override + public boolean wasNull() throws SQLException { + return false; + } + + @Override + public String getString(int columnIndex) throws SQLException { + return ""; + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + return false; + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + return 0; + } + + @Override + public short getShort(int columnIndex) throws SQLException { + return 0; + } + + @Override + public int getInt(int columnIndex) throws SQLException { + return 0; + } + + @Override + public long getLong(int columnIndex) throws SQLException { + return 0; + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + return 0; + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + return 0; + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + return null; + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + return new byte[0]; + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + return null; + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + return null; + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + return null; + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + return null; + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + return null; + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + return null; + } + + @Override + public String getString(String columnLabel) throws SQLException { + return ""; + } + + @Override + public boolean getBoolean(String columnLabel) throws SQLException { + return false; + } + + @Override + public byte getByte(String columnLabel) throws SQLException { + return 0; + } + + @Override + public short getShort(String columnLabel) throws SQLException { + return 0; + } + + @Override + public int getInt(String columnLabel) throws SQLException { + return 0; + } + + @Override + public long getLong(String columnLabel) throws SQLException { + return 0; + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + return 0; + } + + @Override + public double getDouble(String columnLabel) throws SQLException { + return 0; + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + return null; + } + + @Override + public byte[] getBytes(String columnLabel) throws SQLException { + return new byte[0]; + } + + @Override + public Date getDate(String columnLabel) throws SQLException { + return null; + } + + @Override + public Time getTime(String columnLabel) throws SQLException { + return null; + } + + @Override + public Timestamp getTimestamp(String columnLabel) throws SQLException { + return null; + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + return null; + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + return null; + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + return null; + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException { + + } + + @Override + public String getCursorName() throws SQLException { + return ""; + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + return null; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + return null; + } + + @Override + public Object getObject(String columnLabel) throws SQLException { + return null; + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + return 0; + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + return null; + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + return null; + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + return null; + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) throws SQLException { + return null; + } + + @Override + public boolean isBeforeFirst() throws SQLException { + return false; + } + + @Override + public boolean isAfterLast() throws SQLException { + return false; + } + + @Override + public boolean isFirst() throws SQLException { + return false; + } + + @Override + public boolean isLast() throws SQLException { + return false; + } + + @Override + public void beforeFirst() throws SQLException { + + } + + @Override + public void afterLast() throws SQLException { + + } + + @Override + public boolean first() throws SQLException { + return false; + } + + @Override + public boolean last() throws SQLException { + return false; + } + + @Override + public int getRow() throws SQLException { + return 0; + } + + @Override + public boolean absolute(int row) throws SQLException { + return false; + } + + @Override + public boolean relative(int rows) throws SQLException { + return false; + } + + @Override + public boolean previous() throws SQLException { + return false; + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + + } + + @Override + public int getFetchDirection() throws SQLException { + return 0; + } + + @Override + public void setFetchSize(int rows) throws SQLException { + + } + + @Override + public int getFetchSize() throws SQLException { + return 0; + } + + @Override + public int getType() throws SQLException { + return 0; + } + + @Override + public int getConcurrency() throws SQLException { + return 0; + } + + @Override + public boolean rowUpdated() throws SQLException { + return false; + } + + @Override + public boolean rowInserted() throws SQLException { + return false; + } + + @Override + public boolean rowDeleted() throws SQLException { + return false; + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { + + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { + + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + + } + + @Override + public void insertRow() throws SQLException { + + } + + @Override + public void updateRow() throws SQLException { + + } + + @Override + public void deleteRow() throws SQLException { + + } + + @Override + public void refreshRow() throws SQLException { + + } + + @Override + public void cancelRowUpdates() throws SQLException { + + } + + @Override + public void moveToInsertRow() throws SQLException { + + } + + @Override + public void moveToCurrentRow() throws SQLException { + + } + + @Override + public Statement getStatement() throws SQLException { + return null; + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + return null; + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + return null; + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + return null; + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + return null; + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + return null; + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + return null; + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + return null; + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + return null; + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + return null; + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + return null; + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + return null; + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + return null; + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + return null; + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + return null; + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + return null; + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + return null; + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + return null; + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + return null; + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + return null; + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + return null; + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + + } + + @Override + public int getHoldability() throws SQLException { + return 0; + } + + @Override + public boolean isClosed() throws SQLException { + return false; + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + return null; + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + return null; + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + return null; + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + return null; + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + + } + + @Override + public String getNString(int columnIndex) throws SQLException { + return ""; + } + + @Override + public String getNString(String columnLabel) throws SQLException { + return ""; + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + return null; + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + return null; + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { + + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { + + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { + + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { + + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + return null; + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + return null; + } + + @Override + public void updateObject(int columnIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + ResultSet.super.updateObject(columnIndex, x, targetSqlType, scaleOrLength); + } + + @Override + public void updateObject(String columnLabel, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + ResultSet.super.updateObject(columnLabel, x, targetSqlType, scaleOrLength); + } + + @Override + public void updateObject(int columnIndex, Object x, SQLType targetSqlType) throws SQLException { + ResultSet.super.updateObject(columnIndex, x, targetSqlType); + } + + @Override + public void updateObject(String columnLabel, Object x, SQLType targetSqlType) throws SQLException { + ResultSet.super.updateObject(columnLabel, x, targetSqlType); + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java new file mode 100644 index 000000000..673fa0127 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -0,0 +1,289 @@ +package com.clickhouse.jdbc; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; + +public class StatementImpl implements Statement { + @Override + public ResultSet executeQuery(String sql) throws SQLException { + return null; + } + + @Override + public int executeUpdate(String sql) throws SQLException { + return 0; + } + + @Override + public void close() throws SQLException { + + } + + @Override + public int getMaxFieldSize() throws SQLException { + return 0; + } + + @Override + public void setMaxFieldSize(int max) throws SQLException { + + } + + @Override + public int getMaxRows() throws SQLException { + return 0; + } + + @Override + public void setMaxRows(int max) throws SQLException { + + } + + @Override + public void setEscapeProcessing(boolean enable) throws SQLException { + + } + + @Override + public int getQueryTimeout() throws SQLException { + return 0; + } + + @Override + public void setQueryTimeout(int seconds) throws SQLException { + + } + + @Override + public void cancel() throws SQLException { + + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException { + + } + + @Override + public void setCursorName(String name) throws SQLException { + + } + + @Override + public boolean execute(String sql) throws SQLException { + return false; + } + + @Override + public ResultSet getResultSet() throws SQLException { + return null; + } + + @Override + public int getUpdateCount() throws SQLException { + return 0; + } + + @Override + public boolean getMoreResults() throws SQLException { + return false; + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + + } + + @Override + public int getFetchDirection() throws SQLException { + return 0; + } + + @Override + public void setFetchSize(int rows) throws SQLException { + + } + + @Override + public int getFetchSize() throws SQLException { + return 0; + } + + @Override + public int getResultSetConcurrency() throws SQLException { + return 0; + } + + @Override + public int getResultSetType() throws SQLException { + return 0; + } + + @Override + public void addBatch(String sql) throws SQLException { + + } + + @Override + public void clearBatch() throws SQLException { + + } + + @Override + public int[] executeBatch() throws SQLException { + return new int[0]; + } + + @Override + public Connection getConnection() throws SQLException { + return null; + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + return false; + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + return null; + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + return 0; + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + return 0; + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + return 0; + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + return false; + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + return false; + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + return false; + } + + @Override + public int getResultSetHoldability() throws SQLException { + return 0; + } + + @Override + public boolean isClosed() throws SQLException { + return false; + } + + @Override + public void setPoolable(boolean poolable) throws SQLException { + + } + + @Override + public boolean isPoolable() throws SQLException { + return false; + } + + @Override + public void closeOnCompletion() throws SQLException { + + } + + @Override + public boolean isCloseOnCompletion() throws SQLException { + return false; + } + + @Override + public long getLargeUpdateCount() throws SQLException { + return Statement.super.getLargeUpdateCount(); + } + + @Override + public void setLargeMaxRows(long max) throws SQLException { + Statement.super.setLargeMaxRows(max); + } + + @Override + public long getLargeMaxRows() throws SQLException { + return Statement.super.getLargeMaxRows(); + } + + @Override + public long[] executeLargeBatch() throws SQLException { + return Statement.super.executeLargeBatch(); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + return Statement.super.executeLargeUpdate(sql); + } + + @Override + public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + return Statement.super.executeLargeUpdate(sql, autoGeneratedKeys); + } + + @Override + public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { + return Statement.super.executeLargeUpdate(sql, columnIndexes); + } + + @Override + public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { + return Statement.super.executeLargeUpdate(sql, columnNames); + } + + @Override + public String enquoteLiteral(String val) throws SQLException { + return Statement.super.enquoteLiteral(val); + } + + @Override + public String enquoteIdentifier(String identifier, boolean alwaysQuote) throws SQLException { + return Statement.super.enquoteIdentifier(identifier, alwaysQuote); + } + + @Override + public boolean isSimpleIdentifier(String identifier) throws SQLException { + return Statement.super.isSimpleIdentifier(identifier); + } + + @Override + public String enquoteNCharLiteral(String val) throws SQLException { + return Statement.super.enquoteNCharLiteral(val); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return false; + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/AbstractPreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/AbstractPreparedStatement.java deleted file mode 100644 index 0e6f09fbd..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/AbstractPreparedStatement.java +++ /dev/null @@ -1,173 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; - -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.jdbc.SqlExceptionUtils; - -public abstract class AbstractPreparedStatement extends ClickHouseStatementImpl implements PreparedStatement { - protected AbstractPreparedStatement(ClickHouseConnectionImpl connection, ClickHouseRequest request, - int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - super(connection, request, resultSetType, resultSetConcurrency, resultSetHoldability); - } - - protected abstract long[] executeAny(boolean asBatch) throws SQLException; - - protected abstract int getMaxParameterIndex(); - - protected int toArrayIndex(int parameterIndex) throws SQLException { - int max = getMaxParameterIndex(); - if (max < 1) { - String name = getConnection().getJdbcConfig().useNamedParameter() ? "named parameter" - : "JDBC style '?' placeholder"; - throw SqlExceptionUtils.clientError(ClickHouseUtils - .format("Can't set parameter at index %d due to no %s found in the query", - parameterIndex, name)); - } else if (parameterIndex < 1 || parameterIndex > max) { - throw SqlExceptionUtils.clientError(ClickHouseUtils - .format("Parameter index must between 1 and %d but we got %d", max, - parameterIndex)); - } - - return parameterIndex - 1; - } - - @Override - public final void addBatch(String sql) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "addBatch(String) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final boolean execute(String sql) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "execute(String) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "execute(String, int) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final boolean execute(String sql, int[] columnIndexes) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "execute(String, int[]) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final boolean execute(String sql, String[] columnNames) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "execute(String, String[]) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public long[] executeLargeBatch() throws SQLException { - return executeAny(true); - } - - @Override - public final long executeLargeUpdate(String sql) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeLargeUpdate(String) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeLargeUpdate(String, int) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeLargeUpdate(String, int[]) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeLargeUpdate(String, String[]) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final ResultSet executeQuery(String sql) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeQuery(String) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final int executeUpdate() throws SQLException { - return (int) executeLargeUpdate(); - } - - @Override - public final int executeUpdate(String sql) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeUpate(String) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeUpdate(String, int) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeUpdate(String, int[]) cannot be called in PreparedStatement or CallableStatement!"); - } - - @Override - public final int executeUpdate(String sql, String[] columnNames) throws SQLException { - ensureOpen(); - - throw SqlExceptionUtils - .unsupportedError( - "executeUpdate(String, String[]) cannot be called in PreparedStatement or CallableStatement!"); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java deleted file mode 100644 index 6968cfa28..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImpl.java +++ /dev/null @@ -1,1209 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.io.Serializable; -import java.net.URI; -import java.sql.ClientInfoStatus; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.NClob; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLClientInfoException; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.Savepoint; -import java.util.Calendar; -import java.util.GregorianCalendar; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; -import java.util.Properties; -import java.util.TimeZone; -import java.util.Map.Entry; -import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.ClickHouseClientBuilder; -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseException; -import com.clickhouse.client.ClickHouseNode; -import com.clickhouse.client.ClickHouseNodeSelector; -import com.clickhouse.client.ClickHouseNodes; -import com.clickhouse.client.ClickHouseParameterizedQuery; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.client.ClickHouseTransaction; -import com.clickhouse.client.ClickHouseRequest.Mutation; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.http.config.ClickHouseHttpOption; -import com.clickhouse.config.ClickHouseDefaultOption; -import com.clickhouse.config.ClickHouseOption; -import com.clickhouse.config.ClickHouseRenameMethod; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseRecord; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.ClickHouseVersion; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHouseDatabaseMetaData; -import com.clickhouse.jdbc.ClickHouseDriver; -import com.clickhouse.jdbc.JdbcConfig; -import com.clickhouse.jdbc.JdbcParameterizedQuery; -import com.clickhouse.jdbc.JdbcParseHandler; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.JdbcWrapper; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser.ConnectionInfo; -import com.clickhouse.jdbc.parser.ClickHouseSqlParser; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; -import com.clickhouse.jdbc.parser.ParseHandler; -import com.clickhouse.jdbc.parser.StatementType; - -public class ClickHouseConnectionImpl implements Connection, JdbcWrapper { - private static final Logger log = LoggerFactory.getLogger(ClickHouseConnectionImpl.class); - - static final String SETTING_READONLY = "readonly"; - static final String SETTING_MAX_INSERT_BLOCK = "max_insert_block_size"; - static final String SETTING_LW_DELETE = "allow_experimental_lightweight_delete"; - - static final ClickHouseDefaultOption CUSTOM_CONFIG = new ClickHouseDefaultOption("custom_jdbc_config", - "custom_jdbc_config"); - - private static final String SQL_GET_SERVER_INFO = "select currentUser() user, timezone() timezone, version() version, " - + getSetting(SETTING_READONLY, ClickHouseDataType.UInt8) + ", " - + getSetting(ClickHouseTransaction.SETTING_THROW_ON_UNSUPPORTED_QUERY_INSIDE_TRANSACTION, - ClickHouseDataType.Int8) - + ", " - + getSetting(ClickHouseTransaction.SETTING_WAIT_CHANGES_BECOME_VISIBLE_AFTER_COMMIT_MODE, - ClickHouseDataType.String) - + "," - + getSetting(ClickHouseTransaction.SETTING_IMPLICIT_TRANSACTION, ClickHouseDataType.Int8) + ", " - + getSetting(SETTING_MAX_INSERT_BLOCK, ClickHouseDataType.UInt64) + ", " - + getSetting(SETTING_LW_DELETE, ClickHouseDataType.Int8) + ", " - + getSetting((String) CUSTOM_CONFIG.getEffectiveDefaultValue(), ClickHouseDataType.String) - + " FORMAT RowBinaryWithNamesAndTypes"; - - private static String getSetting(String setting, ClickHouseDataType type) { - return getSetting(setting, type, null); - } - - private static String getSetting(String setting, ClickHouseDataType type, String defaultValue) { - StringBuilder builder = new StringBuilder(); - if (type == ClickHouseDataType.String) { - builder.append("(ifnull((select value from system.settings where name = '").append(setting) - .append("'), "); - } else { - builder.append("to").append(type.name()) - .append("(ifnull((select value from system.settings where name = '").append(setting) - .append("'), "); - } - if (ClickHouseChecker.isNullOrEmpty(defaultValue)) { - builder.append(type.getMaxPrecision() > 0 ? (type.isSigned() ? "'-1'" : "'0'") : "''"); - } else { - builder.append('\'').append(defaultValue).append('\''); - } - return builder.append(")) as ").append(setting).toString(); - } - - protected static ClickHouseRecord getServerInfo(ClickHouseNode node, ClickHouseRequest request, - boolean createDbIfNotExist) throws SQLException { - ClickHouseRequest newReq = request.copy().option(ClickHouseClientOption.RENAME_RESPONSE_COLUMN, - ClickHouseRenameMethod.NONE); - if (!createDbIfNotExist) { // in case the database does not exist - newReq.option(ClickHouseClientOption.DATABASE, ""); - } - try (ClickHouseResponse response = newReq.option(ClickHouseClientOption.ASYNC, false) - .option(ClickHouseClientOption.COMPRESS, false) - .option(ClickHouseClientOption.DECOMPRESS, false) - .option(ClickHouseClientOption.FORMAT, ClickHouseFormat.RowBinaryWithNamesAndTypes) - .query(SQL_GET_SERVER_INFO).executeAndWait()) { - return response.firstRecord(); - } catch (Exception e) { - SQLException sqlExp = SqlExceptionUtils.handle(e); - if (createDbIfNotExist && sqlExp.getErrorCode() == 81) { - String db = node.getDatabase(request.getConfig()); - try (ClickHouseResponse resp = newReq.use("") - .query(new StringBuilder("CREATE DATABASE IF NOT EXISTS `") - .append(ClickHouseUtils.escape(db, '`')).append('`').toString()) - .executeAndWait()) { - return getServerInfo(node, request, false); - } catch (SQLException ex) { - throw ex; - } catch (Exception ex) { - throw SqlExceptionUtils.handle(ex); - } - } else { - throw sqlExp; - } - } - } - - private final JdbcConfig jdbcConf; - - private final ClickHouseClient client; - private final ClickHouseRequest clientRequest; - - private boolean autoCommit; - private boolean closed; - private String database; - private boolean readOnly; - private int networkTimeout; - private int rsHoldability; - private int txIsolation; - - private final Optional clientTimeZone; - private final Calendar defaultCalendar; - private final TimeZone jvmTimeZone; - private final TimeZone serverTimeZone; - private final ClickHouseVersion serverVersion; - private final String user; - private final int initialReadOnly; - private final int initialNonTxQuerySupport; - private final String initialTxCommitWaitMode; - private final int initialImplicitTx; - private final long initialMaxInsertBlockSize; - // 0 - unsupported; 1 - experimental support; 2 - always support - private final int initialDeleteSupport; - - private final Map> typeMap; - - private final AtomicReference txRef; - - protected JdbcTransaction createTransaction() throws SQLException { - if (!isTransactionSupported()) { - return new JdbcTransaction(null); - } - - try { - ClickHouseTransaction tx = clientRequest.getManager().createTransaction(clientRequest); - tx.begin(); - // if (txIsolation == Connection.TRANSACTION_READ_UNCOMMITTED) { - // tx.snapshot(ClickHouseTransaction.CSN_EVERYTHING_VISIBLE); - // } - clientRequest.transaction(tx); - return new JdbcTransaction(tx); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } - - protected JdbcSavepoint createSavepoint() { - return new JdbcSavepoint(1, "name"); - } - - /** - * Checks if the connection is open or not. - * - * @throws SQLException when the connection is closed - */ - protected void ensureOpen() throws SQLException { - if (closed) { - throw SqlExceptionUtils.clientError("Cannot operate on a closed connection"); - } - } - - /** - * Checks if a feature can be supported or not. - * - * @param feature non-empty feature name - * @param silent whether to show warning in log or throw unsupported exception - * @throws SQLException when the feature is not supported and silent is - * {@code false} - */ - protected void ensureSupport(String feature, boolean silent) throws SQLException { - String msg = feature + " is not supported"; - - if (jdbcConf.isJdbcCompliant()) { - if (silent) { - log.debug("[JDBC Compliant Mode] %s. You may change %s to false to throw SQLException instead.", msg, - JdbcConfig.PROP_JDBC_COMPLIANT); - } else { - log.warn("[JDBC Compliant Mode] %s. You may change %s to false to throw SQLException instead.", msg, - JdbcConfig.PROP_JDBC_COMPLIANT); - } - } else if (!silent) { - throw SqlExceptionUtils.unsupportedError(msg); - } - } - - protected void ensureTransactionSupport() throws SQLException { - if (!isTransactionSupported()) { - ensureSupport("Transaction", false); - } - } - - protected List getTableColumns(String dbName, String tableName, String columns) - throws SQLException { - if (tableName == null || columns == null) { - throw SqlExceptionUtils.clientError("Failed to extract table and columns from the query"); - } - - if (columns.isEmpty()) { - columns = "*"; - } else { - columns = columns.substring(1); // remove the leading bracket - } - StringBuilder builder = new StringBuilder(); - builder.append("SELECT ").append(columns).append(" FROM "); - if (!ClickHouseChecker.isNullOrEmpty(dbName)) { - builder.append('`').append(ClickHouseUtils.escape(dbName, '`')).append('`').append('.'); - } - builder.append('`').append(ClickHouseUtils.escape(tableName, '`')).append('`').append(" WHERE 0"); - List list; - try (ClickHouseResponse resp = clientRequest.copy().format(ClickHouseFormat.RowBinaryWithNamesAndTypes) - .option(ClickHouseClientOption.RENAME_RESPONSE_COLUMN, ClickHouseRenameMethod.NONE) - .query(builder.toString()).executeAndWait()) { - list = resp.getColumns(); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - return list; - } - - protected String getDatabase() throws SQLException { - ensureOpen(); - - return getCurrentDatabase(); - } - - // for testing purpose - final JdbcTransaction getJdbcTrasaction() { - return txRef.get(); - } - - public ClickHouseConnectionImpl(String url) throws SQLException { - this(url, new Properties()); - } - - public ClickHouseConnectionImpl(String url, Properties properties) throws SQLException { - this(ClickHouseJdbcUrlParser.parse(url, properties)); - } - - public ClickHouseConnectionImpl(ConnectionInfo connInfo) throws SQLException { - Properties props = connInfo.getProperties(); - jvmTimeZone = TimeZone.getDefault(); - - ClickHouseClientBuilder clientBuilder = ClickHouseClient.builder() - .options(ClickHouseDriver.toClientOptions(props)) - .defaultCredentials(connInfo.getDefaultCredentials()); - ClickHouseNodes nodes = connInfo.getNodes(); - - final ClickHouseNode node; - final ClickHouseClient initialClient; - final ClickHouseRequest initialRequest; - if (nodes.isSingleNode()) { - try { - node = nodes.apply(nodes.getNodeSelector()); - } catch (Exception e) { - throw SqlExceptionUtils.clientError("Failed to get single-node", e); - } - initialClient = clientBuilder.nodeSelector(ClickHouseNodeSelector.of(node.getProtocol())).build(); - initialRequest = initialClient.read(node); - } else { - log.debug("Selecting node from: %s", nodes); - initialClient = clientBuilder.build(); // use dummy client - initialRequest = initialClient.read(nodes); - try { - node = initialRequest.getServer(); - } catch (Exception e) { - throw SqlExceptionUtils.clientError("No healthy node available", e); - } - } - - log.debug("Connecting to: %s", node); - ClickHouseConfig config = initialRequest.getConfig(); - String currentUser = null; - TimeZone timeZone = null; - ClickHouseVersion version = null; - ClickHouseRecord r = null; - if (config.hasServerInfo()) { // when both serverTimeZone and serverVersion are configured - timeZone = config.getServerTimeZone(); - version = config.getServerVersion(); - if (connInfo.getJdbcConfig().isCreateDbIfNotExist()) { - r = getServerInfo(node, initialRequest, true); - } - } else { - r = getServerInfo(node, initialRequest, connInfo.getJdbcConfig().isCreateDbIfNotExist()); - currentUser = r.getValue(0).asString(); - String tz = r.getValue(1).asString(); - String ver = r.getValue(2).asString(); - version = ClickHouseVersion.of(ver); - // https://github.com/ClickHouse/ClickHouse/commit/486d63864bcc6e15695cd3e9f9a3f83a84ec4009 - if (version.check("(,20.7)")) { - throw SqlExceptionUtils.unsupportedError( - "We apologize, but this driver only works with ClickHouse servers 20.7 and above. " - + "Please consider to upgrade your server to a more recent version."); - } - if (ClickHouseChecker.isNullOrBlank(tz)) { - tz = "UTC"; - } - // tsTimeZone.hasSameRules(ClickHouseValues.UTC_TIMEZONE) - timeZone = "UTC".equals(tz) ? ClickHouseValues.UTC_TIMEZONE : TimeZone.getTimeZone(tz); - - // update request and corresponding config - initialRequest.option(ClickHouseClientOption.SERVER_TIME_ZONE, tz) - .option(ClickHouseClientOption.SERVER_VERSION, ver); - } - - final boolean useLightWeightDelete = version.check("[23.3,)"); - if (r != null) { - initialReadOnly = r.getValue(3).asInteger(); - initialNonTxQuerySupport = r.getValue(4).asInteger(); - initialTxCommitWaitMode = r.getValue(5).asString().toLowerCase(Locale.ROOT); - initialImplicitTx = r.getValue(6).asInteger(); - initialMaxInsertBlockSize = r.getValue(7).asLong(); - initialDeleteSupport = useLightWeightDelete ? 2 : r.getValue(8).asInteger(); - - String customConf = ClickHouseUtils.unescape(r.getValue(9).asString()); - if (ClickHouseChecker.isNullOrBlank(customConf)) { - jdbcConf = connInfo.getJdbcConfig(); - client = initialClient; - clientRequest = initialRequest; - } else { - initialClient.close(); - - Properties newProps = ClickHouseJdbcUrlParser.newProperties(); - Map options = ClickHouseUtils.extractParameters(customConf, null); - boolean resetAll = Boolean.parseBoolean(options.get("*")); - if (resetAll) { - clientBuilder.clearOptions(); - } else { - newProps.putAll(connInfo.getJdbcConfig().getProperties()); - newProps.putAll(props); - } - newProps.putAll(options); - - jdbcConf = new JdbcConfig(newProps); - Map clientOpts = ClickHouseConfig.toClientOptions(newProps); - clientBuilder.options(clientOpts); - - client = clientBuilder.build(); - clientRequest = client.read(node); - if (resetAll && !initialRequest.getSettings().isEmpty()) { - clientRequest.clearSettings(); - } - clientRequest.option(ClickHouseClientOption.SERVER_TIME_ZONE, timeZone.getID()) - .option(ClickHouseClientOption.SERVER_VERSION, version.toString()); - // two issues: - // 1) inefficient but definitely better than re-creating nodes in a cluster - // 2) client.read(node) won't work - you have to use clientRequest.copy() - for (Entry o : clientOpts.entrySet()) { - clientRequest.option(o.getKey(), o.getValue()); - } - - if (resetAll) { - clientRequest.freezeOptions().freezeSettings(); - } - config = clientRequest.getConfig(); - } - } else { - jdbcConf = connInfo.getJdbcConfig(); - - initialReadOnly = initialRequest.getSetting(SETTING_READONLY, 0); - initialNonTxQuerySupport = initialRequest - .getSetting(ClickHouseTransaction.SETTING_THROW_ON_UNSUPPORTED_QUERY_INSIDE_TRANSACTION, 1); - initialTxCommitWaitMode = initialRequest.getSetting( - ClickHouseTransaction.SETTING_WAIT_CHANGES_BECOME_VISIBLE_AFTER_COMMIT_MODE, "wait_unknown"); - initialImplicitTx = initialRequest.getSetting(ClickHouseTransaction.SETTING_IMPLICIT_TRANSACTION, 0); - initialMaxInsertBlockSize = initialRequest.getSetting(SETTING_MAX_INSERT_BLOCK, 0L); - initialDeleteSupport = initialRequest.getSetting(SETTING_LW_DELETE, useLightWeightDelete ? 2 : 0); - - client = initialClient; - clientRequest = initialRequest; - } - - this.autoCommit = !jdbcConf.isJdbcCompliant() || jdbcConf.isAutoCommit(); - this.closed = false; - this.database = config.getDatabase(); - this.clientRequest.use(this.database); - this.readOnly = clientRequest.getSetting(SETTING_READONLY, initialReadOnly) != 0; - this.networkTimeout = 0; - this.rsHoldability = ResultSet.HOLD_CURSORS_OVER_COMMIT; - if (isTransactionSupported()) { - this.txIsolation = Connection.TRANSACTION_REPEATABLE_READ; - if (jdbcConf.isJdbcCompliant() && !this.readOnly) { - if (!this.clientRequest - .hasSetting(ClickHouseTransaction.SETTING_THROW_ON_UNSUPPORTED_QUERY_INSIDE_TRANSACTION)) { - this.clientRequest.set(ClickHouseTransaction.SETTING_THROW_ON_UNSUPPORTED_QUERY_INSIDE_TRANSACTION, - 0); - } - // .set(ClickHouseTransaction.SETTING_WAIT_CHANGES_BECOME_VISIBLE_AFTER_COMMIT_MODE, - // "wait_unknown"); - } - } else { - this.txIsolation = jdbcConf.isJdbcCompliant() ? Connection.TRANSACTION_READ_COMMITTED - : Connection.TRANSACTION_NONE; - } - - this.user = currentUser != null ? currentUser : node.getCredentials(config).getUserName(); - this.serverTimeZone = timeZone; - if (config.isUseServerTimeZone()) { - clientTimeZone = Optional.empty(); - // with respect of default locale - defaultCalendar = new GregorianCalendar(); - } else { - clientTimeZone = Optional.of(config.getUseTimeZone()); - defaultCalendar = new GregorianCalendar(clientTimeZone.get()); - } - this.serverVersion = version; - this.typeMap = new HashMap<>(jdbcConf.getTypeMap()); - this.txRef = new AtomicReference<>(this.autoCommit ? null : createTransaction()); - } - - @Override - public String nativeSQL(String sql) throws SQLException { - ensureOpen(); - - // get rewritten query? - return sql; - } - - @Override - public void setAutoCommit(boolean autoCommit) throws SQLException { - ensureOpen(); - - if (this.autoCommit == autoCommit) { - return; - } - - ensureTransactionSupport(); - if (this.autoCommit = autoCommit) { // commit - JdbcTransaction tx = txRef.getAndSet(null); - if (tx != null) { - tx.commit(log); - } - } else { // start new transaction - if (!txRef.compareAndSet(null, createTransaction())) { - log.warn("Not able to start a new transaction, reuse the exist one: %s", txRef.get()); - } - } - } - - @Override - public boolean getAutoCommit() throws SQLException { - ensureOpen(); - - return autoCommit; - } - - @Override - public void begin() throws SQLException { - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot start new transaction in auto-commit mode"); - } - - ensureTransactionSupport(); - - JdbcTransaction tx = txRef.get(); - if (tx == null || !tx.isNew()) { - // invalid transaction state - throw new SQLException(JdbcTransaction.ERROR_TX_STARTED, SqlExceptionUtils.SQL_STATE_INVALID_TX_STATE); - } - } - - @Override - public void commit() throws SQLException { - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot commit in auto-commit mode"); - } - - ensureTransactionSupport(); - - JdbcTransaction tx = txRef.get(); - if (tx == null) { - // invalid transaction state - throw new SQLException(JdbcTransaction.ERROR_TX_NOT_STARTED, SqlExceptionUtils.SQL_STATE_INVALID_TX_STATE); - } else { - try { - tx.commit(log); - } finally { - if (!txRef.compareAndSet(tx, createTransaction())) { - log.warn("Transaction was set to %s unexpectedly", txRef.get()); - } - } - } - } - - @Override - public void rollback() throws SQLException { - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot rollback in auto-commit mode"); - } - - ensureTransactionSupport(); - - JdbcTransaction tx = txRef.get(); - if (tx == null) { - // invalid transaction state - throw new SQLException(JdbcTransaction.ERROR_TX_NOT_STARTED, SqlExceptionUtils.SQL_STATE_INVALID_TX_STATE); - } else { - try { - tx.rollback(log); - } finally { - if (!txRef.compareAndSet(tx, createTransaction())) { - log.warn("Transaction was set to %s unexpectedly", txRef.get()); - } - } - } - } - - @Override - public void close() throws SQLException { - try { - this.client.close(); - } catch (Exception e) { - log.warn("Failed to close connection due to %s", e.getMessage()); - throw SqlExceptionUtils.handle(e); - } finally { - this.closed = true; - } - - JdbcTransaction tx = txRef.get(); - if (tx != null) { - try { - tx.commit(log); - } finally { - if (!txRef.compareAndSet(tx, null)) { - log.warn("Transaction was set to %s unexpectedly", txRef.get()); - } - } - } - } - - @Override - public boolean isClosed() throws SQLException { - return closed; - } - - @Override - public DatabaseMetaData getMetaData() throws SQLException { - return new ClickHouseDatabaseMetaData(this); - } - - @Override - public void setReadOnly(boolean readOnly) throws SQLException { - ensureOpen(); - - if (initialReadOnly != 0) { - if (!readOnly) { - throw SqlExceptionUtils.clientError("Cannot change the setting on a read-only connection"); - } - } else { - if (readOnly) { - clientRequest.set(SETTING_READONLY, 2); - } else { - clientRequest.removeSetting(SETTING_READONLY); - } - this.readOnly = readOnly; - } - } - - @Override - public boolean isReadOnly() throws SQLException { - ensureOpen(); - - return readOnly; - } - - @Override - public void setCatalog(String catalog) throws SQLException { - if (jdbcConf.useCatalog()) { - setCurrentDatabase(catalog, true); - } else { - log.warn( - "setCatalog method is no-op. Please either change databaseTerm to catalog or use setSchema method instead"); - } - } - - @Override - public String getCatalog() throws SQLException { - return jdbcConf.useCatalog() ? getDatabase() : null; - } - - @Override - public void setTransactionIsolation(int level) throws SQLException { - ensureOpen(); - - if (Connection.TRANSACTION_NONE != level && Connection.TRANSACTION_READ_UNCOMMITTED != level - && Connection.TRANSACTION_READ_COMMITTED != level && Connection.TRANSACTION_REPEATABLE_READ != level - && Connection.TRANSACTION_SERIALIZABLE != level) { - throw new SQLException("Invalid transaction isolation level: " + level); - } else if (isTransactionSupported()) { - txIsolation = Connection.TRANSACTION_REPEATABLE_READ; - } else if (jdbcConf.isJdbcCompliant()) { - txIsolation = level; - } else { - txIsolation = Connection.TRANSACTION_NONE; - } - } - - @Override - public int getTransactionIsolation() throws SQLException { - ensureOpen(); - - return txIsolation; - } - - @Override - public SQLWarning getWarnings() throws SQLException { - ensureOpen(); - - return null; - } - - @Override - public void clearWarnings() throws SQLException { - ensureOpen(); - } - - @Override - public Map> getTypeMap() throws SQLException { - ensureOpen(); - - return new HashMap<>(typeMap); - } - - @Override - public void setTypeMap(Map> map) throws SQLException { - ensureOpen(); - - if (map != null) { - typeMap.putAll(map); - } - } - - @Override - public void setHoldability(int holdability) throws SQLException { - ensureOpen(); - - if (holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT || holdability == ResultSet.HOLD_CURSORS_OVER_COMMIT) { - rsHoldability = holdability; - } else { - throw new SQLException("Invalid holdability: " + holdability); - } - } - - @Override - public int getHoldability() throws SQLException { - ensureOpen(); - - return rsHoldability; - } - - @Override - public Savepoint setSavepoint() throws SQLException { - return setSavepoint(null); - } - - @Override - public Savepoint setSavepoint(String name) throws SQLException { - ensureOpen(); - - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot set savepoint in auto-commit mode"); - } - - if (!jdbcConf.isJdbcCompliant()) { - throw SqlExceptionUtils.unsupportedError("setSavepoint not implemented"); - } - - JdbcTransaction tx = txRef.get(); - if (tx == null) { - tx = createTransaction(); - if (!txRef.compareAndSet(null, tx)) { - tx = txRef.get(); - } - } - return tx.newSavepoint(name); - } - - @Override - public void rollback(Savepoint savepoint) throws SQLException { - ensureOpen(); - - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot rollback to savepoint in auto-commit mode"); - } - - if (!jdbcConf.isJdbcCompliant()) { - throw SqlExceptionUtils.unsupportedError("rollback not implemented"); - } - - if (!(savepoint instanceof JdbcSavepoint)) { - throw SqlExceptionUtils.clientError("Unsupported type of savepoint: " + savepoint); - } - - JdbcTransaction tx = txRef.get(); - if (tx == null) { - // invalid transaction state - throw new SQLException(JdbcTransaction.ERROR_TX_NOT_STARTED, SqlExceptionUtils.SQL_STATE_INVALID_TX_STATE); - } else { - JdbcSavepoint s = (JdbcSavepoint) savepoint; - tx.logSavepointDetails(log, s, JdbcTransaction.ACTION_ROLLBACK); - tx.toSavepoint(s); - } - } - - @Override - public void releaseSavepoint(Savepoint savepoint) throws SQLException { - ensureOpen(); - - if (getAutoCommit()) { - throw SqlExceptionUtils.clientError("Cannot release savepoint in auto-commit mode"); - } - - if (!jdbcConf.isJdbcCompliant()) { - throw SqlExceptionUtils.unsupportedError("rollback not implemented"); - } - - if (!(savepoint instanceof JdbcSavepoint)) { - throw SqlExceptionUtils.clientError("Unsupported type of savepoint: " + savepoint); - } - - JdbcTransaction tx = txRef.get(); - if (tx == null) { - // invalid transaction state - throw new SQLException(JdbcTransaction.ERROR_TX_NOT_STARTED, SqlExceptionUtils.SQL_STATE_INVALID_TX_STATE); - } else { - JdbcSavepoint s = (JdbcSavepoint) savepoint; - tx.logSavepointDetails(log, s, "released"); - tx.toSavepoint(s); - } - } - - @Override - public ClickHouseStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - ensureOpen(); - - return new ClickHouseStatementImpl(this, clientRequest.copy(), resultSetType, resultSetConcurrency, - resultSetHoldability); - } - - @Override - public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, - int resultSetHoldability) throws SQLException { - ensureOpen(); - - ClickHouseConfig config = clientRequest.getConfig(); - // TODO remove the extra parsing - ClickHouseSqlStatement[] stmts = parse(sql, config, clientRequest.getSettings()); - if (stmts.length != 1) { - throw SqlExceptionUtils - .clientError("Prepared statement only supports one query but we got: " + stmts.length); - } - ClickHouseSqlStatement parsedStmt = stmts[0]; - - ClickHouseParameterizedQuery preparedQuery; - try { - preparedQuery = jdbcConf.useNamedParameter() - ? ClickHouseParameterizedQuery.of(clientRequest.getConfig(), parsedStmt.getSQL()) - : JdbcParameterizedQuery.of(config, parsedStmt.getSQL()); - } catch (RuntimeException e) { - throw SqlExceptionUtils.clientError(e); - } - - PreparedStatement ps = null; - if (preparedQuery.hasParameter()) { - if (parsedStmt.hasTempTable() || parsedStmt.hasInput()) { - throw SqlExceptionUtils - .clientError( - "External table, input function, and query parameter cannot be used together in PreparedStatement."); - } else if (parsedStmt.getStatementType() == StatementType.INSERT && - !parsedStmt.containsKeyword("SELECT") && parsedStmt.hasValues() && - (!parsedStmt.hasFormat() || clientRequest.getFormat().name().equals(parsedStmt.getFormat()))) { - String query = parsedStmt.getSQL(); - boolean useStream = false; - Integer startIndex = parsedStmt.getPositions().get(ClickHouseSqlStatement.KEYWORD_VALUES_START); - if (startIndex != null) { - useStream = true; - int endIndex = parsedStmt.getPositions().get(ClickHouseSqlStatement.KEYWORD_VALUES_END); - for (int i = startIndex + 1; i < endIndex; i++) { - char ch = query.charAt(i); - if (ch != '?' && ch != ',' && !Character.isWhitespace(ch)) { - useStream = false; - break; - } - } - } - - if (useStream) { - ps = new InputBasedPreparedStatement(this, - clientRequest.write().query(query.substring(0, parsedStmt.getStartPosition("VALUES")), - newQueryId()), - getTableColumns(parsedStmt.getDatabase(), parsedStmt.getTable(), - parsedStmt.getContentBetweenKeywords( - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_START, - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_END)), - resultSetType, resultSetConcurrency, resultSetHoldability); - } - } - } else { - if (parsedStmt.hasTempTable()) { - // queries using external/temporary table - ps = new TableBasedPreparedStatement(this, - clientRequest.copy().query(parsedStmt.getSQL(), newQueryId()), parsedStmt, - resultSetType, resultSetConcurrency, resultSetHoldability); - } else if (parsedStmt.getStatementType() == StatementType.INSERT) { - if (!ClickHouseChecker.isNullOrBlank(parsedStmt.getInput())) { - // an ugly workaround of https://github.com/ClickHouse/ClickHouse/issues/39866 - // would be replace JSON and Object('json') types in the query to String - - Mutation m = clientRequest.write(); - if (parsedStmt.hasFormat()) { - m.format(ClickHouseFormat.valueOf(parsedStmt.getFormat())); - } - // insert query using input function - ps = new InputBasedPreparedStatement(this, m.query(parsedStmt.getSQL(), newQueryId()), - ClickHouseColumn.parse(parsedStmt.getInput()), resultSetType, resultSetConcurrency, - resultSetHoldability); - } else if (!parsedStmt.containsKeyword("SELECT") && !parsedStmt.hasValues()) { - ps = parsedStmt.hasFormat() - ? new StreamBasedPreparedStatement(this, - clientRequest.write().query(parsedStmt.getSQL(), newQueryId()), parsedStmt, - resultSetType, resultSetConcurrency, resultSetHoldability) - : new InputBasedPreparedStatement(this, - clientRequest.write().query(parsedStmt.getSQL(), newQueryId()), - getTableColumns(parsedStmt.getDatabase(), parsedStmt.getTable(), - parsedStmt.getContentBetweenKeywords( - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_START, - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_END)), - resultSetType, resultSetConcurrency, resultSetHoldability); - } - } - } - - return ps != null ? ps - : new SqlBasedPreparedStatement(this, clientRequest.copy().query(preparedQuery, newQueryId()), - stmts[0], resultSetType, resultSetConcurrency, resultSetHoldability); - } - - @Override - public NClob createNClob() throws SQLException { - ensureOpen(); - - return createClob(); - } - - @Override - public boolean isValid(int timeout) throws SQLException { - if (timeout < 0) { - throw SqlExceptionUtils.clientError("Negative milliseconds is not allowed"); - } else if (timeout == 0) { - timeout = clientRequest.getConfig().getConnectionTimeout(); - } else { - timeout = (int) TimeUnit.SECONDS.toMillis(timeout); - } - - if (isClosed()) { - return false; - } - - return client.ping(clientRequest.getServer(), timeout); - } - - @Override - public void setClientInfo(String name, String value) throws SQLClientInfoException { - try { - ensureOpen(); - } catch (SQLException e) { - Map failedProps = new HashMap<>(); - failedProps.put(PROP_APPLICATION_NAME, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - failedProps.put(PROP_CUSTOM_HTTP_HEADERS, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - failedProps.put(PROP_CUSTOM_HTTP_PARAMS, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - throw new SQLClientInfoException(e.getMessage(), failedProps); - } - - if (PROP_APPLICATION_NAME.equals(name)) { - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseClientOption.CLIENT_NAME); - } else { - clientRequest.option(ClickHouseClientOption.CLIENT_NAME, value); - } - } else if (PROP_CUSTOM_HTTP_HEADERS.equals(name)) { - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseHttpOption.CUSTOM_HEADERS); - } else { - clientRequest.option(ClickHouseHttpOption.CUSTOM_HEADERS, value); - } - } else if (PROP_CUSTOM_HTTP_PARAMS.equals(name)) { - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseHttpOption.CUSTOM_PARAMS); - } else { - clientRequest.option(ClickHouseHttpOption.CUSTOM_PARAMS, value); - } - } - } - - @Override - public void setClientInfo(Properties properties) throws SQLClientInfoException { - try { - ensureOpen(); - } catch (SQLException e) { - Map failedProps = new HashMap<>(); - failedProps.put(PROP_APPLICATION_NAME, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - failedProps.put(PROP_CUSTOM_HTTP_HEADERS, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - failedProps.put(PROP_CUSTOM_HTTP_PARAMS, ClientInfoStatus.REASON_UNKNOWN_PROPERTY); - throw new SQLClientInfoException(e.getMessage(), failedProps); - } - - if (properties != null) { - String value = properties.getProperty(PROP_APPLICATION_NAME); - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseClientOption.CLIENT_NAME); - } else { - clientRequest.option(ClickHouseClientOption.CLIENT_NAME, value); - } - - value = properties.getProperty(PROP_CUSTOM_HTTP_HEADERS); - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseHttpOption.CUSTOM_HEADERS); - } else { - clientRequest.option(ClickHouseHttpOption.CUSTOM_HEADERS, value); - } - - value = properties.getProperty(PROP_CUSTOM_HTTP_PARAMS); - if (ClickHouseChecker.isNullOrBlank(value)) { - clientRequest.removeOption(ClickHouseHttpOption.CUSTOM_PARAMS); - } else { - clientRequest.option(ClickHouseHttpOption.CUSTOM_PARAMS, value); - } - } - } - - @Override - public String getClientInfo(String name) throws SQLException { - ensureOpen(); - - ClickHouseConfig config = clientRequest.getConfig(); - String value = null; - if (PROP_APPLICATION_NAME.equals(name)) { - value = config.getClientName(); - } else if (PROP_CUSTOM_HTTP_HEADERS.equals(name)) { - value = config.getStrOption(ClickHouseHttpOption.CUSTOM_HEADERS); - } else if (PROP_CUSTOM_HTTP_PARAMS.equals(name)) { - value = config.getStrOption(ClickHouseHttpOption.CUSTOM_PARAMS); - } - return value; - } - - @Override - public Properties getClientInfo() throws SQLException { - ensureOpen(); - - ClickHouseConfig config = clientRequest.getConfig(); - Properties props = new Properties(); - props.setProperty(PROP_APPLICATION_NAME, config.getClientName()); - props.setProperty(PROP_CUSTOM_HTTP_HEADERS, config.getStrOption(ClickHouseHttpOption.CUSTOM_HEADERS)); - props.setProperty(PROP_CUSTOM_HTTP_PARAMS, config.getStrOption(ClickHouseHttpOption.CUSTOM_PARAMS)); - return props; - } - - @Override - public void setSchema(String schema) throws SQLException { - if (jdbcConf.useSchema()) { - setCurrentDatabase(schema, true); - } else { - log.warn( - "setSchema method is no-op. Please either change databaseTerm to schema or use setCatalog method instead"); - } - } - - @Override - public String getSchema() throws SQLException { - return jdbcConf.useSchema() ? getDatabase() : null; - } - - @Override - public void abort(Executor executor) throws SQLException { - if (executor == null) { - throw SqlExceptionUtils.clientError("Non-null executor is required"); - } - - executor.execute(() -> { - try { - // try harder please - this.client.close(); - } finally { - this.closed = true; - } - }); - } - - @Override - public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { - ensureOpen(); - - if (executor == null) { - throw SqlExceptionUtils.clientError("Non-null executor is required"); - } - - if (milliseconds < 0) { - throw SqlExceptionUtils.clientError("Negative milliseconds is not allowed"); - } - - executor.execute(() -> { - // TODO close this connection when any statement timed out after this amount of - // time - networkTimeout = milliseconds; - }); - } - - @Override - public int getNetworkTimeout() throws SQLException { - ensureOpen(); - - return networkTimeout; - } - - @Override - public ClickHouseConfig getConfig() { - return clientRequest.getConfig(); - } - - @Override - public boolean allowCustomSetting() { - return initialReadOnly != 1; - } - - @Override - public String getCurrentDatabase() { - return database; - } - - @Override - public void setCurrentDatabase(String db, boolean check) throws SQLException { - ensureOpen(); - - if (db == null || db.isEmpty()) { - throw new SQLException("Non-empty database name is required", SqlExceptionUtils.SQL_STATE_INVALID_SCHEMA); - } else { - clientRequest.use(db); - if (check) { - try (ClickHouseResponse response = clientRequest.query("select 1").executeAndWait()) { - database = db; - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } finally { - if (!db.equals(database)) { - clientRequest.use(database); - } - } - } else { - database = db; - } - } - } - - @Override - public String getCurrentUser() { - return user; - } - - @Override - public Calendar getDefaultCalendar() { - return defaultCalendar; - } - - @Override - public Optional getEffectiveTimeZone() { - return clientTimeZone; - } - - @Override - public TimeZone getJvmTimeZone() { - return jvmTimeZone; - } - - @Override - public TimeZone getServerTimeZone() { - return serverTimeZone; - } - - @Override - public ClickHouseVersion getServerVersion() { - return serverVersion; - } - - @Override - public ClickHouseTransaction getTransaction() { - return clientRequest.getTransaction(); - } - - @Override - public URI getUri() { - return clientRequest.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX); - } - - @Override - public JdbcConfig getJdbcConfig() { - return jdbcConf; - } - - @Override - public long getMaxInsertBlockSize() { - return initialMaxInsertBlockSize; - } - - @Override - public boolean isTransactionSupported() { - return jdbcConf.isTransactionSupported() && initialNonTxQuerySupport >= 0 - && !ClickHouseChecker.isNullOrEmpty(initialTxCommitWaitMode); - } - - @Override - public boolean isImplicitTransactionSupported() { - return jdbcConf.isTransactionSupported() && initialImplicitTx >= 0; - } - - @Override - public String newQueryId() { - String queryId = clientRequest.getManager().createQueryId(); - JdbcTransaction tx = txRef.get(); - return tx != null ? tx.newQuery(queryId) : queryId; - } - - @Override - public ClickHouseSqlStatement[] parse(String sql, ClickHouseConfig config, Map settings) { - ParseHandler handler = null; - if (jdbcConf.isJdbcCompliant()) { - boolean allowLwDelete = initialDeleteSupport > 1; - boolean allowLwUpdate = false; - if (settings != null) { - Serializable value = settings.get(SETTING_LW_DELETE); - if (!allowLwDelete && (value == null ? initialDeleteSupport == 1 - : ClickHouseOption.fromString(value.toString(), Boolean.class))) { - allowLwDelete = true; - } - } - handler = JdbcParseHandler.getInstance(allowLwDelete, allowLwUpdate, jdbcConf.useLocalFile()); - } else if (jdbcConf.useLocalFile()) { - handler = JdbcParseHandler.getInstance(false, false, true); - } - return ClickHouseSqlParser.parse(sql, config != null ? config : clientRequest.getConfig(), handler); - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return iface == ClickHouseClient.class || iface == ClickHouseRequest.class - || super.isWrapperFor(iface); - } - - @Override - public T unwrap(Class iface) throws SQLException { - if (iface == ClickHouseClient.class) { - return iface.cast(client); - } else if (iface == ClickHouseRequest.class) { - return iface.cast(clientRequest); - } - - return super.unwrap(iface); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseParameterMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseParameterMetaData.java deleted file mode 100644 index 2945ec28d..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseParameterMetaData.java +++ /dev/null @@ -1,93 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.ParameterMetaData; -import java.sql.SQLException; -import java.sql.Types; -import java.util.List; -import java.util.Map; - -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.jdbc.JdbcTypeMapping; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.JdbcWrapper; - -public class ClickHouseParameterMetaData extends JdbcWrapper implements ParameterMetaData { - protected final List params; - protected final JdbcTypeMapping mapper; - protected final Map> typeMap; - - protected ClickHouseParameterMetaData(List params, JdbcTypeMapping mapper, - Map> typeMap) { - this.params = ClickHouseChecker.nonNull(params, "Parameters"); - - this.mapper = mapper; - this.typeMap = typeMap; - } - - protected ClickHouseColumn getParameter(int param) throws SQLException { - if (param < 1 || param > params.size()) { - throw SqlExceptionUtils.clientError(ClickHouseUtils - .format("Parameter index should between 1 and %d but we got %d", params.size(), param)); - } - - return params.get(param - 1); - } - - @Override - public int getParameterCount() throws SQLException { - return params.size(); - } - - @Override - public int isNullable(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - if (p == null) { - return ParameterMetaData.parameterNullableUnknown; - } - - return p.isNullable() ? ParameterMetaData.parameterNullable : ParameterMetaData.parameterNoNulls; - } - - @Override - public boolean isSigned(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return p != null && p.getDataType().isSigned(); - } - - @Override - public int getPrecision(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return p != null ? p.getPrecision() : 0; - } - - @Override - public int getScale(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return p != null ? p.getScale() : 0; - } - - @Override - public int getParameterType(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return p != null ? mapper.toSqlType(p, typeMap) : Types.OTHER; - } - - @Override - public String getParameterTypeName(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return p != null ? mapper.toNativeType(p) : ""; - } - - @Override - public String getParameterClassName(int param) throws SQLException { - ClickHouseColumn p = getParameter(param); - return (p != null ? mapper.toJavaClass(p, typeMap) : Object.class).getName(); - } - - @Override - public int getParameterMode(int param) throws SQLException { - return ParameterMetaData.parameterModeIn; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseStatementImpl.java deleted file mode 100644 index 63904c9ce..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/ClickHouseStatementImpl.java +++ /dev/null @@ -1,1013 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseException; -import com.clickhouse.client.ClickHouseNode; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseRequest.Mutation; -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.client.ClickHouseResponseSummary; -import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.client.ClickHouseTransaction; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.config.ClickHouseConfigChangeListener; -import com.clickhouse.config.ClickHouseOption; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseCompression; -import com.clickhouse.data.ClickHouseDataProcessor; -import com.clickhouse.data.ClickHouseDataStreamFactory; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseExternalTable; -import com.clickhouse.data.ClickHouseFile; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseInputStream; -import com.clickhouse.data.ClickHouseOutputStream; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.jdbc.ClickHouseConnection; -import com.clickhouse.jdbc.ClickHouseResultSet; -import com.clickhouse.jdbc.ClickHouseStatement; -import com.clickhouse.jdbc.JdbcTypeMapping; -import com.clickhouse.jdbc.JdbcWrapper; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; -import com.clickhouse.jdbc.parser.StatementType; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.nio.file.Path; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.sql.SQLWarning; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Function; - -public class ClickHouseStatementImpl extends JdbcWrapper - implements ClickHouseConfigChangeListener>, ClickHouseStatement { - private static final Logger log = LoggerFactory.getLogger(ClickHouseStatementImpl.class); - - private final ClickHouseConnection connection; - private final ClickHouseRequest request; - - private final int resultSetType; - private final int resultSetConcurrency; - private final int resultSetHoldability; - - private final List batchStmts; - - private boolean closed; - private boolean closeOnCompletion; - - private String cursorName; - private boolean escapeScan; - private int fetchSize; - private int maxFieldSize; - private long maxRows; - private OutputStream mirroredOutput; - private int nullAsDefault; - private boolean poolable; - private volatile String queryId; - private int queryTimeout; - - private ClickHouseResultSet currentResult; - private long currentUpdateCount; - - private ClickHouseDataProcessor processor; - - protected final JdbcTypeMapping mapper; - - protected ClickHouseSqlStatement[] parsedStmts; - - - private HashSet getRequestRoles(ClickHouseSqlStatement stmt) { - HashSet roles = new HashSet<>(); - - Map settings = stmt.getSettings(); - int i = 0; - String role; - while ((role = settings.get("_ROLE_" + i)) != null) { - roles.add(role); - i++; - } - - return roles; - } - - private ClickHouseResponse getLastResponse(Map options, - List tables, Map settings) throws SQLException { - boolean autoTx = connection.getAutoCommit() && connection.isTransactionSupported(); - - // disable extremes - if (parsedStmts.length > 1 && !request.getSessionId().isPresent()) { - request.session(request.getManager().createSessionId()); - } - ClickHouseResponse response = null; - for (int i = 0, len = parsedStmts.length; i < len; i++) { - ClickHouseSqlStatement stmt = parsedStmts[i]; - response = processSqlStatement(stmt); - if (response != null) { - updateResult(stmt, response); - continue; - } - - if (stmt.hasFormat()) { - request.format(ClickHouseFormat.valueOf(stmt.getFormat())); - } - - final HashSet requestRoles = getRequestRoles(stmt); - if (!requestRoles.isEmpty()) { - request.set("_set_roles_stmt", requestRoles); - } - - // TODO skip useless queries to reduce network calls and server load - try { - response = sendRequest(stmt.getSQL(), r -> r); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } finally { - request.removeSetting("_set_roles_stmt"); - if (response == null) { - // something went wrong - } else if (i + 1 < len) { - response.close(); - response = null; - } else { - updateResult(stmt, response); - } - } - } - - return response; - } - - protected void ensureOpen() throws SQLException { - if (closed) { - throw SqlExceptionUtils.clientError("Cannot operate on a closed statement"); - } - } - - protected ClickHouseFile getFile(ClickHouseFile f, ClickHouseSqlStatement stmt) { - ClickHouseFormat format = stmt.hasFormat() ? ClickHouseFormat.valueOf(stmt.getFormat()) - : (f.isRecognized() ? f.getFormat() - : (ClickHouseFormat) ClickHouseDefaults.FORMAT.getDefaultValue()); - ClickHouseCompression compressAlgorithm = stmt.hasCompressAlgorithm() - ? ClickHouseCompression.fromEncoding(ClickHouseUtils.unescape(stmt.getCompressAlgorithm())) - : (f.isRecognized() ? f.getCompressionAlgorithm() : ClickHouseCompression.NONE); - int compressLevel = stmt.hasCompressLevel() ? Integer.parseInt(stmt.getCompressLevel()) : -1; - return ClickHouseFile.of(f.getFile(), compressAlgorithm, compressLevel, format); - } - - protected ClickHouseResponse processSqlStatement(ClickHouseSqlStatement stmt) throws SQLException { - if (stmt.getStatementType() == StatementType.USE) { - String dbName = connection.getCurrentDatabase(); - final String newDb = stmt.getDatabaseOrDefault(dbName); - final boolean hasSession = request.getSessionId().isPresent(); - if (!hasSession) { - request.session(request.getManager().createSessionId()); - } - // execute the query to ensure 1) it's valid; and 2) the database exists - try (ClickHouseResponse response = request.use(newDb).query(stmt.getSQL()).executeAndWait()) { - connection.setCurrentDatabase(dbName = newDb, false); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } finally { - if (!dbName.equals(newDb)) { - request.use(dbName); - } - if (!hasSession) { - request.clearSession(); - } - } - return ClickHouseResponse.EMPTY; - } else if (stmt.isTCL()) { - if (stmt.containsKeyword(ClickHouseTransaction.COMMAND_BEGIN)) { - connection.begin(); - } else if (stmt.containsKeyword(ClickHouseTransaction.COMMAND_COMMIT)) { - connection.commit(); - } else if (stmt.containsKeyword(ClickHouseTransaction.COMMAND_ROLLBACK)) { - connection.rollback(); - } else { - throw new SQLFeatureNotSupportedException("Unsupported TCL: " + stmt.getSQL()); - } - return ClickHouseResponse.EMPTY; - } else if (connection.getJdbcConfig().useLocalFile() && stmt.hasFile()) { - String file = ClickHouseUtils.unescape(stmt.getFile()); - boolean suffix = file.lastIndexOf('!') == file.length() - 1; - if (suffix) { - file = file.substring(0, file.length() - 1); - } - if (stmt.getStatementType() == StatementType.SELECT) { - ClickHouseFile f = ClickHouseFile.of(ClickHouseUtils.getFile(file)); - if (!suffix && f.getFile().exists()) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("Output file [%s] already exists!", f.getFile())); - } - - f = getFile(f, stmt); - final ClickHouseResponseSummary summary = new ClickHouseResponseSummary(null, null); - TimeZone responseTimeZone = null; - try (ClickHouseResponse response = request.query(stmt.getSQL()).output(f).executeAndWait()) { - summary.add(response.getSummary()); - responseTimeZone = response.getTimeZone(); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - return ClickHouseSimpleResponse.of(getConfig(), - Arrays.asList(ClickHouseColumn.of("file", ClickHouseDataType.String, false), - ClickHouseColumn.of("format", ClickHouseDataType.String, false), - ClickHouseColumn.of("compression", ClickHouseDataType.String, false), - ClickHouseColumn.of("level", ClickHouseDataType.Int32, false), - ClickHouseColumn.of("bytes", ClickHouseDataType.UInt64, false)), - new Object[][] { { file, f.getFormat().name(), - f.hasCompression() ? f.getCompressionAlgorithm().encoding() : "none", - f.getCompressionLevel(), f.getFile().length() } }, - summary, - responseTimeZone); - } else if (stmt.getStatementType() == StatementType.INSERT) { - final Mutation m = request.write().query(stmt.getSQL()); - final ClickHouseResponseSummary summary = new ClickHouseResponseSummary(null, null); - try { - for (Path p : ClickHouseUtils.findFiles(file)) { - ClickHouseFile f = ClickHouseFile.of(p.toFile()); - if (!f.getFile().exists()) { - if (suffix) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("File [%s] does not exist!", f.getFile())); - } else { - log.warn("Skip [%s] as it does not exist - perhaps it was just deleted somehow?", f); - } - } else { - try (ClickHouseResponse response = m.data(getFile(f, stmt)).executeAndWait()) { - summary.add(response.getSummary()); - log.debug("Loaded %d rows from [%s]", response.getSummary().getWrittenRows(), - f.getFile().getAbsolutePath()); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } - } - if (suffix && summary.getUpdateCount() == 0) { - throw SqlExceptionUtils.clientError("No file imported: " + file); - } - summary.seal(); - } catch (IOException e) { - throw SqlExceptionUtils.handle(e); - } - return ClickHouseSimpleResponse.of(null, null, new Object[0][], summary, null); - } - } - - return null; - } - - protected ClickHouseResponse executeStatement(String stmt, Map options, - List tables, Map settings) throws SQLException { - try { - if (options != null) { - request.options(options); - } - if (settings != null && !settings.isEmpty()) { - if (!request.getSessionId().isPresent()) { - request.session(request.getManager().createSessionId()); - } - for (Entry e : settings.entrySet()) { - request.set(e.getKey(), e.getValue()); - } - } - if (tables != null && !tables.isEmpty()) { - List list = new ArrayList<>(tables.size()); - char quote = '`'; - for (ClickHouseExternalTable t : tables) { - if (t.isTempTable()) { - if (!request.getSessionId().isPresent()) { - request.session(request.getManager().createSessionId()); - } - String tableName = new StringBuilder().append(quote) - .append(ClickHouseUtils.escape(t.getName(), quote)).append(quote).toString(); - try (ClickHouseResponse dropResp = request - .query("DROP TEMPORARY TABLE IF EXISTS ".concat(tableName)).executeAndWait(); - ClickHouseResponse createResp = request - .query("CREATE TEMPORARY TABLE " + tableName + "(" + t.getStructure() + ")") - .executeAndWait(); - ClickHouseResponse writeResp = request.write().table(tableName).data(t.getContent()) - .executeAndWait()) { - // ignore - } - } else { - list.add(t); - } - } - request.external(list); - } - - return sendRequest(stmt, r -> r); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - } - - protected ClickHouseResponse executeStatement(ClickHouseSqlStatement stmt, - Map options, List tables, - Map settings) throws SQLException { - ClickHouseResponse resp = processSqlStatement(stmt); - if (resp != null) { - return resp; - } - return executeStatement(stmt.getSQL(), options, tables, settings); - } - - private ClickHouseResponse sendRequest(String sql, Function, ClickHouseRequest> preSeal) throws SQLException { - boolean autoTx = connection.getAutoCommit() && connection.isTransactionSupported(); - - ClickHouseRequest req; - ClickHouseTransaction tx = null; - synchronized (request) { - try { - if (autoTx) { - if (connection.isImplicitTransactionSupported()) { - request.set(ClickHouseTransaction.SETTING_IMPLICIT_TRANSACTION, 1).transaction(null); - } else { - tx = request.getManager().createImplicitTransaction(request); - request.transaction(connection.getTransaction()); - } - } else { - try { - request.transaction(connection.getTransaction()); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } - - req = preSeal.apply(request).query(sql, queryId = connection.newQueryId()).seal(); - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - } - - try { - return req.executeAndWait(); - } catch (Exception e) { - if (tx != null) { - try { - tx.rollback(); - } catch (Exception ex) { - log.warn("Failed to rollback transaction", ex); - } - } - throw SqlExceptionUtils.handle(e); - } finally { - try { - request.transaction(null); - } catch (Exception e) { - throw SqlExceptionUtils.handle(ClickHouseException.of(e, req.getServer())); - } - } - } - - protected int executeInsert(String sql, InputStream input) throws SQLException { - try (ClickHouseResponse response = sendRequest(sql, r -> r.write().data(input)); - ResultSet rs = updateResult(new ClickHouseSqlStatement(sql, StatementType.INSERT), response)) { - // no more actions needed - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - return (int) currentUpdateCount; - } - - protected ClickHouseDataProcessor getDataProcessor(ClickHouseInputStream input, Map settings, - ClickHouseColumn[] columns) throws SQLException { - if (processor == null) { - try { - processor = ClickHouseDataStreamFactory.getInstance().getProcessor(getConfig(), input, null, settings, - Arrays.asList(columns)); - } catch (IOException e) { - throw SqlExceptionUtils.clientError(e); - } - } - return processor; - } - - protected ClickHouseDataProcessor getDataProcessor(ClickHouseOutputStream output, - Map settings, ClickHouseColumn[] columns) throws SQLException { - if (processor == null) { - try { - processor = ClickHouseDataStreamFactory.getInstance().getProcessor(getConfig(), null, output, settings, - Arrays.asList(columns)); - } catch (IOException e) { - throw SqlExceptionUtils.clientError(e); - } - } - return processor; - } - - protected void resetDataProcessor() { - this.processor = null; - } - - protected ClickHouseSqlStatement getLastStatement() { - if (parsedStmts == null || parsedStmts.length == 0) { - throw new IllegalArgumentException("At least one parsed statement is required"); - } - - return parsedStmts[parsedStmts.length - 1]; - } - - protected void setLastStatement(ClickHouseSqlStatement stmt) { - if (parsedStmts != null && parsedStmts.length > 0) { - parsedStmts[parsedStmts.length - 1] = ClickHouseChecker.nonNull(stmt, "ParsedStatement"); - } - } - - protected ClickHouseSqlStatement parseSqlStatements(String sql) { - parsedStmts = connection.parse(sql, getConfig(), request.getSettings()); - - if (parsedStmts == null || parsedStmts.length == 0) { - // should never happen - throw new IllegalArgumentException("Failed to parse given SQL: " + sql); - } - - return getLastStatement(); - } - - protected ClickHouseResultSet newEmptyResultSet() throws SQLException { - return new ClickHouseResultSet("", "", this, ClickHouseResponse.EMPTY); - } - - protected ResultSet updateResult(ClickHouseSqlStatement stmt, ClickHouseResponse response) throws SQLException { - if (stmt.isQuery() || (!stmt.isRecognized() && !response.getColumns().isEmpty())) { - currentUpdateCount = -1L; - currentResult = new ClickHouseResultSet(stmt.getDatabaseOrDefault(getConnection().getCurrentDatabase()), - stmt.getTable(), this, response); - } else { - response.close(); - currentUpdateCount = stmt.isDDL() || stmt.isTCL() ? 0L - : (response.getSummary().isEmpty() ? 1L : response.getSummary().getWrittenRows()); - currentResult = null; - } - return currentResult; - } - - protected ClickHouseStatementImpl(ClickHouseConnectionImpl connection, ClickHouseRequest request, - int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { - if (connection == null || request == null) { - throw SqlExceptionUtils.clientError("Non-null connection and request are required"); - } - - this.connection = connection; - this.request = request.setChangeListener(this); - - // TODO validate resultSet attributes - this.resultSetType = ResultSet.TYPE_FORWARD_ONLY; - this.resultSetConcurrency = ResultSet.CONCUR_READ_ONLY; - this.resultSetHoldability = ResultSet.CLOSE_CURSORS_AT_COMMIT; - - this.closed = false; - this.closeOnCompletion = true; - - this.fetchSize = connection.getJdbcConfig().getFetchSize(); - this.maxFieldSize = 0; - this.maxRows = 0L; - this.nullAsDefault = connection.getJdbcConfig().getNullAsDefault(); - this.poolable = false; - this.queryId = null; - - this.queryTimeout = 0; - - this.currentResult = null; - this.currentUpdateCount = -1L; - - this.mapper = connection.getJdbcTypeMapping(); - - this.batchStmts = new LinkedList<>(); - - ClickHouseConfig c = request.getConfig(); - setLargeMaxRows(c.getMaxResultRows()); - setQueryTimeout(c.getMaxExecutionTime()); - - optionChanged(this.request, ClickHouseClientOption.FORMAT, null, null); - } - - @Override - public void optionChanged(ClickHouseRequest source, ClickHouseOption option, Serializable oldValue, - Serializable newValue) { - if (source != request) { - return; - } - - if (option == ClickHouseClientOption.FORMAT) { - this.processor = null; - } - } - - @Override - public void settingChanged(ClickHouseRequest source, String setting, Serializable oldValue, - Serializable newValue) { - // ClickHouseConfigChangeListener.super.settingChanged(source, setting, - // oldValue, newValue); - } - - @Override - public boolean execute(String sql) throws SQLException { - executeQuery(sql); - return currentResult != null; - } - - @Override - public ResultSet executeQuery(String sql) throws SQLException { - ensureOpen(); - if (!batchStmts.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - parseSqlStatements(sql); - getLastResponse(null, null, null); - return currentResult != null ? currentResult : newEmptyResultSet(); - } - - @Override - public long executeLargeUpdate(String sql) throws SQLException { - ensureOpen(); - if (!batchStmts.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - parseSqlStatements(sql); - - try (ClickHouseResponse response = getLastResponse(null, null, null)) { - return currentUpdateCount; - } catch (Exception e) { - throw SqlExceptionUtils.handle(e); - } - } - - @Override - public int executeUpdate(String sql) throws SQLException { - return (int) executeLargeUpdate(sql); - } - - @Override - public void close() throws SQLException { - if (currentResult != null) { - currentResult.close(); - } - - this.closed = true; - } - - @Override - public int getMaxFieldSize() throws SQLException { - ensureOpen(); - - return maxFieldSize; - } - - @Override - public void setMaxFieldSize(int max) throws SQLException { - if (max < 0) { - throw SqlExceptionUtils.clientError("Max field size cannot be set to negative number"); - } - ensureOpen(); - - maxFieldSize = max; - } - - @Override - public long getLargeMaxRows() throws SQLException { - ensureOpen(); - - return maxRows; - } - - @Override - public int getMaxRows() throws SQLException { - return (int) getLargeMaxRows(); - } - - @Override - public void setLargeMaxRows(long max) throws SQLException { - if (max < 0L) { - throw SqlExceptionUtils.clientError("Max rows cannot be set to negative number"); - } - ensureOpen(); - - if (this.maxRows != max) { - if (max == 0L || !connection.allowCustomSetting()) { - request.removeSetting(ClickHouseClientOption.MAX_RESULT_ROWS.getKey()); - request.removeSetting("result_overflow_mode"); - } else { - request.set(ClickHouseClientOption.MAX_RESULT_ROWS.getKey(), max); - request.set("result_overflow_mode", "break"); - } - this.maxRows = max; - } - } - - @Override - public void setMaxRows(int max) throws SQLException { - setLargeMaxRows(max); - } - - @Override - public void setEscapeProcessing(boolean enable) throws SQLException { - ensureOpen(); - - this.escapeScan = enable; - } - - @Override - public int getQueryTimeout() throws SQLException { - ensureOpen(); - - return queryTimeout; - } - - @Override - public void setQueryTimeout(int seconds) throws SQLException { - if (seconds < 0) { - throw SqlExceptionUtils.clientError("Query timeout cannot be set to negative seconds"); - } - ensureOpen(); - - if (this.queryTimeout != seconds) { - if (seconds == 0) { - request.removeSetting("max_execution_time"); - } else { - request.set("max_execution_time", seconds); - } - this.queryTimeout = seconds; - } - } - - @Override - public void cancel() throws SQLException { - if (isClosed()) { - return; - } - - final String qid; - if ((qid = this.queryId) != null) { - String sessionIdKey = ClickHouseClientOption.SESSION_ID.getKey(); - ClickHouseNode server = request.getServer(); - if (server.getOptions().containsKey(sessionIdKey)) { - server = ClickHouseNode.builder(request.getServer()).removeOption(sessionIdKey) - .removeOption(ClickHouseClientOption.SESSION_CHECK.getKey()) - .removeOption(ClickHouseClientOption.SESSION_TIMEOUT.getKey()).build(); - } - try { - List summaries = ClickHouseClient - .send(server, String.format("KILL QUERY WHERE query_id='%s'", qid)) - .get(request.getConfig().getConnectionTimeout(), TimeUnit.MILLISECONDS); - log.info("Killed query [%s]: %s", qid, summaries.get(0)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.warn("Cancellation of query [%s] was interrupted", qid); - } catch (TimeoutException e) { - log.warn("Timed out after waiting %d ms for killing query [%s]", - request.getConfig().getConnectionTimeout(), qid); - } catch (Exception e) { // unexpected - throw SqlExceptionUtils.handle(e.getCause()); - } - } - if (request.getTransaction() != null) { - request.getTransaction().abort(); - } - } - - @Override - public SQLWarning getWarnings() throws SQLException { - ensureOpen(); - - return null; - } - - @Override - public void clearWarnings() throws SQLException { - ensureOpen(); - } - - @Override - public void setCursorName(String name) throws SQLException { - ensureOpen(); - - cursorName = name; - } - - @Override - public ResultSet getResultSet() throws SQLException { - ensureOpen(); - - return currentResult; - } - - @Override - public long getLargeUpdateCount() throws SQLException { - ensureOpen(); - - return currentUpdateCount; - } - - @Override - public int getUpdateCount() throws SQLException { - return (int) getLargeUpdateCount(); - } - - @Override - public boolean getMoreResults() throws SQLException { - ensureOpen(); - - if (currentResult != null) { - currentResult.close(); - currentResult = null; - } - currentUpdateCount = -1L; - return false; - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - ensureOpen(); - - if (direction != ResultSet.FETCH_FORWARD) { - throw SqlExceptionUtils.unsupportedError("only FETCH_FORWARD is supported in setFetchDirection"); - } - } - - @Override - public int getFetchDirection() throws SQLException { - ensureOpen(); - - return ResultSet.FETCH_FORWARD; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - if (rows < 0) { - log.warn("Negative fetch size is treated as 0."); - rows = 0; - } - - ensureOpen(); - - if (fetchSize != rows) { - fetchSize = rows; - - if (rows == 0) { - request.removeOption(ClickHouseClientOption.READ_BUFFER_SIZE); - } else { - request.option(ClickHouseClientOption.READ_BUFFER_SIZE, rows * 1024); - } - } - } - - @Override - public int getFetchSize() throws SQLException { - ensureOpen(); - - return fetchSize; - } - - @Override - public int getResultSetConcurrency() throws SQLException { - ensureOpen(); - - return resultSetConcurrency; - } - - @Override - public int getResultSetType() throws SQLException { - ensureOpen(); - - return resultSetType; - } - - @Override - public void addBatch(String sql) throws SQLException { - ensureOpen(); - - for (ClickHouseSqlStatement s : connection.parse(sql, getConfig(), request.getSettings())) { - this.batchStmts.add(s); - } - } - - @Override - public void clearBatch() throws SQLException { - ensureOpen(); - - this.batchStmts.clear(); - } - - @Override - public int[] executeBatch() throws SQLException { - long[] largeUpdateCounts = executeLargeBatch(); - - int len = largeUpdateCounts.length; - int[] results = new int[len]; - for (int i = 0; i < len; i++) { - results[i] = (int) largeUpdateCounts[i]; - } - return results; - } - - @Override - public long[] executeLargeBatch() throws SQLException { - ensureOpen(); - if (batchStmts.isEmpty()) { - return ClickHouseValues.EMPTY_LONG_ARRAY; - } - - boolean continueOnError = getConnection().getJdbcConfig().isContinueBatchOnError(); - long[] results = new long[batchStmts.size()]; - try { - int i = 0; - for (ClickHouseSqlStatement s : batchStmts) { - try (ClickHouseResponse r = executeStatement(s, null, null, null); ResultSet rs = updateResult(s, r)) { - if (rs != null && s.isQuery()) { - throw SqlExceptionUtils.queryInBatchError(results); - } - results[i] = currentUpdateCount <= 0L ? 0L : currentUpdateCount; - } catch (Exception e) { - results[i] = EXECUTE_FAILED; - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Faled to execute task %d of %d", i + 1, batchStmts.size(), e); - } finally { - i++; - } - } - } finally { - clearBatch(); - } - - return results; - } - - @Override - public boolean getMoreResults(int current) throws SQLException { - ensureOpen(); - - switch (current) { - case Statement.KEEP_CURRENT_RESULT: - break; - case Statement.CLOSE_CURRENT_RESULT: - case Statement.CLOSE_ALL_RESULTS: - if (currentResult != null) { - currentResult.close(); - } - break; - default: - throw SqlExceptionUtils.clientError("Unknown statement constants: " + current); - } - return false; - } - - @Override - public ResultSet getGeneratedKeys() throws SQLException { - ensureOpen(); - - return new ClickHouseResultSet(request.getConfig().getDatabase(), ClickHouseSqlStatement.DEFAULT_TABLE, this, - ClickHouseResponse.EMPTY); - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - return executeUpdate(sql); - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - return executeUpdate(sql); - } - - @Override - public int executeUpdate(String sql, String[] columnNames) throws SQLException { - return executeUpdate(sql); - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - return execute(sql); - } - - @Override - public boolean execute(String sql, int[] columnIndexes) throws SQLException { - return execute(sql); - } - - @Override - public boolean execute(String sql, String[] columnNames) throws SQLException { - return execute(sql); - } - - @Override - public int getResultSetHoldability() throws SQLException { - ensureOpen(); - - return resultSetHoldability; - } - - @Override - public boolean isClosed() throws SQLException { - return closed; - } - - @Override - public void setPoolable(boolean poolable) throws SQLException { - ensureOpen(); - - this.poolable = poolable; - } - - @Override - public boolean isPoolable() throws SQLException { - ensureOpen(); - - return poolable; - } - - @Override - public void closeOnCompletion() throws SQLException { - ensureOpen(); - - closeOnCompletion = true; - } - - @Override - public boolean isCloseOnCompletion() throws SQLException { - ensureOpen(); - - return closeOnCompletion; - } - - @Override - public ClickHouseConnection getConnection() throws SQLException { - ensureOpen(); - - return connection; - } - - @Override - public ClickHouseConfig getConfig() { - return request.getConfig(); - } - - @Override - public OutputStream getMirroredOutput() { - return mirroredOutput; - } - - @Override - public void setMirroredOutput(OutputStream out) { - if (this.mirroredOutput != null) { - try { - this.mirroredOutput.flush(); - } catch (IOException e) { - // ignore - } - } - this.mirroredOutput = out; - } - - @Override - public int getNullAsDefault() { - return nullAsDefault; - } - - @Override - public void setNullAsDefault(int level) { - this.nullAsDefault = level; - } - - @Override - public ClickHouseRequest getRequest() { - return request; - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return iface == ClickHouseRequest.class || super.isWrapperFor(iface); - } - - @Override - public T unwrap(Class iface) throws SQLException { - return iface == ClickHouseRequest.class ? iface.cast(request) : super.unwrap(iface); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/InputBasedPreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/InputBasedPreparedStatement.java deleted file mode 100644 index a3617a41b..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/InputBasedPreparedStatement.java +++ /dev/null @@ -1,486 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.io.IOException; -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Date; -import java.sql.ParameterMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.List; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataProcessor; -import com.clickhouse.data.ClickHouseDataStreamFactory; -import com.clickhouse.data.ClickHousePipedOutputStream; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValue; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHousePreparedStatement; -import com.clickhouse.jdbc.SqlExceptionUtils; - -public class InputBasedPreparedStatement extends AbstractPreparedStatement implements ClickHousePreparedStatement { - private static final Logger log = LoggerFactory.getLogger(InputBasedPreparedStatement.class); - - private final Calendar defaultCalendar; - private final ZoneId timeZoneForDate; - private final ZoneId timeZoneForTs; - - private final ClickHouseColumn[] columns; - private final ClickHouseValue[] values; - private final ClickHouseParameterMetaData paramMetaData; - private final boolean[] flags; - - private int counter; - private ClickHousePipedOutputStream stream; - - protected InputBasedPreparedStatement(ClickHouseConnectionImpl connection, ClickHouseRequest request, - List columns, int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - super(connection, request, resultSetType, resultSetConcurrency, resultSetHoldability); - - if (columns == null) { - throw SqlExceptionUtils.clientError("Non-null column list is required"); - } - - ClickHouseConfig config = getConfig(); - defaultCalendar = connection.getDefaultCalendar(); - timeZoneForTs = config.getUseTimeZone().toZoneId(); - timeZoneForDate = config.isUseServerTimeZoneForDates() ? timeZoneForTs : null; - - int size = columns.size(); - this.columns = new ClickHouseColumn[size]; - this.values = new ClickHouseValue[size]; - List list = new ArrayList<>(size); - int i = 0; - for (ClickHouseColumn col : columns) { - this.columns[i] = col; - this.values[i] = col.newValue(config); - list.add(col); - i++; - } - paramMetaData = new ClickHouseParameterMetaData(Collections.unmodifiableList(list), mapper, - connection.getTypeMap()); - flags = new boolean[size]; - - counter = 0; - // it's important to make sure the queue has unlimited length - stream = ClickHouseDataStreamFactory.getInstance().createPipedOutputStream(config.getWriteBufferSize(), 0, - config.getSocketTimeout()); - } - - protected void ensureParams() throws SQLException { - List list = new ArrayList<>(); - for (int i = 0, len = values.length; i < len; i++) { - if (!flags[i]) { - list.add(String.valueOf(i + 1)); - } - } - - if (!list.isEmpty()) { - throw SqlExceptionUtils.clientError(ClickHouseUtils.format("Missing parameter(s): %s", list)); - } - } - - @Override - protected long[] executeAny(boolean asBatch) throws SQLException { - ensureOpen(); - boolean continueOnError = false; - if (asBatch) { - if (counter < 1) { - return ClickHouseValues.EMPTY_LONG_ARRAY; - } - continueOnError = getConnection().getJdbcConfig().isContinueBatchOnError(); - } else { - try { - if (counter != 0) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - addBatch(); - } catch (SQLException e) { - clearBatch(); - throw e; - } - } - - long[] results = new long[counter]; - long rows = 0; - try { - stream.close(); - rows = executeInsert(getRequest().getStatements(false).get(0), stream.getInputStream()); - if (asBatch && getResultSet() != null) { - throw SqlExceptionUtils.queryInBatchError(results); - } - - if (counter == 1) { - results[0] = rows; - } else { - // FIXME grpc and tcp by default can provides accurate result - Arrays.fill(results, 1); - } - } catch (Exception e) { - if (!asBatch) { - throw SqlExceptionUtils.handle(e); - } - - // just a wild guess... - if (rows < 1) { - results[0] = EXECUTE_FAILED; - } else { - if (rows >= counter) { - rows = counter; - } - for (int i = 0, len = (int) rows - 1; i < len; i++) { - results[i] = 1; - } - results[(int) rows] = EXECUTE_FAILED; - } - - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Failed to execute batch insert of %d records", counter + 1, e); - } finally { - clearBatch(); - } - - return results; - } - - @Override - protected int getMaxParameterIndex() { - return values.length; - } - - @Override - public ResultSet executeQuery() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Query failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - - ResultSet rs = getResultSet(); - if (rs != null) { // should not happen - try { - rs.close(); - } catch (Exception e) { - // ignore - } - } - return newEmptyResultSet(); - } - - @Override - public long executeLargeUpdate() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Update failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - long row = getLargeUpdateCount(); - return row > 0L ? row : 0L; - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public void clearParameters() throws SQLException { - ensureOpen(); - - for (int i = 0, len = values.length; i < len; i++) { - flags[i] = false; - } - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } - - @Override - public boolean execute() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Execution failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - return false; - } - - @Override - public void addBatch() throws SQLException { - ensureOpen(); - - ClickHouseDataProcessor processor = getDataProcessor(stream, null, columns); - int nullAsDefault = getNullAsDefault(); - // validate the values before the actual write into the processor - for (int i = 0, len = values.length; i < len; i++) { - if (!flags[i]) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("Missing value for parameter #%d [%s]", i + 1, columns[i])); - } - ClickHouseColumn col = columns[i]; - ClickHouseValue val = values[i]; - if (!col.isNestedType() && !col.isNullable() && (val == null || val.isNullOrEmpty())) { - if (nullAsDefault > 1 && val != null) { - val.resetToDefault(); - } else if (nullAsDefault < 1) { - throw SqlExceptionUtils.clientError(ClickHouseUtils.format( - "Cannot set null to non-nullable column #%d [%s]", i + 1, col)); - } - } - } - // the actual write to the processor - for (int i = 0, len = values.length; i < len; i++) { - ClickHouseValue val = values[i]; - try { - processor.write(val); - } catch (IOException e) { - // should not happen - throw SqlExceptionUtils.handle(e); - } - } - counter++; - clearParameters(); - } - - @Override - public void clearBatch() throws SQLException { - ensureOpen(); - - // just in case - try { - stream.close(); - } catch (Exception e) { - // ignore - } - counter = 0; - - ClickHouseConfig config = getConfig(); - stream = ClickHouseDataStreamFactory.getInstance().createPipedOutputStream(config.getWriteBufferSize(), 0, - config.getSocketTimeout()); - resetDataProcessor(); - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - Object array = x != null ? x.getArray() : x; - values[idx].update(array); - flags[idx] = true; - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x != null) { - LocalDate d; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (timeZoneForDate == null || tz.equals(timeZoneForDate)) { - d = x.toLocalDate(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - d = c.toInstant().atZone(tz).withZoneSameInstant(timeZoneForDate).toLocalDate(); - } - values[idx].update(d); - } else { - values[idx].resetToNullOrEmpty(); - } - flags[idx] = true; - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x != null) { - LocalTime t; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (tz.equals(timeZoneForTs)) { - t = x.toLocalTime(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - t = c.toInstant().atZone(tz).withZoneSameInstant(timeZoneForTs).toLocalTime(); - } - values[idx].update(t); - } else { - values[idx].resetToNullOrEmpty(); - } - flags[idx] = true; - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x != null) { - LocalDateTime dt; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (tz.equals(timeZoneForTs)) { - dt = x.toLocalDateTime(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - dt = c.toInstant().atZone(tz).withNano(x.getNanos()).withZoneSameInstant(timeZoneForTs) - .toLocalDateTime(); - } - values[idx].update(dt); - } else { - values[idx].resetToNullOrEmpty(); - } - flags[idx] = true; - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].resetToNullOrEmpty(); - flags[idx] = true; - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return paramMetaData; - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - values[idx].update(x); - flags[idx] = true; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcSavepoint.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcSavepoint.java deleted file mode 100644 index ce35d0ff7..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcSavepoint.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.SQLException; -import java.sql.Savepoint; - -import com.clickhouse.jdbc.SqlExceptionUtils; - -public class JdbcSavepoint implements Savepoint { - final int id; - final String name; - - JdbcSavepoint(int id, String name) { - this.id = id; - this.name = name; - } - - @Override - public int getSavepointId() throws SQLException { - if (name != null) { - throw SqlExceptionUtils - .clientError("Cannot get ID from a named savepoint, please use getSavepointName() instead"); - } - - return id; - } - - @Override - public String getSavepointName() throws SQLException { - if (name == null) { - throw SqlExceptionUtils - .clientError("Cannot get name from an un-named savepoint, please use getSavepointId() instead"); - } - - return name; - } - - @Override - public String toString() { - return new StringBuilder().append("JdbcSavepoint [id=").append(id).append(", name=").append(name) - .append(']').toString(); - } -} \ No newline at end of file diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcTransaction.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcTransaction.java deleted file mode 100644 index 440bfb546..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcTransaction.java +++ /dev/null @@ -1,159 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.SQLException; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -import com.clickhouse.client.ClickHouseException; -import com.clickhouse.client.ClickHouseRequestManager; -import com.clickhouse.client.ClickHouseTransaction; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.logging.Logger; -import com.clickhouse.jdbc.SqlExceptionUtils; - -public class JdbcTransaction { - static final String ACTION_COMMITTED = "committed"; - static final String ACTION_ROLLBACK = "rolled back"; - - static final String ERROR_TX_NOT_STARTED = "Transaction not started"; - static final String ERROR_TX_STARTED = "Transaction has been started"; - - protected final ClickHouseTransaction tx; - protected final String id; - protected final List queries; - protected final List savepoints; - - JdbcTransaction() { - this(null); - } - - public JdbcTransaction(ClickHouseTransaction tx) { - this.tx = tx; - this.id = tx != null ? tx.getId().asTupleString() : ClickHouseRequestManager.getInstance().createUniqueId(); - this.queries = new LinkedList<>(); - this.savepoints = new LinkedList<>(); - } - - public boolean isNew() { - return this.queries.isEmpty() && this.savepoints.isEmpty() - && (this.tx == null || this.tx.isNew() || this.tx.isActive()); - } - - public void commit(Logger log) throws SQLException { - if (this.tx != null) { - try { - this.tx.commit(); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } else { - logTransactionDetails(log, ACTION_COMMITTED); - } - clear(); - } - - public void rollback(Logger log) throws SQLException { - if (this.tx != null) { - try { - this.tx.rollback(); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } else { - logTransactionDetails(log, JdbcTransaction.ACTION_ROLLBACK); - } - clear(); - } - - synchronized List getQueries() { - return Collections.unmodifiableList(queries); - } - - synchronized List getSavepoints() { - return Collections.unmodifiableList(savepoints); - } - - synchronized void logSavepointDetails(Logger log, JdbcSavepoint s, String action) { - log.warn( - "[JDBC Compliant Mode] Savepoint(id=%d, name=%s) of transaction [%s](%d queries & %d savepoints) is %s.", - s.id, s.name, id, queries.size(), savepoints.size(), action); - } - - synchronized void logTransactionDetails(Logger log, String action) { - if (tx != null) { - log.debug("%s (%d queries & %d savepoints) is %s", tx, queries.size(), - savepoints.size(), action); - } else { - log.warn("[JDBC Compliant Mode] Transaction [%s] (%d queries & %d savepoints) is %s.", id, queries.size(), - savepoints.size(), action); - } - - if (log.isDebugEnabled()) { - log.debug(() -> { - log.debug("[JDBC Compliant Mode] Transaction [%s] is %s - begin", id, action); - int total = queries.size(); - int counter = 1; - for (String queryId : queries) { - log.debug(" '%s', -- query (%d of %d) in transaction [%s]", queryId, counter++, total, id); - } - - total = savepoints.size(); - counter = 1; - for (JdbcSavepoint savepoint : savepoints) { - log.debug(" %s (%d of %d) in transaction [%s]", savepoint, counter++, total, id); - } - return ClickHouseUtils.format("[JDBC Compliant Mode] Transaction [%s] is %s - end", id, action); - }); - } - } - - synchronized String newQuery(String queryId) { - if (ClickHouseChecker.isNullOrEmpty(queryId) || queries.contains(queryId)) { - queryId = ClickHouseRequestManager.getInstance().createQueryId(); - } - - queries.add(queryId); - - return queryId; - } - - synchronized JdbcSavepoint newSavepoint(String name) { - JdbcSavepoint savepoint = new JdbcSavepoint(queries.size(), name); - this.savepoints.add(savepoint); - return savepoint; - } - - synchronized void toSavepoint(JdbcSavepoint savepoint) throws SQLException { - if (tx != null) { - try { - tx.rollback(); - } catch (ClickHouseException e) { - throw SqlExceptionUtils.handle(e); - } - } - boolean found = false; - Iterator it = savepoints.iterator(); - while (it.hasNext()) { - JdbcSavepoint s = it.next(); - if (found) { - it.remove(); - } else if (s == savepoint) { - found = true; - it.remove(); - } - } - - if (!found) { - throw SqlExceptionUtils.clientError("Invalid savepoint: " + savepoint); - } - queries.subList(savepoint.id, queries.size()).clear(); - } - - synchronized void clear() { - this.queries.clear(); - this.savepoints.clear(); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/SqlBasedPreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/SqlBasedPreparedStatement.java deleted file mode 100644 index 0745f1a19..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/SqlBasedPreparedStatement.java +++ /dev/null @@ -1,659 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Date; -import java.sql.ParameterMetaData; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.TimeZone; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseParameterizedQuery; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValue; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.value.ClickHouseDateTimeValue; -import com.clickhouse.data.value.ClickHouseDateValue; -import com.clickhouse.data.value.ClickHouseStringValue; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHouseConnection; -import com.clickhouse.jdbc.ClickHousePreparedStatement; -import com.clickhouse.jdbc.ClickHouseResultSetMetaData; -import com.clickhouse.jdbc.JdbcParameterizedQuery; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; - -public class SqlBasedPreparedStatement extends AbstractPreparedStatement implements ClickHousePreparedStatement { - private static final Logger log = LoggerFactory.getLogger(SqlBasedPreparedStatement.class); - - private final Calendar defaultCalendar; - private final TimeZone preferredTimeZone; - private final ZoneId timeZoneForDate; - private final ZoneId timeZoneForTs; - - private final ClickHouseSqlStatement parsedStmt; - private final String insertValuesQuery; - private final ClickHouseParameterizedQuery preparedQuery; - private final ClickHouseValue[] templates; - private final String[] values; - private final ClickHouseParameterMetaData paramMetaData; - private final List batch; - private final StringBuilder builder; - - private int counter; - - protected SqlBasedPreparedStatement(ClickHouseConnectionImpl connection, ClickHouseRequest request, - ClickHouseSqlStatement parsedStmt, int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - super(connection, request, resultSetType, resultSetConcurrency, resultSetHoldability); - - ClickHouseConfig config = getConfig(); - defaultCalendar = connection.getDefaultCalendar(); - preferredTimeZone = config.getUseTimeZone(); - timeZoneForTs = preferredTimeZone.toZoneId(); - timeZoneForDate = config.isUseServerTimeZoneForDates() ? timeZoneForTs : null; - - this.parsedStmt = parsedStmt; - String valuesExpr = null; - ClickHouseParameterizedQuery parsedValuesExpr = null; - String prefix = null; - if (parsedStmt.hasValues()) { // consolidate multiple inserts into one - valuesExpr = parsedStmt.getContentBetweenKeywords(ClickHouseSqlStatement.KEYWORD_VALUES_START, - ClickHouseSqlStatement.KEYWORD_VALUES_END); - if (ClickHouseChecker.isNullOrBlank(valuesExpr)) { - log.warn( - "Please consider to use one and only one values expression, for example: use 'values(?)' instead of 'values(?),(?)'."); - } else { - valuesExpr += ")"; - prefix = parsedStmt.getSQL().substring(0, - parsedStmt.getPositions().get(ClickHouseSqlStatement.KEYWORD_VALUES_START)); - if (connection.getJdbcConfig().useNamedParameter()) { - parsedValuesExpr = ClickHouseParameterizedQuery.of(config, valuesExpr); - } else { - parsedValuesExpr = JdbcParameterizedQuery.of(config, valuesExpr); - } - } - } - - preparedQuery = parsedValuesExpr == null ? request.getPreparedQuery() : parsedValuesExpr; - - templates = preparedQuery.getParameterTemplates(); - - int tlen = templates.length; - values = new String[tlen]; - List list = new ArrayList<>(tlen); - for (int i = 1; i <= tlen; i++) { - list.add(ClickHouseColumn.of("parameter" + i, ClickHouseDataType.JSON, true)); - } - paramMetaData = new ClickHouseParameterMetaData(Collections.unmodifiableList(list), mapper, - connection.getTypeMap()); - batch = new LinkedList<>(); - builder = new StringBuilder(); - if ((insertValuesQuery = prefix) != null) { - builder.append(insertValuesQuery); - } - - counter = 0; - } - - protected void ensureParams() throws SQLException { - List columns = new ArrayList<>(); - for (int i = 0, len = values.length; i < len; i++) { - if (values[i] == null) { - columns.add(String.valueOf(i + 1)); - } - } - - if (!columns.isEmpty()) { - throw SqlExceptionUtils.clientError(ClickHouseUtils.format("Missing parameter(s): %s", columns)); - } - } - - private ClickHouseResponse executeStatement(String sql, boolean reparse) throws SQLException { - ClickHouseResponse r = null; - if (reparse) { - // parse the query for the third time... - ClickHouseSqlStatement[] stmts = getConnection().parse(sql, getConfig(), null); - if (stmts.length == 1 && stmts[0].hasFile()) { - r = executeStatement(stmts[0], null, null, null); - } - } - return r != null ? r : executeStatement(builder.toString(), null, null, null); - } - - @Override - protected long[] executeAny(boolean asBatch) throws SQLException { - ensureOpen(); - boolean continueOnError = false; - if (asBatch) { - if (counter < 1) { - return ClickHouseValues.EMPTY_LONG_ARRAY; - } - continueOnError = getConnection().getJdbcConfig().isContinueBatchOnError(); - } else { - if (counter != 0) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - addBatch(); - } - - long[] results = new long[counter]; - ClickHouseResponse r = null; - boolean reparse = getConnection().getJdbcConfig().useLocalFile() && this.parsedStmt.hasFile(); - if (builder.length() > 0) { // insert ... values - long rows = 0L; - try { - r = executeStatement(builder.toString(), reparse); - if (updateResult(parsedStmt, r) != null && asBatch && parsedStmt.isQuery()) { - throw SqlExceptionUtils.queryInBatchError(results); - } - rows = r.getSummary().getWrittenRows(); - // no effective rows for update and delete, and the number for insertion is not - // accurate as well - // if (rows > 0L && rows != counter) { - // log.warn("Expect %d rows being inserted but only got %d", counter, rows); - // } - // FIXME needs to enhance http client before getting back to this - Arrays.fill(results, 1); - } catch (Exception e) { - if (!asBatch) { - throw SqlExceptionUtils.handle(e); - } - - // just a wild guess... - if (rows < 1) { - results[0] = EXECUTE_FAILED; - } else { - if (rows >= counter) { - rows = counter; - } - for (int i = 0, len = (int) rows - 1; i < len; i++) { - results[i] = 1; - } - results[(int) rows] = EXECUTE_FAILED; - } - - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Failed to execute batch insertion of %d records", counter, e); - } finally { - if (asBatch && r != null) { - r.close(); - } - clearBatch(); - } - } else { - int index = 0; - try { - for (String[] params : batch) { - builder.setLength(0); - preparedQuery.apply(builder, params); - try { - r = executeStatement(builder.toString(), reparse); - if (updateResult(parsedStmt, r) != null && asBatch && parsedStmt.isQuery()) { - throw SqlExceptionUtils.queryInBatchError(results); - } - int count = getUpdateCount(); - results[index] = count > 0 ? count : 0; - } catch (Exception e) { - results[index] = EXECUTE_FAILED; - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Failed to execute batch insert at %d of %d", index + 1, counter, e); - } finally { - index++; - if (asBatch && r != null) { - r.close(); - } - } - } - } finally { - clearBatch(); - } - } - - return results; - } - - @Override - protected int getMaxParameterIndex() { - return templates.length; - } - - @Override - public ResultSetMetaData describeQueryResult() throws SQLException { - // No metadata unless query has been recognized as SELECT - if (!parsedStmt.isRecognized() || !parsedStmt.isQuery()) { - return null; - } - - final String[] vals; - if (batch.isEmpty()) { - vals = new String[values.length]; - System.arraycopy(this.values, 0, vals, 0, values.length); - } else { - vals = batch.get(0); - } - for (int i = 0; i < values.length; i++) { - if (vals[i] == null) { - vals[i] = ClickHouseValues.NULL_EXPR; - } - } - - StringBuilder sb = new StringBuilder("desc ("); - preparedQuery.apply(sb, vals); - sb.append(')'); - - List columns = new LinkedList<>(); - ClickHouseConnection conn = getConnection(); - try (Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery(sb.toString())) { - while (rs.next()) { - columns.add(ClickHouseColumn.of(rs.getString(1), rs.getString(2))); - } - } - - return ClickHouseResultSetMetaData.of(conn.getJdbcConfig(), conn.getCurrentDatabase(), "", - Collections.unmodifiableList(new ArrayList<>(columns)), mapper, conn.getTypeMap()); - } - - @Override - public ResultSet executeQuery() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Query failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - ResultSet rs = getResultSet(); - return rs == null ? newEmptyResultSet() : rs; - } - - @Override - public long executeLargeUpdate() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Update failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - return getLargeUpdateCount(); - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = String.valueOf(x); - } - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = ClickHouseValues.convertToQuotedString(x); - } - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value == null) { - templates[idx] = value = ClickHouseStringValue.ofNull(); - } - values[idx] = value.update(x).toSqlExpression(); - } - - @Override - public void clearParameters() throws SQLException { - ensureOpen(); - - for (int i = 0, len = values.length; i < len; i++) { - values[i] = null; - } - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.update(x); - values[idx] = value.toSqlExpression(); - } else { - if (x instanceof ClickHouseValue) { - value = (ClickHouseValue) x; - templates[idx] = value; - values[idx] = value.toSqlExpression(); - } else { - values[idx] = ClickHouseValues.convertToSqlExpression(x); - } - } - } - - @Override - public boolean execute() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Execution failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - return getResultSet() != null; - } - - @Override - public void addBatch() throws SQLException { - ensureOpen(); - - if (builder.length() > 0) { - int index = 1; - for (String v : values) { - if (v == null) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("Missing value for parameter #%d", index)); - } - index++; - } - preparedQuery.apply(builder, values); - } else { - int len = values.length; - String[] newValues = new String[len]; - for (int i = 0; i < len; i++) { - String v = values[i]; - if (v == null) { - throw SqlExceptionUtils - .clientError(ClickHouseUtils.format("Missing value for parameter #%d", i + 1)); - } else { - newValues[i] = v; - } - } - batch.add(newValues); - } - counter++; - clearParameters(); - } - - @Override - public void clearBatch() throws SQLException { - ensureOpen(); - - this.batch.clear(); - this.builder.setLength(0); - if (insertValuesQuery != null) { - this.builder.append(insertValuesQuery); - } - - this.counter = 0; - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - Object array = x != null ? x.getArray() : x; - values[idx] = array != null ? ClickHouseValues.convertToSqlExpression(array) - : ClickHouseValues.EMPTY_ARRAY_EXPR; - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x == null) { - values[idx] = ClickHouseValues.NULL_EXPR; - return; - } - - LocalDate d; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (timeZoneForDate == null || tz.equals(timeZoneForDate)) { - d = x.toLocalDate(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - d = c.toInstant().atZone(tz).withZoneSameInstant(timeZoneForDate).toLocalDate(); - } - - ClickHouseValue value = templates[idx]; - if (value == null) { - value = ClickHouseDateValue.ofNull(); - } - values[idx] = value.update(d).toSqlExpression(); - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x == null) { - values[idx] = ClickHouseValues.NULL_EXPR; - return; - } - - LocalTime t; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (tz.equals(timeZoneForTs)) { - t = x.toLocalTime(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - t = c.toInstant().atZone(tz).withZoneSameInstant(timeZoneForTs).toLocalTime(); - } - - ClickHouseValue value = templates[idx]; - if (value == null) { - value = ClickHouseDateValue.ofNull(); - } - values[idx] = value.update(t).toSqlExpression(); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - if (x == null) { - values[idx] = ClickHouseValues.NULL_EXPR; - return; - } - - LocalDateTime dt; - if (cal == null) { - cal = defaultCalendar; - } - ZoneId tz = cal.getTimeZone().toZoneId(); - if (tz.equals(timeZoneForTs)) { - dt = x.toLocalDateTime(); - } else { - Calendar c = (Calendar) cal.clone(); - c.setTime(x); - dt = c.toInstant().atZone(tz).withNano(x.getNanos()).withZoneSameInstant(timeZoneForTs).toLocalDateTime(); - } - - ClickHouseValue value = templates[idx]; - if (value == null) { - value = ClickHouseDateTimeValue.ofNull(dt.getNano() > 0 ? 9 : 0, preferredTimeZone); - } - values[idx] = value.update(dt).toSqlExpression(); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value != null) { - value.resetToNullOrEmpty(); - values[idx] = value.toSqlExpression(); - } else { - values[idx] = ClickHouseValues.NULL_EXPR; - } - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return paramMetaData; - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - ensureOpen(); - - int idx = toArrayIndex(parameterIndex); - ClickHouseValue value = templates[idx]; - if (value == null) { - value = mapper.toColumn(targetSqlType, scaleOrLength).newValue(getConfig()); - templates[idx] = value; - } - - value.update(x); - values[idx] = value.toSqlExpression(); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/StreamBasedPreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/StreamBasedPreparedStatement.java deleted file mode 100644 index 3b1a3d648..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/StreamBasedPreparedStatement.java +++ /dev/null @@ -1,350 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.io.File; -import java.io.InputStream; -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Date; -import java.sql.ParameterMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataStreamFactory; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseInputStream; -import com.clickhouse.data.ClickHouseOutputStream; -import com.clickhouse.data.ClickHousePassThruStream; -import com.clickhouse.data.ClickHousePipedOutputStream; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.ClickHouseWriter; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHousePreparedStatement; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; - -public class StreamBasedPreparedStatement extends AbstractPreparedStatement implements ClickHousePreparedStatement { - private static final Logger log = LoggerFactory.getLogger(StreamBasedPreparedStatement.class); - - private static final String ERROR_SET_PARAM = "Please use setString()/setBytes()/setInputStream() or pass String/InputStream/ClickHouseInputStream to setObject() method instead"; - private static final String DEFAULT_KEY = "pipe"; - private static final List DEFAULT_PARAMS = Collections - .singletonList(ClickHouseColumn.of("data", ClickHouseDataType.String, false)); - - private final ClickHouseSqlStatement parsedStmt; - private final ClickHouseParameterMetaData paramMetaData; - - private final List batch; - - private ClickHouseInputStream value; - - protected StreamBasedPreparedStatement(ClickHouseConnectionImpl connection, ClickHouseRequest request, - ClickHouseSqlStatement parsedStmt, int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - super(connection, request, resultSetType, resultSetConcurrency, resultSetHoldability); - - this.parsedStmt = parsedStmt; - this.value = null; - paramMetaData = new ClickHouseParameterMetaData(DEFAULT_PARAMS, mapper, connection.getTypeMap()); - batch = new LinkedList<>(); - } - - protected void ensureParams() throws SQLException { - if (value == null) { - throw SqlExceptionUtils.clientError("Missing input stream"); - } - } - - @Override - protected long[] executeAny(boolean asBatch) throws SQLException { - ensureOpen(); - boolean continueOnError = false; - if (asBatch) { - if (batch.isEmpty()) { - return ClickHouseValues.EMPTY_LONG_ARRAY; - } - continueOnError = getConnection().getJdbcConfig().isContinueBatchOnError(); - } else { - try { - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - addBatch(); - } catch (SQLException e) { - clearBatch(); - throw e; - } - } - - long[] results = new long[batch.size()]; - int count = 0; - String sql = getRequest().getStatements(false).get(0); - try { - for (ClickHouseInputStream in : batch) { - @SuppressWarnings("unchecked") - final CompletableFuture future = (CompletableFuture) in.removeUserData(DEFAULT_KEY); - results[count++] = executeInsert(sql, in); - if (future != null) { - future.get(); - } - } - } catch (Exception e) { - if (e instanceof InterruptedException) { - Thread.currentThread().interrupt(); - } - - if (!asBatch) { - throw SqlExceptionUtils.handle(e); - } - - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Failed to execute batch insert of %d records", count + 1, e); - } finally { - clearBatch(); - } - - return results; - } - - @Override - protected int getMaxParameterIndex() { - return 1; - } - - protected String getSql() { - // why? because request can be modified so it might not always same as - // parsedStmt.getSQL() - return getRequest().getStatements(false).get(0); - } - - @Override - public ResultSet executeQuery() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Query failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - - ResultSet rs = getResultSet(); - if (rs != null) { // should not happen - try { - rs.close(); - } catch (Exception e) { - // ignore - } - } - return newEmptyResultSet(); - } - - @Override - public long executeLargeUpdate() throws SQLException { - ensureParams(); - try { - executeAny(false); - } catch (SQLException e) { - if (e.getSQLState() != null) { - throw e; - } else { - throw new SQLException("Update failed", SqlExceptionUtils.SQL_STATE_SQL_ERROR, e.getCause()); - } - } - long row = getLargeUpdateCount(); - return row > 0L ? row : 0L; - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - ensureOpen(); - - value = ClickHouseInputStream.of(x); - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - ensureOpen(); - - value = ClickHouseInputStream.of(x); - } - - @Override - public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - ensureOpen(); - - value = ClickHouseInputStream.of(x); - } - - @Override - public void clearParameters() throws SQLException { - ensureOpen(); - - value = null; - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ensureOpen(); - - if (x instanceof ClickHousePassThruStream) { - ClickHousePassThruStream stream = (ClickHousePassThruStream) x; - if (!stream.hasInput()) { - throw SqlExceptionUtils.clientError("No input available in the given pass-thru stream"); - } - value = stream.newInputStream(getConfig().getWriteBufferSize(), null); - } else if (x instanceof ClickHouseWriter) { - final ClickHouseWriter writer = (ClickHouseWriter) x; - final ClickHousePipedOutputStream stream = ClickHouseDataStreamFactory.getInstance() // NOSONAR - .createPipedOutputStream(getConfig()); - value = stream.getInputStream(); - - // always run in async mode or it will not work - value.setUserData(DEFAULT_KEY, ClickHouseClient.submit(() -> { - try (ClickHouseOutputStream out = stream) { - writer.write(out); - } - return true; - })); - } else if (x instanceof InputStream) { - value = ClickHouseInputStream.of((InputStream) x); - } else if (x instanceof String) { - value = ClickHouseInputStream.of((String) x); - } else if (x instanceof byte[]) { - value = ClickHouseInputStream.of((byte[]) x); - } else if (x instanceof File) { - value = ClickHouseInputStream.of((File) x); - } else { - throw SqlExceptionUtils - .clientError( - "Only byte[], String, File, InputStream, ClickHousePassThruStream, and ClickHouseWriter are supported"); - } - } - - @Override - public boolean execute() throws SQLException { - ensureParams(); - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - final String sql = getSql(); - @SuppressWarnings("unchecked") - final CompletableFuture future = (CompletableFuture) value.removeUserData(DEFAULT_KEY); - executeInsert(sql, value); - if (future != null) { - try { - future.get(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.warn("Execution of query was interrupted: %s", sql); - } catch (ExecutionException e) { - throw SqlExceptionUtils.handle(e.getCause()); - } - } - return false; - } - - @Override - public void addBatch() throws SQLException { - ensureOpen(); - - ensureParams(); - batch.add(value); - clearParameters(); - } - - @Override - public void clearBatch() throws SQLException { - ensureOpen(); - - this.batch.clear(); - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_PARAM); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - ensureOpen(); - - value = ClickHouseInputStream.empty(); - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return paramMetaData; - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - setObject(parameterIndex, x); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/TableBasedPreparedStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/TableBasedPreparedStatement.java deleted file mode 100644 index df66eba9a..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/TableBasedPreparedStatement.java +++ /dev/null @@ -1,291 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Date; -import java.sql.ParameterMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.ClickHouseResponse; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseExternalTable; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import com.clickhouse.jdbc.ClickHousePreparedStatement; -import com.clickhouse.jdbc.SqlExceptionUtils; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; - -public class TableBasedPreparedStatement extends AbstractPreparedStatement implements ClickHousePreparedStatement { - private static final Logger log = LoggerFactory.getLogger(TableBasedPreparedStatement.class); - - private static final String ERROR_SET_TABLE = "Please use setObject(ClickHouseExternalTable) method instead"; - - private final ClickHouseSqlStatement parsedStmt; - private final List tables; - private final ClickHouseExternalTable[] values; - private final ClickHouseParameterMetaData paramMetaData; - - private final List> batch; - - protected TableBasedPreparedStatement(ClickHouseConnectionImpl connection, ClickHouseRequest request, - ClickHouseSqlStatement parsedStmt, int resultSetType, int resultSetConcurrency, int resultSetHoldability) - throws SQLException { - super(connection, request, resultSetType, resultSetConcurrency, resultSetHoldability); - - Set set = parsedStmt != null ? parsedStmt.getTempTables() : null; - if (set == null) { - throw SqlExceptionUtils.clientError("Non-null table list is required"); - } - - this.parsedStmt = parsedStmt; - int size = set.size(); - this.tables = new ArrayList<>(size); - this.tables.addAll(set); - values = new ClickHouseExternalTable[size]; - List list = new ArrayList<>(size); - for (String name : set) { - list.add(ClickHouseColumn.of(name, ClickHouseDataType.JSON, false)); - } - paramMetaData = new ClickHouseParameterMetaData(Collections.unmodifiableList(list), mapper, - connection.getTypeMap()); - batch = new LinkedList<>(); - } - - protected void ensureParams() throws SQLException { - List list = new ArrayList<>(); - for (int i = 0, len = values.length; i < len; i++) { - if (values[i] == null) { - list.add(tables.get(i)); - } - } - - if (!list.isEmpty()) { - throw SqlExceptionUtils.clientError(ClickHouseUtils.format("Missing table(s): %s", list)); - } - } - - @Override - public long[] executeAny(boolean asBatch) throws SQLException { - ensureOpen(); - boolean continueOnError = false; - if (asBatch) { - if (batch.isEmpty()) { - return ClickHouseValues.EMPTY_LONG_ARRAY; - } - continueOnError = getConnection().getJdbcConfig().isContinueBatchOnError(); - } else { - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - addBatch(); - } - - long[] results = new long[batch.size()]; - int index = 0; - try { - String sql = getSql(); - for (List list : batch) { - try (ClickHouseResponse r = executeStatement(sql, null, list, null); - ResultSet rs = updateResult(parsedStmt, r)) { - if (asBatch && rs != null && parsedStmt.isQuery()) { - throw SqlExceptionUtils.queryInBatchError(results); - } - long rows = getLargeUpdateCount(); - results[index] = rows > 0L ? rows : 0L; - } catch (Exception e) { - if (!asBatch) { - throw SqlExceptionUtils.handle(e); - } - - results[index] = EXECUTE_FAILED; - if (!continueOnError) { - throw SqlExceptionUtils.batchUpdateError(e, results); - } - log.error("Failed to execute batch insert at %d of %d", index + 1, batch.size(), e); - } - index++; - } - } finally { - clearBatch(); - } - - return results; - } - - @Override - protected int getMaxParameterIndex() { - return values.length; - } - - protected String getSql() { - // why? because request can be modified so it might not always same as - // parsedStmt.getSQL() - return getRequest().getStatements(false).get(0); - } - - @Override - public ResultSet executeQuery() throws SQLException { - ensureParams(); - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - ClickHouseSqlStatement stmt = new ClickHouseSqlStatement(getSql()); - return updateResult(parsedStmt, executeStatement(stmt, null, Arrays.asList(values), null)); - } - - @Override - public long executeLargeUpdate() throws SQLException { - ensureParams(); - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - try (ClickHouseResponse r = executeStatement(getSql(), null, Arrays.asList(values), null)) { - updateResult(parsedStmt, r); - return getLargeUpdateCount(); - } - } - - @Override - public void setByte(int parameterIndex, byte x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setShort(int parameterIndex, short x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setInt(int parameterIndex, int x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setLong(int parameterIndex, long x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setFloat(int parameterIndex, float x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setDouble(int parameterIndex, double x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setString(int parameterIndex, String x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setBytes(int parameterIndex, byte[] x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void clearParameters() throws SQLException { - ensureOpen(); - - for (int i = 0, len = values.length; i < len; i++) { - values[i] = null; - } - } - - @Override - public void setObject(int parameterIndex, Object x) throws SQLException { - ensureOpen(); - - if (x instanceof ClickHouseExternalTable) { - int idx = toArrayIndex(parameterIndex); - values[idx] = (ClickHouseExternalTable) x; - } else { - throw SqlExceptionUtils.clientError("Only ClickHouseExternalTable is allowed"); - } - } - - @Override - public boolean execute() throws SQLException { - ensureParams(); - if (!batch.isEmpty()) { - throw SqlExceptionUtils.undeterminedExecutionError(); - } - - ClickHouseSqlStatement stmt = new ClickHouseSqlStatement(getSql()); - return updateResult(parsedStmt, executeStatement(stmt, null, Arrays.asList(values), null)) != null; - } - - @Override - public void addBatch() throws SQLException { - ensureOpen(); - - ensureParams(); - batch.add(Collections.unmodifiableList(Arrays.asList(values))); - clearParameters(); - } - - @Override - public void clearBatch() throws SQLException { - ensureOpen(); - - this.batch.clear(); - } - - @Override - public void setArray(int parameterIndex, Array x) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - throw SqlExceptionUtils.clientError(ERROR_SET_TABLE); - } - - @Override - public ParameterMetaData getParameterMetaData() throws SQLException { - return paramMetaData; - } - - @Override - public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - setObject(parameterIndex, x); - } -} diff --git a/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver b/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver index 1295d85a1..4ce204321 100644 --- a/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver +++ b/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver @@ -1 +1 @@ -com.clickhouse.jdbc.ClickHouseDriver +com.clickhouse.jdbc.DriverImpl diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java index 909acf950..d8e7ad933 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java @@ -1,6 +1,5 @@ package com.clickhouse.jdbc; -import com.clickhouse.client.ClickHouseProtocol; import com.clickhouse.client.http.config.ClickHouseHttpOption; import com.clickhouse.client.http.config.HttpConnectionProvider; import com.clickhouse.data.ClickHouseVersion; @@ -26,7 +25,7 @@ public void testSetRoleDifferentConnections(String[] roles, String setRoleExpr, Properties properties = new Properties(); properties.setProperty(ClickHouseHttpOption.REMEMBER_LAST_SET_ROLES.getKey(), "true"); properties.setProperty(ClickHouseHttpOption.CONNECTION_PROVIDER.getKey(), connectionProvider); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, properties); + DataSourceImpl dataSource = new DataSourceImpl(url, properties); String serverVersion = getServerVersion(dataSource.getConnection()); if (ClickHouseVersion.of(serverVersion).check("(,24.3]")) { System.out.println("Test is skipped: feature is supported since 24.4"); @@ -109,7 +108,7 @@ public void testSetRolesAccessingTableRows() throws SQLException { String url = String.format("jdbc:ch:%s", getEndpointString()); Properties properties = new Properties(); properties.setProperty(ClickHouseHttpOption.REMEMBER_LAST_SET_ROLES.getKey(), "true"); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, properties); + DataSourceImpl dataSource = new DataSourceImpl(url, properties); String serverVersion = getServerVersion(dataSource.getConnection()); if (ClickHouseVersion.of(serverVersion).check("(,24.3]")) { System.out.println("Test is skipped: feature is supported since 24.4"); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java index e075dcf05..1495623f6 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java @@ -55,7 +55,6 @@ import com.clickhouse.data.value.UnsignedLong; import com.clickhouse.data.value.array.ClickHouseByteArrayValue; import com.clickhouse.jdbc.internal.InputBasedPreparedStatement; -import com.clickhouse.jdbc.internal.SqlBasedPreparedStatement; import com.clickhouse.jdbc.internal.StreamBasedPreparedStatement; import org.testng.Assert; @@ -142,22 +141,22 @@ private Object[][] getTypedParameters() { private Object[][] getStatementAndParameters() { return new Object[][] { // ddl - new Object[] { "ddl", "drop table if exists non_existing_table", SqlBasedPreparedStatement.class, false, + new Object[] { "ddl", "drop table if exists non_existing_table", DataSourceImpl.PreparedStatementImpl.class, false, null, false }, // query - new Object[] { "select1", "select 1", SqlBasedPreparedStatement.class, true, null, false }, - new Object[] { "select_param", "select ?", SqlBasedPreparedStatement.class, true, new String[] { "1" }, + new Object[] { "select1", "select 1", DataSourceImpl.PreparedStatementImpl.class, true, null, false }, + new Object[] { "select_param", "select ?", DataSourceImpl.PreparedStatementImpl.class, true, new String[] { "1" }, false }, // mutation new Object[] { "insert_static", "insert into $table values(1)", - SqlBasedPreparedStatement.class, false, null, + DataSourceImpl.PreparedStatementImpl.class, false, null, false }, new Object[] { "insert_table", "insert into $table", InputBasedPreparedStatement.class, false, new String[] { "2" }, true }, new Object[] { "insert_param", "insert into $table values(?)", InputBasedPreparedStatement.class, false, new String[] { "3" }, true }, new Object[] { "insert_param", "insert into $table values(trim(?))", - SqlBasedPreparedStatement.class, false, new String[] { "4" }, true }, + DataSourceImpl.PreparedStatementImpl.class, false, new String[] { "4" }, true }, new Object[] { "insert_input", "insert into $table select s from input('s String')", InputBasedPreparedStatement.class, false, new String[] { "5" }, true }, }; diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java index dcc12729e..08448e7ba 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java @@ -12,7 +12,6 @@ import java.time.OffsetDateTime; import java.time.ZonedDateTime; import java.util.Arrays; -import java.util.Calendar; import java.util.Collections; import java.util.List; import java.util.Map; @@ -22,7 +21,6 @@ import com.clickhouse.client.ClickHouseConfig; import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.client.config.ClickHouseClientOption; import com.clickhouse.data.ClickHouseColumn; import com.clickhouse.data.ClickHouseDataType; import com.clickhouse.data.ClickHouseRecord; @@ -397,7 +395,7 @@ public void testNullValue(String columnType, String defaultValue, Class clazz @Test(groups = "unit") public void testFetchSizeOfDetachedResultSet() throws SQLException { - try (ClickHouseResultSet rs = new ClickHouseResultSet("", "", + try (ResultSetImpl rs = new ResultSetImpl("", "", ClickHouseSimpleResponse.of(new ClickHouseConfig(), ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" } }))) { Assert.assertEquals(rs.getFetchSize(), 0); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java index 5dc161c00..bc86131dd 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java @@ -19,29 +19,29 @@ public class CombinedResultSetTest { private Object[][] getMultipleResultSets() { ClickHouseConfig config = new ClickHouseConfig(); return new Object[][] { - { new CombinedResultSet(null, new ClickHouseResultSet("", "", + { new CombinedResultSet(null, new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" }, new Object[] { "b" } })), - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "c" }, new Object[] { "d" }, new Object[] { "e" } }))) }, { new CombinedResultSet(Arrays.asList(null, null, - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" } })), null, - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "b" } })), - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { @@ -64,12 +64,12 @@ private Object[][] getNullOrEmptyResultSet() { private Object[][] getSingleResultSet() { ClickHouseConfig config = new ClickHouseConfig(); return new Object[][] { - { new CombinedResultSet(new ClickHouseResultSet("", "", + { new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" }, new Object[] { "b" } }))) }, { new CombinedResultSet(Collections.singleton( - new ClickHouseResultSet("", "", ClickHouseSimpleResponse.of(config, + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) } }; @@ -133,7 +133,7 @@ public void testSingleResultSet(CombinedResultSet combined) throws SQLException @Test(groups = "unit") public void testFetchSize() throws SQLException { - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(new ClickHouseConfig(), ClickHouseColumn.parse("s String"), new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { Assert.assertEquals(rs.getFetchSize(), 0); @@ -149,7 +149,7 @@ public void testFirstAndLastRow() throws SQLException { ClickHouseConfig config = new ClickHouseConfig(); List columns = ClickHouseColumn.parse("s String"); // no record - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[0][])))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); @@ -164,9 +164,9 @@ public void testFirstAndLastRow() throws SQLException { } // no record(with two empty resultsets) - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[0][])), - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[0][])))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); @@ -181,7 +181,7 @@ public void testFirstAndLastRow() throws SQLException { } // one record - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "a" } })))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); @@ -202,7 +202,7 @@ public void testFirstAndLastRow() throws SQLException { Assert.assertFalse(rs.isLast(), "Should NOT be the last"); } - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); @@ -221,7 +221,7 @@ public void testFirstAndLastRow() throws SQLException { public void testNext() throws SQLException { ClickHouseConfig config = new ClickHouseConfig(); List columns = ClickHouseColumn.parse("s String"); - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); @@ -235,9 +235,9 @@ public void testNext() throws SQLException { Assert.assertTrue(rs.isAfterLast(), "Should be after the last row"); } - try (CombinedResultSet rs = new CombinedResultSet(new ClickHouseResultSet("", "", + try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "a" } })), - new ClickHouseResultSet("", "", + new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "b" } })))) { Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); Assert.assertTrue(rs.next()); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDataSourceTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java similarity index 88% rename from clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDataSourceTest.java rename to clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java index 4ad13b1f7..7e5140681 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDataSourceTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java @@ -6,7 +6,6 @@ import java.sql.Statement; import java.util.Properties; -import com.clickhouse.client.ClickHouseServerForTest; import org.testng.Assert; import org.testng.annotations.Test; @@ -16,7 +15,7 @@ import com.clickhouse.client.config.ClickHouseClientOption; import com.clickhouse.client.config.ClickHouseDefaults; -public class ClickHouseDataSourceTest extends JdbcIntegrationTest { +public class DataSourceImplTest extends JdbcIntegrationTest { @Test(groups = "integration") public void testHighAvailabilityConfig() throws SQLException { if (isCloud()) return; //TODO: testHighAvailabilityConfig - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 @@ -47,7 +46,7 @@ public void testMultiEndpoints() throws SQLException { Properties props = new Properties(); props.setProperty("user", "default"); props.setProperty("password", ""); - ClickHouseDataSource ds = new ClickHouseDataSource(url, props); + DataSourceImpl ds = new DataSourceImpl(url, props); for (int i = 0; i < 10; i++) { try (Connection httpConn = ds.getConnection(); Connection grpcConn = ds.getConnection("default", ""); @@ -81,17 +80,17 @@ public void testGetConnection() throws SQLException { String params = String.format("?%s=%s&%s=%d&%s", ClickHouseClientOption.CLIENT_NAME.getKey(), clientName, ClickHouseClientOption.MAX_EXECUTION_TIME.getKey(), maxExecuteTime, JdbcConfig.PROP_CONTINUE_BATCH); - for (ClickHouseDataSource ds : new ClickHouseDataSource[] { - new ClickHouseDataSource(url, properties), - new ClickHouseDataSource(urlWithCredentials, properties), - new ClickHouseDataSource(url + params), - new ClickHouseDataSource(urlWithCredentials + params), + for (DataSourceImpl ds : new DataSourceImpl[] { + new DataSourceImpl(url, properties), + new DataSourceImpl(urlWithCredentials, properties), + new DataSourceImpl(url + params), + new DataSourceImpl(urlWithCredentials + params), }) { for (ClickHouseConnection connection : new ClickHouseConnection[] { ds.getConnection("default", getPassword()), - new ClickHouseDriver().connect(url, properties), - new ClickHouseDriver().connect(urlWithCredentials, properties), - new ClickHouseDriver().connect(urlWithCredentials + params, new Properties()), + new DriverImpl().connect(url, properties), + new DriverImpl().connect(urlWithCredentials, properties), + new DriverImpl().connect(urlWithCredentials + params, new Properties()), (ClickHouseConnection) DriverManager.getConnection(url, properties), (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials, properties), (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials + params), diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDriverTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java similarity index 86% rename from clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDriverTest.java rename to clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java index c39cfa466..7c467775a 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDriverTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java @@ -8,11 +8,11 @@ import org.testng.Assert; import org.testng.annotations.Test; -public class ClickHouseDriverTest extends JdbcIntegrationTest { +public class DriverImplTest extends JdbcIntegrationTest { @Test(groups = "integration") public void testAcceptUrl() throws SQLException { String address = getServerAddress(ClickHouseProtocol.HTTP, true); - ClickHouseDriver driver = new ClickHouseDriver(); + DriverImpl driver = new DriverImpl(); Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://" + address)); Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse:http://" + address)); Assert.assertTrue(driver.acceptsURL("jdbc:ch://" + address)); @@ -23,7 +23,7 @@ public void testAcceptUrl() throws SQLException { public void testConnect() throws SQLException { if (isCloud()) return; //TODO: testConnect - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 String address = getServerAddress(ClickHouseProtocol.HTTP, true); - ClickHouseDriver driver = new ClickHouseDriver(); + DriverImpl driver = new DriverImpl(); ClickHouseConnection conn = driver.connect("jdbc:clickhouse://" + address, null); conn.close(); } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java index 16c65d28e..9a8f50234 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java @@ -5,7 +5,6 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.Locale; import java.util.Properties; import com.clickhouse.client.ClickHouseServerForTest; @@ -87,19 +86,19 @@ public String getServerAddress(ClickHouseProtocol protocol, String customHostOrI .append(':').append(server.getPort()).toString(); } - public ClickHouseDataSource newDataSource() throws SQLException { + public DataSourceImpl newDataSource() throws SQLException { return newDataSource(null, new Properties()); } - public ClickHouseDataSource newDataSource(Properties properties) throws SQLException { + public DataSourceImpl newDataSource(Properties properties) throws SQLException { return newDataSource(null, properties); } - public ClickHouseDataSource newDataSource(String url) throws SQLException { + public DataSourceImpl newDataSource(String url) throws SQLException { return newDataSource(url, new Properties()); } - public ClickHouseDataSource newDataSource(String url, Properties properties) throws SQLException { + public DataSourceImpl newDataSource(String url, Properties properties) throws SQLException { if (isCloud()) { if (properties == null) { properties = new Properties(); @@ -107,9 +106,9 @@ public ClickHouseDataSource newDataSource(String url, Properties properties) thr properties.put("password", getPassword()); properties.put("user", "default"); url = String.format("jdbc:clickhouse:https://%s/%s", getServerAddress(ClickHouseProtocol.HTTP), ClickHouseServerForTest.getDatabase()); - return new ClickHouseDataSource(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); + return new DataSourceImpl(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); } - return new ClickHouseDataSource(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); + return new DataSourceImpl(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); } public ClickHouseConnection newConnection() throws SQLException { diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java index 910557730..771b34354 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java @@ -1,20 +1,13 @@ package com.clickhouse.jdbc; -import com.clickhouse.client.ClickHouseLoadBalancingPolicy; -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.config.ClickHouseDefaults; import org.testcontainers.shaded.org.apache.commons.lang3.StringUtils; import org.testng.Assert; import org.testng.annotations.Test; import java.sql.Connection; -import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; -import java.util.Arrays; import java.util.Properties; public class JdbcIssuesTest extends JdbcIntegrationTest { @@ -25,7 +18,7 @@ public void test01Decompress() throws SQLException { prop.setProperty("decompress", "true"); prop.setProperty("decompress_algorithm", "lz4"); String url = String.format("jdbc:ch:%s", getEndpointString(true)); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, prop); + DataSourceImpl dataSource = new DataSourceImpl(url, prop); String columnNames = "event_id"; String columnValues = "('event_id String')"; String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); @@ -60,7 +53,7 @@ public void test02Decompress() throws SQLException { prop.setProperty("decompress", "true"); prop.setProperty("decompress_algorithm", "lz4"); String url = String.format("jdbc:ch:%s", getEndpointString(true)); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, prop); + DataSourceImpl dataSource = new DataSourceImpl(url, prop); String columnNames = "event_id"; String columnValues = "('event_id String')"; String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); @@ -94,7 +87,7 @@ public void test03Decompress() throws SQLException { prop.setProperty("decompress", "true"); prop.setProperty("decompress_algorithm", "lz4"); String url = String.format("jdbc:ch:%s", getEndpointString(true)); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, prop); + DataSourceImpl dataSource = new DataSourceImpl(url, prop); String columnNames = "event_id, num01,event_id_01 "; String columnValues = "('event_id String, num01 Int8, event_id_01 String')"; String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); @@ -127,7 +120,7 @@ public void test03Decompress() throws SQLException { public void testIssue1373() throws SQLException { String TABLE_NAME = "issue_1373"; String url = String.format("jdbc:ch:%s", getEndpointString(true)); - ClickHouseDataSource dataSource = new ClickHouseDataSource(url, new Properties()); + DataSourceImpl dataSource = new DataSourceImpl(url, new Properties()); String columnNames = "event_id, num01,event_id_01 "; String columnValues = "('event_id String, num01 Int8, event_id_01 String')"; String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java index b9e73abfd..77910ad9f 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java @@ -6,6 +6,7 @@ import com.clickhouse.client.ClickHouseRequest; import com.clickhouse.jdbc.ClickHouseConnection; +import com.clickhouse.jdbc.DriverImpl; import com.clickhouse.jdbc.ClickHouseStatement; import com.clickhouse.jdbc.JdbcIntegrationTest; import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; @@ -17,7 +18,7 @@ public class ClickHouseConnectionImplTest extends JdbcIntegrationTest { @Test(groups = "integration") public void testManualCommit() throws SQLException { if (isCloud()) return; //TODO: testManualCommit - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (ClickHouseConnectionImpl conn = (ClickHouseConnectionImpl) newConnection()) { + try (DriverImpl.ClickHouseConnectionImpl conn = (DriverImpl.ClickHouseConnectionImpl) newConnection()) { Assert.assertEquals(conn.getAutoCommit(), true); Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); conn.setAutoCommit(false); @@ -110,7 +111,7 @@ public void testManualCommit() throws SQLException { @Test(groups = "integration") public void testManualRollback() throws SQLException { if (isCloud()) return; //TODO: testManualRollback - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (ClickHouseConnectionImpl conn = (ClickHouseConnectionImpl) newConnection()) { + try (DriverImpl.ClickHouseConnectionImpl conn = (DriverImpl.ClickHouseConnectionImpl) newConnection()) { Assert.assertEquals(conn.getAutoCommit(), true); Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); conn.setAutoCommit(false); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java index acb2fb2c3..1a9417e23 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java @@ -10,7 +10,8 @@ import com.clickhouse.client.ClickHouseNode; import com.clickhouse.client.ClickHouseProtocol; import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser.ConnectionInfo; +import com.clickhouse.jdbc.ClickHouseJdbcUrlParser; +import com.clickhouse.jdbc.ClickHouseJdbcUrlParser.ConnectionInfo; import org.testng.Assert; import org.testng.annotations.DataProvider; From 87ffc509a94af0409f9772b6377c17d47c6dc7eb Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 4 Sep 2024 04:00:51 -0400 Subject: [PATCH 04/21] Cleaning more existing code, adding some basic implementation from the old code --- .../jdbc/ClickHouseDatabaseMetaData.java | 1353 ----------------- .../jdbc/ClickHouseResultSetMetaData.java | 156 -- .../com/clickhouse/jdbc/ConnectionImpl.java | 12 +- .../com/clickhouse/jdbc/ResultSetImpl.java | 11 - .../com/clickhouse/jdbc/StatementImpl.java | 12 +- .../metadata/ClickHouseDatabaseMetaData.java | 901 +++++++++++ .../metadata/ClickHouseParameterMetaData.java | 73 + .../metadata/ClickHouseResultSetMetaData.java | 114 ++ 8 files changed, 1090 insertions(+), 1542 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSetMetaData.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java deleted file mode 100644 index 19235bc13..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaData.java +++ /dev/null @@ -1,1353 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.RowIdLifetime; -import java.sql.SQLException; -import java.sql.Types; -import java.time.temporal.Temporal; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import com.clickhouse.client.ClickHouseParameterizedQuery; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.config.ClickHouseRenameMethod; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.ClickHouseRecordTransformer; -import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; - -public class ClickHouseDatabaseMetaData extends JdbcWrapper implements DatabaseMetaData { - private static final Logger log = LoggerFactory.getLogger(ClickHouseDatabaseMetaData.class); - - private static final String DATABASE_NAME = "ClickHouse"; - private static final String DRIVER_NAME = DATABASE_NAME + " JDBC Driver"; - - private static final String[] TABLE_TYPES = new String[] { "DICTIONARY", "LOG TABLE", "MEMORY TABLE", - "REMOTE TABLE", "TABLE", "VIEW", "SYSTEM TABLE", "TEMPORARY TABLE" }; - - private final ClickHouseConnection connection; - - protected ResultSet empty(String columns) throws SQLException { - return fixed(columns, null); - } - - protected ResultSet fixed(String columns, Object[][] values) throws SQLException { - return new ResultSetImpl("", "", connection.createStatement(), - ClickHouseSimpleResponse.of(connection.getConfig(), ClickHouseColumn.parse(columns), values)); - } - - protected ResultSet query(String sql) throws SQLException { - return query(sql, null, false); - } - - protected ResultSet query(String sql, boolean ignoreError) throws SQLException { - return query(sql, null, ignoreError); - } - - protected ResultSet query(String sql, ClickHouseRecordTransformer func) throws SQLException { - return query(sql, func, false); - } - - protected ResultSet query(String sql, ClickHouseRecordTransformer func, boolean ignoreError) throws SQLException { - SQLException error = null; - try (ClickHouseStatement stmt = connection.createStatement()) { - stmt.setLargeMaxRows(0L); - return new ResultSetImpl("", "", stmt, - // load everything into memory - ClickHouseSimpleResponse.of(stmt.getRequest() - .format(ClickHouseFormat.RowBinaryWithNamesAndTypes) - .option(ClickHouseClientOption.RENAME_RESPONSE_COLUMN, ClickHouseRenameMethod.NONE) - .query(sql).executeAndWait(), func)); - } catch (Exception e) { - error = SqlExceptionUtils.handle(e); - } - - if (ignoreError) { - return null; - } else { - throw error; - } - } - - public ClickHouseDatabaseMetaData(ClickHouseConnection connection) throws SQLException { - this.connection = ClickHouseChecker.nonNull(connection, "Connection"); - } - - @Override - public boolean allProceduresAreCallable() throws SQLException { - return true; - } - - @Override - public boolean allTablesAreSelectable() throws SQLException { - return true; - } - - @Override - public String getURL() throws SQLException { - return connection.getUri().toString(); - } - - @Override - public String getUserName() throws SQLException { - return connection.getCurrentUser(); - } - - @Override - public boolean isReadOnly() throws SQLException { - return connection.isReadOnly(); - } - - @Override - public boolean nullsAreSortedHigh() throws SQLException { - return true; - } - - @Override - public boolean nullsAreSortedLow() throws SQLException { - return false; - } - - @Override - public boolean nullsAreSortedAtStart() throws SQLException { - return false; - } - - @Override - public boolean nullsAreSortedAtEnd() throws SQLException { - return false; - } - - @Override - public String getDatabaseProductName() throws SQLException { - return DATABASE_NAME; - } - - @Override - public String getDatabaseProductVersion() throws SQLException { - return connection.getServerVersion().toString(); - } - - @Override - public String getDriverName() throws SQLException { - return DRIVER_NAME; - } - - @Override - public String getDriverVersion() throws SQLException { - return DriverImpl.driverVersionString; - } - - @Override - public int getDriverMajorVersion() { - return DriverImpl.driverVersion.getMajorVersion(); - } - - @Override - public int getDriverMinorVersion() { - return DriverImpl.driverVersion.getMinorVersion(); - } - - @Override - public boolean usesLocalFiles() throws SQLException { - return false; - } - - @Override - public boolean usesLocalFilePerTable() throws SQLException { - return false; - } - - @Override - public boolean supportsMixedCaseIdentifiers() throws SQLException { - return true; - } - - @Override - public boolean storesUpperCaseIdentifiers() throws SQLException { - return false; - } - - @Override - public boolean storesLowerCaseIdentifiers() throws SQLException { - return false; - } - - @Override - public boolean storesMixedCaseIdentifiers() throws SQLException { - return true; - } - - @Override - public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { - return true; - } - - @Override - public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { - return false; - } - - @Override - public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { - return false; - } - - @Override - public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { - return true; - } - - @Override - public String getIdentifierQuoteString() throws SQLException { - return "`"; - } - - @Override - public String getSQLKeywords() throws SQLException { - return "APPLY,ASOF,ATTACH,CLUSTER,DATABASE,DATABASES,DETACH," - + "DICTIONARY,DICTIONARIES,ILIKE,INF,LIMIT,LIVE,KILL,MATERIALIZED," - + "NAN,OFFSET,OPTIMIZE,OUTFILE,POLICY,PREWHERE,PROFILE,QUARTER,QUOTA," - + "RENAME,REPLACE,SAMPLE,SETTINGS,SHOW,TABLES,TIES,TOP,TOTALS,TRUNCATE,USE,WATCH,WEEK"; - } - - @Override - public String getNumericFunctions() throws SQLException { - // took from below URLs(not from system.functions): - // https://clickhouse.com/docs/en/sql-reference/functions/arithmetic-functions/ - // https://clickhouse.com/docs/en/sql-reference/functions/math-functions/ - return "abs,acos,acosh,asin,asinh,atan,atan2,atanh,cbrt,cos,cosh,divide,e,erf,erfc,exp,exp10,exp2,gcd,hypot,intDiv,intDivOrZero,intExp10,intExp2,lcm,lgamma,ln,log,log10,log1p,log2,minus,modulo,moduloOrZero,multiply,negate,pi,plus,pow,power,sign,sin,sinh,sqrt,tan,tgamma"; - } - - @Override - public String getStringFunctions() throws SQLException { - // took from below URLs(not from system.functions): - // https://clickhouse.com/docs/en/sql-reference/functions/string-functions/ - // https://clickhouse.com/docs/en/sql-reference/functions/string-search-functions/ - // https://clickhouse.com/docs/en/sql-reference/functions/string-replace-functions/ - return "appendTrailingCharIfAbsent,base64Decode,base64Encode,char_length,CHAR_LENGTH,character_length,CHARACTER_LENGTH,concat,concatAssumeInjective,convertCharset,countMatches,countSubstrings,countSubstringsCaseInsensitive,countSubstringsCaseInsensitiveUTF8,CRC32,CRC32IEEE,CRC64,decodeXMLComponent,empty,encodeXMLComponent,endsWith,extract,extractAll,extractAllGroupsHorizontal,extractAllGroupsVertical,extractTextFromHTML ,format,ilike,isValidUTF8,lcase,leftPad,leftPadUTF8,length,lengthUTF8,like,locate,lower,lowerUTF8,match,mid,multiFuzzyMatchAllIndices,multiFuzzyMatchAny,multiFuzzyMatchAnyIndex,multiMatchAllIndices,multiMatchAny,multiMatchAnyIndex,multiSearchAllPositions,multiSearchAllPositionsUTF8,multiSearchAny,multiSearchFirstIndex,multiSearchFirstPosition,ngramDistance,ngramSearch,normalizedQueryHash,normalizeQuery,notEmpty,notLike,position,positionCaseInsensitive,positionCaseInsensitiveUTF8,positionUTF8,regexpQuoteMeta,repeat,replace,replaceAll,replaceOne,replaceRegexpAll,replaceRegexpOne,reverse,reverseUTF8,rightPad,rightPadUTF8,startsWith,substr,substring,substringUTF8,tokens,toValidUTF8,trim,trimBoth,trimLeft,trimRight,tryBase64Decode,ucase,upper,upperUTF8"; - } - - @Override - public String getSystemFunctions() throws SQLException { - // took from below URL(not from system.functions): - // https://clickhouse.com/docs/en/sql-reference/functions/other-functions/ - return "bar,basename,blockNumber,blockSerializedSize,blockSize,buildId,byteSize,countDigits,currentDatabase,currentProfiles,currentRoles,currentUser,defaultProfiles,defaultRoles,defaultValueOfArgumentType,defaultValueOfTypeName,dumpColumnStructure,enabledProfiles,enabledRoles,errorCodeToName,filesystemAvailable,filesystemCapacity,filesystemFree,finalizeAggregation,formatReadableQuantity,formatReadableSize,formatReadableTimeDelta,FQDN,getMacro,getServerPort,getSetting,getSizeOfEnumType,greatest,hasColumnInTable,hostName,identity,ifNotFinite,ignore,indexHint,initializeAggregation,initialQueryID,isConstant,isDecimalOverflow,isFinite,isInfinite,isNaN,joinGet,least,MACNumToString,MACStringToNum,MACStringToOUI,materialize,modelEvaluate,neighbor,queryID,randomFixedString,randomPrintableASCII,randomString,randomStringUTF8,replicate,rowNumberInAllBlocks,rowNumberInBlock,runningAccumulate,runningConcurrency,runningDifference,runningDifferenceStartingWithFirstValue,shardCount ,shardNum,sleep,sleepEachRow,tcpPort,throwIf,toColumnTypeName,toTypeName,transform,uptime,version,visibleWidth"; - } - - @Override - public String getTimeDateFunctions() throws SQLException { - // took from below URL(not from system.functions): - // https://clickhouse.com/docs/en/sql-reference/functions/date-time-functions/ - return "addDays,addHours,addMinutes,addMonths,addQuarters,addSeconds,addWeeks,addYears,date_add,date_diff,date_sub,date_trunc,dateName,formatDateTime,FROM_UNIXTIME,fromModifiedJulianDay,fromModifiedJulianDayOrNull,now,subtractDays,subtractHours,subtractMinutes,subtractMonths,subtractQuarters,subtractSeconds,subtractWeeks,subtractYears,timeSlot,timeSlots,timestamp_add,timestamp_sub,timeZone,timeZoneOf,timeZoneOffset,today,toDayOfMonth,toDayOfWeek,toDayOfYear,toHour,toISOWeek,toISOYear,toMinute,toModifiedJulianDay,toModifiedJulianDayOrNull,toMonday,toMonth,toQuarter,toRelativeDayNum,toRelativeHourNum,toRelativeMinuteNum,toRelativeMonthNum,toRelativeQuarterNum,toRelativeSecondNum,toRelativeWeekNum,toRelativeYearNum,toSecond,toStartOfDay,toStartOfFifteenMinutes,toStartOfFiveMinute,toStartOfHour,toStartOfInterval,toStartOfISOYear,toStartOfMinute,toStartOfMonth,toStartOfQuarter,toStartOfSecond,toStartOfTenMinutes,toStartOfWeek,toStartOfYear,toTime,toTimeZone,toUnixTimestamp,toWeek,toYear,toYearWeek,toYYYYMM,toYYYYMMDD,toYYYYMMDDhhmmss,yesterday"; - } - - @Override - public String getSearchStringEscape() throws SQLException { - return "\\"; - } - - @Override - public String getExtraNameCharacters() throws SQLException { - return ""; - } - - @Override - public boolean supportsAlterTableWithAddColumn() throws SQLException { - return true; - } - - @Override - public boolean supportsAlterTableWithDropColumn() throws SQLException { - return true; - } - - @Override - public boolean supportsColumnAliasing() throws SQLException { - return true; - } - - @Override - public boolean nullPlusNonNullIsNull() throws SQLException { - return true; - } - - @Override - public boolean supportsConvert() throws SQLException { - // TODO select { fn CONVERT({ts '2021-01-01 12:12:12'}, TIMESTAMP) } - // select cast('2021-01-01 12:12:12' as DateTime) - return false; - } - - @Override - public boolean supportsConvert(int fromType, int toType) throws SQLException { - // TODO select { fn CONVERT({ts '2021-01-01 12:12:12'}, TIMESTAMP) } - // select cast('2021-01-01 12:12:12' as DateTime) - return false; - } - - @Override - public boolean supportsTableCorrelationNames() throws SQLException { - return true; - } - - @Override - public boolean supportsDifferentTableCorrelationNames() throws SQLException { - return false; - } - - @Override - public boolean supportsExpressionsInOrderBy() throws SQLException { - return true; - } - - @Override - public boolean supportsOrderByUnrelated() throws SQLException { - return true; - } - - @Override - public boolean supportsGroupBy() throws SQLException { - return true; - } - - @Override - public boolean supportsGroupByUnrelated() throws SQLException { - return true; - } - - @Override - public boolean supportsGroupByBeyondSelect() throws SQLException { - return true; - } - - @Override - public boolean supportsLikeEscapeClause() throws SQLException { - return true; - } - - @Override - public boolean supportsMultipleResultSets() throws SQLException { - // TODO let's add this in 0.3.3 - return false; - } - - @Override - public boolean supportsMultipleTransactions() throws SQLException { - return false; - } - - @Override - public boolean supportsNonNullableColumns() throws SQLException { - return true; - } - - @Override - public boolean supportsMinimumSQLGrammar() throws SQLException { - return true; - } - - @Override - public boolean supportsCoreSQLGrammar() throws SQLException { - return true; - } - - @Override - public boolean supportsExtendedSQLGrammar() throws SQLException { - return false; - } - - @Override - public boolean supportsANSI92EntryLevelSQL() throws SQLException { - return true; - } - - @Override - public boolean supportsANSI92IntermediateSQL() throws SQLException { - return false; - } - - @Override - public boolean supportsANSI92FullSQL() throws SQLException { - return false; - } - - @Override - public boolean supportsIntegrityEnhancementFacility() throws SQLException { - return false; - } - - @Override - public boolean supportsOuterJoins() throws SQLException { - return true; - } - - @Override - public boolean supportsFullOuterJoins() throws SQLException { - return true; - } - - @Override - public boolean supportsLimitedOuterJoins() throws SQLException { - return true; - } - - @Override - public String getSchemaTerm() throws SQLException { - return connection.getJdbcConfig().useSchema() ? JdbcConfig.TERM_DATABASE : JdbcConfig.TERM_SCHEMA; - } - - @Override - public String getProcedureTerm() throws SQLException { - return "procedure"; - } - - @Override - public String getCatalogTerm() throws SQLException { - return connection.getJdbcConfig().useCatalog() ? JdbcConfig.TERM_DATABASE : JdbcConfig.TERM_CATALOG; - } - - @Override - public boolean isCatalogAtStart() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public String getCatalogSeparator() throws SQLException { - return "."; - } - - @Override - public boolean supportsSchemasInDataManipulation() throws SQLException { - return connection.getJdbcConfig().useSchema(); - } - - @Override - public boolean supportsSchemasInProcedureCalls() throws SQLException { - return connection.getJdbcConfig().useSchema(); - } - - @Override - public boolean supportsSchemasInTableDefinitions() throws SQLException { - return connection.getJdbcConfig().useSchema(); - } - - @Override - public boolean supportsSchemasInIndexDefinitions() throws SQLException { - return connection.getJdbcConfig().useSchema(); - } - - @Override - public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { - return connection.getJdbcConfig().useSchema(); - } - - @Override - public boolean supportsCatalogsInDataManipulation() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public boolean supportsCatalogsInProcedureCalls() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public boolean supportsCatalogsInTableDefinitions() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public boolean supportsCatalogsInIndexDefinitions() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { - return connection.getJdbcConfig().useCatalog(); - } - - @Override - public boolean supportsPositionedDelete() throws SQLException { - return false; - } - - @Override - public boolean supportsPositionedUpdate() throws SQLException { - return false; - } - - @Override - public boolean supportsSelectForUpdate() throws SQLException { - return false; - } - - @Override - public boolean supportsStoredProcedures() throws SQLException { - return false; - } - - @Override - public boolean supportsSubqueriesInComparisons() throws SQLException { - return true; - } - - @Override - public boolean supportsSubqueriesInExists() throws SQLException { - return false; - } - - @Override - public boolean supportsSubqueriesInIns() throws SQLException { - return true; - } - - @Override - public boolean supportsSubqueriesInQuantifieds() throws SQLException { - return true; - } - - @Override - public boolean supportsCorrelatedSubqueries() throws SQLException { - return true; - } - - @Override - public boolean supportsUnion() throws SQLException { - return true; - } - - @Override - public boolean supportsUnionAll() throws SQLException { - return true; - } - - @Override - public boolean supportsOpenCursorsAcrossCommit() throws SQLException { - return false; - } - - @Override - public boolean supportsOpenCursorsAcrossRollback() throws SQLException { - return false; - } - - @Override - public boolean supportsOpenStatementsAcrossCommit() throws SQLException { - return false; - } - - @Override - public boolean supportsOpenStatementsAcrossRollback() throws SQLException { - return false; - } - - @Override - public int getMaxBinaryLiteralLength() throws SQLException { - return 0; - } - - @Override - public int getMaxCharLiteralLength() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnsInGroupBy() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnsInIndex() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnsInOrderBy() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnsInSelect() throws SQLException { - return 0; - } - - @Override - public int getMaxColumnsInTable() throws SQLException { - return 0; - } - - @Override - public int getMaxConnections() throws SQLException { - return 0; - } - - @Override - public int getMaxCursorNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxIndexLength() throws SQLException { - return 0; - } - - @Override - public int getMaxSchemaNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxProcedureNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxCatalogNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxRowSize() throws SQLException { - return 0; - } - - @Override - public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { - return true; - } - - @Override - public int getMaxStatementLength() throws SQLException { - return 0; - } - - @Override - public int getMaxStatements() throws SQLException { - return 0; - } - - @Override - public int getMaxTableNameLength() throws SQLException { - return 0; - } - - @Override - public int getMaxTablesInSelect() throws SQLException { - return 0; - } - - @Override - public int getMaxUserNameLength() throws SQLException { - return 0; - } - - @Override - public int getDefaultTransactionIsolation() throws SQLException { - return connection.getJdbcConfig().isJdbcCompliant() ? Connection.TRANSACTION_REPEATABLE_READ - : Connection.TRANSACTION_NONE; - } - - @Override - public boolean supportsTransactions() throws SQLException { - return connection.isTransactionSupported() || connection.getJdbcConfig().isJdbcCompliant(); - } - - @Override - public boolean supportsTransactionIsolationLevel(int level) throws SQLException { - if (Connection.TRANSACTION_NONE == level) { - return true; - } else if (Connection.TRANSACTION_READ_UNCOMMITTED != level && Connection.TRANSACTION_READ_COMMITTED != level - && Connection.TRANSACTION_REPEATABLE_READ != level && Connection.TRANSACTION_SERIALIZABLE != level) { - throw SqlExceptionUtils.clientError("Unknown isolation level: " + level); - } - - return (connection.isTransactionSupported() && Connection.TRANSACTION_REPEATABLE_READ == level) - || connection.getJdbcConfig().isJdbcCompliant(); - } - - @Override - public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { - return false; - } - - @Override - public boolean supportsDataManipulationTransactionsOnly() throws SQLException { - return connection.getJdbcConfig().isJdbcCompliant(); - } - - @Override - public boolean dataDefinitionCausesTransactionCommit() throws SQLException { - return false; - } - - @Override - public boolean dataDefinitionIgnoredInTransactions() throws SQLException { - return false; - } - - @Override - public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) - throws SQLException { - return empty("PROCEDURE_CAT Nullable(String), PROCEDURE_SCHEM Nullable(String), " - + "RESERVED1 Nullable(String), RESERVED2 Nullable(String), RESERVED3 Nullable(String), " - + "PROCEDURE_NAME String, REMARKS String, PROCEDURE_TYPE Int16, SPECIFIC_NAME String"); - } - - @Override - public ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, - String columnNamePattern) throws SQLException { - return empty("PROCEDURE_CAT Nullable(String), PROCEDURE_SCHEM Nullable(String), " - + "PROCEDURE_NAME String, COLUMN_NAME String, COLUMN_TYPE Int16, " - + "DATA_TYPE Int32, TYPE_NAME String, PRECISION Int32, LENGTH Int32, " - + "SCALE Int16, RADIX Int16, NULLABLE Int16, REMARKS String, " - + "COLUMN_DEF Nullable(String), SQL_DATA_TYPE Int32, SQL_DATETIME_SUB Int32, " - + "CHAR_OCTET_LENGTH Int32, ORDINAL_POSITION Int32, IS_NULLABLE String, SPECIFIC_NAME String"); - } - - @Override - public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) - throws SQLException { - StringBuilder builder = new StringBuilder(); - if (types == null || types.length == 0) { - types = TABLE_TYPES; - } - for (String type : types) { - builder.append('\'').append(ClickHouseUtils.escape(type, '\'')).append('\'').append(','); - } - builder.setLength(builder.length() - 1); - - String databasePattern = connection.getJdbcConfig().useCatalog() ? catalog : schemaPattern; - List databases = new LinkedList<>(); - if (ClickHouseChecker.isNullOrEmpty(databasePattern)) { - try (ResultSet rs = query("select name from system.databases order by name")) { - while (rs.next()) { - databases.add(rs.getString(1)); - } - } catch (Exception e) { - // ignore - } finally { - if (databases.isEmpty()) { - databases.add("%"); - } - } - } else { - databases.add(databasePattern); - } - - List results = new ArrayList<>(databases.size()); - String commentColumn = connection.getServerVersion().check("[21.6,)") ? "t.comment" : "''"; - String catalogColumn = ClickHouseValues.NULL_EXPR; - String schemaColumn = catalogColumn; - if (connection.getJdbcConfig().useCatalog()) { - catalogColumn = "t.database"; - } else { - schemaColumn = "t.database"; - } - for (String database : databases) { - Map params = new HashMap<>(); - params.put(JdbcConfig.TERM_COMMENT, commentColumn); - params.put(JdbcConfig.TERM_CATALOG, catalogColumn); - params.put(JdbcConfig.TERM_SCHEMA, schemaColumn); - params.put(JdbcConfig.TERM_DATABASE, ClickHouseValues.convertToQuotedString(database)); - params.put(JdbcConfig.TERM_TABLE, ClickHouseChecker.isNullOrEmpty(tableNamePattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(tableNamePattern)); - params.put("types", builder.toString()); - String sql = ClickHouseParameterizedQuery - .apply("select :catalog as TABLE_CAT, :schema as TABLE_SCHEM, t.name as TABLE_NAME, " - + "case when t.engine like '%Log' then 'LOG TABLE' " - + "when t.engine in ('Buffer', 'Memory', 'Set') then 'MEMORY TABLE' " - + "when t.is_temporary != 0 then 'TEMPORARY TABLE' " - + "when t.engine like '%View' then 'VIEW' when t.engine = 'Dictionary' then 'DICTIONARY' " - + "when t.engine like 'Async%' or t.engine like 'System%' then 'SYSTEM TABLE' " - + "when empty(t.data_paths) then 'REMOTE TABLE' else 'TABLE' end as TABLE_TYPE, " - + ":comment as REMARKS, null as TYPE_CAT, d.engine as TYPE_SCHEM, " - + "t.engine as TYPE_NAME, null as SELF_REFERENCING_COL_NAME, null as REF_GENERATION\n" - + "from system.tables t inner join system.databases d on t.database = d.name\n" - + "where t.database like :database and t.name like :table and TABLE_TYPE in (:types) " - + "order by t.database, t.name", params); - results.add(query(sql, true)); - } - return new CombinedResultSet(results); - } - - @Override - public ResultSet getSchemas() throws SQLException { - return getSchemas(null, null); - } - - @Override - public ResultSet getCatalogs() throws SQLException { - if (!connection.getJdbcConfig().useCatalog()) { - return empty("TABLE_CAT String"); - } - - ResultSet rs = query("select name as TABLE_CAT from system.databases order by name"); - if (!connection.getJdbcConfig().isExternalDatabaseSupported()) { - return rs; - } - return new CombinedResultSet( - rs, - query("select concat('jdbc(''', name, ''')') as TABLE_CAT from jdbc('', 'SHOW DATASOURCES') order by name", - true)); - } - - @Override - public ResultSet getTableTypes() throws SQLException { - // "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", - // "ALIAS", "SYNONYM". - int len = TABLE_TYPES.length; - Object[][] rows = new Object[len][]; - for (int i = 0; i < len; i++) { - rows[i] = new Object[] { TABLE_TYPES[i] }; - } - return fixed("TABLE_TYPE String", rows); - } - - @Override - public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) - throws SQLException { - Map params = new HashMap<>(); - params.put(JdbcConfig.TERM_COMMENT, - connection.getServerVersion().check("[18.16,)") ? JdbcConfig.TERM_COMMENT : "''"); - if (connection.getJdbcConfig().useCatalog()) { - params.put(JdbcConfig.TERM_CATALOG, JdbcConfig.TERM_DATABASE); - params.put(JdbcConfig.TERM_SCHEMA, ClickHouseValues.NULL_EXPR); - } else { - params.put(JdbcConfig.TERM_CATALOG, ClickHouseValues.NULL_EXPR); - params.put(JdbcConfig.TERM_SCHEMA, JdbcConfig.TERM_DATABASE); - } - String databasePattern = connection.getJdbcConfig().useCatalog() ? catalog : schemaPattern; - params.put(JdbcConfig.TERM_DATABASE, ClickHouseChecker.isNullOrEmpty(databasePattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(databasePattern)); - params.put(JdbcConfig.TERM_TABLE, ClickHouseChecker.isNullOrEmpty(tableNamePattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(tableNamePattern)); - params.put("column", ClickHouseChecker.isNullOrEmpty(columnNamePattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(columnNamePattern)); - params.put("defaultNullable", String.valueOf(DatabaseMetaData.typeNullable)); - params.put("defaultNonNull", String.valueOf(DatabaseMetaData.typeNoNulls)); - params.put("defaultType", String.valueOf(Types.OTHER)); - String sql = ClickHouseParameterizedQuery - .apply("select :catalog as TABLE_CAT, :schema as TABLE_SCHEM, table as TABLE_NAME, " - + "name as COLUMN_NAME, toInt32(:defaultType) as DATA_TYPE, type as TYPE_NAME, toInt32(0) as COLUMN_SIZE, " - + "0 as BUFFER_LENGTH, cast(null as Nullable(Int32)) as DECIMAL_DIGITS, 10 as NUM_PREC_RADIX, " - + "toInt32(position(type, 'Nullable(') >= 1 ? :defaultNullable : :defaultNonNull) as NULLABLE, :comment as REMARKS, default_expression as COLUMN_DEF, " - + "0 as SQL_DATA_TYPE, 0 as SQL_DATETIME_SUB, cast(null as Nullable(Int32)) as CHAR_OCTET_LENGTH, position as ORDINAL_POSITION, " - + "position(type, 'Nullable(') >= 1 ? 'YES' : 'NO' as IS_NULLABLE, null as SCOPE_CATALOG, null as SCOPE_SCHEMA, null as SCOPE_TABLE, " - + "null as SOURCE_DATA_TYPE, 'NO' as IS_AUTOINCREMENT, 'NO' as IS_GENERATEDCOLUMN from system.columns " - + "where database like :database and table like :table and name like :column", params); - return query(sql, (i, r) -> { - String typeName = r.getValue("TYPE_NAME").asString(); - try { - ClickHouseColumn column = ClickHouseColumn.of("", typeName); - r.getValue("DATA_TYPE") - .update(connection.getJdbcTypeMapping().toSqlType(column, connection.getTypeMap())); - r.getValue("COLUMN_SIZE").update( - column.getPrecision() > 0 ? column.getPrecision() : column.getDataType().getByteLength()); - if (column.isNullable()) { - r.getValue("NULLABLE").update(DatabaseMetaData.typeNullable); - r.getValue("IS_NULLABLE").update("YES"); - } else { - r.getValue("NULLABLE").update(DatabaseMetaData.typeNoNulls); - r.getValue("IS_NULLABLE").update("NO"); - } - - if (column.getDataType() == ClickHouseDataType.FixedString) { - r.getValue("CHAR_OCTET_LENGTH").update(column.getPrecision()); - } - - Class clazz = column.getObjectClass(connection.getConfig()); - if (column.getScale() > 0 || Number.class.isAssignableFrom(clazz) || Date.class.isAssignableFrom(clazz) - || Temporal.class.isAssignableFrom(clazz)) { - r.getValue("DECIMAL_DIGITS").update(column.getScale()); - } else { - r.getValue("DECIMAL_DIGITS").resetToNullOrEmpty(); - } - } catch (Exception e) { - log.warn("Failed to read column: %s", typeName, e); - } - }); - } - - @Override - public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) - throws SQLException { - return empty("TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, " - + "COLUMN_NAME String, GRANTOR Nullable(String), GRANTEE String, PRIVILEGE String, " - + "IS_GRANTABLE Nullable(String)"); - } - - @Override - public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) - throws SQLException { - return empty("TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, " - + "GRANTOR Nullable(String), GRANTEE String, PRIVILEGE String, IS_GRANTABLE Nullable(String)"); - } - - @Override - public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) - throws SQLException { - return getVersionColumns(catalog, schema, table); - } - - @Override - public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { - return empty("SCOPE Int16, COLUMN_NAME String, DATA_TYPE Int32, TYPE_NAME String, " - + "COLUMN_SIZE Int32, BUFFER_LENGTH Int32, DECIMAL_DIGITS Int16, PSEUDO_COLUMN Int16"); - } - - @Override - public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { - return empty("TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, " - + "COLUMN_NAME String, KEY_SEQ Int16, PK_NAME String"); - } - - @Override - public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { - return empty("PKTABLE_CAT Nullable(String), PKTABLE_SCHEM Nullable(String), PKTABLE_NAME String, " - + "PKCOLUMN_NAME String, FKTABLE_CAT Nullable(String), FKTABLE_SCHEM Nullable(String), " - + "FKTABLE_NAME String, FKCOLUMN_NAME String, KEY_SEQ Int16, UPDATE_RULE Int16, " - + "DELETE_RULE Int16, FK_NAME Nullable(String), PK_NAME Nullable(String), DEFERRABILITY Int16"); - } - - @Override - public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { - return getImportedKeys(catalog, schema, table); - } - - @Override - public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable, - String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException { - return empty("PKTABLE_CAT Nullable(String), PKTABLE_SCHEM Nullable(String), PKTABLE_NAME String, " - + "PKCOLUMN_NAME String, FKTABLE_CAT Nullable(String), FKTABLE_SCHEM Nullable(String), " - + "FKTABLE_NAME String, FKCOLUMN_NAME String, KEY_SEQ Int16, UPDATE_RULE Int16, " - + "DELETE_RULE Int16, FK_NAME Nullable(String), PK_NAME Nullable(String), DEFERRABILITY Int16"); - } - - private Object[] toTypeRow(String typeName, String aliasTo) throws SQLException { - ClickHouseDataType type; - try { - type = ClickHouseDataType.of(typeName); - } catch (Exception e) { - if (aliasTo == null || aliasTo.isEmpty()) { - return new Object[0]; - } - try { - type = ClickHouseDataType.of(aliasTo); - } catch (Exception ex) { - return new Object[0]; - } - } - - String prefix = ""; - String suffix = ""; - String params = ""; - int nullable = DatabaseMetaData.typeNullable; - int searchable = type == ClickHouseDataType.FixedString || type == ClickHouseDataType.String - ? DatabaseMetaData.typeSearchable - : DatabaseMetaData.typePredBasic; - int money = 0; - switch (type) { - case Date: - case Date32: - case DateTime: - case DateTime32: - case DateTime64: - case Enum8: - case Enum16: - case String: - case FixedString: - case UUID: - prefix = "'"; - suffix = "'"; - break; - case Array: - case Nested: - case Ring: - case Polygon: - case MultiPolygon: - prefix = "["; - suffix = "]"; - nullable = DatabaseMetaData.typeNoNulls; - break; - case AggregateFunction: - case Tuple: - case Point: - prefix = "("; - suffix = ")"; - nullable = DatabaseMetaData.typeNoNulls; - break; - case Map: - prefix = "{"; - suffix = "}"; - nullable = DatabaseMetaData.typeNoNulls; - break; - default: - break; - } - return new Object[] { typeName, - connection.getJdbcTypeMapping().toSqlType(ClickHouseColumn.of("", type, false, false, new String[0]), - connection.getTypeMap()), - type.getMaxPrecision(), prefix, suffix, params, nullable, type.isCaseSensitive() ? 1 : 0, searchable, - type.getMaxPrecision() > 0 && !type.isSigned() ? 1 : 0, money, 0, - aliasTo == null || aliasTo.isEmpty() ? type.name() : aliasTo, type.getMinScale(), type.getMaxScale(), 0, - 0, 10 }; - } - - @Override - public ResultSet getTypeInfo() throws SQLException { - List list = new ArrayList<>(); - try (ResultSet rs = query("select name, alias_to from system.data_type_families order by name")) { - while (rs.next()) { - Object[] row = toTypeRow(rs.getString(1), rs.getString(2)); - if (row.length > 0) { - list.add(row); - } - } - } - - return fixed("TYPE_NAME String, DATA_TYPE Int32, PRECISION Int32, " - + "LITERAL_PREFIX Nullable(String), LITERAL_SUFFIX Nullable(String), CREATE_PARAMS Nullable(String), " - + "NULLABLE Int16, CASE_SENSITIVE UInt8, SEARCHABLE Int16, UNSIGNED_ATTRIBUTE UInt8, " - + "FIXED_PREC_SCALE UInt8, AUTO_INCREMENT UInt8, LOCAL_TYPE_NAME Nullable(String), " - + "MINIMUM_SCALE Int16, MAXIMUM_SCALE Int16, SQL_DATA_TYPE Int32, SQL_DATETIME_SUB Int32, " - + "NUM_PREC_RADIX Int32", list.toArray(new Object[0][])); - } - - @Override - public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) - throws SQLException { - Map params = new HashMap<>(); - if (connection.getJdbcConfig().useCatalog()) { - params.put(JdbcConfig.TERM_CATALOG, JdbcConfig.TERM_DATABASE); - params.put(JdbcConfig.TERM_SCHEMA, ClickHouseValues.NULL_EXPR); - } else { - params.put(JdbcConfig.TERM_CATALOG, ClickHouseValues.NULL_EXPR); - params.put(JdbcConfig.TERM_SCHEMA, JdbcConfig.TERM_DATABASE); - } - params.put(JdbcConfig.TERM_DATABASE, - ClickHouseChecker.isNullOrEmpty(schema) ? "'%'" : ClickHouseValues.convertToQuotedString(schema)); - params.put(JdbcConfig.TERM_TABLE, - ClickHouseChecker.isNullOrEmpty(table) ? "'%'" : ClickHouseValues.convertToQuotedString(table)); - params.put("statIndex", String.valueOf(DatabaseMetaData.tableIndexStatistic)); - params.put("otherIndex", String.valueOf(DatabaseMetaData.tableIndexOther)); - return new CombinedResultSet( - empty("TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, " - + "NON_UNIQUE UInt8, INDEX_QUALIFIER Nullable(String), INDEX_NAME Nullable(String), " - + "TYPE Int16, ORDINAL_POSITION Int16, COLUMN_NAME Nullable(String), ASC_OR_DESC Nullable(String), " - + "CARDINALITY Int64, PAGES Int64, FILTER_CONDITION Nullable(String)"), - query(ClickHouseParameterizedQuery.apply( - "select :catalog as TABLE_CAT, :schema as TABLE_SCHEM, table as TABLE_NAME, toUInt8(0) as NON_UNIQUE, " - + "null as INDEX_QUALIFIER, null as INDEX_NAME, toInt16(:statIndex) as TYPE, " - + "toInt16(0) as ORDINAL_POSITION, null as COLUMN_NAME, null as ASC_OR_DESC, " - + "sum(rows) as CARDINALITY, uniqExact(name) as PAGES, null as FILTER_CONDITION from system.parts " - + "where active = 1 and database like :database and table like :table group by database, table", - params), true), - query(ClickHouseParameterizedQuery.apply( - "select :catalog as TABLE_CAT, :schema as TABLE_SCHEM, table as TABLE_NAME, toUInt8(1) as NON_UNIQUE, " - + "type as INDEX_QUALIFIER, name as INDEX_NAME, toInt16(:otherIndex) as TYPE, " - + "toInt16(1) as ORDINAL_POSITION, expr as COLUMN_NAME, null as ASC_OR_DESC, " - + "0 as CARDINALITY, 0 as PAGES, null as FILTER_CONDITION " - + "from system.data_skipping_indices where database like :database and table like :table", - params), true), - query(ClickHouseParameterizedQuery.apply( - "select :catalog as TABLE_CAT, :schema as TABLE_SCHEM, table as TABLE_NAME, toUInt8(1) as NON_UNIQUE, " - + "null as INDEX_QUALIFIER, name as INDEX_NAME, toInt16(:otherIndex) as TYPE, " - + "column_position as ORDINAL_POSITION, column as COLUMN_NAME, null as ASC_OR_DESC, " - + "sum(rows) as CARDINALITY, uniqExact(partition) as PAGES, null as FILTER_CONDITION " - + "from system.projection_parts_columns where active = 1 and database like :database and table like :table " - + "group by database, table, name, column, column_position " - + "order by database, table, name, column_position", - params), true)); - } - - @Override - public boolean supportsResultSetType(int type) throws SQLException { - return ResultSet.TYPE_FORWARD_ONLY == type; - } - - @Override - public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { - return false; - } - - @Override - public boolean ownUpdatesAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean ownDeletesAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean ownInsertsAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean othersUpdatesAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean othersDeletesAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean othersInsertsAreVisible(int type) throws SQLException { - return true; - } - - @Override - public boolean updatesAreDetected(int type) throws SQLException { - return false; - } - - @Override - public boolean deletesAreDetected(int type) throws SQLException { - return false; - } - - @Override - public boolean insertsAreDetected(int type) throws SQLException { - return false; - } - - @Override - public boolean supportsBatchUpdates() throws SQLException { - return true; - } - - @Override - public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types) - throws SQLException { - return empty("TYPE_CAT Nullable(String), TYPE_SCHEM Nullable(String), TYPE_NAME String, " - + "CLASS_NAME String, DATA_TYPE Int32, REMARKS String, BASE_TYPE Int16"); - } - - @Override - public Connection getConnection() throws SQLException { - return connection; - } - - @Override - public boolean supportsSavepoints() throws SQLException { - return false; - } - - @Override - public boolean supportsNamedParameters() throws SQLException { - return false; - } - - @Override - public boolean supportsMultipleOpenResults() throws SQLException { - return false; - } - - @Override - public boolean supportsGetGeneratedKeys() throws SQLException { - return false; - } - - @Override - public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { - return empty("TYPE_CAT Nullable(String), TYPE_SCHEM Nullable(String), TYPE_NAME String, " - + "SUPERTYPE_CAT Nullable(String), SUPERTYPE_SCHEM Nullable(String), SUPERTYPE_NAME String"); - } - - @Override - public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - return empty( - "TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, SUPERTABLE_NAME String"); - } - - @Override - public ResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern, - String attributeNamePattern) throws SQLException { - return empty("TYPE_CAT Nullable(String), TYPE_SCHEM Nullable(String), TYPE_NAME String, " - + "ATTR_NAME String, DATA_TYPE Int32, ATTR_TYPE_NAME String, ATTR_SIZE Int32, " - + "DECIMAL_DIGITS Int32, NUM_PREC_RADIX Int32, NULLABLE Int32, REMARKS Nullable(String), " - + "ATTR_DEF Nullable(String), SQL_DATA_TYPE Int32, SQL_DATETIME_SUB Int32, " - + "CHAR_OCTET_LENGTH Int32, ORDINAL_POSITION Int32, IS_NULLABLE String, " - + "SCOPE_CATALOG String, SCOPE_SCHEMA String, SCOPE_TABLE String, SOURCE_DATA_TYPE Int16"); - } - - @Override - public boolean supportsResultSetHoldability(int holdability) throws SQLException { - return false; - } - - @Override - public int getResultSetHoldability() throws SQLException { - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public int getDatabaseMajorVersion() throws SQLException { - return connection.getServerVersion().getMajorVersion(); - } - - @Override - public int getDatabaseMinorVersion() throws SQLException { - return connection.getServerVersion().getMinorVersion(); - } - - @Override - public int getJDBCMajorVersion() throws SQLException { - return DriverImpl.specVersion.getMajorVersion(); - } - - @Override - public int getJDBCMinorVersion() throws SQLException { - return DriverImpl.specVersion.getMinorVersion(); - } - - @Override - public int getSQLStateType() throws SQLException { - return sqlStateSQL; - } - - @Override - public boolean locatorsUpdateCopy() throws SQLException { - return false; - } - - @Override - public boolean supportsStatementPooling() throws SQLException { - return false; - } - - @Override - public RowIdLifetime getRowIdLifetime() throws SQLException { - return RowIdLifetime.ROWID_UNSUPPORTED; - } - - @Override - public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException { - if (!connection.getJdbcConfig().useSchema()) { - return empty("TABLE_SCHEM String, TABLE_CATALOG Nullable(String)"); - } - - Map params = Collections.singletonMap("pattern", - ClickHouseChecker.isNullOrEmpty(schemaPattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(schemaPattern)); - ResultSet rs = query(ClickHouseParameterizedQuery.apply("select name as TABLE_SCHEM, null as TABLE_CATALOG " - + "from system.databases where name like :pattern order by name", params)); - if (!connection.getJdbcConfig().isExternalDatabaseSupported()) { - return rs; - } - - return new CombinedResultSet( - rs, - query(ClickHouseParameterizedQuery.apply( - "select concat('jdbc(''', name, ''')') as TABLE_SCHEM, null as TABLE_CATALOG " - + "from jdbc('', 'SHOW DATASOURCES') where TABLE_SCHEM like :pattern order by name", - params), true)); - } - - @Override - public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { - return false; - } - - @Override - public boolean autoCommitFailureClosesAllResultSets() throws SQLException { - return false; - } - - @Override - public ResultSet getClientInfoProperties() throws SQLException { - ClickHouseParameterizedQuery q = ClickHouseParameterizedQuery - .of(connection.getConfig(), - "select :name as NAME, toInt32(0) as MAX_LEN, :default as DEFAULT_VALUE, :desc as DESCRIPTION"); - StringBuilder builder = new StringBuilder(); - q.apply(builder, ClickHouseValues.convertToQuotedString(ClickHouseConnection.PROP_APPLICATION_NAME), - ClickHouseValues - .convertToQuotedString(connection.getClientInfo(ClickHouseConnection.PROP_APPLICATION_NAME)), - ClickHouseValues.convertToQuotedString("Application name")); - builder.append(" union all "); - q.apply(builder, ClickHouseValues.convertToQuotedString(ClickHouseConnection.PROP_CUSTOM_HTTP_HEADERS), - ClickHouseValues - .convertToQuotedString(connection.getClientInfo(ClickHouseConnection.PROP_CUSTOM_HTTP_HEADERS)), - ClickHouseValues.convertToQuotedString("Custom HTTP headers")); - builder.append(" union all "); - q.apply(builder, ClickHouseValues.convertToQuotedString(ClickHouseConnection.PROP_CUSTOM_HTTP_PARAMS), - ClickHouseValues - .convertToQuotedString(connection.getClientInfo(ClickHouseConnection.PROP_CUSTOM_HTTP_PARAMS)), - ClickHouseValues.convertToQuotedString("Customer HTTP query parameters")); - return query(builder.toString()); - } - - @Override - public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) - throws SQLException { - Map params = new HashMap<>(); - String databasePattern = connection.getJdbcConfig().useCatalog() ? catalog : schemaPattern; - boolean systemDatabase = ClickHouseChecker.isNullOrEmpty(databasePattern); - if (!systemDatabase) { - String databasePatternLower = databasePattern.toLowerCase(Locale.ROOT); - systemDatabase = "system".contains(databasePatternLower) - || "information_schema".contains(databasePatternLower); - } - params.put("filter", systemDatabase ? "1" : "0"); - params.put("pattern", ClickHouseChecker.isNullOrEmpty(functionNamePattern) ? "'%'" - : ClickHouseValues.convertToQuotedString(functionNamePattern)); - - String sql = ClickHouseParameterizedQuery.apply( - "select * from (select null as FUNCTION_CAT, 'system' as FUNCTION_SCHEM, name as FUNCTION_NAME,\n" - + "concat('case-', case_insensitive ? 'in' : '', 'sensitive function', is_aggregate ? ' for aggregation' : '') as REMARKS," - + "1 as FUNCTION_TYPE, name as SPECIFIC_NAME from system.functions where name like :pattern union all\n" - + "select null as FUNCTION_CAT, 'system' as FUNCTION_SCHEM, name as FUNCTION_NAME,\n" - + "'case-sensitive table function' as REMARKS, 2 as FUNCTION_TYPE, name as SPECIFIC_NAME from system.table_functions\n" - + "where name not in (select name from system.functions) and name like :pattern) where :filter\n" - + "order by FUNCTION_CAT, FUNCTION_SCHEM, FUNCTION_NAME", - params); - return query(sql); - } - - @Override - public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, - String columnNamePattern) throws SQLException { - return empty("FUNCTION_CAT Nullable(String), FUNCTION_SCHEM Nullable(String), FUNCTION_NAME String," - + "COLUMN_NAME String, COLUMN_TYPE Int16, DATA_TYPE Int32, TYPE_NAME String, PRECISION Int32," - + "LENGTH Int32, SCALE Int16, RADIX Int16, NULLABLE Int16, REMARKS String, CHAR_OCTET_LENGTH Int32," - + "ORDINAL_POSITION Int32, IS_NULLABLE String, SPECIFIC_NAME String"); - } - - @Override - public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, - String columnNamePattern) throws SQLException { - return empty("TABLE_CAT Nullable(String), TABLE_SCHEM Nullable(String), TABLE_NAME String, " - + "COLUMN_NAME String, DATA_TYPE Int32, COLUMN_SIZE Int32, DECIMAL_DIGITS Int32, " - + "NUM_PREC_RADIX Int32, COLUMN_USAGE String, REMARKS Nullable(String), " - + "CHAR_OCTET_LENGTH Int32, IS_NULLABLE String"); - } - - @Override - public boolean generatedKeyAlwaysReturned() throws SQLException { - return false; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSetMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSetMetaData.java deleted file mode 100644 index 13dfc5079..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseResultSetMetaData.java +++ /dev/null @@ -1,156 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.util.List; -import java.util.Map; - -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseUtils; - -public class ClickHouseResultSetMetaData extends JdbcWrapper implements ResultSetMetaData { - public static ResultSetMetaData of(JdbcConfig config, String database, String table, List columns, - JdbcTypeMapping mapper, Map> typeMap) throws SQLException { - if (config == null || database == null || table == null || columns == null) { - throw SqlExceptionUtils.clientError("Non-null config, database, table, and column list are required"); - } - - return new ClickHouseResultSetMetaData(config, database, table, columns, mapper, typeMap); - } - - private final JdbcConfig config; - private final String database; - private final String table; - private final List columns; - private final JdbcTypeMapping mapper; - private final Map> typeMap; - - protected ClickHouseResultSetMetaData(JdbcConfig config, String database, String table, - List columns, - JdbcTypeMapping mapper, Map> typeMap) { - this.config = config; - this.database = database; - this.table = table; - this.columns = columns; - this.mapper = mapper; - this.typeMap = typeMap; - } - - protected List getColumns() { - return this.columns; - } - - protected ClickHouseColumn getColumn(int index) throws SQLException { - if (index < 1 || index > columns.size()) { - throw SqlExceptionUtils.clientError( - ClickHouseUtils.format("Column index must between 1 and %d but we got %d", columns.size() + 1, - index)); - } - return columns.get(index - 1); - } - - @Override - public int getColumnCount() throws SQLException { - return columns.size(); - } - - @Override - public boolean isAutoIncrement(int column) throws SQLException { - return false; - } - - @Override - public boolean isCaseSensitive(int column) throws SQLException { - return true; - } - - @Override - public boolean isSearchable(int column) throws SQLException { - return true; - } - - @Override - public boolean isCurrency(int column) throws SQLException { - return false; - } - - @Override - public int isNullable(int column) throws SQLException { - return getColumn(column).isNullable() ? columnNullable : columnNoNulls; - } - - @Override - public boolean isSigned(int column) throws SQLException { - return getColumn(column).getDataType().isSigned(); - } - - @Override - public int getColumnDisplaySize(int column) throws SQLException { - return 80; - } - - @Override - public String getColumnLabel(int column) throws SQLException { - return getColumnName(column); - } - - @Override - public String getColumnName(int column) throws SQLException { - return getColumn(column).getColumnName(); - } - - @Override - public String getSchemaName(int column) throws SQLException { - return config.useSchema() ? database : ""; - } - - @Override - public int getPrecision(int column) throws SQLException { - return getColumn(column).getPrecision(); - } - - @Override - public int getScale(int column) throws SQLException { - return getColumn(column).getScale(); - } - - @Override - public String getTableName(int column) throws SQLException { - return table; - } - - @Override - public String getCatalogName(int column) throws SQLException { - return config.useCatalog() ? database : ""; - } - - @Override - public int getColumnType(int column) throws SQLException { - return mapper.toSqlType(getColumn(column), typeMap); - } - - @Override - public String getColumnTypeName(int column) throws SQLException { - return mapper.toNativeType(getColumn(column)); - } - - @Override - public boolean isReadOnly(int column) throws SQLException { - return true; - } - - @Override - public boolean isWritable(int column) throws SQLException { - return false; - } - - @Override - public boolean isDefinitelyWritable(int column) throws SQLException { - return false; - } - - @Override - public String getColumnClassName(int column) throws SQLException { - return mapper.toJavaClass(getColumn(column), typeMap).getCanonicalName(); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 72fedcaf9..85ebc750d 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -5,7 +5,7 @@ import java.util.Properties; import java.util.concurrent.Executor; -public class ConnectionImpl implements Connection { +public class ConnectionImpl implements Connection, JdbcWrapper { @Override public Statement createStatement() throws SQLException { return null; @@ -295,14 +295,4 @@ public void setShardingKey(ShardingKey shardingKey, ShardingKey superShardingKey public void setShardingKey(ShardingKey shardingKey) throws SQLException { Connection.super.setShardingKey(shardingKey); } - - @Override - public T unwrap(Class iface) throws SQLException { - return null; - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return false; - } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java index 6d605b28c..243ebbbf6 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -14,17 +14,6 @@ public class ResultSetImpl implements ResultSet, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(ResultSetImpl.class); - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return JdbcWrapper.super.isWrapperFor(iface); - } - - @Override - public T unwrap(Class iface) throws SQLException { - return JdbcWrapper.super.unwrap(iface); - } - @Override public boolean next() throws SQLException { return false; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index 673fa0127..983a720d0 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -6,7 +6,7 @@ import java.sql.SQLWarning; import java.sql.Statement; -public class StatementImpl implements Statement { +public class StatementImpl implements Statement, JdbcWrapper { @Override public ResultSet executeQuery(String sql) throws SQLException { return null; @@ -276,14 +276,4 @@ public boolean isSimpleIdentifier(String identifier) throws SQLException { public String enquoteNCharLiteral(String val) throws SQLException { return Statement.super.enquoteNCharLiteral(val); } - - @Override - public T unwrap(Class iface) throws SQLException { - return null; - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return false; - } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java new file mode 100644 index 000000000..c588506b4 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java @@ -0,0 +1,901 @@ +package com.clickhouse.jdbc.metadata; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; + + +import com.clickhouse.jdbc.JdbcWrapper; +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + +public class ClickHouseDatabaseMetaData implements DatabaseMetaData, JdbcWrapper { + private static final Logger log = LoggerFactory.getLogger(ClickHouseDatabaseMetaData.class); + + @Override + public boolean allProceduresAreCallable() throws SQLException { + return false; + } + + @Override + public boolean allTablesAreSelectable() throws SQLException { + return false; + } + + @Override + public String getURL() throws SQLException { + return ""; + } + + @Override + public String getUserName() throws SQLException { + return ""; + } + + @Override + public boolean isReadOnly() throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedHigh() throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedLow() throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedAtStart() throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedAtEnd() throws SQLException { + return false; + } + + @Override + public String getDatabaseProductName() throws SQLException { + return ""; + } + + @Override + public String getDatabaseProductVersion() throws SQLException { + return ""; + } + + @Override + public String getDriverName() throws SQLException { + return ""; + } + + @Override + public String getDriverVersion() throws SQLException { + return ""; + } + + @Override + public int getDriverMajorVersion() { + return 0; + } + + @Override + public int getDriverMinorVersion() { + return 0; + } + + @Override + public boolean usesLocalFiles() throws SQLException { + return false; + } + + @Override + public boolean usesLocalFilePerTable() throws SQLException { + return false; + } + + @Override + public boolean supportsMixedCaseIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesUpperCaseIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesLowerCaseIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesMixedCaseIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { + return false; + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { + return false; + } + + @Override + public String getIdentifierQuoteString() throws SQLException { + return ""; + } + + @Override + public String getSQLKeywords() throws SQLException { + return ""; + } + + @Override + public String getNumericFunctions() throws SQLException { + return ""; + } + + @Override + public String getStringFunctions() throws SQLException { + return ""; + } + + @Override + public String getSystemFunctions() throws SQLException { + return ""; + } + + @Override + public String getTimeDateFunctions() throws SQLException { + return ""; + } + + @Override + public String getSearchStringEscape() throws SQLException { + return ""; + } + + @Override + public String getExtraNameCharacters() throws SQLException { + return ""; + } + + @Override + public boolean supportsAlterTableWithAddColumn() throws SQLException { + return false; + } + + @Override + public boolean supportsAlterTableWithDropColumn() throws SQLException { + return false; + } + + @Override + public boolean supportsColumnAliasing() throws SQLException { + return false; + } + + @Override + public boolean nullPlusNonNullIsNull() throws SQLException { + return false; + } + + @Override + public boolean supportsConvert() throws SQLException { + return false; + } + + @Override + public boolean supportsConvert(int fromType, int toType) throws SQLException { + return false; + } + + @Override + public boolean supportsTableCorrelationNames() throws SQLException { + return false; + } + + @Override + public boolean supportsDifferentTableCorrelationNames() throws SQLException { + return false; + } + + @Override + public boolean supportsExpressionsInOrderBy() throws SQLException { + return false; + } + + @Override + public boolean supportsOrderByUnrelated() throws SQLException { + return false; + } + + @Override + public boolean supportsGroupBy() throws SQLException { + return false; + } + + @Override + public boolean supportsGroupByUnrelated() throws SQLException { + return false; + } + + @Override + public boolean supportsGroupByBeyondSelect() throws SQLException { + return false; + } + + @Override + public boolean supportsLikeEscapeClause() throws SQLException { + return false; + } + + @Override + public boolean supportsMultipleResultSets() throws SQLException { + return false; + } + + @Override + public boolean supportsMultipleTransactions() throws SQLException { + return false; + } + + @Override + public boolean supportsNonNullableColumns() throws SQLException { + return false; + } + + @Override + public boolean supportsMinimumSQLGrammar() throws SQLException { + return false; + } + + @Override + public boolean supportsCoreSQLGrammar() throws SQLException { + return false; + } + + @Override + public boolean supportsExtendedSQLGrammar() throws SQLException { + return false; + } + + @Override + public boolean supportsANSI92EntryLevelSQL() throws SQLException { + return false; + } + + @Override + public boolean supportsANSI92IntermediateSQL() throws SQLException { + return false; + } + + @Override + public boolean supportsANSI92FullSQL() throws SQLException { + return false; + } + + @Override + public boolean supportsIntegrityEnhancementFacility() throws SQLException { + return false; + } + + @Override + public boolean supportsOuterJoins() throws SQLException { + return false; + } + + @Override + public boolean supportsFullOuterJoins() throws SQLException { + return false; + } + + @Override + public boolean supportsLimitedOuterJoins() throws SQLException { + return false; + } + + @Override + public String getSchemaTerm() throws SQLException { + return ""; + } + + @Override + public String getProcedureTerm() throws SQLException { + return ""; + } + + @Override + public String getCatalogTerm() throws SQLException { + return ""; + } + + @Override + public boolean isCatalogAtStart() throws SQLException { + return false; + } + + @Override + public String getCatalogSeparator() throws SQLException { + return ""; + } + + @Override + public boolean supportsSchemasInDataManipulation() throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInProcedureCalls() throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInTableDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInIndexDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInDataManipulation() throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInProcedureCalls() throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInTableDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { + return false; + } + + @Override + public boolean supportsPositionedDelete() throws SQLException { + return false; + } + + @Override + public boolean supportsPositionedUpdate() throws SQLException { + return false; + } + + @Override + public boolean supportsSelectForUpdate() throws SQLException { + return false; + } + + @Override + public boolean supportsStoredProcedures() throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInComparisons() throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInExists() throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInIns() throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInQuantifieds() throws SQLException { + return false; + } + + @Override + public boolean supportsCorrelatedSubqueries() throws SQLException { + return false; + } + + @Override + public boolean supportsUnion() throws SQLException { + return false; + } + + @Override + public boolean supportsUnionAll() throws SQLException { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() throws SQLException { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() throws SQLException { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() throws SQLException { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() throws SQLException { + return false; + } + + @Override + public int getMaxBinaryLiteralLength() throws SQLException { + return 0; + } + + @Override + public int getMaxCharLiteralLength() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInGroupBy() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInIndex() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInOrderBy() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInSelect() throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInTable() throws SQLException { + return 0; + } + + @Override + public int getMaxConnections() throws SQLException { + return 0; + } + + @Override + public int getMaxCursorNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxIndexLength() throws SQLException { + return 0; + } + + @Override + public int getMaxSchemaNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxProcedureNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxCatalogNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxRowSize() throws SQLException { + return 0; + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { + return false; + } + + @Override + public int getMaxStatementLength() throws SQLException { + return 0; + } + + @Override + public int getMaxStatements() throws SQLException { + return 0; + } + + @Override + public int getMaxTableNameLength() throws SQLException { + return 0; + } + + @Override + public int getMaxTablesInSelect() throws SQLException { + return 0; + } + + @Override + public int getMaxUserNameLength() throws SQLException { + return 0; + } + + @Override + public int getDefaultTransactionIsolation() throws SQLException { + return 0; + } + + @Override + public boolean supportsTransactions() throws SQLException { + return false; + } + + @Override + public boolean supportsTransactionIsolationLevel(int level) throws SQLException { + return false; + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { + return false; + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() throws SQLException { + return false; + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() throws SQLException { + return false; + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() throws SQLException { + return false; + } + + @Override + public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) throws SQLException { + return null; + } + + @Override + public ResultSet getSchemas() throws SQLException { + return null; + } + + @Override + public ResultSet getCatalogs() throws SQLException { + return null; + } + + @Override + public ResultSet getTableTypes() throws SQLException { + return null; + } + + @Override + public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { + return null; + } + + @Override + public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { + return null; + } + + @Override + public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { + return null; + } + + @Override + public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { + return null; + } + + @Override + public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { + return null; + } + + @Override + public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable, String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException { + return null; + } + + @Override + public ResultSet getTypeInfo() throws SQLException { + return null; + } + + @Override + public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) throws SQLException { + return null; + } + + @Override + public boolean supportsResultSetType(int type) throws SQLException { + return false; + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { + return false; + } + + @Override + public boolean ownUpdatesAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean ownDeletesAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean ownInsertsAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean othersUpdatesAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean othersDeletesAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean othersInsertsAreVisible(int type) throws SQLException { + return false; + } + + @Override + public boolean updatesAreDetected(int type) throws SQLException { + return false; + } + + @Override + public boolean deletesAreDetected(int type) throws SQLException { + return false; + } + + @Override + public boolean insertsAreDetected(int type) throws SQLException { + return false; + } + + @Override + public boolean supportsBatchUpdates() throws SQLException { + return false; + } + + @Override + public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types) throws SQLException { + return null; + } + + @Override + public Connection getConnection() throws SQLException { + return null; + } + + @Override + public boolean supportsSavepoints() throws SQLException { + return false; + } + + @Override + public boolean supportsNamedParameters() throws SQLException { + return false; + } + + @Override + public boolean supportsMultipleOpenResults() throws SQLException { + return false; + } + + @Override + public boolean supportsGetGeneratedKeys() throws SQLException { + return false; + } + + @Override + public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) throws SQLException { + return null; + } + + @Override + public boolean supportsResultSetHoldability(int holdability) throws SQLException { + return false; + } + + @Override + public int getResultSetHoldability() throws SQLException { + return 0; + } + + @Override + public int getDatabaseMajorVersion() throws SQLException { + return 0; + } + + @Override + public int getDatabaseMinorVersion() throws SQLException { + return 0; + } + + @Override + public int getJDBCMajorVersion() throws SQLException { + return 0; + } + + @Override + public int getJDBCMinorVersion() throws SQLException { + return 0; + } + + @Override + public int getSQLStateType() throws SQLException { + return 0; + } + + @Override + public boolean locatorsUpdateCopy() throws SQLException { + return false; + } + + @Override + public boolean supportsStatementPooling() throws SQLException { + return false; + } + + @Override + public RowIdLifetime getRowIdLifetime() throws SQLException { + return null; + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException { + return null; + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { + return false; + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() throws SQLException { + return false; + } + + @Override + public ResultSet getClientInfoProperties() throws SQLException { + return null; + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) throws SQLException { + return null; + } + + @Override + public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { + return null; + } + + @Override + public boolean generatedKeyAlwaysReturned() throws SQLException { + return false; + } + + @Override + public long getMaxLogicalLobSize() throws SQLException { + return DatabaseMetaData.super.getMaxLogicalLobSize(); + } + + @Override + public boolean supportsRefCursors() throws SQLException { + return DatabaseMetaData.super.supportsRefCursors(); + } + + @Override + public boolean supportsSharding() throws SQLException { + return DatabaseMetaData.super.supportsSharding(); + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java new file mode 100644 index 000000000..196e9b40f --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java @@ -0,0 +1,73 @@ +package com.clickhouse.jdbc.metadata; + +import com.clickhouse.data.ClickHouseColumn; +import com.clickhouse.jdbc.JdbcWrapper; + +import java.sql.ParameterMetaData; +import java.sql.SQLException; +import java.util.List; + +public class ClickHouseParameterMetaData implements ParameterMetaData, JdbcWrapper { + private final List params; + + protected ClickHouseParameterMetaData(List params) { + if (params == null) { + throw new IllegalArgumentException("Parameters array cannot be null."); + } + + this.params = params; + } + + protected ClickHouseColumn getParam(int param) throws SQLException { + if (param < 1 || param > params.size()) { + throw new SQLException("Parameter index out of range: " + param); + } + + return params.get(param - 1); + } + + @Override + public int getParameterCount() throws SQLException { + return params.size(); + } + + @Override + public int isNullable(int param) throws SQLException { + return getParam(param).isNullable() ? parameterNullable : parameterNoNulls; + } + + @Override + public boolean isSigned(int param) throws SQLException { + return getParam(param).getDataType().isSigned(); + } + + @Override + public int getPrecision(int param) throws SQLException { + return getParam(param).getPrecision(); + } + + @Override + public int getScale(int param) throws SQLException { + return getParam(param).getScale(); + } + + @Override + public int getParameterType(int param) throws SQLException { + return 0; + } + + @Override + public String getParameterTypeName(int param) throws SQLException { + return ""; + } + + @Override + public String getParameterClassName(int param) throws SQLException { + return ""; + } + + @Override + public int getParameterMode(int param) throws SQLException { + return 0; + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java new file mode 100644 index 000000000..fce04f922 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java @@ -0,0 +1,114 @@ +package com.clickhouse.jdbc.metadata; + +import java.sql.ResultSetMetaData; +import java.sql.SQLException; + +import com.clickhouse.jdbc.JdbcWrapper; + +public class ClickHouseResultSetMetaData implements ResultSetMetaData, JdbcWrapper { + + @Override + public int getColumnCount() throws SQLException { + return 0; + } + + @Override + public boolean isAutoIncrement(int column) throws SQLException { + return false; + } + + @Override + public boolean isCaseSensitive(int column) throws SQLException { + return false; + } + + @Override + public boolean isSearchable(int column) throws SQLException { + return false; + } + + @Override + public boolean isCurrency(int column) throws SQLException { + return false; + } + + @Override + public int isNullable(int column) throws SQLException { + return 0; + } + + @Override + public boolean isSigned(int column) throws SQLException { + return false; + } + + @Override + public int getColumnDisplaySize(int column) throws SQLException { + return 0; + } + + @Override + public String getColumnLabel(int column) throws SQLException { + return ""; + } + + @Override + public String getColumnName(int column) throws SQLException { + return ""; + } + + @Override + public String getSchemaName(int column) throws SQLException { + return ""; + } + + @Override + public int getPrecision(int column) throws SQLException { + return 0; + } + + @Override + public int getScale(int column) throws SQLException { + return 0; + } + + @Override + public String getTableName(int column) throws SQLException { + return ""; + } + + @Override + public String getCatalogName(int column) throws SQLException { + return ""; + } + + @Override + public int getColumnType(int column) throws SQLException { + return 0; + } + + @Override + public String getColumnTypeName(int column) throws SQLException { + return ""; + } + + @Override + public boolean isReadOnly(int column) throws SQLException { + return false; + } + + @Override + public boolean isWritable(int column) throws SQLException { + return false; + } + + @Override + public boolean isDefinitelyWritable(int column) throws SQLException { + return false; + } + + @Override + public String getColumnClassName(int column) throws SQLException { + return ""; + } +} From ec2346ca6f3331d385119139e6148f2ce4050ea0 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 4 Sep 2024 04:03:09 -0400 Subject: [PATCH 05/21] Update PreparedStatementImpl.java --- .../jdbc/PreparedStatementImpl.java | 282 +----------------- 1 file changed, 1 insertion(+), 281 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java index c65b92b0c..0ac4a0782 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -7,7 +7,7 @@ import java.sql.*; import java.util.Calendar; -public class PreparedStatementImpl extends StatementImpl implements PreparedStatement { +public class PreparedStatementImpl extends StatementImpl implements PreparedStatement, JdbcWrapper { @Override public ResultSet executeQuery() throws SQLException { return null; @@ -297,284 +297,4 @@ public void setObject(int parameterIndex, Object x, SQLType targetSqlType) throw public long executeLargeUpdate() throws SQLException { return PreparedStatement.super.executeLargeUpdate(); } - - @Override - public ResultSet executeQuery(String sql) throws SQLException { - return null; - } - - @Override - public int executeUpdate(String sql) throws SQLException { - return 0; - } - - @Override - public void close() throws SQLException { - - } - - @Override - public int getMaxFieldSize() throws SQLException { - return 0; - } - - @Override - public void setMaxFieldSize(int max) throws SQLException { - - } - - @Override - public int getMaxRows() throws SQLException { - return 0; - } - - @Override - public void setMaxRows(int max) throws SQLException { - - } - - @Override - public void setEscapeProcessing(boolean enable) throws SQLException { - - } - - @Override - public int getQueryTimeout() throws SQLException { - return 0; - } - - @Override - public void setQueryTimeout(int seconds) throws SQLException { - - } - - @Override - public void cancel() throws SQLException { - - } - - @Override - public SQLWarning getWarnings() throws SQLException { - return null; - } - - @Override - public void clearWarnings() throws SQLException { - - } - - @Override - public void setCursorName(String name) throws SQLException { - - } - - @Override - public boolean execute(String sql) throws SQLException { - return false; - } - - @Override - public ResultSet getResultSet() throws SQLException { - return null; - } - - @Override - public int getUpdateCount() throws SQLException { - return 0; - } - - @Override - public boolean getMoreResults() throws SQLException { - return false; - } - - @Override - public void setFetchDirection(int direction) throws SQLException { - - } - - @Override - public int getFetchDirection() throws SQLException { - return 0; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - - } - - @Override - public int getFetchSize() throws SQLException { - return 0; - } - - @Override - public int getResultSetConcurrency() throws SQLException { - return 0; - } - - @Override - public int getResultSetType() throws SQLException { - return 0; - } - - @Override - public void addBatch(String sql) throws SQLException { - - } - - @Override - public void clearBatch() throws SQLException { - - } - - @Override - public int[] executeBatch() throws SQLException { - return new int[0]; - } - - @Override - public Connection getConnection() throws SQLException { - return null; - } - - @Override - public boolean getMoreResults(int current) throws SQLException { - return false; - } - - @Override - public ResultSet getGeneratedKeys() throws SQLException { - return null; - } - - @Override - public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - return 0; - } - - @Override - public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - return 0; - } - - @Override - public int executeUpdate(String sql, String[] columnNames) throws SQLException { - return 0; - } - - @Override - public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - return false; - } - - @Override - public boolean execute(String sql, int[] columnIndexes) throws SQLException { - return false; - } - - @Override - public boolean execute(String sql, String[] columnNames) throws SQLException { - return false; - } - - @Override - public int getResultSetHoldability() throws SQLException { - return 0; - } - - @Override - public boolean isClosed() throws SQLException { - return false; - } - - @Override - public void setPoolable(boolean poolable) throws SQLException { - - } - - @Override - public boolean isPoolable() throws SQLException { - return false; - } - - @Override - public void closeOnCompletion() throws SQLException { - - } - - @Override - public boolean isCloseOnCompletion() throws SQLException { - return false; - } - - @Override - public long getLargeUpdateCount() throws SQLException { - return PreparedStatement.super.getLargeUpdateCount(); - } - - @Override - public void setLargeMaxRows(long max) throws SQLException { - PreparedStatement.super.setLargeMaxRows(max); - } - - @Override - public long getLargeMaxRows() throws SQLException { - return PreparedStatement.super.getLargeMaxRows(); - } - - @Override - public long[] executeLargeBatch() throws SQLException { - return PreparedStatement.super.executeLargeBatch(); - } - - @Override - public long executeLargeUpdate(String sql) throws SQLException { - return PreparedStatement.super.executeLargeUpdate(sql); - } - - @Override - public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - return PreparedStatement.super.executeLargeUpdate(sql, autoGeneratedKeys); - } - - @Override - public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { - return PreparedStatement.super.executeLargeUpdate(sql, columnIndexes); - } - - @Override - public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { - return PreparedStatement.super.executeLargeUpdate(sql, columnNames); - } - - @Override - public String enquoteLiteral(String val) throws SQLException { - return PreparedStatement.super.enquoteLiteral(val); - } - - @Override - public String enquoteIdentifier(String identifier, boolean alwaysQuote) throws SQLException { - return PreparedStatement.super.enquoteIdentifier(identifier, alwaysQuote); - } - - @Override - public boolean isSimpleIdentifier(String identifier) throws SQLException { - return PreparedStatement.super.isSimpleIdentifier(identifier); - } - - @Override - public String enquoteNCharLiteral(String val) throws SQLException { - return PreparedStatement.super.enquoteNCharLiteral(val); - } - - @Override - public T unwrap(Class iface) throws SQLException { - return null; - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return false; - } } From 1bb525a8f889dc36d4bb0a30156072915b6a82b5 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Thu, 5 Sep 2024 05:26:54 -0400 Subject: [PATCH 06/21] Cleaning up more older implementation code, we can make use of it in version history --- .../jdbc/{parser => }/ClickHouseSqlUtils.java | 2 +- .../com/clickhouse/jdbc/ConnectionImpl.java | 4 +- .../jdbc/{DriverImpl.java => Driver.java} | 4 +- .../java/com/clickhouse/jdbc/JdbcConfig.java | 392 ------------------ .../com/clickhouse/jdbc/JdbcParseHandler.java | 174 -------- .../com/clickhouse/jdbc/JdbcTypeMapping.java | 333 +-------------- ...eJdbcUrlParser.java => JdbcUrlParser.java} | 4 +- .../jdbc/PreparedStatementImpl.java | 6 + .../clickhouse/jdbc/SqlExceptionUtils.java | 158 ------- .../com/clickhouse/jdbc/StatementImpl.java | 5 + ...aseMetaData.java => DatabaseMetaData.java} | 11 +- ...erMetaData.java => ParameterMetaData.java} | 5 +- ...etMetaData.java => ResultSetMetaData.java} | 3 +- .../jdbc/parser/ClickHouseSqlStatement.java | 364 ---------------- .../clickhouse/jdbc/parser/LanguageType.java | 9 - .../clickhouse/jdbc/parser/OperationType.java | 5 - .../clickhouse/jdbc/parser/ParseHandler.java | 57 --- .../clickhouse/jdbc/parser/StatementType.java | 54 --- .../src/main/java9/module-info.java | 18 - .../META-INF/services/java.sql.Driver | 2 +- .../clickhouse/jdbc/DataSourceImplTest.java | 6 +- ...ataTest.java => DatabaseMetaDataTest.java} | 6 +- .../{DriverImplTest.java => DriverTest.java} | 6 +- .../ClickHouseConnectionImplTest.java | 6 +- ...ParserTest.java => JdbcUrlParserTest.java} | 74 ++-- .../jdbc/parser/ClickHouseSqlUtilsTest.java | 1 + 26 files changed, 84 insertions(+), 1625 deletions(-) rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/{parser => }/ClickHouseSqlUtils.java (98%) rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/{DriverImpl.java => Driver.java} (94%) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcConfig.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParseHandler.java rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/{ClickHouseJdbcUrlParser.java => JdbcUrlParser.java} (98%) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/SqlExceptionUtils.java rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/{ClickHouseDatabaseMetaData.java => DatabaseMetaData.java} (98%) rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/{ClickHouseParameterMetaData.java => ParameterMetaData.java} (89%) rename clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/{ClickHouseResultSetMetaData.java => ResultSetMetaData.java} (95%) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlStatement.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/LanguageType.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/OperationType.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ParseHandler.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/StatementType.java delete mode 100644 clickhouse-jdbc/src/main/java9/module-info.java rename clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/{ClickHouseDatabaseMetaDataTest.java => DatabaseMetaDataTest.java} (98%) rename clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/{DriverImplTest.java => DriverTest.java} (88%) rename clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/{ClickHouseJdbcUrlParserTest.java => JdbcUrlParserTest.java} (67%) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtils.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java similarity index 98% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtils.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java index 8b42372d6..c23ba5638 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtils.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java @@ -1,4 +1,4 @@ -package com.clickhouse.jdbc.parser; +package com.clickhouse.jdbc; public final class ClickHouseSqlUtils { public static boolean isQuote(char ch) { diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 85ebc750d..f35c15c57 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -8,12 +8,12 @@ public class ConnectionImpl implements Connection, JdbcWrapper { @Override public Statement createStatement() throws SQLException { - return null; + return new StatementImpl(this); } @Override public PreparedStatement prepareStatement(String sql) throws SQLException { - return null; + return new PreparedStatementImpl(this, sql); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java similarity index 94% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index f8a65d955..7b070f813 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DriverImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -9,8 +9,8 @@ /** * JDBC driver for ClickHouse. */ -public class DriverImpl implements Driver { - private static final Logger log = LoggerFactory.getLogger(DriverImpl.class); +public class Driver implements java.sql.Driver { + private static final Logger log = LoggerFactory.getLogger(Driver.class); @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcConfig.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcConfig.java deleted file mode 100644 index 0802c478c..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcConfig.java +++ /dev/null @@ -1,392 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.DriverPropertyInfo; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Map.Entry; - -import com.clickhouse.config.ClickHouseOption; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; - -/** - * JDBC-specific configuration. - */ -public class JdbcConfig { - private static final Logger log = LoggerFactory.getLogger(JdbcConfig.class); - - public static final String PROP_AUTO_COMMIT = "autoCommit"; - public static final String PROP_CREATE_DATABASE = "createDatabaseIfNotExist"; - public static final String PROP_CONTINUE_BATCH = "continueBatchOnError"; - public static final String PROP_DATABASE_TERM = "databaseTerm"; - public static final String PROP_DIALECT = "dialect"; - public static final String PROP_EXTERNAL_DATABASE = "externalDatabase"; - public static final String PROP_FETCH_SIZE = "fetchSize"; - public static final String PROP_LOCAL_FILE = "localFile"; - public static final String PROP_JDBC_COMPLIANT = "jdbcCompliant"; - public static final String PROP_NAMED_PARAM = "namedParameter"; - public static final String PROP_NULL_AS_DEFAULT = "nullAsDefault"; - public static final String PROP_TX_SUPPORT = "transactionSupport"; - public static final String PROP_TYPE_MAP = "typeMappings"; - public static final String PROP_WRAPPER_OBJ = "wrapperObject"; - - static final String TERM_COMMENT = "comment"; - static final String TERM_DATABASE = "database"; - static final String TERM_TABLE = "table"; - static final String TERM_CATALOG = "catalog"; - static final String TERM_SCHEMA = "schema"; - - private static final String BOOLEAN_FALSE = "false"; - private static final String BOOLEAN_TRUE = "true"; - - private static final String DEFAULT_AUTO_COMMIT = BOOLEAN_TRUE; - private static final String DEFAULT_CREATE_DATABASE = BOOLEAN_FALSE; - private static final String DEFAULT_CONTINUE_BATCH = BOOLEAN_FALSE; - private static final String DEFAULT_DATABASE_TERM = TERM_CATALOG; - private static final String DEFAULT_DIALECT = ""; - private static final String DEFAULT_EXTERNAL_DATABASE = BOOLEAN_TRUE; - private static final String DEFAULT_FETCH_SIZE = "0"; - private static final String DEFAULT_LOCAL_FILE = BOOLEAN_FALSE; - private static final String DEFAULT_JDBC_COMPLIANT = BOOLEAN_TRUE; - private static final String DEFAULT_NAMED_PARAM = BOOLEAN_FALSE; - private static final String DEFAULT_NULL_AS_DEFAULT = "0"; - private static final String DEFAULT_TX_SUPPORT = BOOLEAN_FALSE; - private static final String DEFAULT_TYPE_MAP = ""; - private static final String DEFAULT_WRAPPER_OBJ = BOOLEAN_FALSE; - - static DriverPropertyInfo newDriverProperty(String name, String defaultValue, String description, - String... choices) { - DriverPropertyInfo info = new DriverPropertyInfo(name, defaultValue); - info.description = description; - if (choices != null && choices.length > 0) { - info.choices = choices; - } - return info; - } - - public static List getDriverProperties() { - return Collections.unmodifiableList(new ArrayList<>(Arrays.asList( - newDriverProperty(PROP_AUTO_COMMIT, DEFAULT_AUTO_COMMIT, - "Whether to enable auto commit when connection is created."), - newDriverProperty(PROP_CREATE_DATABASE, DEFAULT_CREATE_DATABASE, - "Whether to automatically create database when it does not exist.", BOOLEAN_TRUE, - BOOLEAN_FALSE), - newDriverProperty(PROP_CONTINUE_BATCH, DEFAULT_CONTINUE_BATCH, - "Whether to continue batch process when error occurred.", BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_FETCH_SIZE, DEFAULT_FETCH_SIZE, - "Default fetch size, negative or zero means no preferred option."), - newDriverProperty(PROP_LOCAL_FILE, DEFAULT_LOCAL_FILE, - "Whether to use local file for INFILE/OUTFILE or not.", BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_JDBC_COMPLIANT, DEFAULT_JDBC_COMPLIANT, - "Whether to enable JDBC-compliant features like fake transaction and standard UPDATE and DELETE statements.", - BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_DATABASE_TERM, DEFAULT_DATABASE_TERM, - "Default JDBC term as synonymous to database.", TERM_CATALOG, TERM_SCHEMA), - newDriverProperty(PROP_DIALECT, DEFAULT_DIALECT, - "Dialect mainly for data type mapping, can be set to ansi or a full qualified class name implementing JdbcTypeMapping."), - newDriverProperty(PROP_EXTERNAL_DATABASE, DEFAULT_EXTERNAL_DATABASE, - "Whether to enable external database support or not.", BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_NAMED_PARAM, DEFAULT_NAMED_PARAM, - "Whether to use named parameter(e.g. :ts(DateTime64(6)) or :value etc.) instead of standard JDBC question mark placeholder.", - BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_NULL_AS_DEFAULT, DEFAULT_NULL_AS_DEFAULT, - "Default approach to handle null value, sets to 0 or negative number to throw exception when target column is not nullable, 1 to disable the null-check, and 2 or higher to replace null to default value of corresponding data type."), - newDriverProperty(PROP_TX_SUPPORT, DEFAULT_TX_SUPPORT, "Whether to enable transaction support or not.", - BOOLEAN_TRUE, BOOLEAN_FALSE), - newDriverProperty(PROP_TYPE_MAP, DEFAULT_TYPE_MAP, - "Default type mappings between ClickHouse data type and Java class. You can define multiple mappings using comma as separator."), - newDriverProperty(PROP_WRAPPER_OBJ, DEFAULT_WRAPPER_OBJ, - "Whether to return wrapper object like Array or Struct in ResultSet.getObject method.", - BOOLEAN_TRUE, BOOLEAN_FALSE)))); - } - - String removeAndGetPropertyValue(Properties props, String key) { - if (props == null || props.isEmpty() || key == null || key.isEmpty()) { - return null; - } - - // Remove JDBC-specific options so that they won't be treated as server settings - // at later stage. Default properties won't be used for the same reason. - Object raw = props.remove(key); - if (raw != null) { - this.properties.put(key, raw); - return raw.toString(); - } else { - return null; - } - } - - boolean extractBooleanValue(Properties props, String key, String defaultValue) { - String value = removeAndGetPropertyValue(props, key); - return Boolean.parseBoolean(value != null ? value : defaultValue); - } - - int extractIntValue(Properties props, String key, String defaultValue) { - String value = removeAndGetPropertyValue(props, key); - return Integer.parseInt(value != null ? value : defaultValue); - } - - // TODO return JdbcDialect - JdbcTypeMapping extractDialectValue(Properties props, String key, String defaultValue) { - String value = removeAndGetPropertyValue(props, key); - if (value == null) { - value = defaultValue; - } - - JdbcTypeMapping mapper; - if (ClickHouseChecker.isNullOrBlank(value)) { - mapper = JdbcTypeMapping.getDefaultMapping(); - } else if ("ansi".equalsIgnoreCase(value)) { - mapper = JdbcTypeMapping.getAnsiMapping(); - } else { - try { - Class clazz = JdbcConfig.class.getClassLoader().loadClass(value); - mapper = (JdbcTypeMapping) clazz.getConstructor().newInstance(); - } catch (Throwable t) { - log.warn("Failed to load custom JDBC type mapping [%s], due to: %s", value, t.getMessage()); - mapper = JdbcTypeMapping.getDefaultMapping(); - } - } - return mapper; - } - - String extractStringValue(Properties props, String key, String defaultValue) { - String value = removeAndGetPropertyValue(props, key); - return value != null ? value : defaultValue; - } - - Map> extractTypeMapValue(Properties props, String key, String defaultValue) { - String value = removeAndGetPropertyValue(props, key); - if (value == null) { - value = defaultValue; - } - - if (ClickHouseChecker.isNullOrBlank(value)) { - return Collections.emptyMap(); - } - - Map> map = new LinkedHashMap<>(); - ClassLoader loader = JdbcConfig.class.getClassLoader(); - for (Entry e : ClickHouseOption.toKeyValuePairs(value).entrySet()) { - Class clazz = null; - try { - clazz = loader.loadClass(e.getValue()); - } catch (Throwable t) { - log.warn("Failed to add mapping [%s]=[%s], due to: %s", e.getKey(), e.getValue(), t.getMessage()); - } - if (clazz != null) { - map.put(e.getKey(), clazz); - } - } - - return Collections.unmodifiableMap(map); - } - - private final Properties properties; - - private final boolean autoCommit; - private final boolean createDb; - private final boolean continueBatch; - private final int fetchSize; - private final boolean localFile; - private final boolean jdbcCompliant; - private final String databaseTerm; - private final JdbcTypeMapping dialect; - private final boolean externalDatabase; - private final boolean namedParameter; - private final int nullAsDefault; - private final boolean txSupport; - private final Map> typeMap; - private final boolean wrapperObject; - - public JdbcConfig() { - this(null); - } - - public JdbcConfig(Properties props) { - this.properties = new Properties(); - - this.autoCommit = extractBooleanValue(props, PROP_AUTO_COMMIT, DEFAULT_AUTO_COMMIT); - this.createDb = extractBooleanValue(props, PROP_CREATE_DATABASE, DEFAULT_CREATE_DATABASE); - this.continueBatch = extractBooleanValue(props, PROP_CONTINUE_BATCH, DEFAULT_CONTINUE_BATCH); - this.databaseTerm = extractStringValue(props, PROP_DATABASE_TERM, DEFAULT_DATABASE_TERM); - this.dialect = extractDialectValue(props, PROP_DIALECT, DEFAULT_DIALECT); - this.externalDatabase = extractBooleanValue(props, PROP_EXTERNAL_DATABASE, DEFAULT_EXTERNAL_DATABASE); - this.fetchSize = extractIntValue(props, PROP_FETCH_SIZE, DEFAULT_FETCH_SIZE); - this.localFile = extractBooleanValue(props, PROP_LOCAL_FILE, DEFAULT_LOCAL_FILE); - this.jdbcCompliant = extractBooleanValue(props, PROP_JDBC_COMPLIANT, DEFAULT_JDBC_COMPLIANT); - this.namedParameter = extractBooleanValue(props, PROP_NAMED_PARAM, DEFAULT_NAMED_PARAM); - this.nullAsDefault = extractIntValue(props, PROP_NULL_AS_DEFAULT, DEFAULT_NULL_AS_DEFAULT); - this.txSupport = extractBooleanValue(props, PROP_TX_SUPPORT, DEFAULT_TX_SUPPORT); - this.typeMap = extractTypeMapValue(props, PROP_TYPE_MAP, DEFAULT_TYPE_MAP); - this.wrapperObject = extractBooleanValue(props, PROP_WRAPPER_OBJ, DEFAULT_WRAPPER_OBJ); - } - - /** - * Checks whether auto commit should be enabled when creating a connection. - * - * @return true if auto commit should be enabled when creating connection; false - * otherwise - */ - public boolean isAutoCommit() { - return autoCommit; - } - - /** - * Checks whether database should be created automatically when it does not - * exist. - * - * @return true if database should be created automatically; false otherwise - */ - public boolean isCreateDbIfNotExist() { - return createDb; - } - - /** - * Checks whether batch processing should continue when error occurred. - * - * @return true if should continue; false to throw exception and abort execution - */ - public boolean isContinueBatchOnError() { - return continueBatch; - } - - /** - * Gets default fetch size for query. - * - * @return default fetch size for query - */ - public int getFetchSize() { - return fetchSize; - } - - /** - * Checks whether to use local file for INFILE/OUTFILE. - * - * @return true to use local file for INFILE/OUTFILE; false otherwise - */ - public boolean useLocalFile() { - return localFile; - } - - /** - * Gets database term. - * - * @return non-null database term - */ - public String getDatabaseTerm() { - return databaseTerm; - } - - /** - * Checks whether to use catalog as synonymous to database. - * - * @return true if use catalog as synonymous to database; false otherwise - */ - public boolean useCatalog() { - return TERM_CATALOG.equals(databaseTerm); - } - - /** - * Checks whether to use schema as synonymous to database. - * - * @return true if use schema as synonymous to database; false otherwise - */ - public boolean useSchema() { - return TERM_SCHEMA.equals(databaseTerm); - } - - /** - * Gets JDBC dialect. - * - * @return non-null JDBC dialect - */ - public JdbcTypeMapping getDialect() { - return dialect; - } - - /** - * Checks whether external database is supported or not. - * - * @return true if external database is supported; false otherwise - */ - public boolean isExternalDatabaseSupported() { - return externalDatabase; - } - - /** - * Gets custom type map. - * - * @return non-null custom type map - */ - public Map> getTypeMap() { - return typeMap; - } - - /** - * Checks whether JDBC-complaint mode is enabled or not. - * - * @return true if JDBC-complaint mode is enabled; false otherwise - */ - public boolean isJdbcCompliant() { - return jdbcCompliant; - } - - /** - * Checks whether transaction support is enabled or not. - * - * @return true if transaction support is enabled; false otherwise - */ - public boolean isTransactionSupported() { - return txSupport; - } - - /** - * Gets default approach to handle null value. - * - * @return 0 or negative to throw exception, 1 to disable the null-check, and 2 - * to reset null to default value of corresponding data type - */ - public int getNullAsDefault() { - return nullAsDefault; - } - - /** - * Checks whether named parameter should be used instead of JDBC standard - * question mark placeholder. - * - * @return true if named parameter should be used; false otherwise - */ - public boolean useNamedParameter() { - return namedParameter; - } - - /** - * Checks whether {@link java.sql.Array} and {@link java.sql.Struct} should be - * returned for array and tuple when calling - * {@link java.sql.ResultSet#getObject(int)}. - * - * @return true if wrapper object should be returned instead of array / tuple; - * false otherwise - */ - public boolean useWrapperObject() { - return wrapperObject; - } - - /** - * Gets properties. - * - * @return non-null properties - */ - public Properties getProperties() { - Properties props = new Properties(); - props.putAll(this.properties); - return props; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParseHandler.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParseHandler.java deleted file mode 100644 index 4aeb318c0..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcParseHandler.java +++ /dev/null @@ -1,174 +0,0 @@ -package com.clickhouse.jdbc; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; -import com.clickhouse.jdbc.parser.ParseHandler; -import com.clickhouse.jdbc.parser.StatementType; - -public class JdbcParseHandler extends ParseHandler { - private static final String SETTING_MUTATIONS_SYNC = "mutations_sync"; - - private static final JdbcParseHandler[] INSTANCES; - - static { - INSTANCES = new JdbcParseHandler[3 * 2]; - for (int i = 0, len = INSTANCES.length; i < len; i++) { - INSTANCES[i] = new JdbcParseHandler((i & 0b100) == 0b100, (i & 0b10) == 0b10, (i & 1) == 1); - } - }; - - public static final ParseHandler INSTANCE = INSTANCES[0]; - - public static JdbcParseHandler getInstance(boolean allowLightWeightDelete, boolean allowLightWeightUpdate, - boolean allowLocaleFile) { - return INSTANCES[(allowLightWeightDelete ? 0b100 : 0) | (allowLightWeightUpdate ? 0b10 : 0) - | (allowLocaleFile ? 1 : 0)]; - } - - private final boolean allowLocalFile; - private final boolean allowLightWeightDelete; - private final boolean allowLightWeightUpdate; - - private void addMutationSetting(String sql, StringBuilder builder, Map positions, - Map settings, int index) { - boolean hasSetting = settings != null && !settings.isEmpty(); - String setting = hasSetting ? settings.get(SETTING_MUTATIONS_SYNC) : null; - if (setting == null) { - String keyword = "SETTINGS"; - Integer settingsIndex = positions.get(keyword); - - if (settingsIndex == null) { - builder.append(sql.substring(index)).append(" SETTINGS mutations_sync=1"); - if (hasSetting) { - builder.append(','); - } - } else { - builder.append(sql.substring(index, settingsIndex)).append("SETTINGS mutations_sync=1,") - .append(sql.substring(settingsIndex + keyword.length())); - } - } else { - builder.append(sql.substring(index)); - } - } - - private ClickHouseSqlStatement handleDelete(String sql, StatementType stmtType, String cluster, String database, - String table, String input, String compressAlgorithm, String compressLevel, String format, String file, - List parameters, Map positions, Map settings, - Set tempTables) { - StringBuilder builder = new StringBuilder(); - int index = positions.get("DELETE"); - if (index > 0) { - builder.append(sql.substring(0, index)); - } - index = positions.get("FROM"); - Integer whereIdx = positions.get("WHERE"); - if (whereIdx != null) { - builder.append("ALTER TABLE "); - if (!ClickHouseChecker.isNullOrEmpty(database)) { - builder.append('`').append(database).append('`').append('.'); - } - builder.append('`').append(table).append('`').append(" DELETE "); - addMutationSetting(sql, builder, positions, settings, whereIdx); - } else { - builder.append("TRUNCATE TABLE").append(sql.substring(index + 4)); - } - return new ClickHouseSqlStatement(builder.toString(), stmtType, cluster, database, table, input, - compressAlgorithm, compressLevel, format, file, parameters, null, settings, null); - } - - private ClickHouseSqlStatement handleUpdate(String sql, StatementType stmtType, String cluster, String database, - String table, String input, String compressAlgorithm, String compressLevel, String format, String file, - List parameters, Map positions, Map settings, - Set tempTables) { - StringBuilder builder = new StringBuilder(); - int index = positions.get("UPDATE"); - if (index > 0) { - builder.append(sql.substring(0, index)); - } - builder.append("ALTER TABLE "); - index = positions.get("SET"); - if (!ClickHouseChecker.isNullOrEmpty(database)) { - builder.append('`').append(database).append('`').append('.'); - } - builder.append('`').append(table).append('`').append(" UPDATE"); // .append(sql.substring(index + 3)); - addMutationSetting(sql, builder, positions, settings, index + 3); - return new ClickHouseSqlStatement(builder.toString(), stmtType, cluster, database, table, input, - compressAlgorithm, compressLevel, format, file, parameters, null, settings, null); - } - - private ClickHouseSqlStatement handleInFileForInsertQuery(String sql, StatementType stmtType, String cluster, - String database, String table, String input, String compressAlgorithm, String compressLevel, String format, - String file, List parameters, Map positions, Map settings, - Set tempTables) { - StringBuilder builder = new StringBuilder(sql.length()); - builder.append(sql.substring(0, positions.get("FROM"))); - Integer index = positions.get("SETTINGS"); - if (index == null || index < 0) { - index = positions.get("FORMAT"); - } - if (index != null && index > 0) { - builder.append(sql.substring(index)); - } else { - ClickHouseFormat f = ClickHouseFormat.fromFileName(ClickHouseUtils.unescape(file)); - if (f == null) { - f = (ClickHouseFormat) ClickHouseDefaults.FORMAT.getDefaultValue(); - } - format = f.name(); - builder.append("FORMAT ").append(format); - } - return new ClickHouseSqlStatement(builder.toString(), stmtType, cluster, database, table, input, - compressAlgorithm, compressLevel, format, file, parameters, null, settings, null); - } - - private ClickHouseSqlStatement handleOutFileForSelectQuery(String sql, StatementType stmtType, String cluster, - String database, String table, String input, String compressAlgorithm, String compressLevel, String format, - String file, List parameters, Map positions, Map settings, - Set tempTables) { - StringBuilder builder = new StringBuilder(sql.length()); - builder.append(sql.substring(0, positions.get("INTO"))); - Integer index = positions.get("FORMAT"); - if (index != null && index > 0) { - builder.append(sql.substring(index)); - } - return new ClickHouseSqlStatement(builder.toString(), stmtType, cluster, database, table, input, - compressAlgorithm, compressLevel, format, file, parameters, null, settings, null); - } - - @Override - public ClickHouseSqlStatement handleStatement(String sql, StatementType stmtType, String cluster, String database, - String table, String input, String compressAlgorithm, String compressLevel, String format, String file, - List parameters, Map positions, Map settings, - Set tempTables) { - boolean hasFile = allowLocalFile && !ClickHouseChecker.isNullOrEmpty(file) && file.charAt(0) == '\''; - ClickHouseSqlStatement s = null; - if (stmtType == StatementType.DELETE) { - s = allowLightWeightDelete ? s - : handleDelete(sql, stmtType, cluster, database, table, input, compressAlgorithm, compressLevel, - format, file, parameters, positions, settings, tempTables); - } else if (stmtType == StatementType.UPDATE) { - s = allowLightWeightUpdate ? s - : handleUpdate(sql, stmtType, cluster, database, table, input, compressAlgorithm, compressLevel, - format, file, parameters, positions, settings, tempTables); - } else if (stmtType == StatementType.INSERT && hasFile) { - s = handleInFileForInsertQuery(sql, stmtType, cluster, database, table, input, compressAlgorithm, - compressLevel, format, file, parameters, positions, settings, tempTables); - } else if (stmtType == StatementType.SELECT && hasFile) { - s = handleOutFileForSelectQuery(sql, stmtType, cluster, database, table, input, compressAlgorithm, - compressLevel, format, file, parameters, positions, settings, tempTables); - } - return s; - } - - private JdbcParseHandler(boolean allowLightWeightDelete, boolean allowLightWeightUpdate, boolean allowLocalFile) { - this.allowLightWeightDelete = allowLightWeightDelete; - this.allowLightWeightUpdate = allowLightWeightUpdate; - this.allowLocalFile = allowLocalFile; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java index 2703e7853..eab0509ce 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java @@ -27,319 +27,13 @@ * separately by {@link com.clickhouse.data.ClickHouseDataProcessor}. */ public class JdbcTypeMapping { - static final class AnsiTypeMapping extends JdbcTypeMapping { - static String toAnsiSqlType(ClickHouseDataType dataType, int precision, int scale, TimeZone tz) { - final String typeName; - switch (dataType) { - case Bool: - typeName = "BOOLEAN"; // or BIT(1)? - break; - case Date: - case Date32: - typeName = "DATE"; - break; - case DateTime: - case DateTime32: - case DateTime64: - typeName = (scale <= 0 ? new StringBuilder("TIMESTAMP") - : new StringBuilder("TIMESTAMP(").append(scale).append(')')) - .append(tz != null ? " WITH TIMEZONE" : "").toString(); - break; - case Int8: - typeName = "BYTE"; // NON-standard - break; - case UInt8: - case Int16: - typeName = "SMALLINT"; - break; - case UInt16: - case Int32: - typeName = "INTEGER"; - break; - case UInt32: - case Int64: - case IntervalYear: - case IntervalQuarter: - case IntervalMonth: - case IntervalWeek: - case IntervalDay: - case IntervalHour: - case IntervalMinute: - case IntervalSecond: - case IntervalMicrosecond: - case IntervalMillisecond: - case IntervalNanosecond: - typeName = "BIGINT"; - break; - case UInt64: - case Int128: - case UInt128: - case Int256: - case UInt256: - case Decimal: - case Decimal32: - case Decimal64: - case Decimal128: - case Decimal256: - typeName = new StringBuilder("DECIMAL(").append(precision).append(',') - .append(scale).append(')').toString(); - break; - case Float32: - typeName = "REAL"; - break; - case Float64: - typeName = "DOUBLE PRECISION"; - break; - case Point: - case Ring: - case Polygon: - case MultiPolygon: - typeName = "ARRAY"; - break; - case Enum8: - case Enum16: - case IPv4: - case IPv6: - case JSON: - case Object: - case FixedString: - case String: - case UUID: - typeName = "VARCHAR"; - break; - default: - typeName = "BINARY"; - break; - } - return typeName; - } - - static String toAnsiSqlType(ClickHouseColumn column, StringBuilder builder) { - final ClickHouseDataType dataType = column.getDataType(); - final String sqlType; - - if (dataType == ClickHouseDataType.SimpleAggregateFunction) { - sqlType = column.hasNestedColumn() ? toAnsiSqlType(column.getNestedColumns().get(0), builder) - : "BINARY"; - } else if (column.isArray()) { - sqlType = builder.append("ARRAY").append('(') - .append(toAnsiSqlType(column.getArrayBaseColumn(), builder)) - .append(')').toString(); - } else if (column.isMap()) { - return builder.append("MAP").append('(').append(toAnsiSqlType(column.getKeyInfo(), builder)).append(',') - .append(toAnsiSqlType(column.getValueInfo(), builder)) - .append(')').toString(); - } else if (column.isNested() || column.isTuple()) { - builder.append("STRUCT").append('('); - for (ClickHouseColumn c : column.getNestedColumns()) { - builder.append(toAnsiSqlType(c, builder)).append(','); - } - builder.setLength(builder.length() - 1); - sqlType = builder.append(')').toString(); - } else { - sqlType = toAnsiSqlType(dataType, column.getPrecision(), column.getScale(), - column.getTimeZone()); - } - return sqlType; - } - - @Override - protected int getSqlType(Class javaClass) { // and purpose(e.g. for read or write?) - final int sqlType; - if (javaClass == boolean.class || javaClass == Boolean.class) { - sqlType = Types.BOOLEAN; - } else if (javaClass == byte.class || javaClass == Byte.class) { - sqlType = Types.TINYINT; - } else if (javaClass == short.class || javaClass == Short.class || javaClass == int.class - || javaClass == Integer.class) { - sqlType = Types.INTEGER; - } else if (javaClass == long.class || javaClass == Long.class) { - sqlType = Types.BIGINT; - } else if (javaClass == float.class || javaClass == Float.class) { - sqlType = Types.FLOAT; - } else if (javaClass == double.class || javaClass == Double.class) { - sqlType = Types.DOUBLE; - } else if (javaClass == BigInteger.class || javaClass == BigDecimal.class) { - sqlType = Types.DECIMAL; - } else if (javaClass == Date.class || javaClass == LocalDate.class) { - sqlType = Types.DATE; - } else if (javaClass == Time.class || javaClass == LocalTime.class) { - sqlType = Types.TIME; - } else if (javaClass == Timestamp.class || javaClass == LocalDateTime.class - || javaClass == OffsetDateTime.class || javaClass == ZonedDateTime.class) { - sqlType = Types.TIMESTAMP; - } else if (javaClass == String.class || javaClass == byte[].class - || Enum.class.isAssignableFrom(javaClass)) { - sqlType = Types.VARCHAR; - } else if (javaClass.isArray()) { // could be Nested type - sqlType = Types.ARRAY; - } else if (List.class.isAssignableFrom(javaClass) || Map.class.isAssignableFrom(javaClass)) { - sqlType = Types.STRUCT; - } else { - sqlType = Types.OTHER; - } - return sqlType; - } - - @Override - public String toNativeType(ClickHouseColumn column) { - return toAnsiSqlType(column, new StringBuilder()); - } - - @Override - public int toSqlType(ClickHouseColumn column, Map> typeMap) { - Class javaClass = getCustomJavaClass(column, typeMap); - if (javaClass != null) { - return getSqlType(javaClass); - } - - int sqlType = Types.OTHER; - switch (column.getDataType()) { - case Bool: - sqlType = Types.BOOLEAN; - break; - case Int8: - sqlType = Types.TINYINT; - break; - case UInt8: - case Int16: - case UInt16: - case Int32: - sqlType = Types.INTEGER; - break; - case UInt32: - case IntervalYear: - case IntervalQuarter: - case IntervalMonth: - case IntervalWeek: - case IntervalDay: - case IntervalHour: - case IntervalMinute: - case IntervalSecond: - case IntervalMicrosecond: - case IntervalMillisecond: - case IntervalNanosecond: - case Int64: - sqlType = Types.BIGINT; - break; - case Float32: - sqlType = Types.FLOAT; - break; - case Float64: - sqlType = Types.DOUBLE; - break; - case UInt64: - case Int128: - case UInt128: - case Int256: - case UInt256: - case Decimal: - case Decimal32: - case Decimal64: - case Decimal128: - case Decimal256: - sqlType = Types.DECIMAL; - break; - case Date: - case Date32: - sqlType = Types.DATE; - break; - case DateTime: - case DateTime32: - case DateTime64: - sqlType = Types.TIMESTAMP; - break; - case Enum8: - case Enum16: - case IPv4: - case IPv6: - case FixedString: - case JSON: - case Object: - case String: - case UUID: - sqlType = Types.VARCHAR; - break; - case Point: - case Ring: - case Polygon: - case MultiPolygon: - case Array: - sqlType = Types.ARRAY; - break; - case Map: // Map - case Nested: // Object[][] - case Tuple: // List - sqlType = Types.STRUCT; - break; - case Nothing: - sqlType = Types.NULL; - break; - default: - break; - } - - return sqlType; - } - } - - /** - * Inner class for static initialization. - */ - static final class InstanceHolder { - private static final JdbcTypeMapping defaultMapping = ClickHouseUtils - .getService(JdbcTypeMapping.class, JdbcTypeMapping::new); - private static final JdbcTypeMapping ansiMapping = new AnsiTypeMapping(); - - private InstanceHolder() { - } - } - - /** - * Gets default type mapping. - * - * @return non-null type mapping - */ - public static JdbcTypeMapping getDefaultMapping() { - return InstanceHolder.defaultMapping; - } - - /** - * Gets ANSI type mapping. - * - * @return non-null type mapping - */ - public static JdbcTypeMapping getAnsiMapping() { - return InstanceHolder.ansiMapping; - } - - /** - * Gets custom Java class for the given column. - * - * @param column non-null column definition - * @param typeMap column type to Java class map, could be null - * @return custom Java class which may or may not be null - */ - protected Class getCustomJavaClass(ClickHouseColumn column, Map> typeMap) { - if (typeMap != null && !typeMap.isEmpty()) { - Class javaClass = typeMap.get(column.getOriginalTypeName()); - if (javaClass == null) { - javaClass = typeMap.get(column.getDataType().name()); - } - - return javaClass; - } - - return null; - } - /** * Gets corresponding {@link ClickHouseDataType} of the given {@link Types}. * * @param sqlType generic SQL types defined in JDBC * @return non-null ClickHouse data type */ - protected ClickHouseDataType getDataType(int sqlType) { + protected static ClickHouseDataType getDataType(int sqlType) { ClickHouseDataType dataType; switch (sqlType) { @@ -423,7 +117,7 @@ protected ClickHouseDataType getDataType(int sqlType) { * @param javaClass non-null Java class * @return generic SQL type defined in JDBC */ - protected int getSqlType(Class javaClass) { // and purpose(e.g. for read or write?) + protected static int getSqlType(Class javaClass) { // and purpose(e.g. for read or write?) final int sqlType; if (javaClass == boolean.class || javaClass == Boolean.class) { sqlType = Types.BOOLEAN; @@ -470,7 +164,7 @@ protected int getSqlType(Class javaClass) { // and purpose(e.g. for read or w * @param scaleOrLength scale or length * @return non-null ClickHouse column */ - public ClickHouseColumn toColumn(JDBCType jdbcType, int scaleOrLength) { + public static ClickHouseColumn toColumn(JDBCType jdbcType, int scaleOrLength) { Integer type = jdbcType.getVendorTypeNumber(); return toColumn(type != null ? type : Types.OTHER, scaleOrLength); } @@ -482,7 +176,7 @@ public ClickHouseColumn toColumn(JDBCType jdbcType, int scaleOrLength) { * @param scaleOrLength scale or length * @return non-null ClickHouse column */ - public ClickHouseColumn toColumn(int sqlType, int scaleOrLength) { + public static ClickHouseColumn toColumn(int sqlType, int scaleOrLength) { ClickHouseDataType dataType = getDataType(sqlType); ClickHouseColumn column = null; if (scaleOrLength > 0) { @@ -514,15 +208,10 @@ public ClickHouseColumn toColumn(int sqlType, int scaleOrLength) { * Converts {@link ClickHouseColumn} to {@link Class}. * * @param column non-null column definition - * @param typeMap optional custom type mapping * @return non-null Java class */ - public Class toJavaClass(ClickHouseColumn column, Map> typeMap) { - Class clazz = getCustomJavaClass(column, typeMap); - if (clazz != null) { - return clazz; - } - + public static Class toJavaClass(ClickHouseColumn column) { + Class clazz; ClickHouseDataType type = column.getDataType(); switch (type) { case DateTime: @@ -543,7 +232,7 @@ public Class toJavaClass(ClickHouseColumn column, Map> typeM * @param column non-null column definition * @return non-null native type */ - public String toNativeType(ClickHouseColumn column) { + public static String toNativeType(ClickHouseColumn column) { return column.getOriginalTypeName(); } @@ -551,15 +240,9 @@ public String toNativeType(ClickHouseColumn column) { * Converts {@link ClickHouseColumn} to generic SQL type defined in JDBC. * * @param column non-null column definition - * @param typeMap optional custom mapping * @return generic SQL type defined in JDBC */ - public int toSqlType(ClickHouseColumn column, Map> typeMap) { - Class javaClass = getCustomJavaClass(column, typeMap); - if (javaClass != null) { - return getSqlType(javaClass); - } - + public static int toSqlType(ClickHouseColumn column) { int sqlType = Types.OTHER; switch (column.getDataType()) { case Bool: diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java similarity index 98% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java index ce00c07f9..4f4b4c1f6 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseJdbcUrlParser.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java @@ -13,7 +13,7 @@ import com.clickhouse.data.ClickHouseFormat; import com.clickhouse.data.ClickHouseUtils; -public class ClickHouseJdbcUrlParser { +public class JdbcUrlParser { public static class ConnectionInfo { private final String cacheKey; private final ClickHouseCredentials credentials; @@ -125,6 +125,6 @@ public static ConnectionInfo parse(String jdbcUrl, Properties defaults) throws S } } - private ClickHouseJdbcUrlParser() { + private JdbcUrlParser() { } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java index 0ac4a0782..2b856d85e 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -8,6 +8,12 @@ import java.util.Calendar; public class PreparedStatementImpl extends StatementImpl implements PreparedStatement, JdbcWrapper { + String sql; + public PreparedStatementImpl(Connection connection, String sql) { + super(connection); + this.sql = sql; + } + @Override public ResultSet executeQuery() throws SQLException { return null; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/SqlExceptionUtils.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/SqlExceptionUtils.java deleted file mode 100644 index da6dcf6c8..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/SqlExceptionUtils.java +++ /dev/null @@ -1,158 +0,0 @@ -package com.clickhouse.jdbc; - -import java.net.ConnectException; -import java.sql.BatchUpdateException; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; - -import com.clickhouse.client.ClickHouseException; - -/** - * Helper class for building {@link SQLException}. - */ -public final class SqlExceptionUtils { - public static final String SQL_STATE_CLIENT_ERROR = "HY000"; - public static final String SQL_STATE_OPERATION_CANCELLED = "HY008"; - public static final String SQL_STATE_CONNECTION_EXCEPTION = "08000"; - public static final String SQL_STATE_SQL_ERROR = "07000"; - public static final String SQL_STATE_NO_DATA = "02000"; - public static final String SQL_STATE_INVALID_SCHEMA = "3F000"; - public static final String SQL_STATE_INVALID_TX_STATE = "25000"; - public static final String SQL_STATE_DATA_EXCEPTION = "22000"; - public static final String SQL_STATE_FEATURE_NOT_SUPPORTED = "0A000"; - - private SqlExceptionUtils() { - } - - private static SQLException create(Throwable e) { - if (e == null) { - return unknownError(); - } else if (e instanceof ClickHouseException) { - return handle((ClickHouseException) e); - } else if (e instanceof SQLException) { - return (SQLException) e; - } - - Throwable cause = e.getCause(); - if (cause instanceof ClickHouseException) { - return handle((ClickHouseException) cause); - } else if (cause instanceof SQLException) { - return (SQLException) cause; - } else if (cause == null) { - cause = e; - } - - return new SQLException(cause); - } - - // https://en.wikipedia.org/wiki/SQLSTATE - private static String toSqlState(ClickHouseException e) { - final String sqlState; - switch (e.getErrorCode()) { - case ClickHouseException.ERROR_ABORTED: - case ClickHouseException.ERROR_CANCELLED: - sqlState = SQL_STATE_OPERATION_CANCELLED; - break; - case ClickHouseException.ERROR_NETWORK: - case ClickHouseException.ERROR_POCO: - sqlState = SQL_STATE_CONNECTION_EXCEPTION; - break; - case 0: - sqlState = e.getCause() instanceof ConnectException ? SQL_STATE_CONNECTION_EXCEPTION - : SQL_STATE_CLIENT_ERROR; - break; - default: - sqlState = e.getCause() instanceof ConnectException ? SQL_STATE_CONNECTION_EXCEPTION - : SQL_STATE_SQL_ERROR; - break; - } - return sqlState; - } - - public static SQLException clientError(String message) { - return new SQLException(message, SQL_STATE_CLIENT_ERROR, null); - } - - public static SQLException clientError(Throwable e) { - return e != null ? new SQLException(e.getMessage(), SQL_STATE_CLIENT_ERROR, e) : unknownError(); - } - - public static SQLException clientError(String message, Throwable e) { - return new SQLException(message, SQL_STATE_CLIENT_ERROR, e); - } - - public static SQLException handle(ClickHouseException e) { - return e != null ? new SQLException(e.getMessage(), toSqlState(e), e.getErrorCode(), e.getCause()) - : unknownError(); - } - - public static SQLException handle(Throwable e, Throwable... more) { - SQLException rootEx = create(e); - if (more != null) { - for (Throwable t : more) { - rootEx.setNextException(create(t)); - } - } - return rootEx; - } - - public static BatchUpdateException batchUpdateError(Throwable e, long[] updateCounts) { - if (e == null) { - return new BatchUpdateException("Something went wrong when performing batch update", SQL_STATE_CLIENT_ERROR, - 0, updateCounts, null); - } else if (e instanceof BatchUpdateException) { - return (BatchUpdateException) e; - } else if (e instanceof SQLException) { - SQLException sqlExp = (SQLException) e; - return new BatchUpdateException(sqlExp.getMessage(), sqlExp.getSQLState(), sqlExp.getErrorCode(), - updateCounts, null); - } - - Throwable cause = e.getCause(); - if (e instanceof BatchUpdateException) { - return (BatchUpdateException) e; - } else if (cause instanceof ClickHouseException) { - return batchUpdateError(cause, updateCounts); - } else if (cause instanceof SQLException) { - SQLException sqlExp = (SQLException) cause; - return new BatchUpdateException(sqlExp.getMessage(), sqlExp.getSQLState(), sqlExp.getErrorCode(), - updateCounts, null); - } else if (cause == null) { - cause = e; - } - - return new BatchUpdateException("Unexpected error", SQL_STATE_SQL_ERROR, 0, updateCounts, cause); - } - - public static BatchUpdateException queryInBatchError(int[] updateCounts) { - return new BatchUpdateException("Query is not allowed in batch update", SQL_STATE_CLIENT_ERROR, updateCounts); - } - - public static BatchUpdateException queryInBatchError(long[] updateCounts) { - return new BatchUpdateException("Query is not allowed in batch update", SQL_STATE_CLIENT_ERROR, 0, updateCounts, - null); - } - - public static SQLException undeterminedExecutionError() { - return clientError("Please either call clearBatch() to clean up context first, or use executeBatch() instead"); - } - - public static SQLException forCancellation(Exception e) { - Throwable cause = e.getCause(); - if (cause == null) { - cause = e; - } - - // operation canceled - return new SQLException(e.getMessage(), SQL_STATE_OPERATION_CANCELLED, ClickHouseException.ERROR_ABORTED, - cause); - } - - public static SQLFeatureNotSupportedException unsupportedError(String message) { - return new SQLFeatureNotSupportedException(message, SQL_STATE_FEATURE_NOT_SUPPORTED); - } - - public static SQLException unknownError() { - return new SQLException("Unknown error", SQL_STATE_CLIENT_ERROR); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index 983a720d0..88e042de3 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -7,6 +7,11 @@ import java.sql.Statement; public class StatementImpl implements Statement, JdbcWrapper { + Connection connection; + public StatementImpl(Connection connection) { + this.connection = connection; + } + @Override public ResultSet executeQuery(String sql) throws SQLException { return null; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java similarity index 98% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java index c588506b4..fcbf5da9b 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseDatabaseMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java @@ -1,7 +1,6 @@ package com.clickhouse.jdbc.metadata; import java.sql.Connection; -import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.RowIdLifetime; import java.sql.SQLException; @@ -11,8 +10,8 @@ import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; -public class ClickHouseDatabaseMetaData implements DatabaseMetaData, JdbcWrapper { - private static final Logger log = LoggerFactory.getLogger(ClickHouseDatabaseMetaData.class); +public class DatabaseMetaData implements java.sql.DatabaseMetaData, JdbcWrapper { + private static final Logger log = LoggerFactory.getLogger(DatabaseMetaData.class); @Override public boolean allProceduresAreCallable() throws SQLException { @@ -886,16 +885,16 @@ public boolean generatedKeyAlwaysReturned() throws SQLException { @Override public long getMaxLogicalLobSize() throws SQLException { - return DatabaseMetaData.super.getMaxLogicalLobSize(); + return java.sql.DatabaseMetaData.super.getMaxLogicalLobSize(); } @Override public boolean supportsRefCursors() throws SQLException { - return DatabaseMetaData.super.supportsRefCursors(); + return java.sql.DatabaseMetaData.super.supportsRefCursors(); } @Override public boolean supportsSharding() throws SQLException { - return DatabaseMetaData.super.supportsSharding(); + return java.sql.DatabaseMetaData.super.supportsSharding(); } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java similarity index 89% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java index 196e9b40f..41bb69c11 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseParameterMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java @@ -3,14 +3,13 @@ import com.clickhouse.data.ClickHouseColumn; import com.clickhouse.jdbc.JdbcWrapper; -import java.sql.ParameterMetaData; import java.sql.SQLException; import java.util.List; -public class ClickHouseParameterMetaData implements ParameterMetaData, JdbcWrapper { +public class ParameterMetaData implements java.sql.ParameterMetaData, JdbcWrapper { private final List params; - protected ClickHouseParameterMetaData(List params) { + protected ParameterMetaData(List params) { if (params == null) { throw new IllegalArgumentException("Parameters array cannot be null."); } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java similarity index 95% rename from clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java rename to clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java index fce04f922..2183ea97a 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ClickHouseResultSetMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java @@ -1,11 +1,10 @@ package com.clickhouse.jdbc.metadata; -import java.sql.ResultSetMetaData; import java.sql.SQLException; import com.clickhouse.jdbc.JdbcWrapper; -public class ClickHouseResultSetMetaData implements ResultSetMetaData, JdbcWrapper { +public class ResultSetMetaData implements java.sql.ResultSetMetaData, JdbcWrapper { @Override public int getColumnCount() throws SQLException { diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlStatement.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlStatement.java deleted file mode 100644 index 88806c295..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ClickHouseSqlStatement.java +++ /dev/null @@ -1,364 +0,0 @@ -package com.clickhouse.jdbc.parser; - -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.Map.Entry; - -public class ClickHouseSqlStatement { - public static final String DEFAULT_DATABASE = "system"; - public static final String DEFAULT_TABLE = "unknown"; - - public static final String KEYWORD_DATABASE = "DATABASE"; - public static final String KEYWORD_EXISTS = "EXISTS"; - public static final String KEYWORD_FORMAT = "FORMAT"; - public static final String KEYWORD_REPLACE = "REPLACE"; - public static final String KEYWORD_TOTALS = "TOTALS"; - public static final String KEYWORD_VALUES = "VALUES"; - - public static final String KEYWORD_TABLE_COLUMNS_START = "ColumnsStart"; - public static final String KEYWORD_TABLE_COLUMNS_END = "ColumnsEnd"; - public static final String KEYWORD_VALUES_START = "ValuesStart"; - public static final String KEYWORD_VALUES_END = "ValuesEnd"; - - private final String sql; - private final StatementType stmtType; - private final String cluster; - private final String database; - private final String table; - private final String input; - private final String compressAlgorithm; - private final String compressLevel; - private final String format; - private final String file; - private final List parameters; - private final Map positions; - private final Map settings; - private final Set tempTables; - - public ClickHouseSqlStatement(String sql) { - this(sql, StatementType.UNKNOWN, null, null, null, null, null, null, null, null, null, null, null, null); - } - - public ClickHouseSqlStatement(String sql, StatementType stmtType) { - this(sql, stmtType, null, null, null, null, null, null, null, null, null, null, null, null); - } - - public ClickHouseSqlStatement(String sql, StatementType stmtType, String cluster, String database, String table, - String input, String compressAlgorithm, String compressLevel, String format, String file, - List parameters, Map positions, Map settings, - Set tempTables) { - this.sql = sql; - this.stmtType = stmtType; - - this.cluster = cluster; - this.database = database; - this.table = table == null || table.isEmpty() ? DEFAULT_TABLE : table; - this.input = input; - this.compressAlgorithm = compressAlgorithm; - this.compressLevel = compressLevel; - this.format = format; - this.file = file; - - if (parameters != null && !parameters.isEmpty()) { - this.parameters = Collections.unmodifiableList(parameters); - } else { - this.parameters = Collections.emptyList(); - } - - if (positions != null && !positions.isEmpty()) { - Map p = new HashMap<>(); - for (Entry e : positions.entrySet()) { - String keyword = e.getKey(); - Integer position = e.getValue(); - - if (keyword != null && position != null) { - p.put(keyword, position); - } - } - this.positions = Collections.unmodifiableMap(p); - } else { - this.positions = Collections.emptyMap(); - } - - if (settings != null && !settings.isEmpty()) { - Map s = new LinkedHashMap<>(); - for (Entry e : settings.entrySet()) { - String key = e.getKey(); - String value = e.getValue(); - - if (key != null && value != null) { - s.put(key, String.valueOf(e.getValue())); - } - } - this.settings = Collections.unmodifiableMap(s); - } else { - this.settings = Collections.emptyMap(); - } - - if (tempTables != null && !tempTables.isEmpty()) { - Set s = new LinkedHashSet<>(); - s.addAll(tempTables); - this.tempTables = Collections.unmodifiableSet(s); - } else { - this.tempTables = Collections.emptySet(); - } - } - - public String getSQL() { - return this.sql; - } - - public boolean isRecognized() { - return stmtType != StatementType.UNKNOWN; - } - - public boolean isDDL() { - return this.stmtType.getLanguageType() == LanguageType.DDL; - } - - public boolean isDML() { - return this.stmtType.getLanguageType() == LanguageType.DML; - } - - public boolean isQuery() { - return this.stmtType.getOperationType() == OperationType.READ && !this.hasFile(); - } - - public boolean isMutation() { - return this.stmtType.getOperationType() == OperationType.WRITE || this.hasFile(); - } - - public boolean isTCL() { - return this.stmtType.getLanguageType() == LanguageType.TCL; - } - - public boolean isIdemponent() { - boolean result = this.stmtType.isIdempotent() && !this.hasFile(); - - if (!result) { // try harder - switch (this.stmtType) { - case ATTACH: - case CREATE: - case DETACH: - case DROP: - result = positions.containsKey(KEYWORD_EXISTS) || positions.containsKey(KEYWORD_REPLACE); - break; - - default: - break; - } - } - - return result; - } - - public LanguageType getLanguageType() { - return this.stmtType.getLanguageType(); - } - - public OperationType getOperationType() { - return this.stmtType.getOperationType(); - } - - public StatementType getStatementType() { - return this.stmtType; - } - - public String getCluster() { - return this.cluster; - } - - public String getDatabase() { - return this.database; - } - - public String getDatabaseOrDefault(String database) { - return this.database == null ? (database == null ? DEFAULT_DATABASE : database) : this.database; - } - - public String getTable() { - return this.table; - } - - public String getInput() { - return this.input; - } - - public String getCompressAlgorithm() { - return this.compressAlgorithm; - } - - public String getCompressLevel() { - return this.compressLevel; - } - - public String getFormat() { - return this.format; - } - - public String getFile() { - return this.file; - } - - public String getContentBetweenKeywords(String startKeyword, String endKeyword) { - return getContentBetweenKeywords(startKeyword, endKeyword, 0); - } - - public String getContentBetweenKeywords(String startKeyword, String endKeyword, int startOffset) { - if (startOffset < 0) { - startOffset = 0; - } - Integer startPos = positions.get(startKeyword); - Integer endPos = positions.get(endKeyword); - - String content = ""; - if (startPos != null && endPos != null && startPos + startOffset < endPos) { - content = sql.substring(startPos + startOffset, endPos); - } - - return content; - } - - public boolean containsKeyword(String keyword) { - if (keyword == null || keyword.isEmpty()) { - return false; - } - - return positions.containsKey(keyword.toUpperCase(Locale.ROOT)); - } - - public boolean hasCompressAlgorithm() { - return this.compressAlgorithm != null && !this.compressAlgorithm.isEmpty(); - } - - public boolean hasCompressLevel() { - return this.compressLevel != null && !this.compressLevel.isEmpty(); - } - - public boolean hasFormat() { - return this.format != null && !this.format.isEmpty(); - } - - public boolean hasInput() { - return this.input != null && !this.input.isEmpty(); - } - - public boolean hasFile() { - return this.file != null && !this.file.isEmpty(); - } - - public boolean hasSettings() { - return !this.settings.isEmpty(); - } - - public boolean hasWithTotals() { - return this.positions.containsKey(KEYWORD_TOTALS); - } - - public boolean hasValues() { - return this.positions.containsKey(KEYWORD_VALUES); - } - - public boolean hasTempTable() { - return !this.tempTables.isEmpty(); - } - - public List getParameters() { - return this.parameters; - } - - public int getStartPosition(String keyword) { - int position = -1; - - if (!this.positions.isEmpty() && keyword != null) { - Integer p = this.positions.get(keyword.toUpperCase(Locale.ROOT)); - if (p != null) { - position = p.intValue(); - } - } - - return position; - } - - public int getEndPosition(String keyword) { - int position = getStartPosition(keyword); - - return position != -1 && keyword != null ? position + keyword.length() : position; - } - - public Map getPositions() { - return this.positions; - } - - public Map getSettings() { - return this.settings; - } - - public Set getTempTables() { - return this.tempTables; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - - sb.append('[').append(stmtType.name()).append(']').append(" cluster=").append(cluster).append(", database=") - .append(database).append(", table=").append(table).append(", input=").append(input) - .append(", compressAlgorithm=").append(compressAlgorithm).append(", compressLevel=") - .append(compressLevel).append(", format=").append(format).append(", outfile=").append(file) - .append(", parameters=").append(parameters).append(", positions=").append(positions) - .append(", settings=").append(settings).append(", tempTables=").append(settings).append("\nSQL:\n") - .append(sql); - - return sb.toString(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((sql == null) ? 0 : sql.hashCode()); - result = prime * result + ((cluster == null) ? 0 : cluster.hashCode()); - result = prime * result + ((database == null) ? 0 : database.hashCode()); - result = prime * result + table.hashCode(); - result = prime * result + ((input == null) ? 0 : input.hashCode()); - result = prime * result + ((compressAlgorithm == null) ? 0 : compressAlgorithm.hashCode()); - result = prime * result + ((compressLevel == null) ? 0 : compressLevel.hashCode()); - result = prime * result + ((format == null) ? 0 : format.hashCode()); - result = prime * result + ((file == null) ? 0 : file.hashCode()); - result = prime * result + ((stmtType == null) ? 0 : stmtType.hashCode()); - - result = prime * result + parameters.hashCode(); - result = prime * result + positions.hashCode(); - result = prime * result + settings.hashCode(); - result = prime * result + tempTables.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ClickHouseSqlStatement other = (ClickHouseSqlStatement) obj; - return stmtType == other.stmtType && Objects.equals(sql, other.sql) && Objects.equals(cluster, other.cluster) - && Objects.equals(database, other.database) && Objects.equals(table, other.table) - && Objects.equals(input, other.input) && Objects.equals(compressAlgorithm, other.compressAlgorithm) - && Objects.equals(compressLevel, other.compressLevel) && Objects.equals(format, other.format) - && Objects.equals(file, other.file) && parameters.equals(other.parameters) - && positions.equals(other.positions) && settings.equals(other.settings) - && tempTables.equals(other.tempTables); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/LanguageType.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/LanguageType.java deleted file mode 100644 index 4ff42e3f6..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/LanguageType.java +++ /dev/null @@ -1,9 +0,0 @@ -package com.clickhouse.jdbc.parser; - -public enum LanguageType { - UNKNOWN, // unknown language - DCL, // data control language - DDL, // data definition language - DML, // data manipulation language - TCL // transaction control language -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/OperationType.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/OperationType.java deleted file mode 100644 index 56360c256..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/OperationType.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.clickhouse.jdbc.parser; - -public enum OperationType { - UNKNOWN, READ, WRITE -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ParseHandler.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ParseHandler.java deleted file mode 100644 index 151bb9a12..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/ParseHandler.java +++ /dev/null @@ -1,57 +0,0 @@ -package com.clickhouse.jdbc.parser; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -public abstract class ParseHandler { - /** - * Handle macro like "#include('/tmp/template.sql')". - * - * @param name name of the macro - * @param parameters parameters - * @return output of the macro, could be null or empty string - */ - public String handleMacro(String name, List parameters) { - return null; - } - - /** - * Handle parameter. - * - * @param cluster cluster - * @param database database - * @param table table - * @param columnIndex columnIndex(starts from 1 not 0) - * @return parameter value - */ - public String handleParameter(String cluster, String database, String table, int columnIndex) { - return null; - } - - /** - * Hanlde statemenet. - * - * @param sql sql statement - * @param stmtType statement type - * @param cluster cluster - * @param database database - * @param table table - * @param compressAlgorithm compression algorithm - * @param compressLevel compression level - * @param format format - * @param input input - * @param file infile or outfile - * @param parameters positions of parameters - * @param positions keyword positions - * @param settings settings - * @param tempTables temporary tables - * @return sql statement, or null means no change - */ - public ClickHouseSqlStatement handleStatement(String sql, StatementType stmtType, String cluster, String database, - String table, String input, String compressAlgorithm, String compressLevel, String format, String file, - List parameters, Map positions, Map settings, - Set tempTables) { - return null; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/StatementType.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/StatementType.java deleted file mode 100644 index 30f7fa209..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/parser/StatementType.java +++ /dev/null @@ -1,54 +0,0 @@ -package com.clickhouse.jdbc.parser; - -public enum StatementType { - UNKNOWN(LanguageType.UNKNOWN, OperationType.UNKNOWN, false), // unknown statement - ALTER(LanguageType.DDL, OperationType.UNKNOWN, false), // alter statement - ALTER_DELETE(LanguageType.DML, OperationType.WRITE, false), // delete statement - ALTER_UPDATE(LanguageType.DML, OperationType.WRITE, false), // update statement - ATTACH(LanguageType.DDL, OperationType.UNKNOWN, false), // attach statement - CHECK(LanguageType.DDL, OperationType.UNKNOWN, true), // check statement - CREATE(LanguageType.DDL, OperationType.UNKNOWN, false), // create statement - DELETE(LanguageType.DML, OperationType.WRITE, false), // the upcoming light-weight delete statement - DESCRIBE(LanguageType.DDL, OperationType.READ, true), // describe/desc statement - DETACH(LanguageType.DDL, OperationType.UNKNOWN, false), // detach statement - DROP(LanguageType.DDL, OperationType.UNKNOWN, false), // drop statement - EXISTS(LanguageType.DML, OperationType.READ, true), // exists statement - EXPLAIN(LanguageType.DDL, OperationType.READ, true), // explain statement - GRANT(LanguageType.DCL, OperationType.UNKNOWN, true), // grant statement - INSERT(LanguageType.DML, OperationType.WRITE, false), // insert statement - KILL(LanguageType.DCL, OperationType.UNKNOWN, false), // kill statement - OPTIMIZE(LanguageType.DDL, OperationType.UNKNOWN, false), // optimize statement - RENAME(LanguageType.DDL, OperationType.UNKNOWN, false), // rename statement - REVOKE(LanguageType.DCL, OperationType.UNKNOWN, true), // revoke statement - SELECT(LanguageType.DML, OperationType.READ, true), // select statement - SET(LanguageType.DCL, OperationType.UNKNOWN, true), // set statement - SHOW(LanguageType.DDL, OperationType.READ, true), // show statement - SYSTEM(LanguageType.DDL, OperationType.UNKNOWN, false), // system statement - TRUNCATE(LanguageType.DDL, OperationType.UNKNOWN, true), // truncate statement - UPDATE(LanguageType.DML, OperationType.WRITE, false), // the upcoming light-weight update statement - USE(LanguageType.DDL, OperationType.UNKNOWN, true), // use statement - WATCH(LanguageType.DDL, OperationType.UNKNOWN, true), // watch statement - TRANSACTION(LanguageType.TCL, OperationType.WRITE, true); // TCL statement - - private LanguageType langType; - private OperationType opType; - private boolean idempotent; - - StatementType(LanguageType langType, OperationType operationType, boolean idempotent) { - this.langType = langType; - this.opType = operationType; - this.idempotent = idempotent; - } - - LanguageType getLanguageType() { - return this.langType; - } - - OperationType getOperationType() { - return this.opType; - } - - boolean isIdempotent() { - return this.idempotent; - } -} diff --git a/clickhouse-jdbc/src/main/java9/module-info.java b/clickhouse-jdbc/src/main/java9/module-info.java deleted file mode 100644 index b5893ee61..000000000 --- a/clickhouse-jdbc/src/main/java9/module-info.java +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Declares com.clickhouse module. - */ -module com.clickhouse.jdbc { - exports com.clickhouse.jdbc; - - requires java.sql; - - requires transitive com.clickhouse.client; - // requires transitive com.google.gson; - // requires transitive org.lz4.java; - - uses com.clickhouse.client.ClickHouseClient; - uses com.clickhouse.client.ClickHouseDnsResolver; - uses com.clickhouse.client.ClickHouseSslContextProvider; - uses com.clickhouse.data.ClickHouseDataStreamFactory; - uses com.clickhouse.logging.LoggerFactory; -} diff --git a/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver b/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver index 4ce204321..20cf6f41c 100644 --- a/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver +++ b/clickhouse-jdbc/src/main/resources/META-INF/services/java.sql.Driver @@ -1 +1 @@ -com.clickhouse.jdbc.DriverImpl +com.clickhouse.jdbc.Driver diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java index 7e5140681..25f1c68ff 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java @@ -88,9 +88,9 @@ public void testGetConnection() throws SQLException { }) { for (ClickHouseConnection connection : new ClickHouseConnection[] { ds.getConnection("default", getPassword()), - new DriverImpl().connect(url, properties), - new DriverImpl().connect(urlWithCredentials, properties), - new DriverImpl().connect(urlWithCredentials + params, new Properties()), + new Driver().connect(url, properties), + new Driver().connect(urlWithCredentials, properties), + new Driver().connect(urlWithCredentials + params, new Properties()), (ClickHouseConnection) DriverManager.getConnection(url, properties), (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials, properties), (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials + params), diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaDataTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java similarity index 98% rename from clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaDataTest.java rename to clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java index 118f79076..22fb1b1b3 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseDatabaseMetaDataTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java @@ -4,13 +4,11 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.Arrays; import java.util.Locale; import java.util.Properties; import java.util.UUID; import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.http.ClickHouseHttpConnectionFactory; import com.clickhouse.data.ClickHouseColumn; import com.clickhouse.logging.Logger; @@ -19,9 +17,9 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -public class ClickHouseDatabaseMetaDataTest extends JdbcIntegrationTest { +public class DatabaseMetaDataTest extends JdbcIntegrationTest { - private static final Logger log = LoggerFactory.getLogger(ClickHouseDatabaseMetaDataTest.class); + private static final Logger log = LoggerFactory.getLogger(DatabaseMetaDataTest.class); @DataProvider(name = "selectedColumns") private Object[][] getSelectedColumns() { return new Object[][] { diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java similarity index 88% rename from clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java rename to clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java index 7c467775a..5cb3dfc13 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverImplTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java @@ -8,11 +8,11 @@ import org.testng.Assert; import org.testng.annotations.Test; -public class DriverImplTest extends JdbcIntegrationTest { +public class DriverTest extends JdbcIntegrationTest { @Test(groups = "integration") public void testAcceptUrl() throws SQLException { String address = getServerAddress(ClickHouseProtocol.HTTP, true); - DriverImpl driver = new DriverImpl(); + Driver driver = new Driver(); Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://" + address)); Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse:http://" + address)); Assert.assertTrue(driver.acceptsURL("jdbc:ch://" + address)); @@ -23,7 +23,7 @@ public void testAcceptUrl() throws SQLException { public void testConnect() throws SQLException { if (isCloud()) return; //TODO: testConnect - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 String address = getServerAddress(ClickHouseProtocol.HTTP, true); - DriverImpl driver = new DriverImpl(); + Driver driver = new Driver(); ClickHouseConnection conn = driver.connect("jdbc:clickhouse://" + address, null); conn.close(); } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java index 77910ad9f..d7f42ab37 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java @@ -6,7 +6,7 @@ import com.clickhouse.client.ClickHouseRequest; import com.clickhouse.jdbc.ClickHouseConnection; -import com.clickhouse.jdbc.DriverImpl; +import com.clickhouse.jdbc.Driver; import com.clickhouse.jdbc.ClickHouseStatement; import com.clickhouse.jdbc.JdbcIntegrationTest; import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; @@ -18,7 +18,7 @@ public class ClickHouseConnectionImplTest extends JdbcIntegrationTest { @Test(groups = "integration") public void testManualCommit() throws SQLException { if (isCloud()) return; //TODO: testManualCommit - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (DriverImpl.ClickHouseConnectionImpl conn = (DriverImpl.ClickHouseConnectionImpl) newConnection()) { + try (Driver.ClickHouseConnectionImpl conn = (Driver.ClickHouseConnectionImpl) newConnection()) { Assert.assertEquals(conn.getAutoCommit(), true); Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); conn.setAutoCommit(false); @@ -111,7 +111,7 @@ public void testManualCommit() throws SQLException { @Test(groups = "integration") public void testManualRollback() throws SQLException { if (isCloud()) return; //TODO: testManualRollback - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (DriverImpl.ClickHouseConnectionImpl conn = (DriverImpl.ClickHouseConnectionImpl) newConnection()) { + try (Driver.ClickHouseConnectionImpl conn = (Driver.ClickHouseConnectionImpl) newConnection()) { Assert.assertEquals(conn.getAutoCommit(), true); Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); conn.setAutoCommit(false); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java similarity index 67% rename from clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java rename to clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java index 1a9417e23..322c7f10a 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseJdbcUrlParserTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java @@ -10,46 +10,46 @@ import com.clickhouse.client.ClickHouseNode; import com.clickhouse.client.ClickHouseProtocol; import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.jdbc.ClickHouseJdbcUrlParser; -import com.clickhouse.jdbc.ClickHouseJdbcUrlParser.ConnectionInfo; +import com.clickhouse.jdbc.JdbcUrlParser; +import com.clickhouse.jdbc.JdbcUrlParser.ConnectionInfo; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -public class ClickHouseJdbcUrlParserTest { +public class JdbcUrlParserTest { @Test(groups = "unit") public void testParseInvalidUri() { - Assert.assertThrows(SQLException.class, () -> ClickHouseJdbcUrlParser.parse(null, null)); - Assert.assertThrows(SQLException.class, () -> ClickHouseJdbcUrlParser.parse("", null)); - Assert.assertThrows(SQLException.class, () -> ClickHouseJdbcUrlParser.parse("some_invalid_uri", null)); - Assert.assertThrows(SQLException.class, () -> ClickHouseJdbcUrlParser.parse("jdbc:clickhouse:.", null)); + Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse(null, null)); + Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("", null)); + Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("some_invalid_uri", null)); + Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("jdbc:clickhouse:.", null)); Assert.assertThrows(SQLException.class, - () -> ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://", null)); + () -> JdbcUrlParser.parse("jdbc:clickhouse://", null)); Assert.assertThrows(SQLException.class, - () -> ClickHouseJdbcUrlParser.parse("jdbc:clickhouse:///db", null)); + () -> JdbcUrlParser.parse("jdbc:clickhouse:///db", null)); Assert.assertThrows(SQLException.class, - () -> ClickHouseJdbcUrlParser.parse("clickhouse://a:b:c@aaa", null)); + () -> JdbcUrlParser.parse("clickhouse://a:b:c@aaa", null)); Assert.assertThrows(SQLException.class, - () -> ClickHouseJdbcUrlParser.parse("clickhouse://::1:1234/a", null)); + () -> JdbcUrlParser.parse("clickhouse://::1:1234/a", null)); } @Test(groups = "unit") public void testParseIpv6() throws SQLException, URISyntaxException { - ConnectionInfo info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://[::1]:1234", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + ConnectionInfo info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]:1234", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://[::1]:1234")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP, 1234).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://[::1]/", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]/", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://[::1]:8123")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://[::1]/dbdb", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]/dbdb", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://[::1]:8123/dbdb")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP).database("dbdb") @@ -58,20 +58,20 @@ public void testParseIpv6() throws SQLException, URISyntaxException { @Test(groups = "unit") public void testParseAbbrevation() throws SQLException, URISyntaxException { - ConnectionInfo info = ClickHouseJdbcUrlParser.parse("jdbc:ch://localhost", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + ConnectionInfo info = JdbcUrlParser.parse("jdbc:ch://localhost", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://localhost:8123")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:ch:grpc://localhost", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:ch:grpc://localhost", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:grpc://localhost:9100")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.GRPC).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:ch:https://:letmein@[::1]:3218/db1?user=aaa", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:ch:https://:letmein@[::1]:3218/db1?user=aaa", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://[::1]:3218/db1?ssl=true&sslmode=STRICT")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("[::1]") .port(ClickHouseProtocol.HTTP, 3218) @@ -84,14 +84,14 @@ public void testParseAbbrevation() throws SQLException, URISyntaxException { @Test(groups = "unit") public void testParse() throws SQLException, URISyntaxException { - ConnectionInfo info = ClickHouseJdbcUrlParser.parse("jdbc:ch://localhost", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + ConnectionInfo info = JdbcUrlParser.parse("jdbc:ch://localhost", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://localhost:8123")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:ch:grpc://localhost/default", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:ch:grpc://localhost/default", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:grpc://localhost:9100/default")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.GRPC) @@ -99,8 +99,8 @@ public void testParse() throws SQLException, URISyntaxException { .getEffectiveDefaultValue()) .build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:ch:https://:letmein@127.0.0.1:3218/db1", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:ch:https://:letmein@127.0.0.1:3218/db1", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://127.0.0.1:3218/db1?ssl=true&sslmode=STRICT")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("127.0.0.1") .port(ClickHouseProtocol.HTTP, 3218).database("db1") @@ -113,14 +113,14 @@ public void testParse() throws SQLException, URISyntaxException { @Test(groups = "unit") public void testParseWithProperties() throws SQLException, URISyntaxException { - ConnectionInfo info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://localhost/", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + ConnectionInfo info = JdbcUrlParser.parse("jdbc:clickhouse://localhost/", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://localhost:8123")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://localhost:4321/ndb", null); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:clickhouse://localhost:4321/ndb", null); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://localhost:4321/ndb")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP, 4321) @@ -128,8 +128,8 @@ public void testParseWithProperties() throws SQLException, URISyntaxException { Properties props = new Properties(); props.setProperty("database", "db1"); - info = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://me@localhost:1234/mydb?password=123", props); - Assert.assertEquals(info.getServer().toUri(ClickHouseJdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), + info = JdbcUrlParser.parse("jdbc:clickhouse://me@localhost:1234/mydb?password=123", props); + Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), new URI("jdbc:clickhouse:http://localhost:1234/db1")); Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP, 1234) @@ -144,7 +144,7 @@ public void testParseCredentials() throws SQLException { Properties props = new Properties(); props.setProperty("user", "default1"); props.setProperty("password", "password1"); - ConnectionInfo connInfo = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://user:a:passwd@foo.ch/test", + ConnectionInfo connInfo = JdbcUrlParser.parse("jdbc:clickhouse://user:a:passwd@foo.ch/test", props); ClickHouseNode server = connInfo.getServer(); Assert.assertEquals(connInfo.getDefaultCredentials().getUserName(), "default1"); @@ -152,7 +152,7 @@ public void testParseCredentials() throws SQLException { Assert.assertEquals(server.getCredentials().get().getUserName(), "user"); Assert.assertEquals(server.getCredentials().get().getPassword(), "a:passwd"); - server = ClickHouseJdbcUrlParser.parse("jdbc:clickhouse://let%40me%3Ain:let%40me%3Ain@foo.ch", null) + server = JdbcUrlParser.parse("jdbc:clickhouse://let%40me%3Ain:let%40me%3Ain@foo.ch", null) .getServer(); Assert.assertEquals(server.getCredentials().get().getUserName(), "let@me:in"); Assert.assertEquals(server.getCredentials().get().getPassword(), "let@me:in"); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java index 2a2b804d2..6d2aca481 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java @@ -1,5 +1,6 @@ package com.clickhouse.jdbc.parser; +import com.clickhouse.jdbc.ClickHouseSqlUtils; import org.testng.Assert; import org.testng.annotations.Test; From ec87efb563e15c9e08ebd3484912ea75195950a4 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Thu, 5 Sep 2024 06:03:24 -0400 Subject: [PATCH 07/21] Update Driver.java --- .../src/main/java/com/clickhouse/jdbc/Driver.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index 7b070f813..d19e9cd30 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -12,6 +12,14 @@ public class Driver implements java.sql.Driver { private static final Logger log = LoggerFactory.getLogger(Driver.class); + static { + try { + DriverManager.registerDriver(new Driver()); + } catch (SQLException e) { + log.error("Failed to register ClickHouse JDBC driver", e); + } + } + @Override public Connection connect(String url, Properties info) throws SQLException { From 1de8b11b66bde77cac954a27828759d468620538 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Thu, 5 Sep 2024 06:39:22 -0400 Subject: [PATCH 08/21] Delete JdbcUrlParser.java --- .../com/clickhouse/jdbc/JdbcUrlParser.java | 130 ------------------ 1 file changed, 130 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java deleted file mode 100644 index 4f4b4c1f6..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcUrlParser.java +++ /dev/null @@ -1,130 +0,0 @@ -package com.clickhouse.jdbc; - -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.Properties; - -import com.clickhouse.client.ClickHouseCredentials; -import com.clickhouse.client.ClickHouseNode; -import com.clickhouse.client.ClickHouseNodes; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.data.ClickHouseChecker; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseUtils; - -public class JdbcUrlParser { - public static class ConnectionInfo { - private final String cacheKey; - private final ClickHouseCredentials credentials; - private final ClickHouseNodes nodes; - private final JdbcConfig jdbcConf; - private final Properties props; - - protected ConnectionInfo(String cacheKey, ClickHouseNodes nodes, Properties props) { - this.cacheKey = cacheKey; - this.nodes = nodes; - this.jdbcConf = new JdbcConfig(props); - this.props = props; - - ClickHouseCredentials c = nodes.getTemplate().getCredentials().orElse(null); - if (props != null && !props.isEmpty()) { - String user = props.getProperty(ClickHouseDefaults.USER.getKey(), ""); - String passwd = props.getProperty(ClickHouseDefaults.PASSWORD.getKey(), ""); - if (!ClickHouseChecker.isNullOrEmpty(user)) { - c = ClickHouseCredentials.fromUserAndPassword(user, passwd); - } - } - this.credentials = c; - } - - public ClickHouseCredentials getDefaultCredentials() { - return this.credentials; - } - - /** - * Gets selected server. - * - * @return non-null selected server - * @deprecated will be removed in 0.5, please use {@link #getNodes()} - * instead - */ - @Deprecated - public ClickHouseNode getServer() { - return nodes.apply(nodes.getNodeSelector()); - } - - public JdbcConfig getJdbcConfig() { - return jdbcConf; - } - - /** - * Gets nodes defined in connection string. - * - * @return non-null nodes - */ - public ClickHouseNodes getNodes() { - return nodes; - } - - public Properties getProperties() { - return props; - } - } - - // URL pattern: - // jdbc:(clickhouse|ch)[:(grpc|http|tcp)]://host[:port][/db][?param1=value1¶m2=value2] - public static final String JDBC_PREFIX = "jdbc:"; - public static final String JDBC_CLICKHOUSE_PREFIX = JDBC_PREFIX + "clickhouse:"; - public static final String JDBC_ABBREVIATION_PREFIX = JDBC_PREFIX + "ch:"; - - static Properties newProperties() { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.ASYNC.getKey(), Boolean.FALSE.toString()); - props.setProperty(ClickHouseClientOption.FORMAT.getKey(), ClickHouseFormat.RowBinaryWithNamesAndTypes.name()); - props.setProperty(ClickHouseClientOption.PRODUCT_NAME.getKey(), "ClickHouse-JdbcDriver"); - return props; - } - - public static ConnectionInfo parse(String jdbcUrl, Properties defaults) throws SQLException { - if (defaults == null) { - defaults = new Properties(); - } - - if (ClickHouseChecker.isNullOrBlank(jdbcUrl)) { - throw SqlExceptionUtils.clientError("Non-blank JDBC URL is required"); - } - - if (jdbcUrl.startsWith(JDBC_CLICKHOUSE_PREFIX)) { - jdbcUrl = jdbcUrl.substring(JDBC_CLICKHOUSE_PREFIX.length()); - } else if (jdbcUrl.startsWith(JDBC_ABBREVIATION_PREFIX)) { - jdbcUrl = jdbcUrl.substring(JDBC_ABBREVIATION_PREFIX.length()); - } else { - throw SqlExceptionUtils.clientError( - new URISyntaxException(jdbcUrl, ClickHouseUtils.format("'%s' or '%s' prefix is mandatory", - JDBC_CLICKHOUSE_PREFIX, JDBC_ABBREVIATION_PREFIX))); - } - - int index = jdbcUrl.indexOf("//"); - if (index == -1) { - throw SqlExceptionUtils - .clientError(new URISyntaxException(jdbcUrl, "Missing '//' from the given JDBC URL")); - } else if (index == 0) { - jdbcUrl = "http:" + jdbcUrl; - } - - try { - String cacheKey = ClickHouseNodes.buildCacheKey(jdbcUrl, defaults); - ClickHouseNodes nodes = ClickHouseNodes.of(cacheKey, jdbcUrl, defaults); - Properties props = newProperties(); - props.putAll(nodes.getTemplate().getOptions()); - props.putAll(defaults); - return new ConnectionInfo(cacheKey, nodes, props); - } catch (IllegalArgumentException e) { - throw SqlExceptionUtils.clientError(e); - } - } - - private JdbcUrlParser() { - } -} From b673d3be49985a34f50ff3f5fcd643d25d1123f4 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 11 Sep 2024 03:54:45 -0400 Subject: [PATCH 09/21] WIP --- clickhouse-jdbc/pom.xml | 169 +----------------- .../com/clickhouse/jdbc/ConnectionImpl.java | 119 +++++++++--- .../main/java/com/clickhouse/jdbc/Driver.java | 4 +- .../jdbc/internal/JdbcConfiguration.java | 99 ++++++++++ 4 files changed, 200 insertions(+), 191 deletions(-) create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java diff --git a/clickhouse-jdbc/pom.xml b/clickhouse-jdbc/pom.xml index 25bae46f1..1c3190c9c 100644 --- a/clickhouse-jdbc/pom.xml +++ b/clickhouse-jdbc/pom.xml @@ -28,19 +28,13 @@ ${project.parent.groupId} - clickhouse-client - ${revision} - - - ${project.parent.groupId} - clickhouse-data + client-v2 ${revision} ${project.parent.groupId} clickhouse-http-client ${revision} - true org.apache.commons @@ -77,33 +71,6 @@ gson true - - io.opencensus - opencensus-impl - true - - - ${project.parent.groupId} - org.roaringbitmap - provided - - - * - * - - - - - ${project.parent.groupId} - org.apache.commons.compress - provided - - - * - * - - - org.lz4 lz4-java @@ -114,7 +81,6 @@ zstd-jni provided - ${project.parent.groupId} clickhouse-client @@ -137,104 +103,8 @@ testng test - - com.mysql - mysql-connector-j - test - - - com.clickhouse - clickhouse-http-client - 0.7.0-SNAPSHOT - compile - - - - - native - - - - org.graalvm.buildtools - native-maven-plugin - true - - - build-native - - compile-no-fork - - package - - - test-native - - test - - test - - - - ${imageName} - ${mainClass} - - --no-fallback - - - - ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar - - - true - - - - - org.codehaus.mojo - exec-maven-plugin - - - java-agent - - exec - - - java - ${project.build.directory} - - -classpath - - - ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar - - ${mainClass} - - - - - native - - exec - - - ${project.build.directory}/${imageName} - ${project.build.directory} - - - - - - - - - @@ -245,27 +115,6 @@ org.apache.maven.plugins maven-surefire-plugin - - com.helger.maven - ph-javacc-maven-plugin - ${javacc-plugin.version} - - - jjc - generate-sources - - javacc - - - ${minJdk} - true - com.clickhouse.jdbc.parser - src/main/javacc - src/main/java - - - - org.apache.maven.plugins maven-jar-plugin @@ -317,10 +166,11 @@ com.clickhouse:clickhouse-data - com.clickhouse:clickhouse-client + com.clickhouse:client-v2 com.clickhouse:clickhouse-http-client org.apache.httpcomponents.client5:httpclient5 org.apache.httpcomponents.core5:httpcore5 + org.apache.httpcomponents.core5:httpcore5-h2 org.lz4:lz4-pure-java @@ -345,7 +195,6 @@ implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> ${project.groupId}.jdbc - ${mainClass} ${spec.title} ${spec.version} @@ -376,7 +225,6 @@ com.clickhouse:clickhouse-data - com.clickhouse:clickhouse-client com.google.code.gson:gson com.google.guava:failureaccess com.google.guava:guava @@ -387,7 +235,6 @@ io.opencensus:opencensus-impl-core io.perfmark:perfmark-api org.apache.commons:commons-compress - com.clickhouse:clickhouse-http-client org.apache.httpcomponents.client5:httpclient5 org.apache.httpcomponents.core5:httpcore5 org.apache.httpcomponents.core5:httpcore5-h2 @@ -431,7 +278,6 @@ implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> ${project.groupId}.jdbc - ${mainClass} ${spec.title} ${spec.version} @@ -465,7 +311,7 @@ com.clickhouse:clickhouse-data - com.clickhouse:clickhouse-client + com.clickhouse:client-v2 com.clickhouse:clickhouse-http-client org.lz4:lz4-pure-java @@ -481,7 +327,6 @@ implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> ${project.groupId}.jdbc - ${mainClass} ${spec.title} ${spec.version} @@ -497,7 +342,6 @@ - shade-default package @@ -513,7 +357,7 @@ com.clickhouse:clickhouse-data - com.clickhouse:clickhouse-client + com.clickhouse:client-v2 com.clickhouse:clickhouse-http-client @@ -524,7 +368,6 @@ implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> ${project.groupId}.jdbc - ${mainClass} ${spec.title} ${spec.version} @@ -557,4 +400,4 @@ - + \ No newline at end of file diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index f35c15c57..69e4dafec 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -1,104 +1,144 @@ package com.clickhouse.jdbc; +import com.clickhouse.client.api.Client; +import com.clickhouse.jdbc.internal.JdbcConfiguration; +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + import java.sql.*; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executor; public class ConnectionImpl implements Connection, JdbcWrapper { + private static final Logger log = LoggerFactory.getLogger(ConnectionImpl.class); + + private final Client client; + private final JdbcConfiguration config; + + private boolean closed = false; + private String catalog; + private String schema; + + public ConnectionImpl(String url, Properties info) { + this.config = new JdbcConfiguration(url, info); + this.client = new Client.Builder() + .addEndpoint(config.getProtocol() + "://" + config.getHost() + ":" + config.getPort()) + .setUsername(config.getUser()) + .setPassword(config.getPassword()) + .compressServerResponse(true) + .setDefaultDatabase(config.getDatabase()) + .build(); + } + + @Override public Statement createStatement() throws SQLException { + checkOpen(); return new StatementImpl(this); } @Override public PreparedStatement prepareStatement(String sql) throws SQLException { + checkOpen(); return new PreparedStatementImpl(this, sql); } @Override public CallableStatement prepareCall(String sql) throws SQLException { + checkOpen(); return null; } @Override public String nativeSQL(String sql) throws SQLException { + checkOpen(); return ""; } @Override public void setAutoCommit(boolean autoCommit) throws SQLException { - + throw new SQLFeatureNotSupportedException("setAutoCommit not supported"); } @Override public boolean getAutoCommit() throws SQLException { - return false; + checkOpen(); + return true; } @Override public void commit() throws SQLException { - + throw new SQLFeatureNotSupportedException("Commit/Rollback not supported"); } @Override public void rollback() throws SQLException { - + throw new SQLFeatureNotSupportedException("Commit/Rollback not supported"); } @Override public void close() throws SQLException { - + closed = true; } @Override public boolean isClosed() throws SQLException { - return false; + return closed; } @Override public DatabaseMetaData getMetaData() throws SQLException { - return null; + checkOpen(); + return new com.clickhouse.jdbc.metadata.DatabaseMetaData(); } @Override public void setReadOnly(boolean readOnly) throws SQLException { + checkOpen(); } @Override public boolean isReadOnly() throws SQLException { + checkOpen(); return false; } @Override public void setCatalog(String catalog) throws SQLException { - + checkOpen(); + this.catalog = catalog; } @Override public String getCatalog() throws SQLException { - return ""; + return catalog; } @Override public void setTransactionIsolation(int level) throws SQLException { - + checkOpen(); + if (TRANSACTION_NONE != level) { + throw new SQLFeatureNotSupportedException("setTransactionIsolation not supported"); + } } @Override public int getTransactionIsolation() throws SQLException { - return 0; + checkOpen(); + return TRANSACTION_NONE; } @Override public SQLWarning getWarnings() throws SQLException { + checkOpen(); return null; } @Override public void clearWarnings() throws SQLException { - + checkOpen(); } @Override @@ -118,96 +158,115 @@ public CallableStatement prepareCall(String sql, int resultSetType, int resultSe @Override public Map> getTypeMap() throws SQLException { - return Map.of(); + checkOpen(); + throw new SQLFeatureNotSupportedException("getTypeMap not supported"); } @Override public void setTypeMap(Map> map) throws SQLException { - + checkOpen(); + throw new SQLFeatureNotSupportedException("setTypeMap not supported"); } @Override public void setHoldability(int holdability) throws SQLException { - + checkOpen(); + //TODO: Should this be supported? } @Override public int getHoldability() throws SQLException { - return 0; + checkOpen(); + return ResultSet.HOLD_CURSORS_OVER_COMMIT;//TODO: Check if this is correct } @Override public Savepoint setSavepoint() throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("Savepoint not supported"); } @Override public Savepoint setSavepoint(String name) throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("Savepoint not supported"); } @Override public void rollback(Savepoint savepoint) throws SQLException { - + checkOpen(); + throw new SQLFeatureNotSupportedException("Commit/Rollback not supported"); } @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { - + checkOpen(); + throw new SQLFeatureNotSupportedException("Savepoint not supported"); } @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + checkOpen(); return null; } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + checkOpen(); return null; } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + checkOpen(); return null; } @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + checkOpen(); return null; } @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + checkOpen(); return null; } @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + checkOpen(); return null; } @Override public Clob createClob() throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("Clob not supported"); } @Override public Blob createBlob() throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("Blob not supported"); } @Override public NClob createNClob() throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("NClob not supported"); } @Override public SQLXML createSQLXML() throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("SQLXML not supported"); } @Override public boolean isValid(int timeout) throws SQLException { + checkOpen(); return false; } @@ -243,12 +302,14 @@ public Struct createStruct(String typeName, Object[] attributes) throws SQLExcep @Override public void setSchema(String schema) throws SQLException { - + checkOpen(); + this.schema = schema; } @Override public String getSchema() throws SQLException { - return ""; + checkOpen(); + return schema; } @Override @@ -295,4 +356,10 @@ public void setShardingKey(ShardingKey shardingKey, ShardingKey superShardingKey public void setShardingKey(ShardingKey shardingKey) throws SQLException { Connection.super.setShardingKey(shardingKey); } + + private void checkOpen() throws SQLException { + if (isClosed()) { + throw new SQLException("Connection is closed"); + } + } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index d19e9cd30..6dcd46127 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -23,12 +23,12 @@ public class Driver implements java.sql.Driver { @Override public Connection connect(String url, Properties info) throws SQLException { - return null; + return new ConnectionImpl(url, info); } @Override public boolean acceptsURL(String url) throws SQLException { - return false; + return url.startsWith("jdbc:clickhouse:") || url.startsWith("jdbc:ch:"); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java new file mode 100644 index 000000000..fa25a5def --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java @@ -0,0 +1,99 @@ +package com.clickhouse.jdbc.internal; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.AbstractMap; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.stream.Collectors; + +public class JdbcConfiguration { + final String host; + final int port; + final String protocol; + final String database; + final String user; + final String password; + final Map queryParams; + + public String getDatabase() { + return database; + } + + public String getHost() { + return host; + } + + public String getPassword() { + return password; + } + + public int getPort() { + return port; + } + + public String getProtocol() { + return protocol; + } + + public Map getQueryParams() { + return queryParams; + } + + public String getUser() { + return user; + } + + public JdbcConfiguration(String url, Properties info) { + Map urlProperties = parseUrl(url); + this.host = urlProperties.get("host"); + this.port = Integer.parseInt(urlProperties.get("port")); + this.protocol = urlProperties.get("protocol"); + this.database = urlProperties.get("database") == null ? "default" : urlProperties.get("database"); + this.queryParams = urlProperties.get("queryParams") == null ? new HashMap<>() : parseQueryParams(urlProperties.get("queryParams")); + + + this.user = info.getProperty("user", "default"); + this.password = info.getProperty("password", ""); + } + + private Map parseUrl(String urlString) { + URL url; + try { + url = new URL(stripUrlPrefix(urlString)); + } catch (MalformedURLException e) { + throw new IllegalArgumentException("URL is malformed."); + } + + Map urlProperties = new HashMap<>(); + urlProperties.put("host", url.getHost()); + urlProperties.put("port", String.valueOf(url.getPort() == -1 ? 8443 : url.getPort())); + urlProperties.put("protocol", url.getProtocol()); + urlProperties.put("database", url.getPath().substring(1)); + urlProperties.put("queryParams", url.getQuery()); + + return urlProperties; + } + private String stripUrlPrefix(String url) { + if (url.startsWith("jdbc:clickhouse:")) { + return url.substring("jdbc:clickhouse:".length()); + } else if (url.startsWith("jdbc:ch:")) { + return url.substring("jdbc:ch:".length()); + } else { + throw new IllegalArgumentException("URL is not supported."); + } + } + private Map parseQueryParams(String queryParams) { + if (queryParams == null || queryParams.isEmpty()) { + return new HashMap<>(0); + } + + return Arrays.stream(queryParams.split("&")) + .map(s -> { + String[] parts = s.split("="); + return new AbstractMap.SimpleImmutableEntry<>(parts[0], parts[1]); + }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } +} From 71a9075d1f9e7ea5765f9a490e42c5d3857789bb Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 18 Sep 2024 17:47:41 -0400 Subject: [PATCH 10/21] More Impl --- .../com/clickhouse/jdbc/ConnectionImpl.java | 5 +++ .../com/clickhouse/jdbc/DataSourceImpl.java | 44 ++++++++++++------- .../main/java/com/clickhouse/jdbc/Driver.java | 8 +++- .../jdbc/internal/JdbcConfiguration.java | 19 ++++++++ 4 files changed, 57 insertions(+), 19 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 69e4dafec..b996caa2f 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -79,6 +79,11 @@ public void rollback() throws SQLException { @Override public void close() throws SQLException { + if (isClosed()) { + return; + } + + client.close(); closed = true; } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java index da8696fa5..8c78f8dfb 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/DataSourceImpl.java @@ -7,38 +7,58 @@ import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.ShardingKeyBuilder; +import java.util.Properties; import java.util.logging.Logger; public class DataSourceImpl implements DataSource, JdbcWrapper { + private String url; + private Properties info; + + public void setUrl(String url) { + this.url = url; + } + + private Properties getProperties() { + Properties copy = new Properties(); + copy.putAll(info); + return copy; + } + public void setProperties(Properties info) { + this.info = info; + } @Override public Connection getConnection() throws SQLException { - return null; + return new ConnectionImpl(this.url, this.info); } @Override public Connection getConnection(String username, String password) throws SQLException { - return null; + Properties info = getProperties(); + info.setProperty("user", username); + info.setProperty("password", password); + + return new ConnectionImpl(this.url, info); } @Override public PrintWriter getLogWriter() throws SQLException { - return null; + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override public void setLogWriter(PrintWriter out) throws SQLException { - + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override public void setLoginTimeout(int seconds) throws SQLException { - + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override public int getLoginTimeout() throws SQLException { - return 0; + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override @@ -48,21 +68,11 @@ public ConnectionBuilder createConnectionBuilder() throws SQLException { @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { - return null; + throw new SQLFeatureNotSupportedException("Method not supported"); } @Override public ShardingKeyBuilder createShardingKeyBuilder() throws SQLException { return DataSource.super.createShardingKeyBuilder(); } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return JdbcWrapper.super.isWrapperFor(iface); - } - - @Override - public T unwrap(Class iface) throws SQLException { - return JdbcWrapper.super.unwrap(iface); - } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index 6dcd46127..4e1cb2ee9 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -3,6 +3,7 @@ import java.sql.*; import java.util.*; +import com.clickhouse.jdbc.internal.JdbcConfiguration; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; @@ -28,12 +29,15 @@ public Connection connect(String url, Properties info) throws SQLException { @Override public boolean acceptsURL(String url) throws SQLException { - return url.startsWith("jdbc:clickhouse:") || url.startsWith("jdbc:ch:"); + return JdbcConfiguration.acceptsURL(url); } @Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { - return new DriverPropertyInfo[0]; + if (!JdbcConfiguration.acceptsURL(url)) { + return new DriverPropertyInfo[0]; + } + return new JdbcConfiguration(url, info).getPropertyInfo(); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java index fa25a5def..e11d5f030 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcConfiguration.java @@ -2,9 +2,12 @@ import java.net.MalformedURLException; import java.net.URL; +import java.sql.DriverPropertyInfo; import java.util.AbstractMap; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; @@ -59,6 +62,22 @@ public JdbcConfiguration(String url, Properties info) { this.password = info.getProperty("password", ""); } + public static boolean acceptsURL(String url) { + return url.startsWith("jdbc:clickhouse:") || url.startsWith("jdbc:ch:"); + } + + public DriverPropertyInfo[] getPropertyInfo() { + List properties = new ArrayList<>(); + properties.add(new DriverPropertyInfo("host", host)); + properties.add(new DriverPropertyInfo("port", String.valueOf(port))); + properties.add(new DriverPropertyInfo("protocol", protocol)); + properties.add(new DriverPropertyInfo("database", database)); + properties.add(new DriverPropertyInfo("user", user)); + properties.add(new DriverPropertyInfo("password", "*REDACTED*")); + properties.add(new DriverPropertyInfo("queryParams", queryParams.toString())); + return properties.toArray(new DriverPropertyInfo[0]); + } + private Map parseUrl(String urlString) { URL url; try { From 5e704ff3af7325344b033e7e9670801249cbc301 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 18 Sep 2024 21:37:49 -0400 Subject: [PATCH 11/21] Update ConnectionImpl.java --- .../com/clickhouse/jdbc/ConnectionImpl.java | 50 ++++++++++++------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index b996caa2f..6f88580b6 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -82,7 +82,7 @@ public void close() throws SQLException { if (isClosed()) { return; } - + client.close(); closed = true; } @@ -101,13 +101,15 @@ public DatabaseMetaData getMetaData() throws SQLException { @Override public void setReadOnly(boolean readOnly) throws SQLException { checkOpen(); - + if (!readOnly) { + throw new SQLFeatureNotSupportedException("read-only=false unsupported"); + } } @Override public boolean isReadOnly() throws SQLException { checkOpen(); - return false; + return true; } @Override @@ -148,17 +150,18 @@ public void clearWarnings() throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - return null; + return new StatementImpl(this); } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return null; + return new PreparedStatementImpl(this, sql); } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return null; + checkOpen(); + throw new SQLFeatureNotSupportedException("CallableStatement not supported"); } @Override @@ -212,37 +215,40 @@ public void releaseSavepoint(Savepoint savepoint) throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); - return null; + return new StatementImpl(this); } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); - return null; + return new PreparedStatementImpl(this, sql); } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); - return null; + throw new SQLFeatureNotSupportedException("CallableStatement not supported"); } @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { checkOpen(); - return null; + //TODO: Should this be supported? + throw new SQLFeatureNotSupportedException("prepareStatement(String sql, int autoGeneratedKeys) not supported"); } @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { checkOpen(); - return null; + //TODO: Should this be supported? + throw new SQLFeatureNotSupportedException("prepareStatement(String sql, int[] columnIndexes) not supported"); } @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { checkOpen(); - return null; + //TODO: Should this be supported? + throw new SQLFeatureNotSupportedException("prepareStatement(String sql, String[] columnNames) not supported"); } @Override @@ -277,31 +283,35 @@ public boolean isValid(int timeout) throws SQLException { @Override public void setClientInfo(String name, String value) throws SQLClientInfoException { - + throw new SQLClientInfoException("ClientInfo not supported", null); } @Override public void setClientInfo(Properties properties) throws SQLClientInfoException { - + throw new SQLClientInfoException("ClientInfo not supported", null); } @Override public String getClientInfo(String name) throws SQLException { - return ""; + checkOpen(); + return null; } @Override public Properties getClientInfo() throws SQLException { - return null; + checkOpen(); + return new Properties(); } @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + //TODO: Should this be supported? return null; } @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + //TODO: Should this be supported? return null; } @@ -319,17 +329,19 @@ public String getSchema() throws SQLException { @Override public void abort(Executor executor) throws SQLException { - + throw new SQLFeatureNotSupportedException("abort not supported"); } @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { - + //TODO: Should this be supported? + throw new SQLFeatureNotSupportedException("setNetworkTimeout not supported"); } @Override public int getNetworkTimeout() throws SQLException { - return 0; + //TODO: Should this be supported? + throw new SQLFeatureNotSupportedException("getNetworkTimeout not supported"); } @Override From 6139fcc7939e25ee8dc5e56433ee35b6c346c050 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Wed, 18 Sep 2024 21:41:06 -0400 Subject: [PATCH 12/21] Update ConnectionImpl.java --- .../src/main/java/com/clickhouse/jdbc/ConnectionImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 6f88580b6..3cdd74a69 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -47,13 +47,13 @@ public PreparedStatement prepareStatement(String sql) throws SQLException { @Override public CallableStatement prepareCall(String sql) throws SQLException { checkOpen(); - return null; + throw new SQLFeatureNotSupportedException("CallableStatement not supported"); } @Override public String nativeSQL(String sql) throws SQLException { checkOpen(); - return ""; + return sql; } @Override From 1ae582124c9aae36c5d1e7a461b25647f2f8544e Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Sat, 21 Sep 2024 06:37:00 -0400 Subject: [PATCH 13/21] Updating/adding tests for the new implementation --- .../com/clickhouse/jdbc/ConnectionImpl.java | 20 +- .../java/com/clickhouse/jdbc/JdbcWrapper.java | 2 +- .../clickhouse/jdbc/AccessManagementTest.java | 178 -- .../jdbc/ClickHouseConnectionTest.java | 790 ------ .../jdbc/ClickHousePreparedStatementTest.java | 2115 ----------------- .../jdbc/ClickHouseResultSetTest.java | 458 ---- .../jdbc/ClickHouseStatementTest.java | 1496 ------------ .../jdbc/CombinedResultSetTest.java | 253 -- .../com/clickhouse/jdbc/ConnectionTest.java | 268 +++ .../clickhouse/jdbc/DataSourceImplTest.java | 108 - .../com/clickhouse/jdbc/DataSourceTest.java | 68 + .../clickhouse/jdbc/DatabaseMetaDataTest.java | 279 --- .../java/com/clickhouse/jdbc/DriverTest.java | 101 +- .../clickhouse/jdbc/JdbcIntegrationTest.java | 174 +- .../com/clickhouse/jdbc/JdbcIssuesTest.java | 152 -- .../jdbc/JdbcParameterizedQueryTest.java | 63 - .../clickhouse/jdbc/JdbcParseHandlerTest.java | 345 --- .../ClickHouseConnectionImplTest.java | 249 -- .../jdbc/internal/JdbcTransactionTest.java | 98 - .../jdbc/internal/JdbcUrlParserTest.java | 180 -- .../jdbc/parser/ClickHouseSqlParserTest.java | 912 ------- .../jdbc/parser/ClickHouseSqlUtilsTest.java | 67 - .../resources/data_samples/test_sample.orc.gz | Bin 1575 -> 0 bytes .../data_samples/test_sample.parquet.gz | Bin 2019 -> 0 bytes .../resources/sqls/issue-441_with-totals.sql | 13 - .../sqls/issue-555_custom-format.sql | 36 - .../src/test/resources/sqls/with-clause.sql | 16 - 27 files changed, 453 insertions(+), 7988 deletions(-) delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseConnectionTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseStatementTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ConnectionTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParameterizedQueryTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParseHandlerTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcTransactionTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlParserTest.java delete mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java delete mode 100644 clickhouse-jdbc/src/test/resources/data_samples/test_sample.orc.gz delete mode 100644 clickhouse-jdbc/src/test/resources/data_samples/test_sample.parquet.gz delete mode 100644 clickhouse-jdbc/src/test/resources/sqls/issue-441_with-totals.sql delete mode 100644 clickhouse-jdbc/src/test/resources/sqls/issue-555_custom-format.sql delete mode 100644 clickhouse-jdbc/src/test/resources/sqls/with-clause.sql diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 3cdd74a69..711f73fb6 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -150,12 +150,15 @@ public void clearWarnings() throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { - return new StatementImpl(this); + checkOpen(); + //TODO: Should this be a silent ignore? + throw new SQLFeatureNotSupportedException("Statement with resultSetType and resultSetConcurrency override not supported"); } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { - return new PreparedStatementImpl(this, sql); + checkOpen(); + throw new SQLFeatureNotSupportedException("PreparedStatement with resultSetType and resultSetConcurrency override not supported"); } @Override @@ -215,13 +218,15 @@ public void releaseSavepoint(Savepoint savepoint) throws SQLException { @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); - return new StatementImpl(this); + //TODO: Should this be a silent ignore? + throw new SQLFeatureNotSupportedException("Statement with resultSetType, resultSetConcurrency, and resultSetHoldability override not supported"); } @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { checkOpen(); - return new PreparedStatementImpl(this, sql); + //TODO: Should this be a silent ignore? + throw new SQLFeatureNotSupportedException("PreparedStatement with resultSetType, resultSetConcurrency, and resultSetHoldability override not supported"); } @Override @@ -278,7 +283,12 @@ public SQLXML createSQLXML() throws SQLException { @Override public boolean isValid(int timeout) throws SQLException { checkOpen(); - return false; + if (timeout < 0) { + throw new SQLException("Timeout must be >= 0"); + } + + //TODO: This is a placeholder implementation + return true; } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java index d719ca8c7..84eb90bd8 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcWrapper.java @@ -13,6 +13,6 @@ default T unwrap(Class iface) throws SQLException { if (isWrapperFor(iface)) { iface.cast(this); } - throw SqlExceptionUtils.unsupportedError("Cannot unwrap to " + iface.getName()); + throw new SQLException("Cannot unwrap to " + iface.getName()); } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java deleted file mode 100644 index d8e7ad933..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/AccessManagementTest.java +++ /dev/null @@ -1,178 +0,0 @@ -package com.clickhouse.jdbc; - -import com.clickhouse.client.http.config.ClickHouseHttpOption; -import com.clickhouse.client.http.config.HttpConnectionProvider; -import com.clickhouse.data.ClickHouseVersion; -import org.testng.Assert; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Arrays; -import java.util.Properties; - -public class AccessManagementTest extends JdbcIntegrationTest { - - @Test(groups = "integration", dataProvider = "setRolesArgsForTestSetRole") - public void testSetRoleDifferentConnections(String[] roles, String setRoleExpr, String[] activeRoles, - String connectionProvider) throws SQLException { - if (isCloud()) return; //TODO: testSetRoleDifferentConnections - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - String url = String.format("jdbc:ch:%s", getEndpointString()); - Properties properties = new Properties(); - properties.setProperty(ClickHouseHttpOption.REMEMBER_LAST_SET_ROLES.getKey(), "true"); - properties.setProperty(ClickHouseHttpOption.CONNECTION_PROVIDER.getKey(), connectionProvider); - DataSourceImpl dataSource = new DataSourceImpl(url, properties); - String serverVersion = getServerVersion(dataSource.getConnection()); - if (ClickHouseVersion.of(serverVersion).check("(,24.3]")) { - System.out.println("Test is skipped: feature is supported since 24.4"); - return; - } - - try (Connection connection = dataSource.getConnection("access_dba", "123")) { - Statement st = connection.createStatement(); - - st.execute("DROP ROLE IF EXISTS " + String.join(", ", roles)); - st.execute("DROP USER IF EXISTS some_user"); - st.execute("CREATE ROLE " + String.join(", ", roles)); - st.execute("CREATE USER some_user IDENTIFIED WITH no_password"); - st.execute("GRANT " + String.join(", ", roles) + " TO some_user"); - st.execute("SET DEFAULT ROLE NONE TO some_user"); - } catch (Exception e) { - Assert.fail("Failed to prepare for the test", e); - } - - try (Connection connection = dataSource.getConnection("some_user", "")) { - Statement st = connection.createStatement(); - st.execute(setRoleExpr); - assertRolesEquals(connection, activeRoles); - // Check roles are reset - st.execute("SET ROLE NONE"); - assertRolesEquals(connection); - } catch (Exception e) { - Assert.fail("Failed", e); - } - } - - @DataProvider(name = "setRolesArgsForTestSetRole") - private static Object[][] setRolesArgsForTestSetRole() { - return new Object[][]{ - {new String[]{"ROL1", "ROL2"}, "set role ROL2", new String[]{"ROL2"}, - HttpConnectionProvider.HTTP_URL_CONNECTION.name()}, - {new String[]{"ROL1", "ROL2"}, "set role ROL2", new String[]{"ROL2"}, - HttpConnectionProvider.APACHE_HTTP_CLIENT.name()}, - {new String[]{"ROL1", "ROL2"}, "set role ROL2, ROL1", new String[]{"ROL1", "ROL2"}, - HttpConnectionProvider.APACHE_HTTP_CLIENT.name()}, - {new String[]{"ROL1", "\"ROL2,☺\""}, "set role \"ROL2,☺\", ROL1", new String[]{"ROL2,☺", "ROL1"}, - HttpConnectionProvider.APACHE_HTTP_CLIENT.name()}, - {new String[]{"ROL1", "ROL2"}, "set role ROL2 , ROL1 ", new String[]{"ROL2", "ROL1"}, - HttpConnectionProvider.APACHE_HTTP_CLIENT.name()}, - }; - } - - private void assertRolesEquals(Connection connection, String... expected) throws SQLException { - try { - Statement st = connection.createStatement(); - ResultSet resultSet = st.executeQuery("select currentRoles()"); - - Assert.assertTrue(resultSet.next()); - String[] roles = (String[]) resultSet.getArray(1).getArray(); - Arrays.sort(roles); - Arrays.sort(expected); - Assert.assertEquals(roles, expected, - "Memorized roles: " + Arrays.toString(roles) + " != Expected: " + Arrays.toString(expected)); - System.out.println("Roles: " + Arrays.toString(roles)); - } catch (Exception e) { - Assert.fail("Failed", e); - } - } - - private String getServerVersion(Connection connection) { - try (Statement stmt = connection.createStatement()) { - ResultSet rs = stmt.executeQuery("SELECT version()"); - if (rs.next()) { - return rs.getString(1); - } - } catch (SQLException e) { - Assert.fail("Failed to get server version", e); - } - return null; - } - - @Test - public void testSetRolesAccessingTableRows() throws SQLException { - if (isCloud()) return; //TODO: testSetRolesAccessingTableRows - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String url = String.format("jdbc:ch:%s", getEndpointString()); - Properties properties = new Properties(); - properties.setProperty(ClickHouseHttpOption.REMEMBER_LAST_SET_ROLES.getKey(), "true"); - DataSourceImpl dataSource = new DataSourceImpl(url, properties); - String serverVersion = getServerVersion(dataSource.getConnection()); - if (ClickHouseVersion.of(serverVersion).check("(,24.3]")) { - System.out.println("Test is skipped: feature is supported since 24.4"); - return; - } - - try (Connection connection = dataSource.getConnection("access_dba", "123")) { - Statement st = connection.createStatement(); - st.execute("DROP ROLE IF EXISTS row_a"); - st.execute("DROP USER IF EXISTS some_user"); - - st.execute("CREATE ROLE row_a, row_b"); - st.execute("CREATE USER some_user IDENTIFIED WITH no_password"); - st.execute("GRANT row_a, row_b TO some_user"); - - st.execute("CREATE OR REPLACE TABLE test_table (`s` String ) ENGINE = MergeTree ORDER BY tuple();"); - st.execute("INSERT INTO test_table VALUES ('a'), ('b')"); - - st.execute("GRANT SELECT ON test_table TO some_user"); - st.execute("CREATE ROW POLICY OR REPLACE policy_row_b ON test_table FOR SELECT USING s = 'b' TO row_b;"); - st.execute("CREATE ROW POLICY OR REPLACE policy_row_a ON test_table FOR SELECT USING s = 'a' TO row_a;"); - } catch (Exception e) { - Assert.fail("Failed on setup", e); - } - - try (Connection connection = dataSource.getConnection("some_user", "")) { - Statement st = connection.createStatement(); - ResultSet rs = st.executeQuery("SELECT * FROM test_table"); - Assert.assertTrue(rs.next()); - Assert.assertTrue(rs.next()); - - st.execute("SET ROLE row_a"); - rs = st.executeQuery("SELECT * FROM test_table"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertFalse(rs.next()); - - st.execute("SET ROLE row_b"); - rs = st.executeQuery("SELECT * FROM test_table"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertFalse(rs.next()); - - - st.execute("SET ROLE row_a, row_b"); - rs = st.executeQuery("SELECT * FROM test_table ORDER BY s"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertFalse(rs.next()); - - st.execute("SET ROLE row_b"); - rs = st.executeQuery("SELECT * FROM test_table"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertFalse(rs.next()); - - st.execute("SET ROLE NONE"); - rs = st.executeQuery("SELECT * FROM test_table"); - Assert.assertTrue(rs.next()); - Assert.assertTrue(rs.next()); - } catch (Exception e) { - Assert.fail("Failed to check roles", e); - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseConnectionTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseConnectionTest.java deleted file mode 100644 index 251814247..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseConnectionTest.java +++ /dev/null @@ -1,790 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.Array; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Properties; -import java.util.UUID; - -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.data.ClickHouseCompression; -import com.clickhouse.data.ClickHouseUtils; -import com.clickhouse.data.value.UnsignedByte; - -import org.testng.Assert; -import org.testng.SkipException; -import org.testng.annotations.Test; - -public class ClickHouseConnectionTest extends JdbcIntegrationTest { - @Override - public ClickHouseConnection newConnection(Properties properties) throws SQLException { - return newDataSource(properties).getConnection(); - } - - @Test(groups = "integration") - public void testCentralizedConfiguration() throws SQLException { - if (isCloud()) return; //TODO: testCentralizedConfiguration - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("custom_settings", "max_result_rows=1"); - try (ClickHouseConnection conn = newConnection(props); Statement stmt = conn.createStatement()) { - // gRPC stopped working since 23.3 with below error: - // SQL Code: 649, DB::Exception: Transaction Control Language queries are - // allowed only inside session: while starting a transaction with - // 'implicit_transaction' - if (stmt.unwrap(ClickHouseRequest.class).getServer().getProtocol() == ClickHouseProtocol.GRPC) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - - Assert.assertEquals(conn.getConfig().getResponseCompressAlgorithm(), ClickHouseCompression.LZ4); - Assert.assertTrue(conn.getJdbcConfig().isAutoCommit()); - Assert.assertFalse(conn.getJdbcConfig().isTransactionSupported()); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().executeQuery("select * from numbers(2)")); - } - - props.setProperty("user", "poorman1"); - props.setProperty("password", ""); - props.setProperty("autoCommit", "false"); - props.setProperty("compress_algorithm", "lz4"); - props.setProperty("transactionSupport", "false"); - try (ClickHouseConnection conn = newConnection(props)) { - Assert.assertEquals(conn.getConfig().getResponseCompressAlgorithm(), ClickHouseCompression.GZIP); - Assert.assertFalse(conn.getJdbcConfig().isAutoCommit()); - Assert.assertTrue(conn.getJdbcConfig().isTransactionSupported()); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().executeQuery("select * from numbers(2)")); - conn.rollback(); - } catch (SQLException e) { - if (e.getErrorCode() != 649) { - Assert.fail(e.getMessage()); - } - } - - props.setProperty("user", "poorman2"); - try (ClickHouseConnection conn = newConnection(props)) { - Assert.assertEquals(conn.getConfig().getResponseCompressAlgorithm(), ClickHouseCompression.GZIP); - Assert.assertTrue(conn.getJdbcConfig().isAutoCommit()); - Assert.assertTrue(conn.getJdbcConfig().isTransactionSupported()); - try (Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("select * from numbers(2)")) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertTrue(rs.next(), "Should have two rows"); - Assert.assertFalse(rs.next(), "Should have only two rows"); - } - } - } - - @Test(groups = "integration") - public void testCreateArray() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties())) { - Assert.assertThrows(SQLException.class, () -> conn.createArrayOf("Int8", null)); - Assert.assertThrows(SQLException.class, () -> conn.createArrayOf("UInt8", null)); - - Array array = conn.createArrayOf("Nullable(Int8)", null); - Assert.assertEquals(array.getArray(), new Byte[0]); - array = conn.createArrayOf("Nullable(UInt8)", null); - Assert.assertEquals(array.getArray(), new UnsignedByte[0]); - array = conn.createArrayOf("Array(Int8)", null); - Assert.assertEquals(array.getArray(), new byte[0][]); - array = conn.createArrayOf("Array(UInt8)", null); - Assert.assertEquals(array.getArray(), new byte[0][]); - array = conn.createArrayOf("Array(Nullable(Int8))", null); - Assert.assertEquals(array.getArray(), new Byte[0][]); - array = conn.createArrayOf("Array(Nullable(UInt8))", null); - Assert.assertEquals(array.getArray(), new UnsignedByte[0][]); - - array = conn.createArrayOf("Int8", new Byte[] { -1, 0, 1 }); - Assert.assertEquals(array.getArray(), new byte[] { -1, 0, 1 }); - array = conn.createArrayOf("UInt8", new Byte[] { -1, 0, 1 }); - Assert.assertEquals(array.getArray(), new byte[] { -1, 0, 1 }); - - array = conn.createArrayOf("Nullable(Int8)", new Byte[] { -1, null, 1 }); - Assert.assertEquals(array.getArray(), new Byte[] { -1, null, 1 }); - array = conn.createArrayOf("Nullable(UInt8)", new Byte[] { -1, null, 1 }); - Assert.assertEquals(array.getArray(), new Byte[] { -1, null, 1 }); - array = conn.createArrayOf("Nullable(UInt8)", - new UnsignedByte[] { UnsignedByte.MAX_VALUE, null, UnsignedByte.ONE }); - Assert.assertEquals(array.getArray(), - new UnsignedByte[] { UnsignedByte.MAX_VALUE, null, UnsignedByte.ONE }); - - array = conn.createArrayOf("Array(Int8)", new byte[][] { { -1, 0, 1 } }); - Assert.assertEquals(array.getArray(), new byte[][] { { -1, 0, 1 } }); - array = conn.createArrayOf("Array(UInt8)", new Byte[][] { { -1, 0, 1 } }); - Assert.assertEquals(array.getArray(), new Byte[][] { { -1, 0, 1 } }); - - // invalid but works - array = conn.createArrayOf("Array(Int8)", new Byte[] { -1, 0, 1 }); - Assert.assertEquals(array.getArray(), new Byte[] { -1, 0, 1 }); - array = conn.createArrayOf("Array(UInt8)", new byte[][] { { -1, 0, 1 } }); - Assert.assertEquals(array.getArray(), new byte[][] { { -1, 0, 1 } }); - } - } - - @Test(groups = "integration") - public void testAutoCommitMode() throws SQLException { - if (isCloud()) return; //TODO: testAutoCommitMode - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("transactionSupport", "true"); - - for (int i = 0; i < 10; i++) { - try (ClickHouseConnection conn = newConnection(props); Statement stmt = conn.createStatement()) { - if (!conn.getServerVersion().check("[22.7,)") - // gRPC stopped working since 23.3 with below error: - // SQL Code: 649, DB::Exception: Transaction Control Language queries are - // allowed only inside session: while starting a transaction with - // 'implicit_transaction' - || stmt.unwrap(ClickHouseRequest.class).getServer().getProtocol() == ClickHouseProtocol.GRPC) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - stmt.execute("select 1, throwIf(" + i + " % 3 = 0)"); - stmt.executeQuery("select number, toDateTime(number), toString(number), throwIf(" + i + " % 5 = 0)" - + " from numbers(100000)"); - } catch (SQLException e) { - if (i % 3 == 0 || i % 5 == 0) { - Assert.assertEquals(e.getErrorCode(), 395, "Expected error code 395 but we got: " + e.getMessage()); - } else { - Assert.fail("Should not have exception"); - } - } - } - } - - @Test(groups = "integration") - public void testNonExistDatabase() throws SQLException { - String database = UUID.randomUUID().toString(); - Properties props = new Properties(); - props.setProperty(JdbcConfig.PROP_DATABASE_TERM, JdbcConfig.TERM_SCHEMA); - props.setProperty(ClickHouseClientOption.DATABASE.getKey(), database); - SQLException exp = null; - try (ClickHouseConnection conn = newConnection(props)) { - // do nothing - } - Assert.assertNull(exp, "Should not have SQLException even the database does not exist"); - - try (ClickHouseConnection conn = newConnection(props)) { - Assert.assertEquals(conn.getSchema(), database); - try (Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("select 1")) { - Assert.fail("Should have SQLException"); - } - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should have SQLException since the database does not exist"); - Assert.assertEquals(exp.getErrorCode(), 81, "Expected error code 81 but we got: " + exp.getMessage()); - - props.setProperty(JdbcConfig.PROP_CREATE_DATABASE, Boolean.TRUE.toString()); - try (ClickHouseConnection conn = newConnection(props)) { - exp = null; - } - Assert.assertNull(exp, "Should not have SQLException because database will be created automatically"); - - props.setProperty(JdbcConfig.PROP_CREATE_DATABASE, Boolean.FALSE.toString()); - try (ClickHouseConnection conn = newConnection(props)) { - Assert.assertEquals(conn.getSchema(), database); - try (Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery( - ClickHouseUtils.format("select * from system.databases where name='%s'", database))) { - Assert.assertTrue(rs.next(), "Should have at least one record in system.databases table"); - Assert.assertEquals(rs.getString("name"), database); - Assert.assertFalse(rs.next(), "Should have only one record in system.databases table"); - exp = new SQLException(); - } - } - Assert.assertNotNull(exp, "Should not have SQLException because the database has been created"); - } - - @Test(groups = "integration") - public void testReadOnly() throws SQLException { - if (isCloud()) return; //TODO: testReadOnly - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - Properties props = new Properties(); - props.setProperty("user", "dba"); - props.setProperty("password", "dba"); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertFalse(conn.isReadOnly(), "Connection should NOT be readonly"); - Assert.assertFalse(stmt.execute( - "drop table if exists test_readonly; drop user if exists readonly1; drop user if exists readonly2; " - + "create table test_readonly(id String)engine=Memory; " - + "create user readonly1 IDENTIFIED WITH no_password SETTINGS readonly=1; " - + "create user readonly2 IDENTIFIED WITH no_password SETTINGS readonly=2; " - + "grant insert on test_readonly TO readonly1, readonly2")); - conn.setReadOnly(false); - Assert.assertFalse(conn.isReadOnly(), "Connection should NOT be readonly"); - conn.setReadOnly(true); - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - - try (Statement s = conn.createStatement()) { - SQLException exp = null; - try { - s.execute("insert into test_readonly values('readonly1')"); - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should fail with SQL exception"); - Assert.assertEquals(exp.getErrorCode(), 164, "Expected error code 164 but we got: " + exp.getMessage()); - } - - conn.setReadOnly(false); - Assert.assertFalse(conn.isReadOnly(), "Connection should NOT be readonly"); - - try (Statement s = conn.createStatement()) { - Assert.assertFalse(s.execute("insert into test_readonly values('readonly1')")); - } - } - - props.clear(); - props.setProperty("user", "readonly1"); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - conn.setReadOnly(true); - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - SQLException exp = null; - try { - stmt.execute("insert into test_readonly values('readonly1')"); - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should fail with SQL exception"); - Assert.assertEquals(exp.getErrorCode(), 164, "Expected error code 164 but we got: " + exp.getMessage()); - - exp = null; - try { - conn.setReadOnly(true); - stmt.execute("set max_result_rows=5; select 1"); - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should fail with SQL exception"); - Assert.assertEquals(exp.getErrorCode(), 164, "Expected error code 164 but we got: " + exp.getMessage()); - } - - props.setProperty("user", "readonly2"); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - Assert.assertTrue(stmt.execute("set max_result_rows=5; select 1")); - - Assert.assertThrows(SQLException.class, () -> conn.setReadOnly(false)); - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - - SQLException exp = null; - try (Statement s = conn.createStatement()) { - Assert.assertFalse(s.execute("insert into test_readonly values('readonly2')")); - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should fail with SQL exception"); - Assert.assertEquals(exp.getErrorCode(), 164, "Expected error code 164 but we got: " + exp.getMessage()); - - conn.setReadOnly(true); - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - } - - props.setProperty(ClickHouseClientOption.SERVER_TIME_ZONE.getKey(), "UTC"); - props.setProperty(ClickHouseClientOption.SERVER_VERSION.getKey(), "21.8"); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertFalse(conn.isReadOnly(), "Connection should NOT be readonly"); - Assert.assertTrue(stmt.execute("set max_result_rows=5; select 1")); - - conn.setReadOnly(true); - Assert.assertTrue(conn.isReadOnly(), "Connection should be readonly"); - conn.setReadOnly(false); - Assert.assertFalse(conn.isReadOnly(), "Connection should NOT be readonly"); - - SQLException exp = null; - try (Statement s = conn.createStatement()) { - Assert.assertFalse(s.execute("insert into test_readonly values('readonly2')")); - } catch (SQLException e) { - exp = e; - } - Assert.assertNotNull(exp, "Should fail with SQL exception"); - Assert.assertEquals(exp.getErrorCode(), 164, "Expected error code 164 but we got: " + exp.getMessage()); - } - } - - @Test(groups = "integration") - public void testAutoCommit() throws SQLException { - if (isCloud()) return; //TODO: testAutoCommit - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("transactionSupport", "true"); - String tableName = "test_jdbc_tx_auto_commit"; - try (ClickHouseConnection c = newConnection(props); Statement s = c.createStatement()) { - if (!c.getServerVersion().check("[22.7,)") - // gRPC stopped working since 23.3 with below error: - // SQL Code: 649, DB::Exception: Transaction Control Language queries are - // allowed only inside session: while starting a transaction with - // 'implicit_transaction' - || s.unwrap(ClickHouseRequest.class).getServer().getProtocol() == ClickHouseProtocol.GRPC) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64) engine=MergeTree order by id"); - } - - try (ClickHouseConnection conn = newConnection(); - ClickHouseConnection txConn = newConnection(props); - Statement stmt = conn.createStatement(); - Statement txStmt = txConn.createStatement(); - PreparedStatement ps = conn.prepareStatement("insert into " + tableName); - PreparedStatement txPs = txConn.prepareStatement("insert into " + tableName)) { - Assert.assertTrue(conn.getAutoCommit()); - Assert.assertTrue(txConn.getAutoCommit()); - Assert.assertFalse(conn.isTransactionSupported()); - Assert.assertTrue(txConn.isTransactionSupported()); - Assert.assertFalse(conn.isImplicitTransactionSupported()); - if (txConn.getServerVersion().check("[22.7,)")) { - Assert.assertTrue(txConn.isImplicitTransactionSupported(), - "Implicit transaction is supported since 22.7"); - } else { - Assert.assertFalse(txConn.isImplicitTransactionSupported(), - "Implicit transaction is NOT supported before 22.7"); - } - - checkRowCount(stmt, "select 1", 1); - checkRowCount(txStmt, "select 1", 1); - - txStmt.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64) engine=MergeTree order by id"); - checkRowCount(stmt, tableName, 0); - checkRowCount(txStmt, tableName, 0); - - stmt.executeUpdate("insert into " + tableName + " values(1)"); - checkRowCount(stmt, tableName, 1); - checkRowCount(txStmt, tableName, 1); - - txStmt.executeUpdate("insert into " + tableName + " values(2)"); - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(props); Statement s = c.createStatement()) { - c.setAutoCommit(false); - s.executeUpdate("insert into " + tableName + " values(-1)"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 3); - c.rollback(); - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - } - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(props); Statement s = c.createStatement()) { - c.setAutoCommit(false); - s.executeUpdate("insert into " + tableName + " values(-2)"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 3); - } - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 3); - - ps.setInt(1, 3); - ps.addBatch(); - ps.setInt(1, 4); - ps.addBatch(); - ps.executeBatch(); - checkRowCount(stmt, tableName, 5); - checkRowCount(txStmt, tableName, 5); - - txPs.setInt(1, 5); - txPs.addBatch(); - txPs.setInt(1, 6); - txPs.addBatch(); - txPs.executeBatch(); - checkRowCount(stmt, tableName, 7); - checkRowCount(txStmt, tableName, 7); - } - } - - @Test(groups = "integration") - public void testManualTxApi() throws SQLException { - if (isCloud()) return; //TODO: testManualTxApi - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("autoCommit", "false"); - Properties txProps = new Properties(); - txProps.putAll(props); - txProps.setProperty("transactionSupport", "true"); - String tableName = "test_jdbc_manual_tx_api"; - try (ClickHouseConnection c = newConnection(txProps); Statement s = c.createStatement()) { - if (!c.getServerVersion().check("[22.7,)") - // gRPC stopped working since 23.3 with below error: - // SQL Code: 649, DB::Exception: Transaction Control Language queries are - // allowed only inside session: while starting a transaction with - // 'implicit_transaction' - || s.unwrap(ClickHouseRequest.class).getServer().getProtocol() == ClickHouseProtocol.GRPC) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64, value String) engine=MergeTree order by id"); - } - - try (ClickHouseConnection conn = newConnection(props); - ClickHouseConnection txConn = newConnection(txProps); - Statement stmt = conn.createStatement(); - Statement txStmt = txConn.createStatement(); - PreparedStatement ps = conn.prepareStatement("insert into " + tableName); - PreparedStatement txPs = txConn.prepareStatement("insert into " + tableName)) { - Assert.assertFalse(conn.getAutoCommit()); - Assert.assertFalse(txConn.getAutoCommit()); - Assert.assertFalse(conn.isTransactionSupported()); - Assert.assertTrue(txConn.isTransactionSupported()); - Assert.assertFalse(conn.isImplicitTransactionSupported()); - if (txConn.getServerVersion().check("[22.7,)")) { - Assert.assertTrue(txConn.isImplicitTransactionSupported(), - "Implicit transaction is supported since 22.7"); - } else { - Assert.assertFalse(txConn.isImplicitTransactionSupported(), - "Implicit transaction is NOT supported before 22.7"); - } - - Assert.assertThrows(SQLException.class, () -> conn.begin()); - Assert.assertThrows(SQLException.class, () -> txConn.begin()); - - checkRowCount(stmt, "select 1", 1); - checkRowCount(txStmt, "select 1", 1); - Assert.assertThrows(SQLException.class, () -> txConn.begin()); - txConn.commit(); - - txConn.begin(); - checkRowCount(stmt, "select 1", 1); - checkRowCount(txStmt, "select 1", 1); - txConn.rollback(); - - checkRowCount(stmt, tableName, 0); - checkRowCount(txStmt, tableName, 0); - - txStmt.executeUpdate("insert into " + tableName + " values(0, '0')"); - checkRowCount(stmt, tableName, 1); - checkRowCount(txStmt, tableName, 1); - txConn.rollback(); - checkRowCount(stmt, tableName, 0); - checkRowCount(txStmt, tableName, 0); - - stmt.executeUpdate("insert into " + tableName + " values(1, 'a')"); - checkRowCount(stmt, tableName, 1); - checkRowCount(txStmt, tableName, 1); - - txStmt.executeUpdate("insert into " + tableName + " values(2, 'b')"); - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(txProps); Statement s = c.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(-1, '-1')"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - c.rollback(); - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 1); - } - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(txProps); Statement s = c.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(3, 'c')"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - txConn.commit(); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - } - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - txConn.commit(); - checkRowCount(txStmt, tableName, 3); - - txConn.setAutoCommit(true); - Assert.assertTrue(txConn.getAutoCommit()); - try (Statement s = txConn.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(4, 'd')"); - checkRowCount(stmt, tableName, 4); - checkRowCount(txStmt, tableName, 4); - checkRowCount(s, tableName, 4); - } - - try (Statement s = txConn.createStatement()) { - checkRowCount(stmt, tableName, 4); - checkRowCount(txStmt, tableName, 4); - checkRowCount(s, tableName, 4); - } - } - } - - @Test(groups = "integration") - public void testManualTxTcl() throws SQLException { - if (isCloud()) return; //TODO: testManualTxTcl - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("autoCommit", "false"); - Properties txProps = new Properties(); - txProps.putAll(props); - txProps.setProperty("transactionSupport", "true"); - String tableName = "test_jdbc_manual_tx_tcl"; - try (ClickHouseConnection c = newConnection(txProps); Statement s = c.createStatement()) { - if (!c.getServerVersion().check("[22.7,)") - // gRPC stopped working since 23.3 with below error: - // SQL Code: 649, DB::Exception: Transaction Control Language queries are - // allowed only inside session: while starting a transaction with - // 'implicit_transaction' - || s.unwrap(ClickHouseRequest.class).getServer().getProtocol() == ClickHouseProtocol.GRPC) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64, value String) engine=MergeTree order by id"); - } - - try (ClickHouseConnection conn = newConnection(props); - ClickHouseConnection txConn = newConnection(txProps); - Statement stmt = conn.createStatement(); - Statement txStmt = txConn.createStatement(); - PreparedStatement ps = conn.prepareStatement("insert into " + tableName); - PreparedStatement txPs = txConn.prepareStatement("insert into " + tableName)) { - Assert.assertFalse(conn.getAutoCommit()); - Assert.assertFalse(txConn.getAutoCommit()); - Assert.assertFalse(conn.isTransactionSupported()); - Assert.assertTrue(txConn.isTransactionSupported()); - Assert.assertFalse(conn.isImplicitTransactionSupported()); - if (txConn.getServerVersion().check("[22.7,)")) { - Assert.assertTrue(txConn.isImplicitTransactionSupported(), - "Implicit transaction is supported since 22.7"); - } else { - Assert.assertFalse(txConn.isImplicitTransactionSupported(), - "Implicit transaction is NOT supported before 22.7"); - } - - Assert.assertThrows(SQLException.class, () -> stmt.execute("begin transaction")); - Assert.assertThrows(SQLException.class, () -> txStmt.execute("begin transaction")); - - checkRowCount(stmt, "select 1", 1); - checkRowCount(txStmt, "select 1", 1); - try (Statement s = conn.createStatement()) { - Assert.assertEquals(s.executeUpdate("commit"), 0); - } - try (Statement s = txConn.createStatement()) { - Assert.assertEquals(s.executeUpdate("commit"), 0); - } - - Assert.assertEquals(stmt.executeUpdate("begin transaction"), 0); - Assert.assertEquals(txStmt.executeUpdate("begin transaction"), 0); - checkRowCount(stmt, "begin transaction; select 1", 1); - checkRowCount(txStmt, "begin transaction; select 1", 1); - try (Statement s = txConn.createStatement()) { - Assert.assertEquals(s.executeUpdate("rollback"), 0); - } - - checkRowCount(stmt, tableName, 0); - checkRowCount(txStmt, tableName, 0); - - txStmt.executeUpdate("insert into " + tableName + " values(0, '0')"); - checkRowCount(stmt, tableName, 1); - checkRowCount(txStmt, tableName, 1); - try (Statement s = txConn.createStatement()) { - Assert.assertEquals(s.executeUpdate("rollback"), 0); - } - checkRowCount(stmt, tableName, 0); - checkRowCount(txStmt, tableName, 0); - - stmt.executeUpdate("insert into " + tableName + " values(1, 'a')"); - checkRowCount(stmt, tableName, 1); - checkRowCount(txStmt, tableName, 1); - - txStmt.executeUpdate("insert into " + tableName + " values(2, 'b')"); - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(txProps); Statement s = c.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(-1, '-1')"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - try (Statement ss = c.createStatement()) { - Assert.assertEquals(ss.executeUpdate("rollback"), 0); - } - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 1); - } - checkRowCount(stmt, tableName, 2); - checkRowCount(txStmt, tableName, 2); - - try (Connection c = newConnection(txProps); Statement s = c.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(3, 'c')"); - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - try (Statement ss = txConn.createStatement()) { - Assert.assertEquals(ss.executeUpdate("commit"), 0); - } - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - checkRowCount(s, tableName, 2); - } - checkRowCount(stmt, tableName, 3); - checkRowCount(txStmt, tableName, 2); - try (Statement s = txConn.createStatement()) { - Assert.assertEquals(s.executeUpdate("commit"), 0); - } - checkRowCount(txStmt, tableName, 3); - - try (Statement s = conn.createStatement()) { - Assert.assertEquals(s.executeUpdate("commit"), 0); - } - try (Statement s = txConn.createStatement()) { - Assert.assertEquals(s.executeUpdate("commit"), 0); - } - txStmt.addBatch("begin transaction"); - txStmt.addBatch("insert into " + tableName + " values(4, 'd')"); - txStmt.addBatch("insert into " + tableName + " values(5, 'e')"); - txStmt.addBatch("commit"); - txStmt.executeBatch(); - - txStmt.addBatch("insert into " + tableName + " values(6, 'f')"); - txStmt.addBatch("rollback"); - txStmt.executeBatch(); - - txConn.setAutoCommit(true); - Assert.assertTrue(txConn.getAutoCommit()); - try (Statement s = txConn.createStatement()) { - s.executeUpdate("insert into " + tableName + " values(6, 'f')"); - checkRowCount(stmt, tableName, 6); - checkRowCount(txStmt, tableName, 6); - checkRowCount(s, tableName, 6); - } - - try (Statement s = txConn.createStatement()) { - checkRowCount(stmt, tableName, 6); - checkRowCount(txStmt, tableName, 6); - checkRowCount(s, tableName, 6); - } - } - } - - @Test(groups = "integration") - public void testNestedTransactions() throws SQLException { - if (isCloud()) return; //TODO: testNestedTransactions - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("autoCommit", "false"); - props.setProperty("transactionSupport", "true"); - String tableName = "test_jdbc_nested_tx"; - try (ClickHouseConnection c = newConnection(props); Statement s = c.createStatement()) { - if (!c.getServerVersion().check("[22.7,)")) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64) engine=MergeTree order by id"); - } - - try (Connection conn = newConnection(props); - Statement stmt = conn.createStatement(); - PreparedStatement ps = conn.prepareStatement("insert into " + tableName)) { - checkRowCount(stmt, tableName, 0); - stmt.executeQuery("insert into " + tableName + " values(1)"); - checkRowCount(stmt, tableName, 1); - ps.setInt(1, 2); - ps.executeUpdate(); - checkRowCount(stmt, tableName, 2); - ps.setInt(1, 3); - ps.executeBatch(); - checkRowCount(stmt, tableName, 2); - ps.setInt(1, 3); - ps.addBatch(); - ps.executeBatch(); - checkRowCount(stmt, tableName, 3); - try (Connection c = newConnection(); Statement s = c.createStatement()) { - checkRowCount(s, tableName, 3); - } - - conn.rollback(); - checkRowCount(stmt, tableName, 0); - try (Connection c = newConnection(); Statement s = c.createStatement()) { - checkRowCount(s, tableName, 0); - } - } - } - - @Test(groups = "integration") - public void testParallelTransactions() throws SQLException { - if (isCloud()) return; //TODO: testParallelTransactions - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - - Properties props = new Properties(); - props.setProperty("autoCommit", "false"); - props.setProperty("transactionSupport", "true"); - String tableName = "test_jdbc_parallel_tx"; - try (ClickHouseConnection c = newConnection(props); Statement s = c.createStatement()) { - if (!c.getServerVersion().check("[22.7,)")) { - throw new SkipException("Skip the test as transaction is supported since 22.7"); - } - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(id UInt64) engine=MergeTree order by id"); - } - - try (Connection conn1 = newConnection(props); - Connection conn2 = newConnection(props); - Statement stmt1 = conn1.createStatement(); - Statement stmt2 = conn2.createStatement(); - PreparedStatement ps1 = conn1.prepareStatement("insert into " + tableName); - PreparedStatement ps2 = conn2.prepareStatement("insert into " + tableName)) { - stmt1.executeUpdate("insert into " + tableName + " values(-1)"); - checkRowCount(stmt1, tableName, 1); - checkRowCount(stmt2, tableName, 0); - conn1.rollback(); - checkRowCount(stmt1, tableName, 0); - checkRowCount(stmt2, tableName, 0); - - stmt2.executeUpdate("insert into " + tableName + " values(-2)"); - checkRowCount(stmt1, tableName, 0); - checkRowCount(stmt2, tableName, 1); - conn2.commit(); - checkRowCount(stmt1, tableName, 0); - checkRowCount(stmt2, tableName, 1); - conn1.commit(); - checkRowCount(stmt1, tableName, 1); - checkRowCount(stmt2, tableName, 1); - - ps1.setInt(1, 1); - ps1.addBatch(); - ps1.setInt(1, 2); - ps1.addBatch(); - ps1.setInt(1, 3); - ps1.addBatch(); - ps1.executeBatch(); - checkRowCount(stmt1, tableName, 4); - checkRowCount(stmt2, tableName, 1); - conn1.commit(); - checkRowCount(stmt1, tableName, 4); - checkRowCount(stmt2, tableName, 1); - try (Connection c = newConnection(props); Statement s = c.createStatement()) { - checkRowCount(s, tableName, 4); - } - - ps2.setInt(1, 4); - ps2.addBatch(); - ps2.setInt(1, 5); - ps2.addBatch(); - ps2.setInt(1, 6); - // ps2.addBatch(); - ps2.executeBatch(); - checkRowCount(stmt1, tableName, 4); - checkRowCount(stmt2, tableName, 3); - conn2.commit(); - checkRowCount(stmt1, tableName, 4); - checkRowCount(stmt2, tableName, 6); - try (Connection c = newConnection(props); Statement s = c.createStatement()) { - checkRowCount(s, tableName, 6); - } - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java deleted file mode 100644 index 1495623f6..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHousePreparedStatementTest.java +++ /dev/null @@ -1,2115 +0,0 @@ -package com.clickhouse.jdbc; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.net.Inet4Address; -import java.net.Inet6Address; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.UnknownHostException; -import java.nio.charset.StandardCharsets; -import java.sql.BatchUpdateException; -import java.sql.Connection; -import java.sql.Date; -import java.sql.ParameterMetaData; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Timestamp; -import java.sql.Types; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.util.Calendar; -import java.util.Collections; -import java.util.Properties; -import java.util.TimeZone; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionException; -import java.util.concurrent.ExecutionException; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.http.config.ClickHouseHttpOption; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataStreamFactory; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseExternalTable; -import com.clickhouse.data.ClickHouseFormat; -import com.clickhouse.data.ClickHouseInputStream; -import com.clickhouse.data.ClickHouseOutputStream; -import com.clickhouse.data.ClickHousePipedOutputStream; -import com.clickhouse.data.ClickHouseWriter; -import com.clickhouse.data.value.ClickHouseBitmap; -import com.clickhouse.data.value.ClickHouseIntegerValue; -import com.clickhouse.data.value.ClickHouseNestedValue; -import com.clickhouse.data.value.UnsignedInteger; -import com.clickhouse.data.value.UnsignedLong; -import com.clickhouse.data.value.array.ClickHouseByteArrayValue; -import com.clickhouse.jdbc.internal.InputBasedPreparedStatement; -import com.clickhouse.jdbc.internal.StreamBasedPreparedStatement; - -import org.testng.Assert; -import org.testng.SkipException; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -public class ClickHousePreparedStatementTest extends JdbcIntegrationTest { - @DataProvider(name = "columnsWithDefaultValue") - private Object[][] getColumnsWithDefaultValue() { - return new Object[][] { - new Object[] { "Bool", "false", "false" }, - new Object[] { "Date", "1", "1970-01-02" }, - new Object[] { "Date32", "-1", "1969-12-31" }, - new Object[] { "DateTime32('UTC')", "'1970-01-01 00:00:01'", "1970-01-01 00:00:01" }, - new Object[] { "DateTime64(3, 'UTC')", "'1970-01-01 00:00:01.234'", "1970-01-01 00:00:01.234" }, - new Object[] { "Decimal(10,4)", "10.1234", "10.1234" }, - new Object[] { "Enum8('x'=5,'y'=6)", "'y'", "y" }, - new Object[] { "Enum8('x'=5,'y'=6)", "5", "x" }, - new Object[] { "Enum16('xx'=55,'yy'=66)", "'yy'", "yy" }, - new Object[] { "Enum16('xx'=55,'yy'=66)", "55", "xx" }, - new Object[] { "Float32", "3.2", "3.2" }, - new Object[] { "Float64", "6.4", "6.4" }, - new Object[] { "Int8", "-1", "-1" }, - new Object[] { "UInt8", "1", "1" }, - new Object[] { "Int16", "-3", "-3" }, - new Object[] { "UInt16", "3", "3" }, - new Object[] { "Int32", "-5", "-5" }, - new Object[] { "UInt32", "7", "7" }, - new Object[] { "Int64", "-9", "-9" }, - new Object[] { "UInt64", "11", "11" }, - new Object[] { "Int128", "-13", "-13" }, - new Object[] { "UInt128", "17", "17" }, - new Object[] { "Int256", "-19", "-19" }, - new Object[] { "UInt256", "23", "23" }, - }; - } - - @DataProvider(name = "columnsWithoutDefaultValue") - private Object[][] getColumnsWithoutDefaultValue() { - return new Object[][] { - new Object[] { "Bool", "false" }, - new Object[] { "Date", "1970-01-01" }, - new Object[] { "Date32", "1970-01-01" }, - new Object[] { "DateTime32('UTC')", "1970-01-01 00:00:00" }, - new Object[] { "DateTime64(3, 'UTC')", "1970-01-01 00:00:00" }, - new Object[] { "Decimal(10,4)", "0" }, - new Object[] { "Enum8('x'=0,'y'=1)", "x" }, - new Object[] { "Enum16('xx'=1,'yy'=0)", "yy" }, - new Object[] { "Float32", "0.0" }, - new Object[] { "Float64", "0.0" }, - new Object[] { "Int8", "0" }, - new Object[] { "UInt8", "0" }, - new Object[] { "Int16", "0" }, - new Object[] { "UInt16", "0" }, - new Object[] { "Int32", "0" }, - new Object[] { "UInt32", "0" }, - new Object[] { "Int64", "0" }, - new Object[] { "UInt64", "0" }, - new Object[] { "Int128", "0" }, - new Object[] { "UInt128", "0" }, - new Object[] { "Int256", "0" }, - new Object[] { "UInt256", "0" }, - }; - } - - @DataProvider(name = "nonBatchQueries") - private Object[][] getNonBatchQueries() { - return new Object[][] { - new Object[] { "input", "insert into %s" }, - new Object[] { "sql", "insert into %s values(?+0, ?)" }, - new Object[] { "table", "insert into %s select * from {tt 'tbl'}" } - }; - } - - @DataProvider(name = "typedParameters") - private Object[][] getTypedParameters() { - return new Object[][] { - new Object[] { "Array(DateTime32)", new LocalDateTime[] { LocalDateTime.of(2021, 11, 1, 1, 2, 3), - LocalDateTime.of(2021, 11, 2, 2, 3, 4) } } }; - } - - @DataProvider(name = "statementAndParams") - private Object[][] getStatementAndParameters() { - return new Object[][] { - // ddl - new Object[] { "ddl", "drop table if exists non_existing_table", DataSourceImpl.PreparedStatementImpl.class, false, - null, false }, - // query - new Object[] { "select1", "select 1", DataSourceImpl.PreparedStatementImpl.class, true, null, false }, - new Object[] { "select_param", "select ?", DataSourceImpl.PreparedStatementImpl.class, true, new String[] { "1" }, - false }, - // mutation - new Object[] { "insert_static", "insert into $table values(1)", - DataSourceImpl.PreparedStatementImpl.class, false, null, - false }, - new Object[] { "insert_table", "insert into $table", InputBasedPreparedStatement.class, false, - new String[] { "2" }, true }, - new Object[] { "insert_param", "insert into $table values(?)", InputBasedPreparedStatement.class, - false, new String[] { "3" }, true }, - new Object[] { "insert_param", "insert into $table values(trim(?))", - DataSourceImpl.PreparedStatementImpl.class, false, new String[] { "4" }, true }, - new Object[] { "insert_input", "insert into $table select s from input('s String')", - InputBasedPreparedStatement.class, false, new String[] { "5" }, true }, - }; - } - - private void setParameters(PreparedStatement ps, String[] params) throws SQLException { - if (params != null) { - for (int i = 0; i < params.length; i++) { - ps.setString(i + 1, params[i]); - } - } - } - - private void checkTable(Statement stmt, String query, String[] results) throws SQLException { - if (results == null) { - return; - } - try (ResultSet rs = stmt.executeQuery(query)) { - for (int i = 0; i < results.length; i++) { - Assert.assertTrue(rs.next(), "Should have next row"); - Assert.assertEquals(rs.getString(1), results[i]); - } - Assert.assertFalse(rs.next(), "Should not have next row"); - } - } - - @Test(groups = "integration") - public void testQueryWithoutParameter() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement stmt = conn.prepareStatement("select 1")) { - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next(), "Should have one row"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertFalse(rs.next(), "Should have only one row"); - - Assert.assertThrows(SQLException.class, () -> stmt.setInt(1, 2)); - } - - props.setProperty(JdbcConfig.PROP_NAMED_PARAM, "true"); - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement stmt = conn.prepareStatement("select 1")) { - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next(), "Should have one row"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertFalse(rs.next(), "Should have only one row"); - - Assert.assertThrows(SQLException.class, () -> stmt.setInt(1, 2)); - } - } - - @Test(groups = "integration") - public void testReadWriteBool() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement( - "insert into test_read_write_bool select c1, c2 from input('c1 Int32, c2 Bool')")) { - s.execute("drop table if exists test_read_write_bool; " - + "create table test_read_write_bool(id Int32, b Bool)engine=Memory"); - - stmt.setInt(1, 1); - stmt.setBoolean(2, true); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setBoolean(2, false); - stmt.addBatch(); - stmt.setInt(1, 3); - stmt.setString(2, "tRUe"); - stmt.addBatch(); - stmt.setInt(1, 4); - stmt.setString(2, "no"); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1, 1, 1 }); - - ResultSet rs = conn.createStatement().executeQuery("select * from test_read_write_bool order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getBoolean(2), true); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getBoolean(2), false); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 3); - Assert.assertEquals(rs.getBoolean(2), true); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 4); - Assert.assertEquals(rs.getBoolean(2), false); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteBinaryString() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.USE_BINARY_STRING.getKey(), "true"); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_binary_string; " - + "create table test_binary_string(id Int32, " - + "f0 FixedString(3), f1 Nullable(FixedString(3)), s0 String, s1 Nullable(String)) engine=MergeTree ORDER BY id"); - } - - byte[] bytes = new byte[256]; - for (int i = 0; i < 256; i++) { - bytes[i] = (byte) i; - } - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement ps = conn.prepareStatement("select ?, ?")) { - ps.setBytes(1, bytes); - ps.setString(2, Integer.toString(bytes.length)); - ResultSet rs = ps.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getBytes(1), bytes); - Assert.assertEquals(rs.getInt(2), bytes.length); - Assert.assertFalse(rs.next()); - } - - bytes = new byte[] { 0x61, 0x62, 0x63 }; - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement ps = conn.prepareStatement("insert into test_binary_string")) { - ps.setInt(1, 1); - ps.setBytes(2, bytes); - ps.setBytes(3, null); - ps.setBytes(4, bytes); - ps.setBytes(5, null); - ps.addBatch(); - ps.setInt(1, 2); - ps.setString(2, "abc"); - ps.setString(3, null); - ps.setString(4, "abc"); - ps.setString(5, null); - ps.addBatch(); - ps.setInt(1, 3); - ps.setBytes(2, bytes); - ps.setBytes(3, bytes); - ps.setBytes(4, bytes); - ps.setBytes(5, bytes); - ps.addBatch(); - ps.setInt(1, 4); - ps.setString(2, "abc"); - ps.setString(3, "abc"); - ps.setString(4, "abc"); - ps.setString(5, "abc"); - ps.addBatch(); - ps.executeBatch(); - } - - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement ps = conn - .prepareStatement("SELECT DISTINCT * EXCEPT(id) FROM test_binary_string" + - " WHERE f0 = ? ORDER BY id" + (isCloud() ? " SETTINGS select_sequential_consistency=1" : ""))) { - ps.setBytes(1, bytes); - ResultSet rs = ps.executeQuery(); - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getBytes(1), bytes); - Assert.assertNull(rs.getBytes(2), "f1 should be null"); - Assert.assertEquals(rs.getBytes(3), bytes); - Assert.assertNull(rs.getBytes(4), "s1 should be null"); - Assert.assertTrue(rs.next(), "Should have at least two rows"); - for (int i = 1; i <= 4; i++) { - Assert.assertEquals(rs.getBytes(i), bytes); - Assert.assertEquals(rs.getString(i), "abc"); - } - Assert.assertFalse(rs.next(), "Should not have more than two rows"); - } - } - - @Test(groups = "integration") - public void testReadWriteDate() throws SQLException { - LocalDate d = LocalDate.of(2021, 3, 25); - Date x = Date.valueOf(d); - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement("insert into test_read_write_date values(? + 1,?,?)")) { - s.execute("drop table if exists test_read_write_date"); - try { - s.execute("create table test_read_write_date(id Int32, d1 Date, d2 Date32)engine=Memory"); - } catch (SQLException e) { - s.execute("create table test_read_write_date(id Int32, d1 Date, d2 Nullable(Date))engine=Memory"); - } - stmt.setInt(1, 0); - stmt.setObject(2, d); - stmt.setObject(3, d); - stmt.addBatch(); - stmt.setInt(1, 1); - stmt.setDate(2, x); - stmt.setDate(3, x); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - - ResultSet rs = conn.createStatement().executeQuery("select * from test_read_write_date order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), d); - Assert.assertEquals(rs.getDate(2), x); - Assert.assertEquals(rs.getObject(3), d); - Assert.assertEquals(rs.getDate(3), x); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), d); - Assert.assertEquals(rs.getDate(2), x); - Assert.assertEquals(rs.getObject(3), d); - Assert.assertEquals(rs.getDate(3), x); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteDateWithClientTimeZone() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.USE_SERVER_TIME_ZONE_FOR_DATES.getKey(), "false"); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement()) { - TimeZone tz = conn.getServerTimeZone(); - // 2021-03-25 - LocalDate d = LocalDateTime.ofInstant(Instant.ofEpochSecond(1616630400L), tz.toZoneId()).toLocalDate(); - Date x = Date.valueOf(d); - s.execute("drop table if exists test_read_write_date_cz"); - try { - s.execute("create table test_read_write_date_cz(id Int32, d1 Date, d2 Date32)engine=Memory"); - } catch (SQLException e) { - s.execute("create table test_read_write_date_cz(id Int32, d1 Date, d2 Nullable(Date))engine=Memory"); - } - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_date_cz values (?, ?, ?)")) { - stmt.setInt(1, 1); - stmt.setObject(2, d); - stmt.setObject(3, d); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setDate(2, x); - stmt.setDate(3, x); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - } - - ResultSet rs = conn.createStatement().executeQuery("select * from test_read_write_date_cz order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), d); - Assert.assertEquals(rs.getDate(2), x); - Assert.assertEquals(rs.getObject(3), d); - Assert.assertEquals(rs.getDate(3), x); - Assert.assertTrue(rs.next()); - - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), d); - Assert.assertEquals(rs.getDate(2), x); - Assert.assertEquals(rs.getObject(3), d); - Assert.assertEquals(rs.getDate(3), x); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteDateTime() throws SQLException { - LocalDateTime dt = LocalDateTime.of(2021, 3, 25, 8, 50, 56); - Timestamp x = Timestamp.valueOf(dt); - try (ClickHouseConnection conn = newConnection(new Properties()); - PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_datetime values(?+1,?,?)")) { - conn.createStatement().execute("drop table if exists test_read_write_datetime;" - + "create table test_read_write_datetime(id Int32, d1 DateTime32, d2 DateTime64(3))engine=Memory"); - stmt.setInt(1, 0); - stmt.setObject(2, dt); - stmt.setObject(3, dt); - stmt.addBatch(); - stmt.setInt(1, 1); - stmt.setTimestamp(2, x); - stmt.setTimestamp(3, x); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - - LocalDateTime dx = dt.atZone(TimeZone.getDefault().toZoneId()) - .withZoneSameInstant(conn.getServerTimeZone().toZoneId()).toLocalDateTime(); - Timestamp xx = Timestamp.valueOf(dx); - ResultSet rs = conn.createStatement().executeQuery("select * from test_read_write_datetime order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), dt); - Assert.assertEquals(rs.getTimestamp(2), x); - Assert.assertEquals(rs.getObject(3), dt); - Assert.assertEquals(rs.getTimestamp(3), x); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), dx); - Assert.assertEquals(rs.getTimestamp(2), xx); - Assert.assertEquals(rs.getObject(3), dx); - Assert.assertEquals(rs.getTimestamp(3), xx); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteDateTimeWithNanos() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists test_read_write_datetime_nanos;" - + "CREATE TABLE test_read_write_datetime_nanos (id UUID, date DateTime64(3)) ENGINE = MergeTree() ORDER BY (id, date)"); - UUID id = UUID.randomUUID(); - long value = 1617359745321000L; - Instant i = Instant.ofEpochMilli(value / 1000L); - LocalDateTime dt = LocalDateTime.ofInstant(i, conn.getServerTimeZone().toZoneId()); - try (PreparedStatement ps = conn - .prepareStatement("insert into test_read_write_datetime_nanos values(?,?)")) { - ps.setObject(1, id); - ps.setObject(2, dt); - // below works too but too slow - // ps.setTimestamp(2, new Timestamp(value / 1000L)); - ps.executeUpdate(); - } - - ResultSet rs = stmt.executeQuery("select * from test_read_write_datetime_nanos"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), id); - Assert.assertEquals(rs.getObject(2), dt); - // rs.getString(2) will return "2021-04-02 03:35:45.321" - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteDateTimeWithClientTimeZone() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.USE_SERVER_TIME_ZONE.getKey(), "false"); - LocalDateTime dt = LocalDateTime.of(2021, 3, 25, 8, 50, 56); - Timestamp x = Timestamp.valueOf(dt); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_read_write_datetime_cz;" - + "create table test_read_write_datetime_cz(id Int32, d1 DateTime32, d2 DateTime64(3))engine=Memory"); - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_datetime_cz")) { - stmt.setInt(1, 1); - stmt.setObject(2, dt); - stmt.setObject(3, dt); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setTimestamp(2, x); - stmt.setTimestamp(3, x); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - } - - ResultSet rs = s.executeQuery("select * from test_read_write_datetime_cz order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), dt); - Assert.assertEquals(rs.getTimestamp(2), x); - Assert.assertEquals(rs.getObject(3), dt); - Assert.assertEquals(rs.getTimestamp(3), x); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), dt); - Assert.assertEquals(rs.getTimestamp(2), x); - Assert.assertEquals(rs.getObject(3), dt); - Assert.assertEquals(rs.getTimestamp(3), x); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteEnums() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_read_write_enums;" - + "create table test_read_write_enums(id Int32, e1 Enum8('v1'=1,'v2'=2), e2 Enum16('v11'=11,'v22'=22))engine=Memory"); - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_enums")) { - stmt.setInt(1, 1); - stmt.setString(2, "v1"); - stmt.setObject(3, "v11"); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setObject(2, 2); - stmt.setByte(3, (byte) 22); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - } - - ResultSet rs = s.executeQuery("select * from test_read_write_enums order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), "v1"); - Assert.assertEquals(rs.getString(3), "v11"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getByte(2), 2); - Assert.assertEquals(rs.getObject(3), "v22"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteArrayWithNullableTypes() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_read_write_nullable_unsigned_types;" - + "create table test_read_write_nullable_unsigned_types(id Int32, a1 Array(Nullable(Int8)), a2 Array(Nullable(UInt64)))engine=Memory"); - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_nullable_unsigned_types")) { - stmt.setInt(1, 1); - stmt.setObject(2, new byte[0]); - stmt.setObject(3, new long[0]); - stmt.execute(); - } - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_nullable_unsigned_types")) { - stmt.setInt(1, 2); - stmt.setObject(2, new byte[] { 2, 2 }); - stmt.setObject(3, new Long[] { 2L, null }); - stmt.addBatch(); - stmt.setInt(1, 3); - stmt.setArray(2, conn.createArrayOf("Nullable(Int8)", new Byte[] { null, 3 })); - stmt.setArray(3, - conn.createArrayOf("Nullable(UInt64)", new UnsignedLong[] { null, UnsignedLong.valueOf(3L) })); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - } - - ResultSet rs = s.executeQuery("select * from test_read_write_nullable_unsigned_types order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), new Byte[0]); - Assert.assertEquals(rs.getArray(2).getArray(), new Byte[0]); - Assert.assertEquals(rs.getObject(3), new UnsignedLong[0]); - Assert.assertEquals(rs.getArray(3).getArray(), new UnsignedLong[0]); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), new Byte[] { 2, 2 }); - Assert.assertEquals(rs.getArray(2).getArray(), new Byte[] { 2, 2 }); - Assert.assertEquals(rs.getObject(3), new UnsignedLong[] { UnsignedLong.valueOf(2L), null }); - Assert.assertEquals(rs.getArray(3).getArray(), new UnsignedLong[] { UnsignedLong.valueOf(2L), null }); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 3); - Assert.assertEquals(rs.getObject(2), new Byte[] { null, 3 }); - Assert.assertEquals(rs.getArray(2).getArray(), new Byte[] { null, 3 }); - Assert.assertEquals(rs.getObject(3), new UnsignedLong[] { null, UnsignedLong.valueOf(3L) }); - Assert.assertEquals(rs.getArray(3).getArray(), new UnsignedLong[] { null, UnsignedLong.valueOf(3L) }); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testReadWriteString() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_read_write_strings;" - + "create table test_read_write_strings(id Int32, s1 String, s2 Nullable(String), s3 Array(String), s4 Array(Nullable(String)))engine=Memory"); - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_strings")) { - stmt.setInt(1, 0); - stmt.setObject(2, null); - stmt.setObject(3, null); - stmt.setObject(4, new String[0]); - stmt.setObject(5, new String[0]); - Assert.assertThrows(SQLException.class, () -> stmt.execute()); - } - try (PreparedStatement stmt = conn - .prepareStatement("insert into test_read_write_strings")) { - stmt.setInt(1, 1); - stmt.setObject(2, ""); - stmt.setString(3, ""); - stmt.setArray(4, conn.createArrayOf("String", new String[] { "" })); - stmt.setObject(5, new String[] { "" }); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setString(2, ""); - stmt.setString(3, null); - stmt.setObject(4, new String[0]); - stmt.setArray(5, conn.createArrayOf("String", new String[] { null })); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1 }); - } - - ResultSet rs = s.executeQuery("select * from test_read_write_strings order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), ""); - Assert.assertEquals(rs.getObject(3), ""); - Assert.assertEquals(rs.getObject(4), new String[] { "" }); - Assert.assertEquals(rs.getArray(5).getArray(), new String[] { "" }); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), ""); - Assert.assertEquals(rs.getString(3), null); - Assert.assertEquals(rs.getArray(4).getArray(), new String[0]); - Assert.assertEquals(rs.getObject(5), new String[] { null }); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testInsertQueryDateTime64() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement s = conn.createStatement();) { - s.execute("drop table if exists test_issue_612;" - + "CREATE TABLE IF NOT EXISTS test_issue_612 (id UUID, date DateTime64(6)) ENGINE = MergeTree() ORDER BY (id, date)"); - UUID id = UUID.randomUUID(); - long value = 1617359745321000L; - Instant i = Instant.ofEpochMilli(value / 1000L); - LocalDateTime dt = LocalDateTime.ofInstant(i, conn.getServerTimeZone().toZoneId()); - try (PreparedStatement ps = conn.prepareStatement("insert into test_issue_612 values(trim(?),?)")) { - ps.setLong(2, value); - ps.setObject(1, id); - ps.execute(); - ps.setObject(1, UUID.randomUUID()); - ps.setString(2, "2021-09-01 00:00:00.123456"); - ps.executeUpdate(); - } - - try (PreparedStatement ps = conn.prepareStatement("select * from test_issue_612 where id = ?")) { - ps.setObject(1, id); - ResultSet rs = ps.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), id); - Assert.assertEquals(rs.getObject(2), dt); - Assert.assertEquals(rs.getLong(2), dt.atZone(conn.getServerTimeZone().toZoneId()).toEpochSecond()); - Assert.assertFalse(rs.next()); - } - - try (PreparedStatement ps = conn.prepareStatement("select * from test_issue_612 where id != ?")) { - ps.setObject(1, id); - ResultSet rs = ps.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(2), LocalDateTime.of(2021, 9, 1, 0, 0, 0, 123456000)); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testBatchDdl() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props)) { - if (!conn.getServerVersion().check("[22.8,)")) { - throw new SkipException("Skip due to error 'unknown key zookeeper_load_balancing'"); - } - try (PreparedStatement stmt = conn.prepareStatement(isCloud() ? - "drop table if exists test_batch_dll" : - "drop table if exists test_batch_dll_on_cluster on cluster single_node_cluster_localhost")) { - stmt.addBatch(); - stmt.addBatch(); - Assert.assertEquals(stmt.executeBatch(), new int[] { 0, 0 }); - } - - try (PreparedStatement stmt = conn.prepareStatement("select 1")) { - stmt.addBatch(); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeBatch()); - } - } - } - - @Test(groups = "integration") - public void testBatchInsert() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement("insert into test_batch_insert values(? + 1,?)")) { - s.execute("drop table if exists test_batch_insert;" - + "create table test_batch_insert(id Int32, name Nullable(String))engine=Memory"); - stmt.setInt(1, 0); - stmt.setString(2, "a"); - stmt.addBatch(); - stmt.setInt(1, 1); - stmt.setString(2, "b"); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setString(2, null); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1, 1 }); - - ResultSet rs = s.executeQuery("select * from test_batch_insert order by id"); - String[] expected = new String[] { "a", "b", null }; - int index = 1; - while (rs.next()) { - Assert.assertEquals(rs.getInt(1), index); - Assert.assertEquals(rs.getString(2), expected[index - 1]); - index++; - } - Assert.assertEquals(index, 4); - } - - // try with only one column - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement("insert into test_batch_insert(id)")) { - s.execute("truncate table test_batch_insert"); - stmt.setInt(1, 1); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.addBatch(); - stmt.setInt(1, 3); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1, 1 }); - - ResultSet rs = s.executeQuery("select * from test_batch_insert order by id"); - int index = 1; - while (rs.next()) { - Assert.assertEquals(rs.getInt(1), index); - Assert.assertEquals(rs.getString(2), null); - index++; - } - Assert.assertEquals(index, 4); - } - - // now without specifying any column - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement("insert into test_batch_insert")) { - s.execute("truncate table test_batch_insert"); - stmt.setInt(1, 1); - stmt.setString(2, "a"); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.setString(2, "b"); - stmt.addBatch(); - stmt.setInt(1, 3); - stmt.setString(2, null); - stmt.addBatch(); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 1, 1, 1 }); - - ResultSet rs = s.executeQuery("select * from test_batch_insert order by id"); - String[] expected = new String[] { "a", "b", null }; - int index = 1; - while (rs.next()) { - Assert.assertEquals(rs.getInt(1), index); - Assert.assertEquals(rs.getString(2), expected[index - 1]); - index++; - } - Assert.assertEquals(index, 4); - } - } - - @Test(groups = "integration") - public void testBatchInsertWithoutUnboundedQueue() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.WRITE_BUFFER_SIZE.getKey(), "1"); - props.setProperty(ClickHouseClientOption.MAX_QUEUED_BUFFERS.getKey(), "1"); - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_insert_buffer_size; " - + "CREATE TABLE test_insert_buffer_size(value String) ENGINE=Memory"); - try (PreparedStatement ps = conn.prepareStatement( - "INSERT INTO test_insert_buffer_size")) { - ps.setString(1, "1"); - ps.addBatch(); - ps.setString(1, "2"); - ps.addBatch(); - ps.setString(1, "3"); - ps.addBatch(); - ps.executeBatch(); - - ps.setString(1, "4"); - ps.addBatch(); - ps.executeBatch(); - - ps.setString(1, "4"); - ps.addBatch(); - ps.clearBatch(); - ps.setString(1, "5"); - ps.addBatch(); - ps.setString(1, "6"); - ps.addBatch(); - ps.executeBatch(); - } - - try (ResultSet rs = s.executeQuery("select * from test_insert_buffer_size order by value")) { - int count = 1; - while (rs.next()) { - Assert.assertEquals(rs.getInt(1), count++); - } - Assert.assertEquals(count, 7); - } - } - } - - @Test(groups = "integration") - public void testQueryWithDateTime() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement( - "select id, dt from test_query_datetime where dt > ? order by id")) { - s.execute("drop table if exists test_query_datetime;" - + "create table test_query_datetime(id Int32, dt DateTime32)engine=Memory;" - + "insert into test_query_datetime values(1, '2021-03-25 12:34:56'), (2, '2021-03-26 12:34:56')"); - stmt.setObject(1, LocalDateTime.of(2021, 3, 25, 12, 34, 57)); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), LocalDateTime.of(2021, 3, 26, 12, 34, 56)); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testBatchInput() throws SQLException { - Properties props = new Properties(); - props.setProperty("continueBatchOnError", "true"); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement( - "insert into test_batch_input select id, name, value from input('id Int32, name Nullable(String), desc Nullable(String), value AggregateFunction(groupBitmap, UInt32)')")) { - s.execute("drop table if exists test_batch_input;" - + "create table test_batch_input(id Int32, name Nullable(String), value AggregateFunction(groupBitmap, UInt32))engine=Memory"); - Object[][] objs = new Object[][] { - new Object[] { 1, "a", "aaaaa", ClickHouseBitmap.wrap(1, 2, 3, 4, 5) }, - new Object[] { 2, "b", null, ClickHouseBitmap.wrap(6, 7, 8, 9, 10) }, - new Object[] { 3, null, "33333", ClickHouseBitmap.wrap(11, 12, 13) } - }; - for (Object[] v : objs) { - stmt.setInt(1, (int) v[0]); - stmt.setString(2, (String) v[1]); - stmt.setString(3, (String) v[2]); - stmt.setObject(4, v[3]); - stmt.addBatch(); - } - int[] results = stmt.executeBatch(); - Assert.assertEquals(results.length, objs.length); - for (int result : results) { - Assert.assertNotEquals(result, PreparedStatement.EXECUTE_FAILED); - } - - try (ResultSet rs = s.executeQuery("select * from test_batch_input order by id")) { - Object[][] values = new Object[objs.length][]; - int index = 0; - while (rs.next()) { - values[index++] = new Object[] { - rs.getObject(1), rs.getObject(2), rs.getObject(3) - }; - } - Assert.assertEquals(index, objs.length); - for (int i = 0; i < objs.length; i++) { - Object[] actual = values[i]; - Object[] expected = objs[i]; - Assert.assertEquals(actual[0], expected[0]); - Assert.assertEquals(actual[1], expected[1]); - Assert.assertEquals(actual[2], expected[3]); - } - } - } - } - - @Test(groups = "integration") - public void testBatchQuery() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - PreparedStatement stmt = conn.prepareStatement("select * from numbers(100) where number < ?")) { - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - Assert.assertThrows(SQLException.class, () -> stmt.setInt(0, 5)); - Assert.assertThrows(SQLException.class, () -> stmt.setInt(2, 5)); - Assert.assertThrows(SQLException.class, () -> stmt.addBatch()); - - stmt.setInt(1, 3); - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - stmt.addBatch(); - stmt.setInt(1, 2); - stmt.addBatch(); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeBatch()); - - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - } - } - - @Test(dataProvider = "statementAndParams", groups = "integration") - public void testExecuteWithOrWithoutParameters(String tableSuffix, String query, Class clazz, - boolean hasResultSet, String[] params, boolean checkTable) throws SQLException { - int expectedRowCount = "ddl".equals(tableSuffix) ? 0 : 1; - String tableName = "test_execute_ps_" + tableSuffix; - query = query.replace("$table", tableName); - Properties props = new Properties(); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertFalse(stmt.execute("drop table if exists " + tableName - + "; create table " + tableName + "(s String)engine=Memory"), "Should not have result set"); - - try (PreparedStatement ps = conn.prepareStatement(query)) { - Assert.assertEquals(ps.getClass(), clazz); - - // executeQuery - setParameters(ps, params); - Assert.assertNotNull(ps.executeQuery(), "executeQuery should never return null result set"); - if (hasResultSet) { - Assert.assertNotNull(ps.getResultSet(), "Should have result set"); - Assert.assertEquals(ps.getUpdateCount(), -1); - Assert.assertEquals(ps.getLargeUpdateCount(), -1L); - } else { - Assert.assertNull(ps.getResultSet(), "Should not have result set"); - Assert.assertTrue(ps.getUpdateCount() >= 0, "Should have update count"); - Assert.assertTrue(ps.getLargeUpdateCount() >= 0L, "Should have update count"); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - - // execute - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - setParameters(ps, params); - if (hasResultSet) { - Assert.assertTrue(ps.execute(), "Should have result set"); - Assert.assertNotNull(ps.getResultSet(), "Should have result set"); - Assert.assertEquals(ps.getUpdateCount(), -1); - Assert.assertEquals(ps.getLargeUpdateCount(), -1L); - } else { - Assert.assertFalse(ps.execute(), "Should not have result set"); - Assert.assertNull(ps.getResultSet(), "Should not have result set"); - Assert.assertTrue(ps.getUpdateCount() >= 0, "Should have update count"); - Assert.assertTrue(ps.getLargeUpdateCount() >= 0L, "Should have update count"); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - - // executeLargeUpdate - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - setParameters(ps, params); - Assert.assertEquals(ps.executeLargeUpdate(), ps.getLargeUpdateCount()); - if (hasResultSet) { - Assert.assertNotNull(ps.getResultSet(), "Should have result set"); - Assert.assertEquals(ps.getUpdateCount(), -1); - Assert.assertEquals(ps.getLargeUpdateCount(), -1L); - } else { - Assert.assertNull(ps.getResultSet(), "Should not have result set"); - Assert.assertTrue(ps.getUpdateCount() >= 0, "Should have update count"); - Assert.assertTrue(ps.getLargeUpdateCount() >= 0L, "Should have update count"); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - - // executeUpdate - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - setParameters(ps, params); - Assert.assertEquals(ps.executeUpdate(), ps.getUpdateCount()); - if (hasResultSet) { - Assert.assertNotNull(ps.getResultSet(), "Should have result set"); - Assert.assertEquals(ps.getUpdateCount(), -1); - Assert.assertEquals(ps.getLargeUpdateCount(), -1L); - } else { - Assert.assertNull(ps.getResultSet(), "Should not have result set"); - Assert.assertTrue(ps.getUpdateCount() >= 0, "Should have update count"); - Assert.assertTrue(ps.getLargeUpdateCount() >= 0L, "Should have update count"); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - } - - // executeLargeBatch - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - try (PreparedStatement ps = conn.prepareStatement(query)) { - Assert.assertEquals(ps.getClass(), clazz); - setParameters(ps, params); - ps.addBatch(); - Assert.assertThrows(SQLException.class, () -> ps.execute()); - Assert.assertThrows(SQLException.class, () -> ps.executeQuery()); - Assert.assertThrows(SQLException.class, () -> ps.executeUpdate()); - if (hasResultSet) { - Assert.assertThrows(SQLException.class, () -> ps.executeLargeBatch()); - } else { - Assert.assertEquals(ps.executeLargeBatch(), new long[] { expectedRowCount }); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - } - - // executeBatch - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - try (PreparedStatement ps = conn.prepareStatement(query)) { - Assert.assertEquals(ps.getClass(), clazz); - setParameters(ps, params); - ps.addBatch(); - Assert.assertThrows(SQLException.class, () -> ps.execute()); - Assert.assertThrows(SQLException.class, () -> ps.executeQuery()); - Assert.assertThrows(SQLException.class, () -> ps.executeUpdate()); - if (hasResultSet) { - Assert.assertThrows(SQLException.class, () -> ps.executeBatch()); - } else { - Assert.assertEquals(ps.executeBatch(), new int[] { expectedRowCount }); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - } - } - - props.setProperty(JdbcConfig.PROP_CONTINUE_BATCH, "true"); - try (Connection conn = newConnection(props); - Statement stmt = conn.createStatement(); - PreparedStatement ps = conn.prepareStatement(query)) { - Assert.assertEquals(ps.getClass(), clazz); - - // executeLargeBatch - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - setParameters(ps, params); - ps.addBatch(); - Assert.assertThrows(SQLException.class, () -> ps.execute()); - Assert.assertThrows(SQLException.class, () -> ps.executeQuery()); - Assert.assertThrows(SQLException.class, () -> ps.executeUpdate()); - if (hasResultSet) { - Assert.assertEquals(ps.executeLargeBatch(), new long[] { Statement.EXECUTE_FAILED }); - } else { - Assert.assertEquals(ps.executeLargeBatch(), new long[] { expectedRowCount }); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - - // executeBatch - Assert.assertFalse(stmt.execute("truncate table " + tableName), "Should not have result set"); - setParameters(ps, params); - ps.addBatch(); - Assert.assertThrows(SQLException.class, () -> ps.execute()); - Assert.assertThrows(SQLException.class, () -> ps.executeQuery()); - Assert.assertThrows(SQLException.class, () -> ps.executeUpdate()); - if (hasResultSet) { - Assert.assertEquals(ps.executeBatch(), new int[] { Statement.EXECUTE_FAILED }); - } else { - Assert.assertEquals(ps.executeBatch(), new int[] { expectedRowCount }); - } - if (checkTable) - checkTable(stmt, "select * from " + tableName, params); - } - } - - @Test(groups = "integration") - public void testLoadRawData() throws IOException, SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement stmt = conn.createStatement(); - PreparedStatement ps = conn.prepareStatement( - "insert into test_jdbc_load_raw_data select * from {tt 'raw_data'}")) { - Assert.assertFalse(stmt.execute("drop table if exists test_jdbc_load_raw_data; " - + "create table test_jdbc_load_raw_data(s String)engine=Memory"), "Should not have result set"); - ClickHouseConfig config = stmt.getConfig(); - CompletableFuture future; - try (ClickHousePipedOutputStream stream = ClickHouseDataStreamFactory.getInstance() - .createPipedOutputStream(config)) { - ps.setObject(1, ClickHouseExternalTable.builder().name("raw_data") - .columns("s String").format(ClickHouseFormat.RowBinary) - .content(stream.getInputStream()) - .build()); - future = CompletableFuture.supplyAsync(() -> { - try { - return ps.executeUpdate(); - } catch (SQLException e) { - throw new CompletionException(e); - } - }); - - // write bytes into the piped stream - for (int i = 0; i < 101; i++) { - stream.writeAsciiString(Integer.toString(i)); - } - } - - try { - Assert.assertTrue(future.get() >= 0); - } catch (InterruptedException | ExecutionException ex) { - Assert.fail("Failed to get result", ex); - } - } - } - - @Test(groups = "integration") - public void testQueryWithExternalTable() throws SQLException { - if (isCloud()) return; //TODO: testQueryWithExternalTable - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - // FIXME grpc seems has problem dealing with session - if (DEFAULT_PROTOCOL == ClickHouseProtocol.GRPC) { - return; - } - - try (ClickHouseConnection conn = newConnection(new Properties()); - PreparedStatement stmt = conn - .prepareStatement("SELECT count() FROM (select 3 x) WHERE x IN {tt 'table1' }")) { - stmt.setObject(1, ClickHouseExternalTable.builder().name("table1").columns("id Int8") - .format(ClickHouseFormat.TSV) - .content(new ByteArrayInputStream("1".getBytes(StandardCharsets.US_ASCII))) - .build()); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 0); - Assert.assertFalse(rs.next()); - } - - try (ClickHouseConnection conn = newConnection(new Properties()); - PreparedStatement stmt = conn.prepareStatement( - "SELECT bitmapContains(my_bitmap, toUInt32(1)) as v1, bitmapContains(my_bitmap, toUInt32(2)) as v2 from {tt 'ext_table'}")) { - stmt.setObject(1, ClickHouseExternalTable.builder().name("ext_table") - .columns("my_bitmap AggregateFunction(groupBitmap,UInt32)").format(ClickHouseFormat.RowBinary) - .content(new ByteArrayInputStream(ClickHouseBitmap.wrap(1, 3, 5).toBytes())) - .asTempTable() - .build()); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getInt(2), 0); - Assert.assertFalse(rs.next()); - } - } - - @Test(dataProvider = "typedParameters", groups = "integration") - public void testArrayParameter(String t, Object v) throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - PreparedStatement stmt = conn.prepareStatement("select ?::?")) { - if (conn.getServerVersion().check("(,21.3]")) { - return; - } - - stmt.setObject(1, v); - // stmt.setString(2, t) or stmt.setObject(2, t) will result in quoted string - stmt.setObject(2, new StringBuilder(t)); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), v); - Assert.assertFalse(rs.next()); - } - } - - @Test(dataProvider = "nonBatchQueries", groups = "integration") - public void testNonBatchUpdate(String mode, String query) throws SQLException { - String tableName = String.format("test_non_batch_update_%s", mode); - try (Connection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement();) { - stmt.execute(String.format("drop table if exists %1$s; " - + "create table %1$s (id Int32, str String)engine=Memory", tableName)); - - try (PreparedStatement ps = conn.prepareStatement(String.format(query, tableName))) { - if (query.contains("{tt ")) { - ps.setObject(1, - ClickHouseExternalTable.builder().name("tbl").columns("id Int32, str String") - .content(ClickHouseInputStream.of( - Collections.singleton("1\t1\n".getBytes()), byte[].class, null, - null)) - .build()); - } else { - ps.setInt(1, 1); - ps.setString(2, "1"); - } - Assert.assertEquals(ps.executeUpdate(), 1); - } - - // insertion was a success - try (ResultSet rs = stmt.executeQuery(String.format("select * from %s", tableName))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "1"); - Assert.assertEquals(rs.getInt(2), 1); - Assert.assertFalse(rs.next()); - } - - // make sure it won't throw BatchUpdateException - try (PreparedStatement ps = conn.prepareStatement(String.format(query, tableName))) { - if (query.contains("{tt ")) { - ps.setObject(1, - ClickHouseExternalTable.builder().name("tbl").columns("id Int32, str String") - .content(ClickHouseInputStream.of( - Collections.singleton("2\t2\n".getBytes()), byte[].class, null, - null)) - .build()); - } else { - ps.setInt(1, 2); - ps.setString(2, "2"); - } - stmt.execute(String.format("drop table %s", tableName)); - - SQLException exp = null; - try { - ps.executeUpdate(); - } catch (SQLException e) { - exp = e; - } - Assert.assertTrue(exp.getClass() == SQLException.class); - } - } - } - - @Test(groups = "integration") - public void testInsertAggregateFunction() throws SQLException { - // https://kb.altinity.com/altinity-kb-schema-design/ingestion-aggregate-function/ - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement(); - PreparedStatement ps = conn.prepareStatement( - "insert into test_insert_aggregate_function SELECT uid, updated, arrayReduce('argMaxState', [name], [updated]) " - + "FROM input('uid Int16, updated DateTime, name String')")) { - s.execute("drop table if exists test_insert_aggregate_function;" - + "CREATE TABLE test_insert_aggregate_function (uid Int16, updated SimpleAggregateFunction(max, DateTime), " - + "name AggregateFunction(argMax, String, DateTime)) ENGINE=AggregatingMergeTree order by uid"); - ps.setInt(1, 1); - ps.setString(2, "2020-01-02 00:00:00"); - ps.setString(3, "b"); - ps.addBatch(); - ps.setInt(1, 1); - ps.setString(2, "2020-01-01 00:00:00"); - ps.setString(3, "a"); - ps.addBatch(); - ps.executeBatch(); - try (ResultSet rs = s.executeQuery( - "select uid, max(updated) AS updated, argMaxMerge(name) from test_insert_aggregate_function group by uid")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), "2020-01-02 00:00:00"); - Assert.assertEquals(rs.getString(3), "b"); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testInsertByteArray() throws SQLException { - Properties props = new Properties(); - props.setProperty("use_binary_string", "true"); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - s.execute("drop table if exists test_insert_byte_array;" - + "create table test_insert_byte_array(id String, b Array(Int8), s Array(Array(Int8))) engine=Memory"); - try (PreparedStatement stmt = conn.prepareStatement( - "insert into test_insert_byte_array(id, b, s) values (?,?,?)")) { - stmt.setString(1, "1"); - stmt.setObject(2, new byte[] { 1, 2, 3 }); - stmt.setObject(3, new byte[][] { { 1, 2, 3 }, { 4, 5, 6 } }); - Assert.assertEquals(stmt.executeUpdate(), 1); - - ResultSet rs = s.executeQuery("select * from test_insert_byte_array order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), new byte[] { 1, 2, 3 }); - Assert.assertEquals(rs.getObject(3), new byte[][] { { 1, 2, 3 }, { 4, 5, 6 } }); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testInsertDefaultValue() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement(); - PreparedStatement ps = conn.prepareStatement( - "INSERT INTO test_insert_default_value select id, name from input('id UInt32, name Nullable(String)')")) { - s.execute("DROP TABLE IF EXISTS test_insert_default_value; CREATE TABLE test_insert_default_value(n Int32, s String DEFAULT 'secret') engine=MergeTree ORDER BY n"); - ps.setInt(1, 1); - ps.setString(2, null); - ps.addBatch(); - ps.setInt(1, -1); - ps.setNull(2, Types.ARRAY); - ps.addBatch(); - ps.executeBatch(); - try (ResultSet rs = s.executeQuery(String.format("SELECT * FROM test_insert_default_value ORDER BY n %s", isCloud() ? "SETTINGS select_sequential_consistency=1" : ""))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), -1); - Assert.assertEquals(rs.getString(2), "secret"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), "secret"); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration", enabled = false) - public void testOutFileAndInFile() throws SQLException { - if (isCloud()) return; //TODO: testOutFileAndInFile - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - if (DEFAULT_PROTOCOL != ClickHouseProtocol.HTTP) { - throw new SkipException("Skip non-http protocol"); - } - - Properties props = new Properties(); - props.setProperty("localFile", "true"); - File f = new File("f.csv"); - if (f.exists()) { - f.delete(); - } - f.deleteOnExit(); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - s.execute("drop table if exists test_load_infile_with_params;" - + "CREATE TABLE test_load_infile_with_params(n Int32, s String) engine=Memory"); - try (PreparedStatement stmt = conn - .prepareStatement("SELECT number n, toString(n) from numbers(999) into outfile ?")) { - stmt.setString(1, f.getName()); - try (ResultSet rs = stmt.executeQuery()) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), f.getName()); - Assert.assertFalse(rs.next()); - } - Assert.assertTrue(f.exists()); - - stmt.setString(1, f.getName()); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery()); - stmt.setString(1, f.getName() + "!"); - try (ResultSet rs = stmt.executeQuery()) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), f.getName()); - Assert.assertFalse(rs.next()); - } - } - - try (PreparedStatement stmt = conn - .prepareStatement("INSERT INTO test_load_infile_with_params FROM infile ? format CSV")) { - stmt.setString(1, f.getName()); - stmt.addBatch(); - stmt.setString(1, f.getName()); - stmt.addBatch(); - stmt.setString(1, f.getName() + "!"); - stmt.addBatch(); - stmt.executeBatch(); - } - - try (ResultSet rs = s.executeQuery("SELECT count(1), uniqExact(n) FROM test_load_infile_with_params")) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getInt(1), 999 * 3); - Assert.assertEquals(rs.getInt(2), 999); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - } - } - - @Test(dataProvider = "columnsWithDefaultValue", groups = "integration") - public void testInsertDefaultValue(String columnType, String defaultExpr, String defaultValue) throws SQLException { - Properties props = new Properties(); - props.setProperty(JdbcConfig.PROP_NULL_AS_DEFAULT, "1"); - props.setProperty(ClickHouseClientOption.COMPRESS.getKey(), "false"); - props.setProperty(ClickHouseClientOption.FORMAT.getKey(), - ClickHouseFormat.TabSeparatedWithNamesAndTypes.name()); - String tableName = "test_insert_default_value_" + columnType.split("\\(")[0].trim().toLowerCase(); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - if (conn.getUri().toString().contains(":grpc:")) { - throw new SkipException("Skip gRPC test"); - } else if (!conn.getServerVersion().check("[21.8,)")) { - throw new SkipException("Skip test when ClickHouse is older than 21.8"); - } - s.execute(String.format("drop table if exists %s; ", tableName) - + String.format("CREATE TABLE %s(id Int8, v %s DEFAULT %s) engine=MergeTree ORDER BY id", tableName, columnType, - defaultExpr)); - s.executeUpdate(String.format("INSERT INTO %s values(1, null)", tableName)); - try (PreparedStatement stmt = conn - .prepareStatement(String.format("insert into %s values(?,?)", tableName))) { - stmt.setInt(1, 2); - stmt.setObject(2, null); - stmt.executeUpdate(); - stmt.setInt(1, 3); - stmt.setNull(2, Types.OTHER); - stmt.executeUpdate(); - } - - int rowCount = 0; - try (ResultSet rs = s.executeQuery(String.format("select * from %s order by id %s", tableName, isCloud() ? "SETTINGS select_sequential_consistency=1" : ""))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - rowCount++; - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - rowCount++; - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 3); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - rowCount++; - Assert.assertFalse(rs.next(), "Should have only 3 rows"); - } - Assert.assertEquals(rowCount, 3); - } catch (SQLException e) { - // 'Unknown data type family', 'Missing columns' or 'Cannot create table column' - if (e.getErrorCode() == 50 || e.getErrorCode() == 47 || e.getErrorCode() == 44) { - return; - } - throw e; - } - } - - @Test(groups = "integration") - public void testInsertNestedValue() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); Statement s = conn.createStatement()) { - s.execute("drop table if exists test_nested_insert;" - + "create table test_nested_insert(id UInt32, n Nested(c1 Int8, c2 Int8))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement("insert into test_nested_insert")) { - // insert into test_nested_insert values(0, [],[]) - ps.setObject(1, 0); - ps.setObject(2, new int[0]); - ps.setObject(3, new int[0]); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(1, [1],[1]) - ps.setInt(1, 1); - ps.setBytes(2, new byte[] { 1 }); - ps.setArray(3, conn.createArrayOf("Array(Int8)", new Byte[] { 1 })); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(2, [1,2],[1,2]) - ps.setObject(1, ClickHouseIntegerValue.of(2)); - ps.setObject(2, ClickHouseByteArrayValue.of(new byte[] { 1, 2 })); - ps.setObject(3, ClickHouseByteArrayValue.of(new byte[] { 1, 2 })); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(3, [1,2,3],[1,2,3]) - ps.setString(1, "3"); - ps.setString(2, "[1,2,3]"); - ps.setString(3, "[1,2,3]"); - Assert.assertEquals(ps.executeUpdate(), 1); - } - - try (ResultSet rs = s.executeQuery("select * from test_nested_insert order by id")) { - for (int i = 0; i < 4; i++) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), i); - byte[] bytes = new byte[i]; - for (int j = 0; j < i; j++) { - bytes[j] = (byte) (j + 1); - } - Assert.assertEquals(rs.getObject(2), bytes); - Assert.assertEquals(rs.getObject(3), bytes); - } - Assert.assertFalse(rs.next()); - } - - // same case but this time disable flatten_nested - s.execute("set flatten_nested=0; drop table if exists test_nested_insert; " - + "create table test_nested_insert(id UInt32, n Nested(c1 Int8, c2 Int8))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement("insert into test_nested_insert")) { - // insert into test_nested_insert values(0, []) - ps.setObject(1, 0); - ps.setObject(2, new Integer[0][]); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(1, [(1,1)]) - ps.setInt(1, 1); - ps.setArray(2, conn.createArrayOf("Array(Array(Int8))", new Byte[][] { { 1, 1 } })); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(2, [(1,1),(2,2)]) - ps.setObject(1, ClickHouseIntegerValue.of(2)); - ps.setObject(2, - ClickHouseNestedValue.of( - ClickHouseColumn.of("n", "Nested(c1 Int8, c2 Int8)").getNestedColumns(), - new Byte[][] { { 1, 1 }, { 2, 2 } })); - Assert.assertEquals(ps.executeUpdate(), 1); - - // insert into test_nested_insert values(3, [(1,1),(2,2),(3,3)]) - ps.setString(1, "3"); - // ps.setString(2, "[(1,1),(2,2),(3,3)]"); - ps.setObject(2, - ClickHouseNestedValue.of( - ClickHouseColumn.of("n", "Nested(c1 Int8, c2 Int8)").getNestedColumns(), - new Byte[][] { { 1, 1 }, { 2, 2 }, { 3, 3 } })); - Assert.assertEquals(ps.executeUpdate(), 1); - } - - try (ResultSet rs = s.executeQuery("select * from test_nested_insert order by id")) { - for (int i = 0; i < 4; i++) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), i); - Byte[][] bytes = new Byte[i][]; - for (int j = 0; j < i; j++) { - Byte[] b = new Byte[2]; - for (int k = 0; k < 2; k++) { - b[k] = (byte) (j + 1); - } - bytes[j] = b; - } - Assert.assertEquals(rs.getObject(2), bytes); - } - Assert.assertFalse(rs.next()); - } - - // https://github.com/ClickHouse/clickhouse-java/issues/1259 - s.execute("set flatten_nested=0; drop table if exists test_nested_insert; " - + "create table test_nested_insert(id UInt32, n Nested(c1 Int8, c2 LowCardinality(String)))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement("insert into test_nested_insert(id, n)")) { - ps.setString(1, "1"); - ps.setObject(2, new Object[][] { { 1, "foo1" }, { 2, "bar1" }, { 3, "bug1" } }); - ps.executeUpdate(); - } - // try invalid query - try (PreparedStatement ps = conn.prepareStatement( - "insert into test_nested_insert(id, n) select id, n from input('id UInt32, n Nested(c1 Int8, c2 LowCardinality(String)))'")) { - ps.setString(1, "2"); - ps.setObject(2, new Object[][] { { 4, "foo2" }, { 5, "bar2" }, { 6, "bug2" } }); - ps.executeUpdate(); - Assert.fail("Query should fail"); - } catch (SQLException e) { - Assert.assertTrue(e.getMessage().startsWith("Missing ")); - } - // now use input function - try (PreparedStatement ps = conn.prepareStatement( - "insert into test_nested_insert(id, n) select id, n from input('id UInt32, n Nested(c1 Int8, c2 LowCardinality(String))') settings flatten_nested=0")) { - ps.setString(1, "2"); - ps.setObject(2, new Object[][] { { 4, "foo2" }, { 5, "bar2" }, { 6, "bug2" } }); - ps.executeUpdate(); - } - try (ResultSet rs = s.executeQuery("select * from test_nested_insert order by id")) { - for (int i = 1; i <= 2; i++) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), i); - Object[][] nestedValue = (Object[][]) rs.getObject(2); - Assert.assertEquals(nestedValue.length, 3); - String[] arr = new String[] { "foo", "bar", "bug" }; - for (int j = 1; j <= 3; j++) { - Assert.assertEquals(nestedValue[j - 1], - new Object[] { (byte) (j + (i - 1) * 3), arr[j - 1] + i }); - } - } - Assert.assertFalse(rs.next()); - } - - s.execute("set flatten_nested=1; drop table if exists test_nested_insert; " - + "create table test_nested_insert(id UInt32, n Nested(c1 Int8, c2 LowCardinality(String)))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement( - "insert into test_nested_insert(id, n.c1, n.c2) select id, c1, c2 from input('id UInt32, c1 Array(Int8), c2 Array(LowCardinality(String))')")) { - ps.setString(1, "3"); - ps.setObject(2, new byte[] { 7, 8, 9 }); - ps.setObject(3, new String[] { "foo3", "bar3", "bug3" }); - ps.executeUpdate(); - } - try (ResultSet rs = s.executeQuery("select * from test_nested_insert order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 3); - Assert.assertEquals(rs.getObject(2), new byte[] { 7, 8, 9 }); - Assert.assertEquals(rs.getObject(3), new String[] { "foo3", "bar3", "bug3" }); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(dataProvider = "columnsWithoutDefaultValue", groups = "integration") - public void testInsertNullValue(String columnType, String defaultValue) throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.FORMAT.getKey(), - ClickHouseFormat.TabSeparatedWithNamesAndTypes.name()); - props.setProperty(ClickHouseClientOption.CUSTOM_SETTINGS.getKey(), "input_format_null_as_default=0"); - String tableName = "test_insert_null_value_" + columnType.split("\\(")[0].trim().toLowerCase(); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - if (conn.getUri().toString().contains(":grpc:")) { - throw new SkipException("Skip gRPC test"); - } else if (!conn.getServerVersion().check("[22.3,)")) { - throw new SkipException("Skip test when ClickHouse is older than 22.3"); - } - s.execute(String.format("drop table if exists %s; ", tableName) - + String.format("create table %s(id Int8, v %s)engine=Memory", tableName, columnType)); - SQLException sqlException = null; - try (PreparedStatement stmt = conn - .prepareStatement(String.format("insert into %s values(?,?)", tableName))) { - try { - ((ClickHouseStatement) stmt).setNullAsDefault(0); - stmt.setInt(1, 0); - stmt.setObject(2, null); - stmt.executeUpdate(); - } catch (SQLException e) { - sqlException = e; - } - Assert.assertNotNull(sqlException, "Should end-up with SQL exception when nullAsDefault < 1"); - sqlException = null; - - try { - ((ClickHouseStatement) stmt).setNullAsDefault(1); - stmt.setInt(1, 0); - stmt.setNull(2, Types.OTHER); - stmt.executeUpdate(); - } catch (SQLException e) { - sqlException = e; - } - Assert.assertNotNull(sqlException, "Should end-up with SQL exception when nullAsDefault = 1"); - - ((ClickHouseStatement) stmt).setNullAsDefault(2); - stmt.setInt(1, 1); - stmt.setObject(2, null); - stmt.executeUpdate(); - stmt.setInt(1, 2); - stmt.setNull(2, Types.OTHER); - stmt.executeUpdate(); - } - - int rowCount = 0; - try (ResultSet rs = s.executeQuery(String.format("select * from %s order by id", tableName))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - rowCount++; - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - rowCount++; - Assert.assertFalse(rs.next(), "Should have only 2 rows"); - } - Assert.assertEquals(rowCount, 2); - } catch (SQLException e) { - // 'Unknown data type family', 'Missing columns' or 'Cannot create table column' - if (e.getErrorCode() == 50 || e.getErrorCode() == 47 || e.getErrorCode() == 44) { - return; - } - throw e; - } - } - - @Test(groups = "integration") - public void testInsertStringAsArray() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement( - "insert into test_array_insert(id, a, b) values (toUInt32(?),?,?)")) { - s.execute("drop table if exists test_array_insert;" - + "create table test_array_insert(id UInt32, a Array(Int16), b Array(Nullable(UInt32)))engine=Memory"); - - stmt.setString(1, "1"); - stmt.setString(2, "[1,2,3]"); - stmt.setString(3, "[3,null,1]"); - Assert.assertEquals(stmt.executeUpdate(), 1); - - ResultSet rs = s.executeQuery("select * from test_array_insert order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), new short[] { 1, 2, 3 }); - Assert.assertEquals(rs.getObject(3), - new UnsignedInteger[] { UnsignedInteger.valueOf(3), null, UnsignedInteger.ONE }); - Assert.assertFalse(rs.next()); - } - - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_string_array_insert; " - + "create table test_string_array_insert(id UInt32, a Array(LowCardinality(String)), b Array(Nullable(String)))engine=Memory"); - - try (PreparedStatement stmt = conn.prepareStatement( - "insert into test_string_array_insert(id, a, b) values (?,?,?)")) { - stmt.setString(1, "1"); - stmt.setObject(2, new String[] { "1", "2", "3" }); - stmt.setArray(3, conn.createArrayOf("String", new String[] { "3", null, "1" })); - Assert.assertEquals(stmt.executeUpdate(), 1); - } - - ResultSet rs = s.executeQuery("select * from test_string_array_insert order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getArray(2).getArray(), new String[] { "1", "2", "3" }); - Assert.assertEquals(rs.getObject(3), new String[] { "3", null, "1" }); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testInsertWithFunction() throws SQLException, UnknownHostException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement stmt = conn.prepareStatement( - conn.getServerVersion().check("[22.3,)") - ? "insert into test_issue_315(id, src, dst) values (?+0,?,?)" - : "insert into test_issue_315(id, src, dst) values (?,IPv4ToIPv6(toIPv4(?)),IPv4ToIPv6(toIPv4(?)))")) { - s.execute("drop table if exists test_issue_315; " - + "create table test_issue_315(id Int32, src IPv6, dst IPv6)engine=Memory"); - - stmt.setObject(1, 1); - stmt.setString(2, "127.0.0.1"); - stmt.setString(3, "127.0.0.2"); - Assert.assertEquals(stmt.executeUpdate(), 1); - - // omitted '(id, src, dst)' in the query for simplicity - try (PreparedStatement ps = conn.prepareStatement("insert into test_issue_315")) { - stmt.setObject(1, 2); - stmt.setObject(2, Inet4Address.getByName("127.0.0.2")); - stmt.setObject(3, Inet6Address.getByName("::1")); - Assert.assertEquals(stmt.executeUpdate(), 1); - } - - ResultSet rs = s.executeQuery("select * from test_issue_315 order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), "0:0:0:0:0:ffff:7f00:1"); - Assert.assertEquals(rs.getString(3), "0:0:0:0:0:ffff:7f00:2"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), "0:0:0:0:0:ffff:7f00:2"); - Assert.assertEquals(rs.getString(3), - conn.getServerVersion().check("[22.3,)") ? "0:0:0:0:0:0:0:1" : "0:0:0:0:0:ffff:0:0"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testInsertWithSelect() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement(); - PreparedStatement ps1 = conn - .prepareStatement("insert into test_issue_402(uid,uuid) select 2,generateUUIDv4()"); - PreparedStatement ps2 = conn.prepareStatement( - "insert into test_issue_402\nselect ?, max(uuid) from test_issue_402 where uid in (?) group by uid having count(*) = 1")) { - s.execute("drop table if exists test_issue_402; " - + "create table test_issue_402(uid Int32, uuid UUID)engine=Memory"); - Assert.assertEquals(ps1.executeUpdate(), 1); - ps2.setInt(1, 1); - ps2.setInt(2, 2); - Assert.assertEquals(ps2.executeUpdate(), 1); - - ResultSet rs = s.executeQuery("select * from test_issue_402 order by uid"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - String uuid = rs.getString(2); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), uuid); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testQueryWithNamedParameter() throws SQLException { - Properties props = new Properties(); - props.setProperty(JdbcConfig.PROP_NAMED_PARAM, "true"); - LocalDateTime ts = LocalDateTime.ofEpochSecond(10000, 123456789, ZoneOffset.UTC); - try (ClickHouseConnection conn = newConnection(props); - PreparedStatement stmt = conn - .prepareStatement("select :ts1 ts1, :ts2(DateTime32) ts2, :ts2 ts3")) { - // just two parameters here - ts2 is referenced twice - stmt.setObject(1, ts); - stmt.setObject(2, ts); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "1970-01-01 02:46:40.123456789"); - Assert.assertEquals(rs.getString(2), "1970-01-01 02:46:40"); - Assert.assertEquals(rs.getString(3), "1970-01-01 02:46:40"); - Assert.assertFalse(rs.next()); - } - - // try again using JDBC standard question mark placeholder - try (ClickHouseConnection conn = newConnection(); - PreparedStatement stmt = conn - .prepareStatement("select ? ts1, ? ts2, ? ts3")) { - // unlike above, this time we have 3 parameters - stmt.setObject(1, "1970-01-01 02:46:40.123456789"); - stmt.setObject(2, "1970-01-01 02:46:40"); - stmt.setObject(3, "1970-01-01 02:46:40"); - ResultSet rs = stmt.executeQuery(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "1970-01-01 02:46:40.123456789"); - Assert.assertEquals(rs.getString(2), "1970-01-01 02:46:40"); - Assert.assertEquals(rs.getString(3), "1970-01-01 02:46:40"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testInsertWithAndSelect() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_insert_with_and_select; " - + "CREATE TABLE test_insert_with_and_select(value String) ENGINE=Memory"); - try (PreparedStatement ps = conn.prepareStatement( - "INSERT INTO test_insert_with_and_select(value) WITH t as ( SELECT 'testValue1') SELECT * FROM t")) { - ps.executeUpdate(); - } - - try (PreparedStatement ps = conn.prepareStatement( - "INSERT INTO test_insert_with_and_select(value) WITH t as ( SELECT 'testValue2' as value) SELECT * FROM t WHERE value != ?")) { - ps.setString(1, ""); - ps.executeUpdate(); - } - - ResultSet rs = s.executeQuery("select * from test_insert_with_and_select order by value"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString("Value"), "testValue1"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString("VALUE"), "testValue2"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testInsertWithMultipleValues() throws MalformedURLException, SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_insert_with_multiple_values; " - + "CREATE TABLE test_insert_with_multiple_values(a Int32, b Nullable(String)) ENGINE=Memory"); - try (PreparedStatement ps = conn.prepareStatement( - "INSERT INTO test_insert_with_multiple_values values(?, ?), (2 , ? ), ( ? , '') , (?,?) ,( ? ,? )")) { - ps.setInt(1, 1); - ps.setNull(2, Types.VARCHAR); - ps.setObject(3, "er"); - ps.setInt(4, 3); - ps.setInt(5, 4); - ps.setURL(6, new URL("http://some.host")); - ps.setInt(7, 5); - ps.setString(8, null); - ps.executeUpdate(); - } - - try (ResultSet rs = s.executeQuery("select * from test_insert_with_multiple_values order by a")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getByte(1), (byte) 1); - Assert.assertEquals(rs.getObject(2), null); - Assert.assertTrue(rs.wasNull()); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getBigDecimal(1), BigDecimal.valueOf(2L)); - Assert.assertEquals(rs.getString(2), "er"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "3"); - Assert.assertEquals(rs.getObject(2), ""); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getShort(1), (short) 4); - Assert.assertEquals(rs.getURL(2), new URL("http://some.host")); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), Integer.valueOf(5)); - Assert.assertEquals(rs.getString(2), null); - Assert.assertTrue(rs.wasNull()); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testInsertWithNullDateTime() throws SQLException { - Properties props = new Properties(); - props.setProperty(JdbcConfig.PROP_NULL_AS_DEFAULT, "2"); - try (ClickHouseConnection conn = newConnection(props); - Statement s = conn.createStatement()) { - s.execute("drop table if exists test_insert_with_null_datetime; " - + "CREATE TABLE test_insert_with_null_datetime(a Int32, " - + "b01 DateTime32, b02 DateTime32('America/Los_Angeles'), " - + "b11 DateTime32, b12 DateTime32('America/Los_Angeles'), " - + "c01 DateTime64(3), c02 DateTime64(6, 'Asia/Shanghai'), " - + "c11 DateTime64(3), c12 DateTime64(6, 'Asia/Shanghai')) ENGINE=Memory"); - try (PreparedStatement ps = conn - .prepareStatement("INSERT INTO test_insert_with_null_datetime values(?, ? ,? ,?,?)")) { - ps.setInt(1, 1); - ps.setObject(2, LocalDateTime.now()); - ps.setObject(3, LocalDateTime.now()); - ps.setTimestamp(4, null); - ps.setNull(5, Types.TIMESTAMP); - ps.setObject(6, LocalDateTime.now()); - ps.setObject(7, LocalDateTime.now()); - ps.setObject(8, null); - ps.setTimestamp(9, null, Calendar.getInstance()); - ps.executeUpdate(); - } - - try (ResultSet rs = s.executeQuery("select * from test_insert_with_null_datetime order by a")) { - Assert.assertTrue(rs.next()); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testInsertWithFormat() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseHttpOption.WAIT_END_OF_QUERY.getKey(), "true"); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - if (!conn.getServerVersion().check("[22.5,)")) { - throw new SkipException( - "Skip due to breaking change introduced by https://github.com/ClickHouse/ClickHouse/pull/35883"); - } - - s.execute("drop table if exists test_insert_with_format; " - + "CREATE TABLE test_insert_with_format(i Int32, s String) ENGINE=MergeTree ORDER BY i"); - try (PreparedStatement ps = conn.prepareStatement("INSERT INTO test_insert_with_format format CSV")) { - Assert.assertTrue(ps instanceof StreamBasedPreparedStatement); - Assert.assertEquals(ps.getParameterMetaData().getParameterCount(), 1); - Assert.assertEquals(ps.getParameterMetaData().getParameterClassName(1), String.class.getName()); - ps.setObject(1, ClickHouseInputStream.of("1,\\N\n2,two")); - ps.executeUpdate(); - } - - try (ResultSet rs = s.executeQuery("SELECT * FROM test_insert_with_format ORDER BY i" + (isCloud() ? " SETTINGS select_sequential_consistency=1" : ""))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), ""); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), "two"); - Assert.assertFalse(rs.next()); - } - - s.execute("truncate table test_insert_with_format"); - - try (PreparedStatement ps = conn - .prepareStatement( - "INSERT INTO test_insert_with_format(s,i)SETTINGS insert_null_as_default=1 format JSONEachRow")) { - Assert.assertTrue(ps instanceof StreamBasedPreparedStatement); - ps.setString(1, "{\"i\":null,\"s\":null}"); - ps.addBatch(); - ps.setObject(1, "{\"i\":1,\"s\":\"one\"}"); - ps.addBatch(); - ps.setObject(1, new ClickHouseWriter() { - @Override - public void write(ClickHouseOutputStream out) throws IOException { - out.write("{\"i\":2,\"s\":\"22\"}".getBytes()); - } - }); - ps.addBatch(); - Assert.assertEquals(ps.executeBatch(), new int[] { 1, 1, 1 }); - } - - try (PreparedStatement ps = conn - .prepareStatement( - "INSERT INTO test_insert_with_format(s,i) select * from input('s String, i Int32') format CSV")) { - Assert.assertFalse(ps instanceof StreamBasedPreparedStatement); - ps.setInt(2, 3); - ps.setString(1, "three"); - Assert.assertEquals(ps.executeUpdate(), 1); - } - - try (PreparedStatement ps = conn.prepareStatement( - "select i,s from test_insert_with_format order by i format RowBinaryWithNamesAndTypes"); - ResultSet rs = ps.executeQuery()) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 0); - Assert.assertEquals(rs.getString(2), ""); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), "one"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getString(2), "22"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 3); - Assert.assertEquals(rs.getString(2), "three"); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testInsertWithSettings() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); Statement s = conn.createStatement()) { - if (!conn.getServerVersion().check("[22.5,)")) { - throw new SkipException( - "Skip due to breaking change introduced by https://github.com/ClickHouse/ClickHouse/pull/35883"); - } - - s.execute("drop table if exists test_insert_with_settings; " - + "CREATE TABLE test_insert_with_settings(i Int32, s String) ENGINE=Memory"); - try (PreparedStatement ps = conn - .prepareStatement( - "INSERT INTO test_insert_with_settings SETTINGS async_insert=1,wait_for_async_insert=1 values(?, ?)")) { - ps.setInt(1, 1); - ps.setString(2, "1"); - ps.addBatch(); - ps.executeBatch(); - } - - try (ResultSet rs = s.executeQuery("select * from test_insert_with_settings order by i")) { - Assert.assertTrue(rs.next()); - Assert.assertFalse(rs.next()); - } - } - } - - //TODO: This test is failing both on cloud and locally, need to investigate - @Test(groups = "integration", enabled = false) - public void testGetMetadataTypes() throws SQLException { - try (Connection conn = newConnection(new Properties()); - PreparedStatement ps = conn.prepareStatement("select ? a, ? b")) { - ResultSetMetaData md = ps.getMetaData(); - Assert.assertEquals(md.getColumnCount(), 2); - Assert.assertEquals(md.getColumnName(1), "a"); - Assert.assertEquals(md.getColumnTypeName(1), "Nullable(Nothing)"); - Assert.assertEquals(md.getColumnName(2), "b"); - Assert.assertEquals(md.getColumnTypeName(2), "Nullable(Nothing)"); - - ps.setString(1, "x"); - md = ps.getMetaData(); - Assert.assertEquals(md.getColumnCount(), 2); - Assert.assertEquals(md.getColumnName(1), "a"); - Assert.assertEquals(md.getColumnTypeName(1), "String"); - Assert.assertEquals(md.getColumnName(2), "b"); - Assert.assertEquals(md.getColumnTypeName(2), "Nullable(Nothing)"); - - ps.setObject(2, new BigInteger("12345")); - md = ps.getMetaData(); - Assert.assertEquals(md.getColumnCount(), 2); - Assert.assertEquals(md.getColumnName(1), "a"); - Assert.assertEquals(md.getColumnTypeName(1), "String"); - Assert.assertEquals(md.getColumnName(2), "b"); - Assert.assertEquals(md.getColumnTypeName(2), "UInt16"); - - ps.addBatch(); - ps.setInt(1, 2); - md = ps.getMetaData(); - Assert.assertEquals(md.getColumnCount(), 2); - Assert.assertEquals(md.getColumnName(1), "a"); - Assert.assertEquals(md.getColumnTypeName(1), "String"); - Assert.assertEquals(md.getColumnName(2), "b"); - Assert.assertEquals(md.getColumnTypeName(2), "UInt16"); - - ps.clearBatch(); - ps.clearParameters(); - md = ps.getMetaData(); - Assert.assertEquals(md.getColumnCount(), 2); - Assert.assertEquals(md.getColumnName(1), "a"); - Assert.assertEquals(md.getColumnTypeName(1), "Nullable(Nothing)"); - Assert.assertEquals(md.getColumnName(2), "b"); - Assert.assertEquals(md.getColumnTypeName(2), "Nullable(Nothing)"); - } - } - - @Test(groups = "integration", enabled = false) - public void testGetMetadataStatements() throws SQLException { - if (isCloud()) return; //TODO: testGetMetadataStatements - Skipping because it doesn't seem valid, we should revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (Connection conn = newConnection(new Properties()); - PreparedStatement createPs = conn.prepareStatement("create table test_get_metadata_statements (col String) Engine=Log"); - PreparedStatement selectPs = conn.prepareStatement("select 'Hello, World!'"); - PreparedStatement insertPs = conn.prepareStatement( - "insert into test_get_metadata_statements select 'Hello, World!'"); - PreparedStatement updatePs = conn.prepareStatement( - "update test_get_metadata_statements set col = 'Bye, World!'"); - PreparedStatement grantPs = conn.prepareStatement("grant select on * to default"); - PreparedStatement commitPS = conn.prepareStatement("commit");) { - - // Only select shall have valid metadata - ResultSetMetaData selectMetaData = selectPs.getMetaData(); - Assert.assertNotNull(selectMetaData); - Assert.assertEquals(selectMetaData.getColumnCount(), 1); - Assert.assertEquals(selectMetaData.getColumnTypeName(1), "String"); - - // The rest shall return null - Assert.assertNull(createPs.getMetaData()); - Assert.assertNull(insertPs.getMetaData()); - Assert.assertNull(updatePs.getMetaData()); - Assert.assertNull(grantPs.getMetaData()); - Assert.assertNull(commitPS.getMetaData()); - } - } - - @Test(groups = "integration") - public void testGetParameterMetaData() throws SQLException { - try (Connection conn = newConnection(new Properties()); - PreparedStatement emptyPs = conn.prepareStatement("select 1"); - PreparedStatement inputPs = conn.prepareStatement( - "insert into non_existing_table select * from input('col2 String, col3 Int8, col1 JSON')"); - PreparedStatement sqlPs = conn.prepareStatement("select ?, toInt32(?), ? b"); - PreparedStatement tablePs = conn.prepareStatement( - "select a.id, c.* from {tt 'col2'} a inner join {tt 'col3'} b on a.id = b.id left outer join {tt 'col1'} c on b.id = c.id");) { - Assert.assertEquals(emptyPs.getParameterMetaData().getParameterCount(), 0); - - for (PreparedStatement ps : new PreparedStatement[] { inputPs, sqlPs }) { - Assert.assertNotNull(ps.getParameterMetaData()); - Assert.assertTrue(ps.getParameterMetaData() == ps.getParameterMetaData(), - "parameter mete data should be singleton"); - Assert.assertEquals(ps.getParameterMetaData().getParameterCount(), 3); - Assert.assertEquals(ps.getParameterMetaData().getParameterMode(3), ParameterMetaData.parameterModeIn); - Assert.assertEquals(ps.getParameterMetaData().getParameterType(3), Types.VARCHAR); - Assert.assertEquals(ps.getParameterMetaData().getPrecision(3), 0); - Assert.assertEquals(ps.getParameterMetaData().getScale(3), 0); - Assert.assertEquals(ps.getParameterMetaData().getParameterClassName(3), Object.class.getName()); - Assert.assertEquals(ps.getParameterMetaData().getParameterTypeName(3), ClickHouseDataType.JSON.name()); - } - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java deleted file mode 100644 index 08448e7ba..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseResultSetTest.java +++ /dev/null @@ -1,458 +0,0 @@ -package com.clickhouse.jdbc; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.ZonedDateTime; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.TimeZone; -import java.util.function.BiFunction; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseRecord; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.value.ClickHouseDateTimeValue; -import com.clickhouse.data.value.ClickHouseOffsetDateTimeValue; -import com.clickhouse.data.value.UnsignedByte; -import com.clickhouse.data.value.UnsignedInteger; - -import org.testng.Assert; -import org.testng.SkipException; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -public class ClickHouseResultSetTest extends JdbcIntegrationTest { - @DataProvider(name = "nullableTypes") - private Object[][] getNullableTypes() { - return new Object[][] { - new Object[] { ClickHouseDataType.Int32, Integer.valueOf(12345), - new BiFunction() { - @Override - public Object apply(ResultSet rs, Integer i) { - try { - Object obj = rs.getInt(i); - if (obj != null) { - obj = rs.getFloat(i); - } - if (obj != null) { - obj = rs.getBigDecimal(i); - } - return obj; - } catch (SQLException e) { - throw new IllegalArgumentException(e); - } - } - } }, - new Object[] { ClickHouseDataType.Date, LocalDate.of(2022, 1, 7), - new BiFunction() { - @Override - public Object apply(ResultSet rs, Integer i) { - try { - Object obj = rs.getDate(i); - if (obj != null) { - obj = rs.getTime(i); - } - if (obj != null) { - obj = rs.getTimestamp(i); - } - return obj; - } catch (SQLException e) { - throw new IllegalArgumentException(e); - } - } - } }, - new Object[] { ClickHouseDataType.DateTime, LocalDateTime.of(2022, 1, 7, 19, 11, 55), - new BiFunction() { - @Override - public Object apply(ResultSet rs, Integer i) { - try { - Object obj = rs.getDate(i); - if (obj != null) { - obj = rs.getTime(i); - } - if (obj != null) { - obj = rs.getTimestamp(i); - } - return obj; - } catch (SQLException e) { - throw new IllegalArgumentException(e); - } - } - } } - }; - } - - @DataProvider(name = "nullableColumns") - private Object[][] getNullableColumns() { - return new Object[][] { - new Object[] { "Bool", "false", Boolean.class }, - new Object[] { "Date", "1970-01-01", LocalDate.class }, - new Object[] { "Date32", "1970-01-01", LocalDate.class }, - new Object[] { "DateTime32('UTC')", "1970-01-01 00:00:00", LocalDateTime.class }, - new Object[] { "DateTime64(3, 'UTC')", "1970-01-01 00:00:00", OffsetDateTime.class }, - new Object[] { "Decimal(10,4)", "0", BigDecimal.class }, - new Object[] { "Enum8('x'=0,'y'=1)", "x", Integer.class }, - new Object[] { "Enum16('xx'=1,'yy'=0)", "yy", String.class }, - new Object[] { "Float32", "0.0", Float.class }, - new Object[] { "Float64", "0.0", Double.class }, - new Object[] { "Int8", "0", Byte.class }, - new Object[] { "UInt8", "0", Short.class }, - new Object[] { "Int16", "0", Short.class }, - new Object[] { "UInt16", "0", Integer.class }, - new Object[] { "Int32", "0", Integer.class }, - new Object[] { "UInt32", "0", Long.class }, - new Object[] { "Int64", "0", Long.class }, - new Object[] { "UInt64", "0", BigInteger.class }, - new Object[] { "Int128", "0", BigInteger.class }, - new Object[] { "UInt128", "0", BigInteger.class }, - new Object[] { "Int256", "0", BigInteger.class }, - new Object[] { "UInt256", "0", BigInteger.class }, - }; - } - - @Test(groups = "integration") - public void testFloatToBigDecimal() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("select toFloat32(1.35) fp, toFloat32(-1.35) fn, " - + "toFloat64(1.35) dp, toFloat64(-1.35) dn, " - + "toDecimal64(1.35, 1) p1, toDecimal64(1.35, 2) p2, " - + "toDecimal64(-1.35, 1) n1, toDecimal64(-1.35, 2) n2")) { - while (rs.next()) { - ClickHouseRecord r = rs.unwrap(ClickHouseRecord.class); - Assert.assertEquals(r.getValue("fp").asBigDecimal(), r.getValue("p2").asObject()); - Assert.assertEquals(r.getValue("fn").asBigDecimal(), r.getValue("n2").asObject()); - Assert.assertEquals(r.getValue("dp").asBigDecimal(), r.getValue("p2").asObject()); - Assert.assertEquals(r.getValue("dn").asBigDecimal(), r.getValue("n2").asObject()); - for (int i = 1; i <= 2; i++) { - Assert.assertEquals(r.getValue("fp").asBigDecimal(i), r.getValue("p" + i).asObject()); - Assert.assertEquals(r.getValue("fn").asBigDecimal(i), r.getValue("n" + i).asObject()); - Assert.assertEquals(r.getValue("dp").asBigDecimal(i), r.getValue("p" + i).asObject()); - Assert.assertEquals(r.getValue("dn").asBigDecimal(i), r.getValue("n" + i).asObject()); - - Assert.assertEquals(rs.getBigDecimal("fp", i), rs.getBigDecimal("p" + i)); - Assert.assertEquals(rs.getBigDecimal("fn", i), rs.getBigDecimal("n" + i)); - Assert.assertEquals(rs.getBigDecimal("dp", i), rs.getBigDecimal("p" + i)); - Assert.assertEquals(rs.getBigDecimal("dn", i), rs.getBigDecimal("n" + i)); - } - } - } - } - - @Test(groups = "integration") - public void testBigDecimal() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("select toDecimal64(number / 10, 1) from numbers(10)")) { - BigDecimal v = BigDecimal.valueOf(0L).setScale(1); - while (rs.next()) { - Assert.assertEquals(rs.getBigDecimal(1), v); - Assert.assertEquals(rs.getObject(1), v); - v = v.add(new BigDecimal("0.1")); - } - } - } - - @Test(groups = "integration") - public void testArray() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery( - "select [1,2,3] v1, ['a','b', 'c'] v2, arrayZip(v1, v2) v3, cast(['2021-11-01 01:02:03', '2021-11-02 02:03:04'] as Array(DateTime32)) v4"); - Assert.assertTrue(rs.next()); - - Assert.assertEquals(rs.getObject(1), new byte[] { 1, 2, 3 }); - Assert.assertEquals(rs.getArray(1).getArray(), new byte[] { 1, 2, 3 }); - Assert.assertTrue(rs.getArray(1).getArray() == rs.getObject(1)); - - Assert.assertEquals(rs.getObject(2), new String[] { "a", "b", "c" }); - Assert.assertEquals(rs.getArray(2).getArray(), new String[] { "a", "b", "c" }); - Assert.assertTrue(rs.getArray(2).getArray() == rs.getObject(2)); - - Assert.assertEquals(rs.getObject(3), new List[] { Arrays.asList(UnsignedByte.ONE, "a"), - Arrays.asList(UnsignedByte.valueOf((byte) 2), "b"), - Arrays.asList(UnsignedByte.valueOf((byte) 3), "c") }); - Assert.assertEquals(rs.getArray(3).getArray(), new List[] { Arrays.asList(UnsignedByte.ONE, "a"), - Arrays.asList(UnsignedByte.valueOf((byte) 2), "b"), - Arrays.asList(UnsignedByte.valueOf((byte) 3), "c") }); - Assert.assertTrue(rs.getArray(3).getArray() == rs.getObject(3)); - - Assert.assertEquals(rs.getObject(4), new LocalDateTime[] { LocalDateTime.of(2021, 11, 1, 1, 2, 3), - LocalDateTime.of(2021, 11, 2, 2, 3, 4) }); - Assert.assertEquals(rs.getArray(4).getArray(), new LocalDateTime[] { LocalDateTime.of(2021, 11, 1, 1, 2, 3), - LocalDateTime.of(2021, 11, 2, 2, 3, 4) }); - Assert.assertTrue(rs.getArray(4).getArray() == rs.getObject(4)); - - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testHugeNumber() throws SQLException { - String number = "15369343623947579499"; - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement(); - ResultSet rs = stmt - .executeQuery(String.format("SELECT toUInt64(%1$s) a, toNullable(%1$s) b", number))) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), number); - Assert.assertEquals(rs.getString(2), number); - Assert.assertEquals(rs.getBigDecimal(1), new BigDecimal(number)); - Assert.assertEquals(rs.getBigDecimal(2), new BigDecimal(number)); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testIpAddress() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt - .executeQuery("select toIPv4('116.253.40.133'), toIPv6('2001:44c8:129:2632:33:0:252:2')"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "116.253.40.133"); - Assert.assertEquals(rs.getObject(1).toString(), "/116.253.40.133"); - Assert.assertEquals(rs.getString(2), "2001:44c8:129:2632:33:0:252:2"); - Assert.assertEquals(rs.getObject(2).toString(), "/2001:44c8:129:2632:33:0:252:2"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testMap() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - if (!conn.getServerVersion().check("[21.8,)")) { - throw new SkipException("Skip test when ClickHouse version is older than 21.8"); - } - - stmt.execute("drop table if exists test_map_of_array; " - + "create table test_map_of_array(id Int8, m0 Map(String, Array(Nullable(DateTime64(3)))), m1 Map(String, Array(Nullable(DateTime64(3, 'Asia/Shanghai'))))) ENGINE = Memory; " - + "insert into test_map_of_array values(1, { 'a' : [], 'b' : [ '2022-03-30 00:00:00.123', null ] }, { 'a' : [], 'b' : [ '2022-03-30 00:00:00.123', null ] })"); - ResultSet rs = stmt - .executeQuery( - "select * from test_map_of_array order by id"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Map v = rs.getObject(2, Map.class); - Assert.assertEquals(v.size(), 2); - Assert.assertEquals(v.get("a"), new LocalDateTime[0]); - Assert.assertEquals(v.get("b"), - new LocalDateTime[] { - ClickHouseDateTimeValue.ofNull(3, TimeZone.getTimeZone("Asia/Shanghai")) - .update("2022-03-30 00:00:00.123").getValue(), - null }); - v = rs.getObject(3, Map.class); - Assert.assertEquals(v.size(), 2); - Assert.assertEquals(v.get("a"), new OffsetDateTime[0]); - Assert.assertEquals(v.get("b"), - new OffsetDateTime[] { - ClickHouseOffsetDateTimeValue.ofNull(3, TimeZone.getTimeZone("Asia/Shanghai")) - .update("2022-03-30 00:00:00.123").getValue(), - null }); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testTuple() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery( - "select (toInt16(1), 'a', toFloat32(1.2), cast([1,2] as Array(Nullable(UInt8))), map(toUInt32(1),'a')) v"); - Assert.assertTrue(rs.next()); - List v = rs.getObject(1, List.class); - Assert.assertEquals(v.size(), 5); - Assert.assertEquals(v.get(0), Short.valueOf((short) 1)); - Assert.assertEquals(v.get(1), "a"); - Assert.assertEquals(v.get(2), Float.valueOf(1.2F)); - Assert.assertEquals(v.get(3), new UnsignedByte[] { UnsignedByte.ONE, UnsignedByte.valueOf((byte) 2) }); - Assert.assertEquals(v.get(4), Collections.singletonMap(UnsignedInteger.ONE, "a")); - Assert.assertFalse(rs.next()); - - rs = stmt.executeQuery( - "select cast(tuple(1, [2,3], ('4', [5,6]), map('seven', 8)) as Tuple(Int16, Array(Nullable(Int16)), Tuple(String, Array(Int32)), Map(String, Int32))) v"); - Assert.assertTrue(rs.next()); - v = rs.getObject(1, List.class); - Assert.assertEquals(v.size(), 4); - Assert.assertEquals(v.get(0), Short.valueOf((short) 1)); - Assert.assertEquals(v.get(1), new Short[] { 2, 3 }); - Assert.assertEquals(((List) v.get(2)).get(0), "4"); - Assert.assertEquals(((List) v.get(2)).get(1), new int[] { 5, 6 }); - Assert.assertEquals(v.get(3), Collections.singletonMap("seven", 8)); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testNested() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - - stmt.execute("set flatten_nested=0; " - + "drop table if exists test_simple_aggregate_nested; " - + "create table test_simple_aggregate_nested(id Int8, n0 SimpleAggregateFunction(anyLast, Nested(a String,b String))) ENGINE = AggregatingMergeTree() ORDER BY (id); " - + "insert into test_simple_aggregate_nested values(1, [tuple('foo1', 'bar1'), tuple('foo11', 'bar11')]), (2, [tuple('foo2', 'bar2'), tuple('foo22', 'bar22')])"); - ResultSet rs = stmt - .executeQuery( - "select * from test_simple_aggregate_nested"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Map v = rs.getObject(2, Map.class); - Assert.assertEquals(v.size(), 2); - Assert.assertEquals(v.get("a"), new String[]{"foo1", "foo11"}); - } - } - - @Test(dataProvider = "nullableTypes", groups = "integration") - public void testNullableValues(ClickHouseDataType type, Object value, BiFunction func) - throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - String table = "test_nullable_" + type.name().toLowerCase(); - String ddl = "drop table if exists " + table + "; create table " + table + "(v1 " + type.name() - + ", v2 Nullable(" + type.name() + "))engine=Memory;"; - String insert = "insert into " + table + " values(" + ClickHouseValues.convertToSqlExpression(value) - + ", null);"; - String query = "select * from " + table; - - ResultSet rs = stmt.executeQuery(ddl + insert + query); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), value); - Assert.assertNotNull(rs.getString(1)); - Assert.assertNotNull(func.apply(rs, 1)); - Assert.assertNull(rs.getObject(2)); - Assert.assertNull(rs.getString(2)); - Assert.assertNull(func.apply(rs, 2)); - Assert.assertFalse(rs.next()); - } - } - - @Test(dataProvider = "nullableColumns", groups = "integration") - public void testNullValue(String columnType, String defaultValue, Class clazz) throws SQLException { - Properties props = new Properties(); - props.setProperty(JdbcConfig.PROP_NULL_AS_DEFAULT, "2"); - String tableName = "test_query_null_value_" + columnType.split("\\(")[0].trim().toLowerCase(); - try (ClickHouseConnection conn = newConnection(props); ClickHouseStatement s = conn.createStatement()) { - if (!conn.getServerVersion().check("[22.3,)")) { - throw new SkipException("Skip test when ClickHouse is older than 22.3"); - } - s.execute(String.format("drop table if exists %s; ", tableName) - + String.format("create table %s(id Int8, v Nullable(%s))engine=Memory; ", tableName, columnType) - + String.format("insert into %s values(1, null)", tableName)); - - try (ResultSet rs = s.executeQuery(String.format("select * from %s order by id", tableName))) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), defaultValue); - Assert.assertNotNull(rs.getObject(2)); - Assert.assertNotNull(rs.getObject(2, clazz)); - Assert.assertFalse(rs.wasNull(), "Should not be null"); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - - s.setNullAsDefault(1); - try (ResultSet rs = s.executeQuery(String.format("select * from %s order by id", tableName))) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), null); - Assert.assertEquals(rs.getObject(2, clazz), null); - Assert.assertTrue(rs.wasNull(), "Should be null"); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - - s.setNullAsDefault(0); - try (ResultSet rs = s.executeQuery(String.format("select * from %s order by id", tableName))) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), null); - Assert.assertEquals(rs.getObject(2, clazz), null); - Assert.assertTrue(rs.wasNull(), "Should be null"); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - } catch (SQLException e) { - // 'Unknown data type family', 'Missing columns' or 'Cannot create table column' - if (e.getErrorCode() == 50 || e.getErrorCode() == 47 || e.getErrorCode() == 44) { - return; - } - throw e; - } - } - - @Test(groups = "unit") - public void testFetchSizeOfDetachedResultSet() throws SQLException { - try (ResultSetImpl rs = new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(new ClickHouseConfig(), ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "a" } }))) { - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(2); - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(-1); - Assert.assertEquals(rs.getFetchSize(), 0); - } - } - - @Test(groups = "integration") - public void testFetchSize() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select 1")) { - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(2); - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(-1); - Assert.assertEquals(rs.getFetchSize(), 0); - } - - stmt.setFetchSize(1); - try (ResultSet rs = stmt.executeQuery("select 1")) { - Assert.assertEquals(rs.getFetchSize(), 1); - rs.setFetchSize(2); - Assert.assertEquals(rs.getFetchSize(), 1); - rs.setFetchSize(-1); - Assert.assertEquals(rs.getFetchSize(), 1); - } - } - } - - - @Test(groups = "integration") - public void testDateTimeWithoutTimezone() throws SQLException { - final String sql = "select now(), toDateTime(now(), 'America/Los_Angeles') as tzTime SETTINGS session_timezone = 'America/Los_Angeles'"; - // Default behavior - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - OffsetDateTime serverNowOffseted = rs.getObject(1, OffsetDateTime.class); - LocalDateTime serverNow = (LocalDateTime) rs.getObject(1); - OffsetDateTime tzTime = (OffsetDateTime) rs.getObject(2); - ZonedDateTime serverNowZoned = rs.getObject(1, ZonedDateTime.class); - Assert.assertTrue(serverNow.isEqual(tzTime.toLocalDateTime())); - Assert.assertTrue(serverNow.isEqual(serverNowOffseted.toLocalDateTime())); - Assert.assertEquals(tzTime.getOffset(), TimeZone.getTimeZone("America/Los_Angeles").toZoneId().getRules().getOffset(tzTime.toInstant())); - Assert.assertEquals(serverNowZoned.getZone(), TimeZone.getTimeZone("America/Los_Angeles").toZoneId()); - Assert.assertEquals(serverNowZoned.toLocalDateTime(), serverNow); - - Time serverNowTime = rs.getTime(1); - Time tzTimeTime = rs.getTime(2); - Timestamp serverNowTimestamp = rs.getTimestamp(1); - Timestamp tzTimeTimestamp = rs.getTimestamp(2); - Assert.assertEquals(serverNowTime, tzTimeTime); - Assert.assertEquals(serverNowTimestamp, tzTimeTimestamp); - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseStatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseStatementTest.java deleted file mode 100644 index 5d3bfd3a4..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ClickHouseStatementTest.java +++ /dev/null @@ -1,1496 +0,0 @@ -package com.clickhouse.jdbc; - -import com.clickhouse.client.ClickHouseClient; -import com.clickhouse.client.ClickHouseParameterizedQuery; -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.http.config.ClickHouseHttpOption; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseValues; -import com.clickhouse.data.value.ClickHouseBitmap; -import com.clickhouse.data.value.ClickHouseDateTimeValue; -import com.clickhouse.data.value.UnsignedByte; -import com.clickhouse.data.value.UnsignedInteger; -import com.clickhouse.data.value.UnsignedLong; -import com.clickhouse.data.value.UnsignedShort; -import org.roaringbitmap.longlong.Roaring64NavigableMap; -import org.testng.Assert; -import org.testng.SkipException; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import java.io.File; -import java.io.IOException; -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.BatchUpdateException; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Struct; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.Duration; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetDateTime; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.GregorianCalendar; -import java.util.List; -import java.util.Locale; -import java.util.Properties; -import java.util.TimeZone; -import java.util.UUID; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -public class ClickHouseStatementTest extends JdbcIntegrationTest { - @DataProvider(name = "timeZoneTestOptions") - private Object[][] getTimeZoneTestOptions() { - return new Object[][] { - new Object[] { true }, new Object[] { false } }; - } - - @DataProvider(name = "connectionProperties") - private Object[][] getConnectionProperties() { - Properties emptyProps = new Properties(); - Properties sessionProps = new Properties(); - sessionProps.setProperty(ClickHouseClientOption.SESSION_ID.getKey(), UUID.randomUUID().toString()); - return new Object[][] { - new Object[] { emptyProps }, new Object[] { sessionProps } }; - } - - @Test(groups = "integration") - public void testBatchUpdate() throws SQLException { - if (isCloud()) return; //TODO: testBatchUpdate - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); ClickHouseStatement stmt = conn.createStatement()) { - if (!conn.getServerVersion().check("[22.8,)")) { - throw new SkipException("Skip due to error 'unknown key zookeeper_load_balancing'"); - } - - stmt.addBatch("drop table if exists test_batch_dll_on_cluster on cluster single_node_cluster_localhost"); - stmt.addBatch( - "create table if not exists test_batch_dll_on_cluster on cluster single_node_cluster_localhost(a Int64) Engine=MergeTree order by a;" - + "drop table if exists test_batch_dll_on_cluster on cluster single_node_cluster_localhost;"); - Assert.assertEquals(stmt.executeBatch(), new int[] { 0, 0, 0 }); - - stmt.addBatch("drop table if exists test_batch_queries"); - stmt.addBatch("select 1"); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeBatch()); - } - } - - @Test(groups = "integration") - public void testBitmap64() throws SQLException { - Properties props = new Properties(); - String sql = "select k,\n" - + "[ tuple(arraySort(groupUniqArrayIf(n, n > 33)), groupBitmapStateIf(n, n > 33)),\n" - + " tuple(arraySort(groupUniqArrayIf(n, n < 32)), groupBitmapStateIf(n, n < 32)),\n" - + " tuple(arraySort(groupUniqArray(n)), groupBitmapState(n)),\n" - + " tuple(arraySort(groupUniqArray(v)), groupBitmapState(v))\n" - + "]::Array(Tuple(Array(UInt64), AggregateFunction(groupBitmap, UInt64))) v\n" - + "from (select 'k' k, (number % 33)::UInt64 as n, (9223372036854775807 + number::Int16)::UInt64 v from numbers(300000))\n" - + "group by k"; - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("drop table if exists test_bitmap64_serde; " - + "create table test_bitmap64_serde(k String, v Array(Tuple(Array(UInt64), AggregateFunction(groupBitmap, UInt64))))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement("insert into test_bitmap64_serde"); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "k"); - ps.setString(1, rs.getString(1)); - Object[] values = (Object[]) rs.getObject(2); - ps.setObject(2, values); - Assert.assertEquals(values.length, 4); - for (int i = 0; i < values.length; i++) { - List tuple = (List) values[i]; - Assert.assertEquals(tuple.size(), 2); - long[] nums = (long[]) tuple.get(0); - ClickHouseBitmap bitmap = (ClickHouseBitmap) tuple.get(1); - Roaring64NavigableMap bitmap64 = (Roaring64NavigableMap) bitmap.unwrap(); - Assert.assertEquals(nums.length, bitmap64.getLongCardinality()); - for (int j = 0; j < nums.length; j++) { - Assert.assertTrue(bitmap64.contains(nums[j]), "Bitmap does not contain value: " + nums[j]); - } - } - Assert.assertFalse(rs.next()); - - // Assert.assertThrows(IllegalStateException.class, () -> ps.executeUpdate()); - Roaring64NavigableMap.SERIALIZATION_MODE = Roaring64NavigableMap.SERIALIZATION_MODE_PORTABLE; - ps.executeUpdate(); - } - - stmt.execute("insert into test_bitmap64_serde\n" + sql); - try (ResultSet rs = stmt.executeQuery("select distinct * from test_bitmap64_serde")) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertFalse(rs.next(), "Should have only one unique row"); - } - } - } - - @Test(groups = "integration") - public void testDialect() throws SQLException { - Properties props = new Properties(); - String sql = "select cast(1 as UInt64) a, cast([1, 2] as Array(Int8)) b"; - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql);) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getMetaData().getColumnTypeName(1), ClickHouseDataType.UInt64.name()); - Assert.assertEquals(rs.getMetaData().getColumnTypeName(2), "Array(Int8)"); - Assert.assertFalse(rs.next()); - } - - props.setProperty(JdbcConfig.PROP_DIALECT, "ansi"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql);) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getMetaData().getColumnTypeName(1), "DECIMAL(20,0)"); - Assert.assertEquals(rs.getMetaData().getColumnTypeName(2), "ARRAY(BYTE)"); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration", enabled = false) - public void testOutFileAndInFile() throws SQLException { - if (isCloud()) return; //TODO: testOutFileAndInFile - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - if (DEFAULT_PROTOCOL != ClickHouseProtocol.HTTP) { - throw new SkipException("Skip non-http protocol"); - } - - Properties props = new Properties(); - props.setProperty("localFile", "true"); - File f1 = new File("a1.csv"); - if (f1.exists()) { - f1.delete(); - } - f1.deleteOnExit(); - File f2 = new File("a2.csv"); - if (f2.exists()) { - f2.delete(); - } - f2.deleteOnExit(); - - try (ClickHouseConnection conn = newConnection(props)) { - String sql1 = "SELECT number n, toString(n) FROM numbers(1234) into outfile '" + f1.getName() + "'"; - try (ClickHouseStatement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery(sql1)) { - Assert.assertTrue(rs.next()); - Assert.assertFalse(rs.next()); - Assert.assertTrue(f1.exists()); - // end up with exception because the file already exists - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery(sql1)); - } - - // try again with ! suffix to override the existing file - String sql = "select number n, toString(n) from numbers(1234) into outfile '" + f1.getName() + "!'"; - try (ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - Assert.assertFalse(rs.next()); - Assert.assertTrue(f1.exists()); - } - - sql = "select number n, toString(n) from numbers(4321) into outfile '" + f2.getName() + "!'"; - try (ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - Assert.assertFalse(rs.next()); - } - - try (ClickHouseStatement stmt = conn.createStatement()) { - Assert.assertFalse(stmt.execute( - "drop table if exists test_load_infile; create table test_load_infile(n UInt64, s String)engine=Memory")); - stmt.executeUpdate("insert into test_load_infile from infile 'a?.csv'"); - try (ResultSet rs = stmt.executeQuery("select count(1) from test_load_infile")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 5555); - Assert.assertFalse(rs.next()); - } - } - - try (ClickHouseStatement stmt = conn.createStatement()) { - // it's fine when no record was inserted - stmt.executeUpdate("insert into test_load_infile from infile 'non-existent.csv'"); - // unless suffix ! was added... - Assert.assertThrows(SQLException.class, - () -> stmt.executeUpdate("insert into test_load_infile from infile 'non-existent.csv!'")); - try (ResultSet rs = stmt.executeQuery("select count(1) from test_load_infile")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 5555); - Assert.assertFalse(rs.next()); - } - } - } - } - - @Test(groups = "integration") - public void testJdbcEscapeSyntax() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery( - "select * from (select {d '2021-11-01'} as D, {t '12:34:56'} as T, " - + "{ts '2021-11-01 12:34:56'} as TS) as {tt 'temp_table'}"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject("ts", LocalDateTime.class), LocalDateTime.of(2021, 11, 1, 12, 34, 56)); - Assert.assertEquals(rs.getObject("t", LocalTime.class), LocalTime.of(12, 34, 56)); - Assert.assertEquals(rs.getObject("d"), LocalDate.of(2021, 11, 1)); - Assert.assertEquals(rs.getTime("t"), Time.valueOf(LocalTime.of(12, 34, 56))); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testSocketTimeout() throws SQLException { - Properties props = new Properties(); - props.setProperty("connect_timeout", "500"); - props.setProperty("socket_timeout", "1000"); - props.setProperty("database", "system"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - if (stmt.unwrap(ClickHouseRequest.class).getServer().getProtocol() != ClickHouseProtocol.HTTP) { - throw new SkipException("Skip as only http implementation works well"); - } - stmt.executeQuery("select sleep(3)"); - Assert.fail("Should throw timeout exception"); - } catch (SQLException e) { - Assert.assertTrue(e.getCause() instanceof java.net.SocketTimeoutException - || e.getCause() instanceof IOException, - "Should throw SocketTimeoutException or HttpTimeoutException"); - } - } - - @Test(groups = "integration") - public void testSwitchCatalog() throws SQLException { - if (isCloud()) return; //TODO: testSwitchCatalog - Revisit, see:https://github.com/ClickHouse/clickhouse-java/issues/1747 - Properties props = new Properties(); - props.setProperty("databaseTerm", "catalog"); - props.setProperty("database", "system"); - String dbName = "test_switch_schema"; - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - Assert.assertEquals(conn.getCatalog(), "system"); - Assert.assertEquals(conn.getSchema(), null); - stmt.execute( - ClickHouseParameterizedQuery.apply("drop database if exists :db; " - + "create database :db; " - + "create table :db.:db (a Int32) engine=Memory", - Collections.singletonMap("db", dbName))); - ResultSet rs = stmt.executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("select * from test_switch_schema")); - conn.setCatalog(dbName); - conn.setSchema("non-existent-catalog"); - Assert.assertEquals(conn.getCatalog(), dbName); - Assert.assertEquals(conn.getSchema(), null); - rs = stmt.executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), dbName); - Assert.assertFalse(rs.next()); - - conn.createStatement().execute("use system"); - Assert.assertEquals(conn.getCurrentDatabase(), "system"); - Assert.assertEquals(conn.getCatalog(), "system"); - Assert.assertEquals(conn.getSchema(), null); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - - conn.createStatement().execute("use `" + dbName + "`"); - Assert.assertEquals(conn.getCurrentDatabase(), dbName); - Assert.assertEquals(conn.getCatalog(), dbName); - Assert.assertEquals(conn.getSchema(), null); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), dbName); - Assert.assertFalse(rs.next()); - - rs = conn.createStatement().executeQuery("use system;select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertEquals(conn.getCatalog(), "system"); - Assert.assertEquals(conn.getSchema(), null); - Assert.assertFalse(rs.next()); - - // non-existent databse - String nonExistentDb = UUID.randomUUID().toString(); - Assert.assertThrows(SQLException.class, () -> conn.setCatalog(nonExistentDb)); - Assert.assertThrows(SQLException.class, () -> conn.createStatement().execute("use an invalid query")); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().execute("use `" + nonExistentDb + "`")); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().execute("use `" + nonExistentDb + "`; select 1")); - } finally { - dropDatabase(dbName); - } - } - - @Test(groups = "integration") - public void testSwitchSchema() throws SQLException { - if (isCloud()) return; //TODO: testSwitchSchema - Revisit, see:https://github.com/ClickHouse/clickhouse-java/issues/1747 - Properties props = new Properties(); - props.setProperty("databaseTerm", "schema"); - props.setProperty("database", "system"); - String dbName = "test_switch_schema"; - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - Assert.assertEquals(conn.getCatalog(), null); - Assert.assertEquals(conn.getSchema(), "system"); - stmt.execute( - ClickHouseParameterizedQuery.apply("drop database if exists :db; " - + "create database :db; " - + "create table :db.:db (a Int32) engine=Memory", - Collections.singletonMap("db", dbName))); - ResultSet rs = stmt.executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("select * from test_switch_schema")); - conn.setCatalog("non-existent-catalog"); - conn.setSchema(dbName); - Assert.assertEquals(conn.getCatalog(), null); - Assert.assertEquals(conn.getSchema(), dbName); - rs = stmt.executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), dbName); - Assert.assertFalse(rs.next()); - - conn.createStatement().execute("use system"); - Assert.assertEquals(conn.getCurrentDatabase(), "system"); - Assert.assertEquals(conn.getCatalog(), null); - Assert.assertEquals(conn.getSchema(), "system"); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertFalse(rs.next()); - - conn.createStatement().execute("use `" + dbName + "`"); - Assert.assertEquals(conn.getCurrentDatabase(), dbName); - Assert.assertEquals(conn.getCatalog(), null); - Assert.assertEquals(conn.getSchema(), dbName); - rs = conn.createStatement().executeQuery("select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), dbName); - Assert.assertFalse(rs.next()); - - rs = conn.createStatement().executeQuery("use system;select currentDatabase()"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "system"); - Assert.assertEquals(conn.getCatalog(), null); - Assert.assertEquals(conn.getSchema(), "system"); - Assert.assertFalse(rs.next()); - - // non-existent databse - String nonExistentDb = UUID.randomUUID().toString(); - Assert.assertThrows(SQLException.class, () -> conn.setSchema(nonExistentDb)); - Assert.assertThrows(SQLException.class, () -> conn.createStatement().execute("use an invalid query")); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().execute("use `" + nonExistentDb + "`")); - Assert.assertThrows(SQLException.class, - () -> conn.createStatement().execute("use `" + nonExistentDb + "`; select 1")); - } finally { - dropDatabase(dbName); - } - } - - @Test(groups = "local") - public void testLogComment() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.LOG_LEADING_COMMENT.getKey(), "true"); - try (ClickHouseConnection conn = newConnection(props)) { - ClickHouseStatement stmt = conn.createStatement(); - String uuid = UUID.randomUUID().toString(); - String sql = "-- select something " + uuid + "\nselect 12345"; - stmt.execute(sql + "; system flush logs;"); - ResultSet rs = stmt.executeQuery( - "select distinct query from system.query_log where type = 'QueryStart' and log_comment = 'select something " - + uuid + "'"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), sql); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testMaxFloatValues() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement s = conn.createStatement()) { - s.execute("drop table if exists test_float_values; " - + "create table test_float_values(f1 Nullable(Float64), f2 Nullable(Float64))engine=Memory"); - try (PreparedStatement ps = conn.prepareStatement("insert into test_float_values values(?, ?)")) { - ps.setObject(1, Float.MAX_VALUE); - ps.setObject(2, Double.MAX_VALUE); - ps.executeUpdate(); - } - ResultSet rs = s.executeQuery("select * from test_float_values"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getFloat(1), Float.MAX_VALUE); - Assert.assertEquals(rs.getDouble(2), Double.MAX_VALUE); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testMutation() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); ClickHouseStatement stmt = conn.createStatement()) { - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - - Assert.assertFalse(stmt.execute("drop table if exists test_mutation;" - + "create table test_mutation(a String, b UInt32) engine=MergeTree() order by tuple()"), - "Should not return result set"); - // [delete from ]tbl a [delete ]where a.b = 1[ settings mutation_async=0] - // alter table tbl a delete where a.b = 1 - Assert.assertTrue(stmt.execute("-- test\nselect 1"), "Should return a result set"); - Assert.assertFalse(stmt.execute("-- test\ndelete from test_mutation where b = 1"), - "Should not return result set"); - // [update] tbl a [set] a.b = 1 where a.b != 1[ settings mutation_async=0] - // alter table tbl a update a.b = 1 where a.b != 1 - conn.setClientInfo("ApplicationName", "333"); - Assert.assertEquals(conn.createStatement().executeUpdate("update test_mutation set b = 22 where b = 1"), 1); - - Assert.assertThrows(SQLException.class, - () -> stmt.executeUpdate("update non_existing_table set value=1 where key=1")); - - stmt.addBatch("insert into test_mutation values('1',1)"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_mutation values('2',2)"); - Assert.assertThrows(SQLException.class, () -> stmt.executeBatch()); - - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - } - - props.setProperty(JdbcConfig.PROP_CONTINUE_BATCH, "true"); - try (ClickHouseConnection conn = newConnection(props); ClickHouseStatement stmt = conn.createStatement()) { - stmt.addBatch("insert into test_mutation values('a',1)"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_mutation values('b',2)"); - stmt.addBatch("select 2"); - Assert.assertEquals(stmt.executeBatch(), - new int[] { 1, Statement.EXECUTE_FAILED, 1, Statement.EXECUTE_FAILED }); - } - } - - @Test(groups = "integration") - public void testAsyncInsert() throws SQLException { - if (DEFAULT_PROTOCOL != ClickHouseProtocol.HTTP) { - return; - } - - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props)) { - if (conn.getServerVersion().check("(,21.12)")) { - return; - } - } - - props.setProperty(ClickHouseHttpOption.CUSTOM_PARAMS.getKey(), "async_insert=1,wait_for_async_insert=1"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement();) { - stmt.execute("drop table if exists test_async_insert; " - + "CREATE TABLE test_async_insert(id UInt32, s String) ENGINE = MergeTree ORDER BY id; " - + "INSERT INTO test_async_insert VALUES(1, 'a'); " - + "SELECT * FROM test_async_insert" + (isCloud() ? " SETTINGS select_sequential_consistency=1" : "")); - ResultSet rs = stmt.getResultSet(); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getString(2), "a"); - Assert.assertFalse(rs.next()); - } - - //TODO: I'm not sure this is a valid test... - if (isCloud()) return; //TODO: testAsyncInsert - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - props.setProperty(ClickHouseHttpOption.CUSTOM_PARAMS.getKey(), "async_insert=1,wait_for_async_insert=0"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement();) { - stmt.execute("TRUNCATE TABLE test_async_insert; " - + "INSERT INTO test_async_insert VALUES(1, 'a'); " - + "SELECT * FROM test_async_insert"); - ResultSet rs = stmt.getResultSet(); - Assert.assertFalse(rs.next(), - "Server was probably busy at that time, so the row was inserted before your query"); - } - } - - @Test(dataProvider = "connectionProperties", groups = "integration") - public void testCancelQuery(Properties props) throws SQLException { - if (isCloud()) return; //TODO: testCancelQuery - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement();) { - CountDownLatch c = new CountDownLatch(1); - ClickHouseClient.submit(() -> stmt.executeQuery("select * from numbers(100000000)")).whenComplete( - (rs, e) -> { - Assert.assertNull(e, "Should NOT have any exception"); - - int index = 0; - - try { - while (rs.next()) { - if (index++ < 1) { - c.countDown(); - } - } - Assert.fail("Query should have been cancelled"); - } catch (SQLException ex) { - Assert.assertNotNull(ex, "Should end up with exception"); - } - }); - try { - c.await(5, TimeUnit.SECONDS); - } catch (Exception e) { - Assert.fail("Failed to wait", e); - } finally { - stmt.cancel(); - } - - try (ResultSet rs = stmt.executeQuery("select 5")) { - Assert.assertTrue(rs.next(), "Should have at least one record"); - Assert.assertEquals(rs.getInt(1), 5); - Assert.assertFalse(rs.next(), "Should have only one record"); - } - } - } - - @Test(groups = "integration") - public void testExecute() throws SQLException { - try (Connection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - // ddl - Assert.assertFalse(stmt.execute("drop table if exists non_existing_table"), "Should have no result set"); - Assert.assertEquals(stmt.getResultSet(), null); - Assert.assertTrue(stmt.getUpdateCount() >= 0, "Should have update count"); - // query - Assert.assertTrue(stmt.execute("select 1"), "Should have result set"); - ResultSet rs = stmt.getResultSet(); - Assert.assertTrue(rs.next(), "Should have one record"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertFalse(rs.next(), "Should have only one record"); - // mixed usage - stmt.addBatch("drop table if exists non_existing_table"); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("drop table if exists non_existing_table")); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("select 2")); - stmt.clearBatch(); - Assert.assertFalse(stmt.execute("drop table if exists non_existing_table"), "Should have no result set"); - Assert.assertEquals(stmt.getResultSet(), null); - Assert.assertTrue(stmt.getUpdateCount() >= 0, "Should have update count"); - Assert.assertTrue(stmt.execute("select 2"), "Should have result set"); - rs = stmt.getResultSet(); - Assert.assertTrue(rs.next(), "Should have one record"); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertFalse(rs.next(), "Should have only one record"); - } - } - - @Test(groups = "integration") - public void testExecuteBatch() throws SQLException { - Properties props = new Properties(); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - stmt.addBatch("select 1"); - stmt.clearBatch(); - Assert.assertEquals(stmt.executeBatch(), new int[0]); - Assert.assertEquals(stmt.executeLargeBatch(), new long[0]); - stmt.addBatch("select 1"); - // mixed usage - Assert.assertThrows(SQLException.class, () -> stmt.execute("select 2")); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("select 2")); - Assert.assertThrows(SQLException.class, - () -> stmt.executeLargeUpdate("drop table if exists non_existing_table")); - Assert.assertThrows(SQLException.class, - () -> stmt.executeUpdate("drop table if exists non_existing_table")); - // query in batch - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeBatch()); - stmt.addBatch("select 1"); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeLargeBatch()); - - Assert.assertFalse(stmt.execute("drop table if exists test_execute_batch; " - + "create table test_execute_batch(a Int32, b String)engine=Memory"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - stmt.addBatch("insert into test_execute_batch values(3,'3')"); - Assert.assertEquals(stmt.executeBatch(), new int[] { 1, 1, 1 }); - - Assert.assertFalse(stmt.execute("truncate table test_execute_batch"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - stmt.addBatch("insert into test_execute_batch values(3,'3')"); - Assert.assertEquals(stmt.executeLargeBatch(), new long[] { 1L, 1L, 1L }); - - try (ResultSet rs = stmt.executeQuery("select * from test_execute_batch order by a")) { - int count = 0; - while (rs.next()) { - count++; - Assert.assertEquals(rs.getInt(1), count); - Assert.assertEquals(rs.getString(2), String.valueOf(count)); - } - Assert.assertEquals(count, 3); - } - - Assert.assertFalse(stmt.execute("truncate table test_execute_batch"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeBatch()); - - Assert.assertFalse(stmt.execute("truncate table test_execute_batch"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - Assert.assertThrows(BatchUpdateException.class, () -> stmt.executeLargeBatch()); - } - - props.setProperty(JdbcConfig.PROP_CONTINUE_BATCH, "true"); - try (Connection conn = newConnection(props); Statement stmt = conn.createStatement()) { - Assert.assertFalse(stmt.execute("truncate table test_execute_batch"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - stmt.addBatch("drop table non_existing_table"); - Assert.assertEquals(stmt.executeBatch(), - new int[] { 1, Statement.EXECUTE_FAILED, 1, Statement.EXECUTE_FAILED }); - - Assert.assertFalse(stmt.execute("truncate table test_execute_batch"), "Should not have result set"); - stmt.addBatch("insert into test_execute_batch values(1,'1')"); - stmt.addBatch("drop table non_existing_table"); - stmt.addBatch("insert into test_execute_batch values(2,'2')"); - stmt.addBatch("drop table non_existing_table"); - Assert.assertEquals(stmt.executeLargeBatch(), - new long[] { 1L, Statement.EXECUTE_FAILED, 1L, Statement.EXECUTE_FAILED }); - try (ResultSet rs = stmt.executeQuery("select * from test_execute_batch order by a")) { - int count = 0; - while (rs.next()) { - count++; - Assert.assertEquals(rs.getInt(1), count); - Assert.assertEquals(rs.getString(2), String.valueOf(count)); - } - Assert.assertEquals(count, 2); - } - } - } - - @Test(groups = "integration") - public void testExecuteQuery() throws SQLException { - try (Connection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("select 1"); - Assert.assertTrue(rs == stmt.getResultSet(), "Should be the exact same result set"); - Assert.assertEquals(stmt.getUpdateCount(), -1); - Assert.assertTrue(rs.next(), "Should have one record"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertFalse(rs.next(), "Should have only one record"); - - stmt.addBatch("select 1"); - Assert.assertThrows(SQLException.class, () -> stmt.executeQuery("select 2")); - stmt.clearBatch(); - rs = stmt.executeQuery("select 2"); - Assert.assertTrue(rs == stmt.getResultSet(), "Should be the exact same result set"); - Assert.assertEquals(stmt.getUpdateCount(), -1); - Assert.assertTrue(rs.next(), "Should have one record"); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertFalse(rs.next(), "Should have only one record"); - - // never return null result set - rs = stmt.executeQuery("drop table if exists non_existing_table"); - Assert.assertNotNull(rs, "Should never be null"); - Assert.assertNull(stmt.getResultSet(), "Should be null"); - Assert.assertEquals(stmt.getUpdateCount(), 0); - Assert.assertFalse(rs.next(), "Should has no row"); - } - } - - @Test(groups = "integration") - public void testExecuteUpdate() throws SQLException { - try (Connection conn = newConnection(new Properties()); - Statement stmt = conn.createStatement()) { - Assert.assertFalse(stmt.execute("drop table if exists test_execute_query; " - + "create table test_execute_query(a Int32, b String)engine=Memory"), "Should not have result set"); - - Assert.assertTrue(stmt.executeUpdate("insert into test_execute_query values(1,'1')") >= 0, - "Should return value greater than or equal to zero"); - Assert.assertNull(stmt.getResultSet(), "Should have no result set"); - Assert.assertEquals(stmt.getUpdateCount(), 1); - Assert.assertEquals(stmt.getLargeUpdateCount(), 1L); - Assert.assertTrue(stmt.executeLargeUpdate("insert into test_execute_query values(1,'1')") >= 0L, - "Should return value greater than or equal to zero"); - Assert.assertNull(stmt.getResultSet(), "Should have no result set"); - Assert.assertEquals(stmt.getUpdateCount(), 1); - Assert.assertEquals(stmt.getLargeUpdateCount(), 1L); - - stmt.addBatch("select 1"); - Assert.assertThrows(SQLException.class, - () -> stmt.executeUpdate("insert into test_execute_query values(1,'1')")); - Assert.assertThrows(SQLException.class, - () -> stmt.executeLargeUpdate("insert into test_execute_query values(1,'1')")); - stmt.clearBatch(); - - Assert.assertTrue(stmt.executeUpdate("insert into test_execute_query values(2,'2')") >= 0, - "Should return value greater than or equal to zero"); - Assert.assertNull(stmt.getResultSet(), "Should have no result set"); - Assert.assertEquals(stmt.getUpdateCount(), 1); - Assert.assertEquals(stmt.getLargeUpdateCount(), 1L); - Assert.assertTrue(stmt.executeLargeUpdate("insert into test_execute_query values(2,'2')") >= 0, - "Should return value greater than or equal to zero"); - Assert.assertNull(stmt.getResultSet(), "Should have no result set"); - Assert.assertEquals(stmt.getUpdateCount(), 1); - Assert.assertEquals(stmt.getLargeUpdateCount(), 1L); - } - } - - @Test(groups = "integration") - public void testFetchSize() throws SQLException { - try (Connection conn = newConnection(new Properties()); Statement stmt = conn.createStatement()) { - Assert.assertEquals(stmt.getFetchSize(), 0); - - stmt.setFetchSize(0); - Assert.assertEquals(stmt.getFetchSize(), 0); - stmt.setFetchSize(-1); - Assert.assertEquals(stmt.getFetchSize(), 0); - stmt.setFetchSize(Integer.MIN_VALUE); - Assert.assertEquals(stmt.getFetchSize(), 0); - - stmt.setFetchSize(1); - Assert.assertEquals(stmt.getFetchSize(), 1); - stmt.setFetchSize(Integer.MAX_VALUE); - Assert.assertEquals(stmt.getFetchSize(), Integer.MAX_VALUE); - stmt.setFetchSize(0); - Assert.assertEquals(stmt.getFetchSize(), 0); - } - } - - @Test(groups = "integration") - public void testSimpleAggregateFunction() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties()); - ClickHouseStatement stmt = conn.createStatement();) { - stmt.execute("drop table if exists test_simple_agg_func; " - + "CREATE TABLE test_simple_agg_func (x SimpleAggregateFunction(max, UInt64)) ENGINE=AggregatingMergeTree ORDER BY tuple(); " - + "INSERT INTO test_simple_agg_func VALUES(1)"); - - try (ResultSet rs = stmt.executeQuery("select * from test_simple_agg_func")) { - Assert.assertTrue(rs.next(), "Should have one row"); - Assert.assertEquals(rs.getLong(1), 1L); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - } - } - - @Test(groups = "integration") - public void testWrapperObject() throws SQLException { - String sql = "SELECT CAST('[(''a'',''b'')]' AS Array(Tuple(String, String))), ('a', 'b')"; - List expectedTuple = Arrays.asList("a", "b"); - Object expectedArray = new List[] { expectedTuple }; - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement();) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getArray(1).getArray(), expectedArray); - Assert.assertEquals(rs.getObject(1), expectedArray); - Assert.assertEquals(rs.getObject(2), expectedTuple); - Assert.assertFalse(rs.next()); - } - - props.setProperty(JdbcConfig.PROP_WRAPPER_OBJ, "true"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement();) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getArray(1).getArray(), expectedArray); - Assert.assertEquals(((Array) rs.getObject(1)).getArray(), expectedArray); - Assert.assertEquals(((Struct) rs.getObject(2)).getAttributes(), expectedTuple.toArray(new String[0])); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testQuerySystemLog() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties())) { - ClickHouseStatement stmt = conn.createStatement(); - stmt.setMaxRows(10); - stmt.setLargeMaxRows(11L); - ResultSet rs = stmt.executeQuery("select * from numbers(100)"); - - int rows = 0; - try (ResultSet colRs = conn.getMetaData().getColumns(null, "system", "query_log", "")) { - while (colRs.next()) { - continue; - } - } - - while (rs.next()) { - rows++; - } - Assert.assertEquals(rows, 11); - - // batch query - stmt.addBatch("drop table if exists non_existing_table1"); - stmt.addBatch("drop table if exists non_existing_table2"); - stmt.addBatch("drop table if exists non_existing_table3"); - int[] results = stmt.executeBatch(); - Assert.assertEquals(results, new int[] { 0, 0, 0 }); - } - } - - @Test(groups = "integration") - public void testQueryWithFormat() throws SQLException { - try (Connection conn = newConnection(new Properties())) { - Statement stmt = conn.createStatement(); - - for (String[] pair : new String[][] { new String[] { "TSV", "1" }, - new String[] { "JSONEachRow", "{\"1\":1}" } }) { - try (ResultSet rs = stmt.executeQuery(String.format("select 1 format %s", pair[0]))) { - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertEquals(rs.getString(1), pair[1]); - Assert.assertFalse(rs.next(), "Should have only one row"); - } - } - } - } - - @Test(groups = "integration") - public void testMultiStatementQuery() throws SQLException { - try (ClickHouseConnection conn = newConnection(new Properties())) { - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery("set join_use_nulls=1;\n" - + "select a.k, b.m from ( " - + " select 1 k, null v union all select 2 k, 'a' v " - + ") a left outer join ( select 1 f, 2 m ) b on a.k = b.f " - + "order by a.k"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getInt(2), 2); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getInt(2), 0); - Assert.assertEquals(rs.getObject(2), null); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testTimestamp() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("select now(), now('Asia/Chongqing')"); - Assert.assertTrue(rs.next()); - LocalDateTime dt1 = (LocalDateTime) rs.getObject(1); - LocalDateTime dt2 = rs.getObject(1, LocalDateTime.class); - Assert.assertTrue(dt1 == dt2); - OffsetDateTime ot1 = (OffsetDateTime) rs.getObject(2); - OffsetDateTime ot2 = rs.getObject(2, OffsetDateTime.class); - Assert.assertTrue(ot1 == ot2); - Assert.assertFalse(rs.next()); - } - - String tz = "America/Los_Angeles"; - String sql = "SELECT toDateTime(1616633456), toDateTime(1616633456, 'Etc/UTC'), " - + "toDateTime(1616633456, 'America/Los_Angeles'), toDateTime(1616633456, 'Asia/Chongqing'), " - + "toDateTime(1616633456, 'Europe/Berlin'), toUInt32(toDateTime('2021-03-25 08:50:56')), " - + "toUInt32(toDateTime('2021-03-25 08:50:56', 'Asia/Chongqing'))"; - props.setProperty("use_time_zone", tz); - props.setProperty("use_server_time_zone", "false"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), - ZonedDateTime.ofInstant(Instant.ofEpochSecond(1616633456L), ZoneId.of(tz)) - .toLocalDateTime()); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testTimestampWithNanoSeconds() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("drop table if exists test_timetamp_with_nanos;" - + "create table test_timetamp_with_nanos(d DateTime64(9))engine=Memory"); - Instant instant = Instant.now(); - Timestamp now = new Timestamp(instant.toEpochMilli()); - now.setNanos(instant.getNano()); - try (PreparedStatement ps1 = conn.prepareStatement("insert into test_timetamp_with_nanos"); - PreparedStatement ps2 = conn - .prepareStatement( - "insert into test_timetamp_with_nanos values(toDateTime64(?, 9))")) { - ps1.setTimestamp(1, now); - ps1.executeUpdate(); - - ps2.setTimestamp(1, now); - ps2.executeUpdate(); - } - ResultSet rs = stmt.executeQuery("select distinct * from test_timetamp_with_nanos"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1, Instant.class), instant); - Assert.assertFalse(rs.next()); - } - } - - // @Test(groups = "integration") - // public void testAggregateFunction() throws SQLException { - // Properties props = new Properties(); - // try (ClickHouseConnection conn = newConnection(props); - // ClickHouseStatement stmt = conn.createStatement()) { - // ResultSet rs = stmt.executeQuery("select anyState(n) from (select - // toInt32(number + 5) n from numbers(3))"); - // Assert.assertTrue(rs.next()); - // Assert.assertEquals(rs.getObject(1), 5); - // Assert.assertFalse(rs.next()); - - // rs = stmt.executeQuery("select anyState(null)"); - // Assert.assertTrue(rs.next()); - // Assert.assertNull(rs.getObject(1)); - // Assert.assertFalse(rs.next()); - - // rs = stmt.executeQuery("select anyState(n) from (select toString(number) n - // from numbers(0))"); - // Assert.assertTrue(rs.next()); - // Assert.assertNull(rs.getObject(1)); - // Assert.assertFalse(rs.next()); - // } - // } - - @Test(groups = "integration") - public void testCustomTypeMappings() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("select cast('a' as Enum('a'=1,'b'=2))"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getByte(1), (byte) 1); - Assert.assertEquals(rs.getShort(1), (short) 1); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(1), "a"); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertFalse(rs.next()); - } - - props.setProperty("typeMappings", - "Enum8=java.lang.Byte,DateTime64=java.lang.String, String=com.clickhouse.data.ClickHouseDataType"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery( - "select cast('a' as Enum('a'=1,'b'=2)), toDateTime64('2021-12-21 12:34:56.789',3), 'Float64'"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), (byte) 1); - Assert.assertEquals(rs.getObject(2), "2021-12-21 12:34:56.789"); - Assert.assertEquals(rs.getObject(3), ClickHouseDataType.Float64); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testPrimitiveTypes() throws SQLException { - String sql = "select toInt8(1), toUInt8(1), toInt16(1), toUInt16(1), toInt32(1), toUInt32(1), toInt64(1), toUInt64(1), " - + "cast([1] as Array(Int8)), cast([1] as Array(UInt8)), cast([1] as Array(Int16)), cast([1] as Array(UInt16)), " - + "cast([1] as Array(Int32)), cast([1] as Array(UInt32)), cast([1] as Array(Int64)), cast([1] as Array(UInt64))"; - - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - int index = 1; - Assert.assertEquals(rs.getObject(index++), (byte) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedByte.ONE); - Assert.assertEquals(rs.getObject(index++), (short) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedShort.ONE); - Assert.assertEquals(rs.getObject(index++), 1); - Assert.assertEquals(rs.getObject(index++), UnsignedInteger.ONE); - Assert.assertEquals(rs.getObject(index++), 1L); - Assert.assertEquals(rs.getObject(index++), UnsignedLong.ONE); - Assert.assertEquals(rs.getObject(index++), new byte[] { (byte) 1 }); - Assert.assertEquals(rs.getObject(index++), new byte[] { (byte) 1 }); - Assert.assertEquals(rs.getObject(index++), new short[] { (short) 1 }); - Assert.assertEquals(rs.getObject(index++), new short[] { (short) 1 }); - Assert.assertEquals(rs.getObject(index++), new int[] { 1 }); - Assert.assertEquals(rs.getObject(index++), new int[] { 1 }); - Assert.assertEquals(rs.getObject(index++), new long[] { 1L }); - Assert.assertEquals(rs.getObject(index++), new long[] { 1L }); - Assert.assertFalse(rs.next()); - } - - props.clear(); - props.setProperty("use_objects_in_arrays", "true"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - int index = 1; - Assert.assertEquals(rs.getObject(index++), (byte) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedByte.ONE); - Assert.assertEquals(rs.getObject(index++), (short) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedShort.ONE); - Assert.assertEquals(rs.getObject(index++), 1); - Assert.assertEquals(rs.getObject(index++), UnsignedInteger.ONE); - Assert.assertEquals(rs.getObject(index++), 1L); - Assert.assertEquals(rs.getObject(index++), UnsignedLong.ONE); - Assert.assertEquals(rs.getObject(index++), new byte[] { (byte) 1 }); - Assert.assertEquals(rs.getObject(index++), new UnsignedByte[] { UnsignedByte.ONE }); - Assert.assertEquals(rs.getObject(index++), new short[] { (short) 1 }); - Assert.assertEquals(rs.getObject(index++), new UnsignedShort[] { UnsignedShort.ONE }); - Assert.assertEquals(rs.getObject(index++), new int[] { 1 }); - Assert.assertEquals(rs.getObject(index++), new UnsignedInteger[] { UnsignedInteger.ONE }); - Assert.assertEquals(rs.getObject(index++), new Long[] { 1L }); - Assert.assertEquals(rs.getObject(index++), new UnsignedLong[] { UnsignedLong.ONE }); - Assert.assertFalse(rs.next()); - } - - props.clear(); - props.setProperty("widen_unsigned_types", "false"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next()); - int index = 1; - Assert.assertEquals(rs.getObject(index++), (byte) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedByte.ONE); - Assert.assertEquals(rs.getObject(index++), (short) 1); - Assert.assertEquals(rs.getObject(index++), UnsignedShort.ONE); - Assert.assertEquals(rs.getObject(index++), 1); - Assert.assertEquals(rs.getObject(index++), UnsignedInteger.ONE); - Assert.assertEquals(rs.getObject(index++), 1L); - Assert.assertEquals(rs.getObject(index++), UnsignedLong.ONE); - Assert.assertEquals(rs.getObject(index++), new byte[] { (byte) 1 }); - Assert.assertEquals(rs.getObject(index++), new byte[] { (byte) 1 }); - Assert.assertEquals(rs.getObject(index++), new short[] { (short) 1 }); - Assert.assertEquals(rs.getObject(index++), new short[] { (short) 1 }); - Assert.assertEquals(rs.getObject(index++), new int[] { 1 }); - Assert.assertEquals(rs.getObject(index++), new int[] { 1 }); - Assert.assertEquals(rs.getObject(index++), new long[] { 1L }); - Assert.assertEquals(rs.getObject(index++), new long[] { 1L }); - Assert.assertFalse(rs.next()); - } - } - - @Test(groups = "integration") - public void testNestedArrayInTuple() throws SQLException { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - // nested values on same row - Assert.assertFalse(stmt.execute("drop table if exists test_nested_array_in_tuple; " - + "create table test_nested_array_in_tuple(id UInt64, v1 Tuple(Array(Int32)), v2 Tuple(Array(Int32)))engine=Memory; " - + "insert into test_nested_array_in_tuple values(1, ([1, 2]), ([2, 3]))")); - try (ResultSet rs = stmt.executeQuery("select * from test_nested_array_in_tuple order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(((List) rs.getObject(2)).size(), 1); - Assert.assertEquals(((List) rs.getObject(2)).get(0), new int[] { 1, 2 }); - Assert.assertEquals(((List) rs.getObject(3)).size(), 1); - Assert.assertEquals(((List) rs.getObject(3)).get(0), new int[] { 2, 3 }); - Assert.assertFalse(rs.next()); - } - - // nested values on same column - Assert.assertFalse(stmt.execute("drop table if exists test_nested_array_in_tuple; " - + "create table test_nested_array_in_tuple(id UInt64, val Tuple(Array(Int32)))engine=Memory; " - + "insert into test_nested_array_in_tuple values(1, ([1, 2])), (2, ([2, 3]))")); - try (ResultSet rs = stmt.executeQuery("select * from test_nested_array_in_tuple order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(((List) rs.getObject(2)).size(), 1); - Assert.assertEquals(((List) rs.getObject(2)).get(0), new int[] { 1, 2 }); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(((List) rs.getObject(2)).size(), 1); - Assert.assertEquals(((List) rs.getObject(2)).get(0), new int[] { 2, 3 }); - Assert.assertFalse(rs.next()); - } - - // deeper nested level and more elements - Assert.assertFalse(stmt.execute("drop table if exists test_nested_array_in_tuple; " - + "create table test_nested_array_in_tuple(id UInt64, val Array(Tuple(UInt16,Array(UInt32))))engine=Memory; " - + "insert into test_nested_array_in_tuple values(1, [(0, [1, 2]), (1, [2, 3])]), (2, [(2, [4, 5]), (3, [6, 7])])")); - try (ResultSet rs = stmt.executeQuery("select * from test_nested_array_in_tuple order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(((Object[]) rs.getObject(2)).length, 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(0), UnsignedShort.ZERO); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(1), new int[] { 1, 2 }); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(0), UnsignedShort.ONE); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(1), new int[] { 2, 3 }); - Assert.assertTrue(rs.next()); - Assert.assertEquals(((Object[]) rs.getObject(2)).length, 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(0), - UnsignedShort.valueOf((short) 2)); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(1), new int[] { 4, 5 }); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(0), - UnsignedShort.valueOf((short) 3)); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(1), new int[] { 6, 7 }); - Assert.assertFalse(rs.next()); - } - - Assert.assertFalse(stmt.execute("drop table if exists test_nested_array_in_tuple; " - + "create table test_nested_array_in_tuple(id UInt64, val Array(Tuple(UInt16,Array(Decimal(10,0)))))engine=Memory; " - + "insert into test_nested_array_in_tuple values(1, [(0, [1, 2]), (1, [2, 3])]), (2, [(2, [4, 5]), (3, [6, 7])])")); - try (ResultSet rs = stmt.executeQuery("select * from test_nested_array_in_tuple order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(((Object[]) rs.getObject(2)).length, 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(0), UnsignedShort.ZERO); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[0]).get(1))[0], - BigDecimal.valueOf(1)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[0]).get(1))[1], - BigDecimal.valueOf(2)); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(0), UnsignedShort.ONE); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[1]).get(1))[0], - BigDecimal.valueOf(2)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[1]).get(1))[1], - BigDecimal.valueOf(3)); - Assert.assertTrue(rs.next()); - Assert.assertEquals(((Object[]) rs.getObject(2)).length, 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[0]).get(0), - UnsignedShort.valueOf((short) 2)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[0]).get(1))[0], - BigDecimal.valueOf(4)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[0]).get(1))[1], - BigDecimal.valueOf(5)); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).size(), 2); - Assert.assertEquals(((List) ((Object[]) rs.getObject(2))[1]).get(0), - UnsignedShort.valueOf((short) 3)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[1]).get(1))[0], - BigDecimal.valueOf(6)); - Assert.assertEquals(((BigDecimal[]) ((List) ((Object[]) rs.getObject(2))[1]).get(1))[1], - BigDecimal.valueOf(7)); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testNestedArrays() throws SQLException { - Properties props = new Properties(); - Object[][] arr1 = null; - Object[][] arr2 = null; - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement(); - ResultSet rs = stmt - .executeQuery( - "select * from (select 1 id, [['1','2'],['3', '4']] v union all select 2 id, [['5','6'],['7','8']] v) order by id")) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 1); - Assert.assertEquals(rs.getObject(2), arr1 = (Object[][]) rs.getArray(2).getArray()); - Assert.assertEquals(((Object[][]) rs.getObject(2)).length, 2); - Assert.assertEquals(((Object[][]) rs.getObject(2))[0], new Object[] { "1", "2" }); - Assert.assertEquals(((Object[][]) rs.getObject(2))[1], new Object[] { "3", "4" }); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getInt(1), 2); - Assert.assertEquals(rs.getObject(2), arr2 = (Object[][]) rs.getArray(2).getArray()); - Assert.assertEquals(((Object[][]) rs.getObject(2)).length, 2); - Assert.assertEquals(((Object[][]) rs.getObject(2))[0], new Object[] { "5", "6" }); - Assert.assertEquals(((Object[][]) rs.getObject(2))[1], new Object[] { "7", "8" }); - Assert.assertFalse(rs.next()); - } - - Assert.assertTrue(arr1 != arr2); - Assert.assertNotEquals(arr1[0], arr2[0]); - Assert.assertNotEquals(arr1[1], arr2[1]); - } - - @Test(groups = "integration") - public void testNestedDataTypes() throws SQLException { - String sql = "select (1,2) as t, [3,4] as a"; - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), Arrays.asList(UnsignedByte.ONE, UnsignedByte.valueOf((byte) 2))); - Assert.assertEquals(rs.getObject(2), new byte[] { (byte) 3, (byte) 4 }); - Assert.assertFalse(rs.next()); - } - - props.setProperty("use_objects_in_arrays", "true"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), Arrays.asList(UnsignedByte.ONE, UnsignedByte.valueOf((byte) 2))); - Assert.assertEquals(rs.getObject(2), - new UnsignedByte[] { UnsignedByte.valueOf((byte) 3), UnsignedByte.valueOf((byte) 4) }); - Assert.assertFalse(rs.next()); - } - - props.clear(); - props.setProperty("widen_unsigned_types", "true"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getObject(1), Arrays.asList((short) 1, (short) 2)); - Assert.assertEquals(rs.getObject(2), new short[] { (short) 3, (short) 4 }); - Assert.assertFalse(rs.next()); - } - - props.clear(); - props.setProperty("wrapperObject", "true"); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery(sql); - Assert.assertTrue(rs.next()); - Assert.assertEquals(((ClickHouseStruct) rs.getObject(1)).getAttributes(), - new Object[] { UnsignedByte.ONE, UnsignedByte.valueOf((byte) 2) }); - Assert.assertEquals(((ClickHouseArray) rs.getObject(2)).getArray(), rs.getArray(2).getArray()); - Assert.assertFalse(rs.next()); - } - } - - @Test(dataProvider = "timeZoneTestOptions", groups = "integration") - public void testTimeZone(boolean useBinary) throws SQLException { - if (isCloud()) return; //TODO: testTimeZone - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String dateType = "DateTime32"; - String dateValue = "2020-02-11 00:23:33"; - ClickHouseDateTimeValue v = ClickHouseDateTimeValue.of(dateValue, 0, ClickHouseValues.UTC_TIMEZONE); - - Properties props = new Properties(); - String[] timeZones = new String[] { "Asia/Chongqing", "America/Los_Angeles", "Europe/Moscow", "Etc/UTC", - "Europe/Berlin" }; - StringBuilder columns = new StringBuilder().append("d0 ").append(dateType); - StringBuilder constants = new StringBuilder().append(ClickHouseValues.convertToQuotedString(dateValue)); - StringBuilder currents = new StringBuilder().append("now()"); - StringBuilder parameters = new StringBuilder().append(useBinary ? "?,?" : "trim(?),?"); - int len = timeZones.length; - Calendar[] calendars = new Calendar[len + 1]; - for (int i = 0; i < len; i++) { - String timeZoneId = timeZones[i]; - columns.append(",d").append(i + 1).append(' ').append(dateType).append("('").append(timeZoneId) - .append("')"); - constants.append(',').append(ClickHouseValues.convertToQuotedString(dateValue)); - currents.append(",now()"); - parameters.append(",?"); - calendars[i] = new GregorianCalendar(TimeZone.getTimeZone(timeZoneId)); - } - len++; - try (ClickHouseConnection conn = newConnection(props); - Connection mconn = newMySqlConnection(props); - Statement mstmt = mconn.createStatement();) { - ClickHouseStatement stmt = conn.createStatement(); - stmt.execute("drop table if exists test_tz;" + "create table test_tz(no String," + columns.toString() - + ") engine=Memory;" + "insert into test_tz Values ('0 - Constant'," + constants.toString() + ");" - + "insert into test_tz values('1 - Current'," + currents.toString() + ");"); - - String sql = "insert into test_tz values(" + parameters.toString() + ")"; - try (PreparedStatement ps = conn.prepareStatement(sql); - PreparedStatement mps = mconn.prepareStatement(sql)) { - int index = 2; - mps.setString(1, (0 - index) + " - String"); - ps.setString(1, index++ + " - String"); - for (int i = 1; i <= len; i++) { - ps.setString(i + 1, v.asString()); - mps.setString(i + 1, v.asString()); - } - ps.addBatch(); - mps.addBatch(); - - ps.setString(1, index++ + " - LocalDateTime"); - for (int i = 1; i <= len; i++) { - ps.setObject(i + 1, v.asDateTime()); - } - ps.addBatch(); - - ps.setString(1, index++ + " - OffsetDateTime"); - for (int i = 1; i <= len; i++) { - ps.setObject(i + 1, v.asOffsetDateTime()); - } - ps.addBatch(); - - ps.setString(1, index++ + " - DateTime"); - for (int i = 1; i <= len; i++) { - if (i == 1) { - ps.setObject(i + 1, v.asDateTime()); - } else { - ps.setObject(i + 1, v.asDateTime().atZone(TimeZone.getTimeZone(timeZones[i - 2]).toZoneId()) - .toOffsetDateTime()); - } - } - ps.addBatch(); - - mps.setString(1, (0 - index) + " - BigDecimal"); - ps.setString(1, index++ + " - BigDecimal"); - for (int i = 1; i <= len; i++) { - ps.setBigDecimal(i + 1, v.asBigDecimal()); - mps.setBigDecimal(i + 1, v.asBigDecimal()); - } - ps.addBatch(); - mps.addBatch(); - - mps.setString(1, (0 - index) + " - Timestamp"); - ps.setString(1, index++ + " - Timestamp"); - for (int i = 1; i <= len; i++) { - ps.setTimestamp(i + 1, Timestamp.valueOf(v.asDateTime())); - mps.setTimestamp(i + 1, Timestamp.valueOf(v.asDateTime())); - } - ps.addBatch(); - mps.addBatch(); - - for (int j = 0; j < len; j++) { - Calendar c = calendars[j]; - mps.setString(1, (0 - index) + " - Timestamp(" + (c == null ? "" : c.getTimeZone().getID()) + ")"); - ps.setString(1, index++ + " - Timestamp(" + (c == null ? "" : c.getTimeZone().getID()) + ")"); - for (int i = 1; i <= len; i++) { - ps.setTimestamp(i + 1, Timestamp.valueOf(v.asDateTime()), c); - mps.setTimestamp(i + 1, Timestamp.valueOf(v.asDateTime()), c); - } - ps.addBatch(); - mps.addBatch(); - } - - int[] results = ps.executeBatch(); - mps.executeBatch(); - } - - try (ResultSet rs = stmt - .executeQuery("select * from test_tz order by toInt32(splitByString(' - ', no)[1])"); - ResultSet mrs = mstmt - .executeQuery("select * from test_tz order by toInt32(splitByString(' - ', no)[1])")) { - int row = 0; - while (rs.next()) { - row++; - Assert.assertTrue(mrs.next()); - - for (int i = 1; i <= len; i++) { - String msg = String.format(Locale.ROOT, "row: %d, column: %d", row, i + 1); - // Assert.assertEquals(rs.getObject(i + 1), mrs.getObject(i + 1)); - Assert.assertEquals(rs.getDate(i + 1), mrs.getDate(i + 1), msg); - Assert.assertEquals(rs.getString(i + 1), mrs.getString(i + 1), msg); - Assert.assertEquals(rs.getTimestamp(i + 1), mrs.getTimestamp(i + 1), msg); - Assert.assertEquals(rs.getTime(i + 1), mrs.getTime(i + 1), msg); - for (int j = 0; j < len; j++) { - msg = String.format(Locale.ROOT, "row: %d, column: %d, calendar: %s", row, i + 1, - calendars[j]); - Assert.assertEquals(rs.getTimestamp(i + 1, calendars[j]), - mrs.getTimestamp(i + 1, calendars[j]), msg); - Assert.assertEquals(rs.getTime(i + 1, calendars[j]), mrs.getTime(i + 1, calendars[j]), msg); - } - } - } - } - } - } - - @Test(groups = "integration") - public void testMultiThreadedExecution() throws Exception { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - - - ScheduledExecutorService executor = Executors.newScheduledThreadPool(3); - - final AtomicReference failedException = new AtomicReference<>(null); - for (int i = 0; i < 3; i++) { - executor.scheduleWithFixedDelay(() -> { - try { - stmt.execute("select 1"); - } catch (Exception e) { - failedException.set(e); - } - }, 100, 100, TimeUnit.MILLISECONDS); - } - - try { - Thread.sleep(1000); - } catch (Exception e) { - Assert.fail("Test interrupted", e); - } - - executor.shutdown(); - executor.awaitTermination(10, TimeUnit.SECONDS); - - Assert.assertNull(failedException.get(), "Failed because of exception: " + failedException.get()); - } - } - - @Test(groups = "integration") - public void testSessionTimezoneSetting() { - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("SELECT now() SETTINGS session_timezone = 'America/Los_Angeles'"); - rs.next(); - OffsetDateTime srvNow = rs.getObject(1, OffsetDateTime.class); - OffsetDateTime localNow = OffsetDateTime.now(ZoneId.of("America/Los_Angeles")); - Assert.assertTrue(Duration.between(srvNow, localNow).abs().getSeconds() < 60, - "server time (" + srvNow +") differs from local time (" + localNow + ")"); - } catch (Exception e) { - Assert.fail("Failed to create connection", e); - } - } - - - @Test(groups = "integration") - public void testUseOffsetDateTime() { - try (ClickHouseConnection conn = newConnection(); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("select toDateTime('2024-01-01 10:00:00', 'America/Los_Angeles'), toDateTime('2024-05-01 10:00:00', " + - " 'America/Los_Angeles'), now() SETTINGS session_timezone = 'America/Los_Angeles'"); - rs.next(); - OffsetDateTime dstStart = (OffsetDateTime) rs.getObject(1); - OffsetDateTime dstEnd = (OffsetDateTime) rs.getObject(2); - OffsetDateTime now = rs.getObject(3, OffsetDateTime.class); - System.out.println("dstStart: " + dstStart + ", dstEnd: " + dstEnd + ", now: " + now); - Assert.assertEquals(dstStart.getOffset(), ZoneOffset.ofHours(-8)); - Assert.assertEquals(dstEnd.getOffset(), ZoneOffset.ofHours(-7)); - } catch (Exception e) { - e.printStackTrace(); - Assert.fail("Failed to create connection", e); - } - } - - - @Test(groups = "integration") - public void testDescMetadata() { - try (ClickHouseConnection conn = newConnection(); - ClickHouseStatement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("DESC (select timezone(), number FROM system.numbers)"); - rs.next(); - ResultSetMetaData metaData = rs.getMetaData(); - Assert.assertEquals(metaData.getColumnCount(), 7); - } catch (Exception e) { - e.printStackTrace(); - Assert.fail("Failed to create connection", e); - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java deleted file mode 100644 index bc86131dd..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/CombinedResultSetTest.java +++ /dev/null @@ -1,253 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.testng.Assert; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import com.clickhouse.client.ClickHouseConfig; -import com.clickhouse.client.ClickHouseSimpleResponse; -import com.clickhouse.data.ClickHouseColumn; - -public class CombinedResultSetTest { - @DataProvider(name = "multipleResultSetsProvider") - private Object[][] getMultipleResultSets() { - ClickHouseConfig config = new ClickHouseConfig(); - return new Object[][] { - { new CombinedResultSet(null, new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "a" }, - new Object[] { "b" } })), - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, - ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "c" }, - new Object[] { "d" }, - new Object[] { "e" } }))) }, - { new CombinedResultSet(Arrays.asList(null, null, - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, - ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { - "a" } })), - null, - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, - ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { - "b" } })), - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, - ClickHouseColumn.parse("s String"), - new Object[][] { - new Object[] { "c" }, - new Object[] { "d" }, - new Object[] { "e" } })))) } }; - } - - @DataProvider(name = "nullOrEmptyResultSetProvider") - private Object[][] getNullOrEmptyResultSet() { - return new Object[][] { { new CombinedResultSet() }, { new CombinedResultSet((ResultSet) null) }, - { new CombinedResultSet(null, null) }, { new CombinedResultSet(null, null, null) }, - { new CombinedResultSet(Collections.emptyList()) }, - { new CombinedResultSet(Collections.singleton(null)) }, - { new CombinedResultSet(Arrays.asList(null, null)) }, - { new CombinedResultSet(Arrays.asList(null, null, null)) } }; - } - - @DataProvider(name = "singleResultSetProvider") - private Object[][] getSingleResultSet() { - ClickHouseConfig config = new ClickHouseConfig(); - return new Object[][] { - { new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "a" }, - new Object[] { "b" } }))) }, - { new CombinedResultSet(Collections.singleton( - new ResultSetImpl("", "", ClickHouseSimpleResponse.of(config, - ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "a" }, - new Object[] { "b" } })))) } }; - } - - @Test(dataProvider = "multipleResultSetsProvider", groups = "unit") - public void testMultipleResultSets(CombinedResultSet combined) throws SQLException { - Assert.assertFalse(combined.isClosed()); - Assert.assertEquals(combined.getRow(), 0); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 1); - Assert.assertEquals(combined.getString(1), "a"); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 2); - Assert.assertEquals(combined.getString(1), "b"); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 3); - Assert.assertEquals(combined.getString(1), "c"); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 4); - Assert.assertEquals(combined.getString(1), "d"); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 5); - Assert.assertEquals(combined.getString(1), "e"); - Assert.assertFalse(combined.next()); - Assert.assertFalse(combined.next()); - Assert.assertEquals(combined.getRow(), 5); - combined.close(); - Assert.assertTrue(combined.isClosed()); - } - - @Test(dataProvider = "nullOrEmptyResultSetProvider", groups = "unit") - public void testNullAndEmptyResultSet(CombinedResultSet combined) throws SQLException { - Assert.assertFalse(combined.isClosed()); - Assert.assertEquals(combined.getRow(), 0); - Assert.assertFalse(combined.next()); - Assert.assertEquals(combined.getRow(), 0); - Assert.assertFalse(combined.next()); - Assert.assertEquals(combined.getRow(), 0); - combined.close(); - Assert.assertTrue(combined.isClosed()); - Assert.assertThrows(SQLException.class, () -> combined.getString(1)); - } - - @Test(dataProvider = "singleResultSetProvider", groups = "unit") - public void testSingleResultSet(CombinedResultSet combined) throws SQLException { - Assert.assertFalse(combined.isClosed()); - Assert.assertEquals(combined.getRow(), 0); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 1); - Assert.assertEquals(combined.getString(1), "a"); - Assert.assertTrue(combined.next()); - Assert.assertEquals(combined.getRow(), 2); - Assert.assertEquals(combined.getString(1), "b"); - Assert.assertFalse(combined.next()); - Assert.assertFalse(combined.next()); - Assert.assertEquals(combined.getRow(), 2); - combined.close(); - Assert.assertTrue(combined.isClosed()); - } - - @Test(groups = "unit") - public void testFetchSize() throws SQLException { - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(new ClickHouseConfig(), ClickHouseColumn.parse("s String"), - new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(2); - Assert.assertEquals(rs.getFetchSize(), 0); - rs.setFetchSize(-1); - Assert.assertEquals(rs.getFetchSize(), 0); - } - } - - @Test(groups = "unit") - public void testFirstAndLastRow() throws SQLException { - ClickHouseConfig config = new ClickHouseConfig(); - List columns = ClickHouseColumn.parse("s String"); - // no record - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, new Object[0][])))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should have no row"); - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - } - - // no record(with two empty resultsets) - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, new Object[0][])), - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, new Object[0][])))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should NOT be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should have no row"); - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should NOT be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - } - - // one record - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, - new Object[][] { new Object[] { "a" } })))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertFalse(rs.isAfterLast(), "Should NOT be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertTrue(rs.next(), "Should have one row"); - Assert.assertFalse(rs.isBeforeFirst(), "Should NOT be before the first"); - Assert.assertTrue(rs.isFirst(), "Should be the first"); - Assert.assertFalse(rs.isAfterLast(), "Should NOT be after the last"); - Assert.assertTrue(rs.isLast(), "Should be the last"); - - Assert.assertFalse(rs.next(), "Should have only one row"); - Assert.assertFalse(rs.isBeforeFirst(), "Should NOT be before the first"); - Assert.assertTrue(rs.isFirst(), "Should be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - } - - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, - new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertTrue(rs.next(), "Should have at least one row"); - Assert.assertTrue(rs.isFirst(), "Should be the first row"); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertTrue(rs.next(), "Should have two rows"); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertTrue(rs.isLast(), "Should be the last row"); - Assert.assertFalse(rs.next(), "Should have only two rows"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last row"); - } - } - - @Test(groups = "unit") - public void testNext() throws SQLException { - ClickHouseConfig config = new ClickHouseConfig(); - List columns = ClickHouseColumn.parse("s String"); - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, - new Object[][] { new Object[] { "a" }, new Object[] { "b" } })))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertTrue(rs.next()); - Assert.assertTrue(rs.isFirst(), "Should be the first row"); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertTrue(rs.isLast(), "Should be the last row"); - Assert.assertFalse(rs.next()); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last row"); - } - - try (CombinedResultSet rs = new CombinedResultSet(new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "a" } })), - new ResultSetImpl("", "", - ClickHouseSimpleResponse.of(config, columns, new Object[][] { new Object[] { "b" } })))) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertTrue(rs.next()); - Assert.assertTrue(rs.isFirst(), "Should be the first row"); - Assert.assertEquals(rs.getString(1), "a"); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString(1), "b"); - Assert.assertTrue(rs.isLast(), "Should be the last row"); - Assert.assertFalse(rs.next()); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last row"); - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ConnectionTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ConnectionTest.java new file mode 100644 index 000000000..f9959ddf9 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/ConnectionTest.java @@ -0,0 +1,268 @@ +package com.clickhouse.jdbc; + +import java.sql.*; +import java.util.Properties; + +import org.testng.Assert; +import org.testng.annotations.AfterTest; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class ConnectionTest extends JdbcIntegrationTest { + private Connection connection; + + @BeforeTest + public void setUp() { + try { + this.connection = this.getJdbcConnection(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @AfterTest + public void tearDown() { + try { + this.connection.close(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void createStatementTest() throws SQLException { + Statement statement = this.connection.createStatement(); + Assert.assertNotNull(statement); + statement.close(); + + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void prepareStatementTest() throws SQLException { + PreparedStatement statement = this.connection.prepareStatement("SELECT 1"); + Assert.assertNotNull(statement); + statement.close(); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareStatement("SELECT 1", Statement.RETURN_GENERATED_KEYS)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareStatement("SELECT 1", new int[] { 1 })); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareStatement("SELECT 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareStatement("SELECT 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareStatement("SELECT 1", new String[] { "1" })); + } + + @Test + public void prepareCallTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareCall("SELECT 1")); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareCall("SELECT 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.prepareCall("SELECT 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void nativeSQLTest() throws SQLException { + String sql = "SELECT 1"; + Assert.assertEquals(this.connection.nativeSQL(sql), sql); + } + + @Test + public void setAutoCommitTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setAutoCommit(false)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setAutoCommit(true)); + } + + @Test + public void getAutoCommitTest() throws SQLException { + Assert.assertTrue(this.connection.getAutoCommit()); + } + + @Test + public void commitTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.commit()); + } + + @Test + public void rollbackTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.rollback()); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.rollback(null)); + } + + @Test + public void closeTest() throws SQLException { + Connection localConnection = this.getJdbcConnection(); + Assert.assertFalse(localConnection.isClosed()); + localConnection.close(); + Assert.assertTrue(localConnection.isClosed()); + Assert.assertThrows(SQLException.class, localConnection::createStatement); + Assert.assertThrows(SQLException.class, () -> localConnection.prepareStatement("SELECT 1")); + } + + @Test + public void getMetaDataTest() throws SQLException { + Assert.assertNotNull(this.connection.getMetaData()); + } + + @Test + public void setReadOnlyTest() throws SQLException { + this.connection.setReadOnly(true); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setReadOnly(false)); + } + + @Test + public void isReadOnlyTest() throws SQLException { + Assert.assertTrue(this.connection.isReadOnly()); + } + + @Test + public void setCatalogTest() throws SQLException { + this.connection.setCatalog("catalog-name"); + Assert.assertEquals(this.connection.getCatalog(), "catalog-name"); + } + + @Test + public void setTransactionIsolationTest() throws SQLException { + this.connection.setTransactionIsolation(Connection.TRANSACTION_NONE); + Assert.assertEquals(this.connection.getTransactionIsolation(), Connection.TRANSACTION_NONE); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE)); + } + + @Test + public void getTransactionIsolationTest() throws SQLException { + Assert.assertEquals(this.connection.getTransactionIsolation(), Connection.TRANSACTION_NONE); + } + + @Test + public void getWarningsTest() throws SQLException { + Assert.assertNull(this.connection.getWarnings()); + } + + @Test + public void clearWarningsTest() throws SQLException { + this.connection.clearWarnings(); + } + + + @Test + public void getTypeMapTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.getTypeMap()); + } + + @Test + public void setTypeMapTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setTypeMap(null)); + } + + @Test + public void setHoldabilityTest() throws SQLException { + this.connection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);//No-op + } + + @Test + public void getHoldabilityTest() throws SQLException { + Assert.assertEquals(this.connection.getHoldability(), ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void setSavepointTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setSavepoint()); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setSavepoint("savepoint-name")); + } + + @Test + public void releaseSavepointTest(){ + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.releaseSavepoint(null)); + } + + @Test + public void createClobTest(){ + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createClob()); + } + + @Test + public void createBlobTest(){ + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createBlob()); + } + + @Test + public void createNClobTest(){ + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createNClob()); + } + + @Test + public void createSQLXMLTest(){ + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.createSQLXML()); + } + + @Test + public void isValidTest() throws SQLException { + Assert.assertThrows(SQLException.class, () -> this.connection.isValid(-1)); + Assert.assertTrue(this.connection.isValid(0)); + } + + @Test + public void setClientInfoTest() { + Assert.assertThrows(SQLClientInfoException.class, () -> this.connection.setClientInfo("key", "value")); + Assert.assertThrows(SQLClientInfoException.class, () -> this.connection.setClientInfo(new Properties())); + } + + @Test + public void getClientInfoTest() throws SQLException { + Assert.assertNull(this.connection.getClientInfo("key")); + Assert.assertNotNull(this.connection.getClientInfo()); + } + + @Test + public void createArrayOfTest() throws SQLException { + Assert.assertNull(this.connection.createArrayOf("type-name", new Object[] { 1, 2, 3 })); + } + + @Test + public void createStructTest() throws SQLException { + Assert.assertNull(this.connection.createStruct("type-name", new Object[] { 1, 2, 3 })); + } + + @Test + public void setSchemaTest() throws SQLException { + this.connection.setSchema("schema-name"); + Assert.assertEquals(this.connection.getSchema(), "schema-name"); + } + + @Test + public void abortTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.abort(null)); + } + + @Test + public void setNetworkTimeoutTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setNetworkTimeout(null, 0)); + } + + @Test + public void getNetworkTimeoutTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.getNetworkTimeout()); + } + + @Test + public void beginRequestTest() throws SQLException { + this.connection.beginRequest();//No-op + } + + @Test + public void endRequestTest() throws SQLException { + this.connection.endRequest();//No-op + } + + @Test + public void setShardingKeyIfValidTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setShardingKeyIfValid(null, 0)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setShardingKeyIfValid(null, null, 0)); + } + + @Test + public void setShardingKeyTest() { + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setShardingKey(null)); + Assert.assertThrows(SQLFeatureNotSupportedException.class, () -> this.connection.setShardingKey(null, null)); + } +} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java deleted file mode 100644 index 25f1c68ff..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceImplTest.java +++ /dev/null @@ -1,108 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Properties; - -import org.testng.Assert; -import org.testng.annotations.Test; - -import com.clickhouse.client.ClickHouseLoadBalancingPolicy; -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.client.config.ClickHouseDefaults; - -public class DataSourceImplTest extends JdbcIntegrationTest { - @Test(groups = "integration") - public void testHighAvailabilityConfig() throws SQLException { - if (isCloud()) return; //TODO: testHighAvailabilityConfig - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String httpEndpoint = getEndpointString(); - String grpcEndpoint = "grpc://" + getServerAddress(ClickHouseProtocol.GRPC) + "/"; - String tcpEndpoint = "tcp://" + getServerAddress(ClickHouseProtocol.TCP) + "/"; - - String url = "jdbc:ch://(" + httpEndpoint + "),(" + grpcEndpoint + "),(" + tcpEndpoint + ")/system"; - Properties props = new Properties(); - props.setProperty("failover", "21"); - props.setProperty("load_balancing_policy", "roundRobin"); - try (Connection conn = DriverManager.getConnection(url, props)) { - Assert.assertEquals(conn.unwrap(ClickHouseRequest.class).getConfig().getFailover(), 21); - Assert.assertEquals(conn.unwrap(ClickHouseRequest.class).getConfig().getOption( - ClickHouseClientOption.LOAD_BALANCING_POLICY), ClickHouseLoadBalancingPolicy.ROUND_ROBIN); - } - } - - @Test // (groups = "integration") - public void testMultiEndpoints() throws SQLException { - if (isCloud()) return; //TODO: testMultiEndpoints - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String httpEndpoint = getEndpointString(); - String grpcEndpoint = "grpc://" + getServerAddress(ClickHouseProtocol.GRPC) + "/"; - String tcpEndpoint = "tcp://" + getServerAddress(ClickHouseProtocol.TCP) + "/"; - - String url = "jdbc:ch://(" + httpEndpoint + "),(" + grpcEndpoint + "),(" + tcpEndpoint - + ")/system?load_balancing_policy=roundRobin"; - Properties props = new Properties(); - props.setProperty("user", "default"); - props.setProperty("password", ""); - DataSourceImpl ds = new DataSourceImpl(url, props); - for (int i = 0; i < 10; i++) { - try (Connection httpConn = ds.getConnection(); - Connection grpcConn = ds.getConnection("default", ""); - Connection tcpConn = DriverManager.getConnection(url, props)) { - Assert.assertEquals(httpConn.unwrap(ClickHouseRequest.class).getServer().getBaseUri(), httpEndpoint); - Assert.assertEquals(grpcConn.unwrap(ClickHouseRequest.class).getServer().getBaseUri(), grpcEndpoint); - Assert.assertEquals(tcpConn.unwrap(ClickHouseRequest.class).getServer().getBaseUri(), tcpEndpoint); - } - } - } - - @Test(groups = "integration") - public void testGetConnection() throws SQLException { - String url = "jdbc:ch:" + getEndpointString(); -// String urlWithCredentials = "jdbc:ch:" + (isCloud() ? "https" : DEFAULT_PROTOCOL.name()) + "://default@" -// + getServerAddress(DEFAULT_PROTOCOL); - String urlWithCredentials = "jdbc:ch:" + DEFAULT_PROTOCOL.name() + "://default@" + getServerAddress(DEFAULT_PROTOCOL); - if (isCloud()) { - urlWithCredentials = "jdbc:ch:https://default:" + getPassword() + "@" + getServerAddress(DEFAULT_PROTOCOL); - } - String clientName = "client1"; - int maxExecuteTime = 1234; - boolean continueBatchOnError = true; - - Properties properties = new Properties(); - properties.setProperty(ClickHouseDefaults.USER.getKey(), "default"); - properties.setProperty(ClickHouseDefaults.PASSWORD.getKey(), getPassword()); - properties.setProperty(ClickHouseClientOption.CLIENT_NAME.getKey(), clientName); - properties.setProperty(ClickHouseClientOption.MAX_EXECUTION_TIME.getKey(), Integer.toString(maxExecuteTime)); - properties.setProperty(JdbcConfig.PROP_CONTINUE_BATCH, Boolean.toString(continueBatchOnError)); - String params = String.format("?%s=%s&%s=%d&%s", ClickHouseClientOption.CLIENT_NAME.getKey(), clientName, - ClickHouseClientOption.MAX_EXECUTION_TIME.getKey(), maxExecuteTime, JdbcConfig.PROP_CONTINUE_BATCH); - - for (DataSourceImpl ds : new DataSourceImpl[] { - new DataSourceImpl(url, properties), - new DataSourceImpl(urlWithCredentials, properties), - new DataSourceImpl(url + params), - new DataSourceImpl(urlWithCredentials + params), - }) { - for (ClickHouseConnection connection : new ClickHouseConnection[] { - ds.getConnection("default", getPassword()), - new Driver().connect(url, properties), - new Driver().connect(urlWithCredentials, properties), - new Driver().connect(urlWithCredentials + params, new Properties()), - (ClickHouseConnection) DriverManager.getConnection(url, properties), - (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials, properties), - (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials + params), - (ClickHouseConnection) DriverManager.getConnection(url + params, "default", getPassword()), - (ClickHouseConnection) DriverManager.getConnection(urlWithCredentials + params, "default", getPassword()), - }) { - try (ClickHouseConnection conn = connection; Statement stmt = conn.createStatement()) { - Assert.assertEquals(conn.getClientInfo(ClickHouseConnection.PROP_APPLICATION_NAME), clientName); - Assert.assertEquals(stmt.getQueryTimeout(), maxExecuteTime); - Assert.assertEquals(conn.getJdbcConfig().isContinueBatchOnError(), continueBatchOnError); - } - } - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceTest.java new file mode 100644 index 000000000..49aece048 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DataSourceTest.java @@ -0,0 +1,68 @@ +package com.clickhouse.jdbc; + +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; + +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertNotNull; + +import com.clickhouse.client.ClickHouseServerForTest; +import org.testng.annotations.AfterTest; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class DataSourceTest extends JdbcIntegrationTest { + private DataSourceImpl dataSource; + + @BeforeTest + public void setUp() { + dataSource = new DataSourceImpl(); + dataSource.setUrl(getEndpointString()); + Properties info = new Properties(); + info.setProperty("user", "default"); + info.setProperty("password", ClickHouseServerForTest.getPassword()); + dataSource.setProperties(info); + } + + @AfterTest + public void tearDown() { + dataSource = null; + } + + @Test + public void testGetConnection() throws SQLException { + Connection connection = dataSource.getConnection(); + assertNotNull(connection); + connection.close(); + } + + @Test + public void testGetConnectionWithUserAndPassword() throws SQLException { + Connection connection = dataSource.getConnection("default", ClickHouseServerForTest.getPassword()); + assertNotNull(connection); + connection.close(); + } + + @Test + public void testGetLogWriter() { + assertThrows(SQLFeatureNotSupportedException.class, () -> dataSource.getLogWriter()); + } + + @Test + public void testSetLogWriter() { + assertThrows(SQLFeatureNotSupportedException.class, () -> dataSource.setLogWriter(null)); + } + + @Test + public void testSetLoginTimeout() { + assertThrows(SQLFeatureNotSupportedException.class, () -> dataSource.setLoginTimeout(0)); + } + + @Test + public void testGetLoginTimeout() { + assertThrows(SQLFeatureNotSupportedException.class, () -> dataSource.getLoginTimeout()); + } + +} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java deleted file mode 100644 index 22fb1b1b3..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DatabaseMetaDataTest.java +++ /dev/null @@ -1,279 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Locale; -import java.util.Properties; -import java.util.UUID; - -import com.clickhouse.client.config.ClickHouseClientOption; -import com.clickhouse.data.ClickHouseColumn; - -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; -import org.testng.Assert; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -public class DatabaseMetaDataTest extends JdbcIntegrationTest { - - private static final Logger log = LoggerFactory.getLogger(DatabaseMetaDataTest.class); - @DataProvider(name = "selectedColumns") - private Object[][] getSelectedColumns() { - return new Object[][] { - // COLUMN_SIZE, DECIMAL_DIGITS, CHAR_OCTET_LENGTH - // new Object[] { "Bool", 1, null, null }, // Bool was an alias before 21.12 - new Object[] { "Int8", 3, 0, null }, - new Object[] { "UInt8", 3, 0, null }, - new Object[] { "FixedString(3)", 3, null, 3 }, - new Object[] { "String", 0, null, null }, - new Object[] { "Date", 10, 0, null }, - new Object[] { "DateTime64(5)", 29, 5, null }, - new Object[] { "Decimal64(10)", 18, 10, null }, - new Object[] { "Decimal(10,2)", 10, 2, null }, - new Object[] { "Decimal(12,0)", 12, 0, null }, - new Object[] { "Float32", 12, 0, null }, - new Object[] { "Float64", 22, 0, null } }; - } - - @Test(groups = "integration") - public void testDatabaseTerm() throws SQLException { - Properties props = new Properties(); - props.setProperty("databaseTerm", "schema"); - try (ClickHouseConnection conn = newConnection(props)) { - DatabaseMetaData md = conn.getMetaData(); - Assert.assertEquals(md.getCatalogTerm(), "catalog"); - Assert.assertFalse(md.getCatalogs().next()); - Assert.assertFalse(md.supportsCatalogsInDataManipulation()); - Assert.assertFalse(md.supportsCatalogsInIndexDefinitions()); - Assert.assertFalse(md.supportsCatalogsInPrivilegeDefinitions()); - Assert.assertFalse(md.supportsCatalogsInProcedureCalls()); - Assert.assertFalse(md.supportsCatalogsInTableDefinitions()); - - Assert.assertEquals(md.getSchemaTerm(), "database"); - Assert.assertTrue(md.getSchemas().next()); - Assert.assertTrue(md.supportsSchemasInDataManipulation()); - Assert.assertTrue(md.supportsSchemasInIndexDefinitions()); - Assert.assertTrue(md.supportsSchemasInPrivilegeDefinitions()); - Assert.assertTrue(md.supportsSchemasInProcedureCalls()); - Assert.assertTrue(md.supportsSchemasInTableDefinitions()); - } - - props.setProperty("databaseTerm", "catalog"); - try (ClickHouseConnection conn = newConnection(props)) { - DatabaseMetaData md = conn.getMetaData(); - Assert.assertEquals(md.getCatalogTerm(), "database"); - Assert.assertTrue(md.getCatalogs().next()); - Assert.assertTrue(md.supportsCatalogsInDataManipulation()); - Assert.assertTrue(md.supportsCatalogsInIndexDefinitions()); - Assert.assertTrue(md.supportsCatalogsInPrivilegeDefinitions()); - Assert.assertTrue(md.supportsCatalogsInProcedureCalls()); - Assert.assertTrue(md.supportsCatalogsInTableDefinitions()); - - Assert.assertEquals(md.getSchemaTerm(), "schema"); - Assert.assertFalse(md.getSchemas().next()); - Assert.assertFalse(md.supportsSchemasInDataManipulation()); - Assert.assertFalse(md.supportsSchemasInIndexDefinitions()); - Assert.assertFalse(md.supportsSchemasInPrivilegeDefinitions()); - Assert.assertFalse(md.supportsSchemasInProcedureCalls()); - Assert.assertFalse(md.supportsSchemasInTableDefinitions()); - } - } - - @Test(groups = "integration") - public void testGetTypeInfo() throws SQLException { - Properties props = new Properties(); - props.setProperty("decompress", "0"); - try (ClickHouseConnection conn = newConnection(props); ResultSet rs = conn.getMetaData().getTypeInfo()) { - while (rs.next()) { - Assert.assertNotNull(rs.getString(1)); - } - } - } - - @Test(groups = "integration") - public void testGetClientInfo() throws SQLException { - String clientName = ""; - Properties props = new Properties(); - try (ClickHouseConnection conn = newConnection(props); - ResultSet rs = conn.getMetaData().getClientInfoProperties()) { - while (rs.next()) { - if (ClickHouseConnection.PROP_APPLICATION_NAME.equals(rs.getString(1))) { - clientName = rs.getString(3); - } - } - Assert.assertEquals(clientName, ClickHouseClientOption.CLIENT_NAME.getDefaultValue()); - } - - props.setProperty(ClickHouseClientOption.CLIENT_NAME.getKey(), "client1"); - try (ClickHouseConnection conn = newConnection(props)) { - clientName = ""; - try (ResultSet rs = conn.getMetaData().getClientInfoProperties()) { - while (rs.next()) { - if (ClickHouseConnection.PROP_APPLICATION_NAME.equals(rs.getString(1))) { - clientName = rs.getString(3); - } - } - Assert.assertEquals(clientName, "client1"); - } - - conn.setClientInfo(ClickHouseConnection.PROP_APPLICATION_NAME, "client2"); - clientName = ""; - try (ResultSet rs = conn.getMetaData().getClientInfoProperties()) { - while (rs.next()) { - if (ClickHouseConnection.PROP_APPLICATION_NAME.equals(rs.getString(1))) { - clientName = rs.getString(3); - } - } - Assert.assertEquals(clientName, "client2"); - } - } - } - - @Test(dataProvider = "selectedColumns", groups = "integration") - public void testGetColumns(String columnType, Integer columnSize, Integer decimalDigits, Integer octectLength) - throws SQLException { - ClickHouseColumn c = ClickHouseColumn.of("x", columnType); - String tableName = "test_get_column_" + c.getDataType().name().toLowerCase(); - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - s.execute("drop table if exists " + tableName + "; " - + "create table " + tableName + "(x " + columnType + ") engine=Memory"); - try (ResultSet rs = conn.getMetaData().getColumns(conn.getCatalog(), conn.getSchema(), tableName, "%")) { - Assert.assertTrue(rs.next(), "Should have one record"); - Assert.assertEquals(rs.getString("cOLUMN_NAME"), "x"); - Assert.assertEquals(rs.getObject("COLUMN_SIZE"), columnSize); - Assert.assertEquals(rs.getObject("DECIMAL_DIGITS"), decimalDigits); - Assert.assertEquals(rs.getObject("CHAR_OCTET_LENGTH"), octectLength); - Assert.assertFalse(rs.next(), "Should have only one record"); - } - } - } - - @Test(groups = "integration") - public void testMaxRows() throws SQLException { - Properties props = new Properties(); - props.setProperty(ClickHouseClientOption.MAX_RESULT_ROWS.getKey(), "1"); - int count = 0; - try (ClickHouseConnection conn = newConnection(props)) { - try (ResultSet rs = conn.getMetaData().getColumns(conn.getCatalog(), conn.getSchema(), "%", "%")) { - while (rs.next()) { - count++; - } - } - } - Assert.assertTrue(count > 1, "Should have more than one row returned"); - } - - @Test(groups = "integration") - public void testTableComment() throws SQLException { - String tableName = "test_table_comment"; - String tableComment = "table comments"; - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - // https://github.com/ClickHouse/ClickHouse/pull/30852 - if (!conn.getServerVersion().check("[21.6,)")) { - return; - } - - s.execute(String.format(Locale.ROOT, - "drop table if exists %1$s; create table %1$s(s String) engine=Memory comment '%2$s'", - tableName, tableComment)); - try (ResultSet rs = conn.getMetaData().getTables(conn.getCatalog(), conn.getSchema(), tableName, null)) { - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString("remarks"), tableComment); - Assert.assertFalse(rs.next()); - } - } - } - - @Test(groups = "integration") - public void testGetTables() throws SQLException { - if (isCloud()) return; //TODO: testGetTables - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String db1 = "a" + UUID.randomUUID().toString().replace('-', 'X'); - String db2 = "b" + UUID.randomUUID().toString().replace('-', 'X'); - String tableName = "test_get_tables"; - Properties props = new Properties(); - props.setProperty("databaseTerm", "catalog"); - try (ClickHouseConnection conn = newConnection(new Properties()); - Statement s = conn.createStatement()) { - // no record - try (ResultSet rs = s.executeQuery("select * from numbers(1) where number=-1")) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should NOT have any row"); - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should NOT have any row"); - } - - try (ResultSet rs = conn.getMetaData().getTables(null, null, UUID.randomUUID().toString(), null)) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should NOT have any row"); - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertFalse(rs.isFirst(), "Should NOT be the first"); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last"); - Assert.assertFalse(rs.isLast(), "Should NOT be the last"); - - Assert.assertFalse(rs.next(), "Should NOT have any row"); - } - - s.execute(String.format(Locale.ROOT, "create database %2$s; create database %3$s; " - + "create table %2$s.%1$s(id Int32, value String)engine=Memory; " - + "create view %3$s.%1$s as select * from %2$s.%1$s", tableName, db1, db2)); - try (ResultSet rs = conn.getMetaData().getTables(null, null, tableName, - new String[] { "MEMORY TABLE", "VIEW" })) { - Assert.assertTrue(rs.isBeforeFirst(), "Should be before the first"); - Assert.assertTrue(rs.next()); - Assert.assertTrue(rs.isFirst(), "Should be the first row"); - Assert.assertEquals(rs.getString("TABLE_CAT"), db1); - Assert.assertEquals(rs.getString("TABLE_NAME"), tableName); - Assert.assertTrue(rs.next()); - Assert.assertEquals(rs.getString("TABLE_CAT"), db2); - Assert.assertEquals(rs.getString("TABLE_NAME"), tableName); - Assert.assertTrue(rs.isLast(), "Should be the last row"); - Assert.assertFalse(rs.next()); - Assert.assertTrue(rs.isAfterLast(), "Should be after the last row"); - } finally { - s.execute(String.format(Locale.ROOT, "drop database %1$s; drop database %2$s", db1, db2)); - } - - try (ResultSet rs1 = s.executeQuery("select * from system.tables"); - ResultSet rs2 = conn.getMetaData().getTables(null, null, null, null)) { - int count1 = 0 , count1withOutSystem = 0; - while (rs1.next()) { - log.debug("%s.%s", rs1.getString(1) , rs1.getString(2)); - String databaseName = rs1.getString(1); - count1++; - if (!databaseName.equals("system")) - count1withOutSystem++; - } - log.debug("--------- SEP ---------"); - int count2 = 0, count2withOutSystem = 0; - while (rs2.next()) { - log.debug("%s.%s", rs2.getString("TABLE_CAT") , rs2.getString("TABLE_NAME")); - String databaseName = rs2.getString("TABLE_CAT"); - count2++; - if (!databaseName.equals("system")) - count2withOutSystem++; - } - - Assert.assertEquals(rs1.getRow(), count1); - Assert.assertEquals(rs2.getRow(), count2); - Assert.assertEquals(count1withOutSystem, count2withOutSystem); - } - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java index 5cb3dfc13..2b785acea 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java @@ -1,30 +1,93 @@ package com.clickhouse.jdbc; +import java.sql.DriverManager; import java.sql.SQLException; - -import com.clickhouse.client.ClickHouseProtocol; +import java.util.Properties; import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser; import org.testng.Assert; +import org.testng.annotations.AfterTest; +import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; public class DriverTest extends JdbcIntegrationTest { - @Test(groups = "integration") - public void testAcceptUrl() throws SQLException { - String address = getServerAddress(ClickHouseProtocol.HTTP, true); - Driver driver = new Driver(); - Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://" + address)); - Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse:http://" + address)); - Assert.assertTrue(driver.acceptsURL("jdbc:ch://" + address)); - Assert.assertTrue(driver.acceptsURL("jdbc:ch:http://" + address)); - } - - @Test(groups = "integration") - public void testConnect() throws SQLException { - if (isCloud()) return; //TODO: testConnect - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - String address = getServerAddress(ClickHouseProtocol.HTTP, true); - Driver driver = new Driver(); - ClickHouseConnection conn = driver.connect("jdbc:clickhouse://" + address, null); - conn.close(); + private Driver driver; + + @BeforeTest + public void setUp() { + driver = new Driver(); + } + + @AfterTest + public void tearDown() { + driver = null; + } + + @Test(groups = { "unit" }) + public void testDriver() { + try { + DriverManager.registerDriver(new Driver()); + } catch (SQLException e) { + Assert.fail("Failed to register ClickHouse JDBC driver", e); + } + } + + @Test(groups = { "unit" }) + public void testConnect() { + try { + Assert.assertNotNull(driver.connect(getEndpointString(), new Properties())); + } catch (SQLException e) { + Assert.fail("Failed to connect to ClickHouse", e); + } + } + + @Test(groups = { "unit" }) + public void testAcceptsURL() { + try { + Assert.assertTrue(driver.acceptsURL(getEndpointString())); + Assert.assertTrue(driver.acceptsURL("jdbc:ch://localhost:8123")); + Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://localhost:8123")); + Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://localhost:8123?user=default&password=clickhouse")); + Assert.assertFalse(driver.acceptsURL("jdbc:something://localhost:8123")); + } catch (SQLException e) { + Assert.fail("Failed to accept URL", e); + } + } + + @Test(groups = { "unit" }) + public void testGetPropertyInfo() { + try { + Assert.assertEquals(driver.getPropertyInfo(getEndpointString(), new Properties()).length, 7); + Properties sample = new Properties(); + sample.setProperty("testing", "true"); + Assert.assertEquals(driver.getPropertyInfo(getEndpointString(), sample).length, 7); + } catch (SQLException e) { + Assert.fail("Failed to get property info", e); + } + } + + @Test(groups = { "unit" }) + public void testGetMajorVersion() { + Assert.assertEquals(driver.getMajorVersion(), 0); + } + + @Test(groups = { "unit" }) + public void testGetMinorVersion() { + Assert.assertEquals(driver.getMinorVersion(), 0); + } + + @Test(groups = { "unit" }) + public void testJdbcCompliant() { + Assert.assertFalse(driver.jdbcCompliant()); + } + + @Test(groups = { "unit" }) + public void testGetParentLogger() { + try { + driver.getParentLogger(); + Assert.fail("Should not reach here"); + } catch (SQLException e) { + Assert.assertEquals(e.getMessage(), "Method not supported"); + } } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java index 9a8f50234..0000f9aaf 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java @@ -1,169 +1,33 @@ package com.clickhouse.jdbc; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Properties; - import com.clickhouse.client.ClickHouseServerForTest; -import org.testng.Assert; import com.clickhouse.client.BaseIntegrationTest; -import com.clickhouse.client.ClickHouseNode; import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.http.config.ClickHouseHttpOption; - -public abstract class JdbcIntegrationTest extends BaseIntegrationTest { - private static final String CLASS_PREFIX = "ClickHouse"; - private static final String CLASS_SUFFIX = "Test"; - - protected static final String CUSTOM_PROTOCOL_NAME = System.getProperty("protocol", "http").toUpperCase(); - protected static final ClickHouseProtocol DEFAULT_PROTOCOL = ClickHouseProtocol - .valueOf(CUSTOM_PROTOCOL_NAME.indexOf("HTTP") >= 0 ? "HTTP" : CUSTOM_PROTOCOL_NAME); - - protected String buildJdbcUrl(ClickHouseProtocol protocol, String prefix, String url) { - if (url != null && url.startsWith("jdbc:")) { - return url; - } - - if (protocol == null) { - protocol = DEFAULT_PROTOCOL; - } - - StringBuilder builder = new StringBuilder(); - if (prefix == null || prefix.isEmpty()) { - builder.append("jdbc:clickhouse:").append(protocol.name().toLowerCase()).append("://"); - } else if (!prefix.startsWith("jdbc:")) { - builder.append("jdbc:").append(prefix); - } else { - builder.append(prefix); - } - - builder.append(getServerAddress(protocol)); - - if (url != null && !url.isEmpty()) { - if (url.charAt(0) != '/') { - builder.append('/'); - } - - builder.append(url); - } - - if (CUSTOM_PROTOCOL_NAME.indexOf("HTTP") >= 0 && !"HTTP".equals(CUSTOM_PROTOCOL_NAME)) { - builder.append('?').append(ClickHouseHttpOption.CONNECTION_PROVIDER.getKey()).append('=') - .append(CUSTOM_PROTOCOL_NAME); - } - return builder.toString(); - } - - protected void checkRowCount(Statement stmt, String queryOrTableName, int expectedRowCount) throws SQLException { - String sql = queryOrTableName.indexOf(' ') > 0 ? queryOrTableName - : "select count(1) from ".concat(queryOrTableName); - try (ResultSet rs = stmt.executeQuery(sql)) { - Assert.assertTrue(rs.next(), "Should have at least one record"); - Assert.assertEquals(rs.getInt(1), expectedRowCount); - Assert.assertFalse(rs.next(), "Should have only one record"); - } - } - - public String getServerAddress(ClickHouseProtocol protocol) { - return getServerAddress(protocol, false); - } - - public String getServerAddress(ClickHouseProtocol protocol, boolean useIPaddress) { - ClickHouseNode server = getServer(protocol); - - return new StringBuilder().append(useIPaddress ? getIpAddress(server) : server.getHost()).append(':') - .append(server.getPort()).toString(); - } - - public String getServerAddress(ClickHouseProtocol protocol, String customHostOrIp) { - ClickHouseNode server = getServer(protocol); - return new StringBuilder() - .append(customHostOrIp == null || customHostOrIp.isEmpty() ? server.getHost() : customHostOrIp) - .append(':').append(server.getPort()).toString(); - } - - public DataSourceImpl newDataSource() throws SQLException { - return newDataSource(null, new Properties()); - } - - public DataSourceImpl newDataSource(Properties properties) throws SQLException { - return newDataSource(null, properties); - } - - public DataSourceImpl newDataSource(String url) throws SQLException { - return newDataSource(url, new Properties()); - } - - public DataSourceImpl newDataSource(String url, Properties properties) throws SQLException { - if (isCloud()) { - if (properties == null) { - properties = new Properties(); - } - properties.put("password", getPassword()); - properties.put("user", "default"); - url = String.format("jdbc:clickhouse:https://%s/%s", getServerAddress(ClickHouseProtocol.HTTP), ClickHouseServerForTest.getDatabase()); - return new DataSourceImpl(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); - } - return new DataSourceImpl(buildJdbcUrl(DEFAULT_PROTOCOL, null, url), properties); - } - - public ClickHouseConnection newConnection() throws SQLException { - return newConnection(null); - } - - public ClickHouseConnection newConnection(Properties properties) throws SQLException { - try (ClickHouseConnection conn = newDataSource(properties).getConnection(); - ClickHouseStatement stmt = conn.createStatement();) { - stmt.execute("CREATE DATABASE IF NOT EXISTS " + ClickHouseServerForTest.getDatabase()); - } - - return newDataSource(ClickHouseServerForTest.getDatabase(), properties == null ? new Properties() : properties).getConnection(); - } - - public Connection newMySqlConnection(Properties properties) throws SQLException { - if (properties == null) { - properties = new Properties(); - } - - if (!properties.containsKey("user")) { - properties.setProperty("user", "default"); - } - if (!properties.containsKey("password")) { - properties.setProperty("password", getPassword()); - } - - String url = buildJdbcUrl(ClickHouseProtocol.MYSQL, "jdbc:mysql://", ClickHouseServerForTest.getDatabase()); - url += url.indexOf('?') > 0 ? "&useSSL="+isCloud() : "?useSSL="+isCloud(); - Connection conn = DriverManager.getConnection(url, properties); - - try (Statement stmt = conn.createStatement()) { - stmt.execute("CREATE DATABASE IF NOT EXISTS " + ClickHouseServerForTest.getDatabase()); - } - - return conn; - } - - public void closeConnection(Connection conn) throws SQLException { - if (conn == null) { - return; - } - - try (Statement stmt = conn.createStatement()) { - stmt.execute("DROP DATABASE IF EXISTS " + ClickHouseServerForTest.getDatabase()); - } finally { - conn.close(); - } - } +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +public abstract class JdbcIntegrationTest extends BaseIntegrationTest { + private static final Logger LOGGER = LoggerFactory.getLogger(JdbcIntegrationTest.class); public String getEndpointString() { return getEndpointString(false); } public String getEndpointString(boolean includeDbName) { - return (isCloud() ? "https" : "http") + "://" + getServerAddress(ClickHouseProtocol.HTTP) + "/" + (includeDbName ? ClickHouseServerForTest.getDatabase() : ""); + return "jdbc:ch:" + (isCloud() ? "https" : "http") + "://" + + ClickHouseServerForTest.getClickHouseAddress(ClickHouseProtocol.HTTP, false) + "/" + (includeDbName ? ClickHouseServerForTest.getDatabase() : ""); + } + + public Connection getJdbcConnection() throws SQLException { + Properties info = new Properties(); + info.setProperty("user", "default"); + info.setProperty("password", ClickHouseServerForTest.getPassword()); + LOGGER.info("Connecting to {}", getEndpointString()); + + return new ConnectionImpl(getEndpointString(), info); + //return DriverManager.getConnection(getEndpointString(), "default", ClickHouseServerForTest.getPassword()); } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java deleted file mode 100644 index 771b34354..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIssuesTest.java +++ /dev/null @@ -1,152 +0,0 @@ -package com.clickhouse.jdbc; - -import org.testcontainers.shaded.org.apache.commons.lang3.StringUtils; -import org.testng.Assert; -import org.testng.annotations.Test; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Properties; - -public class JdbcIssuesTest extends JdbcIntegrationTest { - @Test(groups = "integration") - public void test01Decompress() throws SQLException { - String TABLE_NAME = "decompress_issue_01"; - Properties prop = new Properties(); - prop.setProperty("decompress", "true"); - prop.setProperty("decompress_algorithm", "lz4"); - String url = String.format("jdbc:ch:%s", getEndpointString(true)); - DataSourceImpl dataSource = new DataSourceImpl(url, prop); - String columnNames = "event_id"; - String columnValues = "('event_id String')"; - String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); - - Connection conn = dataSource.getConnection("default", getPassword()); - Statement st = conn.createStatement(); - st.execute(String.format("DROP TABLE IF EXISTS %s", TABLE_NAME)); - st.execute(String.format("CREATE TABLE %s (`event_id` String) ENGINE = Memory", TABLE_NAME)); - - int count = 1; - boolean failed = false; - while (count <= 100000) { - String content = StringUtils.repeat("*", count); - try (PreparedStatement ps = conn.prepareStatement(sql)) { - ps.setString(1, content); - ps.addBatch(); - ps.executeBatch(); - } catch (SQLException sqlException) { - sqlException.printStackTrace(); - failed = true; - } - Assert.assertFalse(failed, String.format("Failed when content size %d", count)); - count *= 2; - } - } - - - @Test - public void test02Decompress() throws SQLException { - String TABLE_NAME = "decompress_issue_02"; - Properties prop = new Properties(); - prop.setProperty("decompress", "true"); - prop.setProperty("decompress_algorithm", "lz4"); - String url = String.format("jdbc:ch:%s", getEndpointString(true)); - DataSourceImpl dataSource = new DataSourceImpl(url, prop); - String columnNames = "event_id"; - String columnValues = "('event_id String')"; - String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); - - Connection conn = dataSource.getConnection("default", getPassword()); - Statement st = conn.createStatement(); - st.execute(String.format("DROP TABLE IF EXISTS %s", TABLE_NAME)); - st.execute(String.format("CREATE TABLE %s (`event_id` String) ENGINE = Memory", TABLE_NAME)); - - int count = 1; - boolean failed = false; - - String content = StringUtils.repeat("*", count); - try (PreparedStatement ps = conn.prepareStatement(sql)) { - while (count <= 100000) { - ps.setString(1, content); - ps.addBatch(); - count *= 2; - } - ps.executeBatch(); - } catch (SQLException sqlException) { - sqlException.printStackTrace(); - failed = true; - } - Assert.assertFalse(failed, String.format("Failed when content size %d", count)); - } - @Test - public void test03Decompress() throws SQLException { - String TABLE_NAME = "decompress_issue_03"; - Properties prop = new Properties(); - prop.setProperty("decompress", "true"); - prop.setProperty("decompress_algorithm", "lz4"); - String url = String.format("jdbc:ch:%s", getEndpointString(true)); - DataSourceImpl dataSource = new DataSourceImpl(url, prop); - String columnNames = "event_id, num01,event_id_01 "; - String columnValues = "('event_id String, num01 Int8, event_id_01 String')"; - String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); - - Connection conn = dataSource.getConnection("default", getPassword()); - Statement st = conn.createStatement(); - st.execute(String.format("DROP TABLE IF EXISTS %s", TABLE_NAME)); - st.execute(String.format("CREATE TABLE %s (`event_id` String, `num01` Int8, `event_id_01` String) ENGINE = Memory", TABLE_NAME)); - - int count = 1; - boolean failed = false; - - String content = StringUtils.repeat("*", 50000); - try (PreparedStatement ps = conn.prepareStatement(sql)) { - while (count <= 3) { - ps.setString(1, content); - ps.setInt(2, 10); - ps.setString(3, content); - ps.addBatch(); - count += 1; - } - ps.executeBatch(); - } catch (SQLException sqlException) { - sqlException.printStackTrace(); - failed = true; - } - Assert.assertFalse(failed, String.format("Failed when content size %d", count)); - } - @Test - public void testIssue1373() throws SQLException { - String TABLE_NAME = "issue_1373"; - String url = String.format("jdbc:ch:%s", getEndpointString(true)); - DataSourceImpl dataSource = new DataSourceImpl(url, new Properties()); - String columnNames = "event_id, num01,event_id_01 "; - String columnValues = "('event_id String, num01 Int8, event_id_01 String')"; - String sql = String.format("INSERT INTO %s (%s) SELECT %s FROM input %s", TABLE_NAME, columnNames, columnNames, columnValues); - Connection conn = dataSource.getConnection("default", getPassword()); - Statement st = conn.createStatement(); - st.execute(String.format("DROP TABLE IF EXISTS %s", TABLE_NAME)); - st.execute(String.format("CREATE TABLE %s (`event_id` String, `num01` Int8, `event_id_01` String) ENGINE = Memory", TABLE_NAME)); - int count = 1; - boolean failed = false; - try (PreparedStatement ps = conn.prepareStatement(sql)) { - while (count <= 10) { - try { - ps.setString(1, "******"); - ps.setInt(2, 10); - ps.setString( 3, count == 2 ? null : "--------"); - ps.addBatch(); - } catch (Exception e) { - //e.printStackTrace(); - } - count += 1; - } - ps.executeBatch(); - } catch (SQLException sqlException) { - sqlException.printStackTrace(); - failed = true; - } - Assert.assertFalse(failed, String.format("executeBatch got exception")); - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParameterizedQueryTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParameterizedQueryTest.java deleted file mode 100644 index cedcfae5d..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParameterizedQueryTest.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.clickhouse.jdbc; - -import java.util.Arrays; - -import com.clickhouse.client.ClickHouseConfig; - -import org.testng.Assert; -import org.testng.annotations.Test; - -public class JdbcParameterizedQueryTest { - private final ClickHouseConfig config = new ClickHouseConfig(); - - @Test(groups = "unit") - public void testParseBlankQueries() { - Assert.assertThrows(IllegalArgumentException.class, () -> JdbcParameterizedQuery.of(config, null)); - Assert.assertThrows(IllegalArgumentException.class, () -> JdbcParameterizedQuery.of(config, "")); - Assert.assertThrows(IllegalArgumentException.class, () -> JdbcParameterizedQuery.of(config, " \n\t\r")); - } - - @Test(groups = "unit") - public void testParseQueriesWithNamedParameter() { - String sql = "select :no, :name(String)"; - JdbcParameterizedQuery q = JdbcParameterizedQuery.of(config, sql); - Assert.assertEquals(q.getOriginalQuery(), sql); - Assert.assertEquals(q.hasParameter(), false); - } - - @Test(groups = "unit") - public void testParseJdbcQueries() { - StringBuilder builder = new StringBuilder(); - String sql = "select ?(number % 2 == 0 ? 1 : 0) from numbers(100) where number > ?"; - JdbcParameterizedQuery q = JdbcParameterizedQuery.of(config, sql); - Assert.assertEquals(q.getOriginalQuery(), sql); - Assert.assertEquals(q.hasParameter(), true); - Assert.assertEquals(q.getParameters(), Arrays.asList("0", "1")); - builder.setLength(0); - q.apply(builder, "sum", "1"); - Assert.assertEquals(builder.toString(), - "select sum(number % 2 == 0 ? 1 : 0) from numbers(100) where number > 1"); - - Assert.assertEquals(JdbcParameterizedQuery.of(config, "select '; select 2' as ?").hasParameter(), true); - Assert.assertThrows(IllegalArgumentException.class, - () -> JdbcParameterizedQuery.of(config, "select 1; select 2")); - - sql = "select 1 ? 'a' : 'b', 2 ? (select 1) : 2, ?"; - q = JdbcParameterizedQuery.of(config, sql); - Assert.assertEquals(q.getOriginalQuery(), sql); - Assert.assertEquals(q.hasParameter(), true); - Assert.assertEquals(q.getParameters(), Arrays.asList("0")); - builder.setLength(0); - q.apply(builder, "3"); - Assert.assertEquals(builder.toString(), "select 1 ? 'a' : 'b', 2 ? (select 1) : 2, 3"); - - sql = "select ?::?"; - q = JdbcParameterizedQuery.of(config, sql); - Assert.assertEquals(q.getOriginalQuery(), sql); - Assert.assertEquals(q.hasParameter(), true); - Assert.assertEquals(q.getParameters(), Arrays.asList("0", "1")); - builder.setLength(0); - q.apply(builder, 1, new StringBuilder("Int8")); - Assert.assertEquals(builder.toString(), "select 1::Int8"); - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParseHandlerTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParseHandlerTest.java deleted file mode 100644 index 4c2b52a76..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcParseHandlerTest.java +++ /dev/null @@ -1,345 +0,0 @@ -package com.clickhouse.jdbc; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import com.clickhouse.jdbc.parser.ClickHouseSqlParser; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; -import com.clickhouse.jdbc.parser.StatementType; - -import org.testng.Assert; -import org.testng.annotations.Test; - -public class JdbcParseHandlerTest { - @Test(groups = "unit") - public void testInsertFromInFileStatement() { - JdbcParseHandler handler = JdbcParseHandler.getInstance(false, false, true); - Assert.assertEquals(ClickHouseSqlParser.parse("INSERT INTO aaa", null, handler)[0].getSQL(), "INSERT INTO aaa"); - Assert.assertEquals(ClickHouseSqlParser.parse("INSERT INTO aaa INFILE", null, handler)[0].getSQL(), - "INSERT INTO aaa INFILE"); // invalid - Assert.assertEquals(ClickHouseSqlParser.parse("INSERT INTO aaa FROM INFILE", null, handler)[0].getSQL(), - "INSERT INTO aaa FROM INFILE"); // invalid - Assert.assertEquals(ClickHouseSqlParser.parse("INSERT INTO aaa FROM INFILE 'a.csv'", null, handler)[0].getSQL(), - "INSERT INTO aaa FORMAT CSV"); - Assert.assertEquals( - ClickHouseSqlParser.parse("INSERT INTO aaa FROM INFILE 'a.csv' Format CSV", null, handler)[0].getSQL(), - "INSERT INTO aaa Format CSV"); - Assert.assertEquals( - ClickHouseSqlParser.parse("INSERT INTO aaa FROM INFILE 'a.csv' settings a=2", null, handler)[0] - .getSQL(), - "INSERT INTO aaa settings a=2"); - Assert.assertEquals( - ClickHouseSqlParser.parse("INSERT INTO aaa FROM INFILE 'a.csv.gz' compression 'gzip' settings a=2", - null, handler)[0] - .getSQL(), - "INSERT INTO aaa settings a=2"); - Assert.assertEquals( - ClickHouseSqlParser.parse( - "INSERT INTO aaa FROM INFILE 'input_*.csv.gz' compression 'gzip' settings max_result_rows=1, max_execution_time=2 FORMAT CSV", - null, handler)[0].getSQL(), - "INSERT INTO aaa settings max_result_rows=1, max_execution_time=2 FORMAT CSV"); - } - - @Test(groups = "unit") - public void testSelectIntoOutFileStatement() { - JdbcParseHandler handler = JdbcParseHandler.getInstance(false, false, true); - Assert.assertEquals(ClickHouseSqlParser.parse("select 1", null, handler)[0].getSQL(), "select 1"); - Assert.assertEquals(ClickHouseSqlParser.parse("select * from outfile", null, handler)[0].getSQL(), - "select * from outfile"); - Assert.assertEquals(ClickHouseSqlParser.parse("select into outfile", null, handler)[0].getSQL(), - "select into outfile"); // invalid - Assert.assertEquals(ClickHouseSqlParser.parse("select 1 into outfile 'a.csv'", null, handler)[0].getSQL(), - "select 1 "); - Assert.assertEquals(ClickHouseSqlParser.parse( - "select * from numbers(10) settings max_result_rows=1,max_execution_time=3 into outfile 'a.csv' format CSV", - null, handler)[0].getSQL(), - "select * from numbers(10) settings max_result_rows=1,max_execution_time=3 format CSV"); - } - - @Test(groups = "unit") - public void testParseDeleteStatement() { - Assert.assertEquals( - ClickHouseSqlParser.parse("delete from tbl", null, JdbcParseHandler.INSTANCE)[0] - .getSQL(), - "TRUNCATE TABLE tbl"); - Assert.assertEquals( - ClickHouseSqlParser.parse("delete from tbl where 1", null, JdbcParseHandler.INSTANCE)[0] - .getSQL(), - "ALTER TABLE `tbl` DELETE where 1 SETTINGS mutations_sync=1"); - Assert.assertEquals( - ClickHouseSqlParser.parse("delete from tbl where 1 and 1 settings a=1 format CSV", null, - JdbcParseHandler.INSTANCE)[0].getSQL(), - "ALTER TABLE `tbl` DELETE where 1 and 1 SETTINGS mutations_sync=1, a=1 format CSV"); - Assert.assertEquals( - ClickHouseSqlParser.parse( - "delete from tbl where 1 and 1 settings mutations_sync=0 format CSV", - null, - JdbcParseHandler.INSTANCE)[0].getSQL(), - "ALTER TABLE `tbl` DELETE where 1 and 1 settings mutations_sync=0 format CSV"); - } - - @Test(groups = "unit") - public void testParseInsertStatement() { - } - - @Test(groups = "unit") - public void testParseUpdateStatement() { - Assert.assertEquals( - ClickHouseSqlParser.parse("update tbl set a=1", null, JdbcParseHandler.INSTANCE)[0] - .getSQL(), - "ALTER TABLE `tbl` UPDATE a=1 SETTINGS mutations_sync=1"); - Assert.assertEquals( - ClickHouseSqlParser.parse("update tbl set a=1,b=2 where 1", null, - JdbcParseHandler.INSTANCE)[0] - .getSQL(), - "ALTER TABLE `tbl` UPDATE a=1,b=2 where 1 SETTINGS mutations_sync=1"); - Assert.assertEquals( - ClickHouseSqlParser.parse( - "update tbl set x=1, y = 2 where 1 and 1 settings a=1 format CSV", null, - JdbcParseHandler.INSTANCE)[0].getSQL(), - "ALTER TABLE `tbl` UPDATE x=1, y = 2 where 1 and 1 SETTINGS mutations_sync=1, a=1 format CSV"); - Assert.assertEquals( - ClickHouseSqlParser.parse( - "update tbl set y = 2 where 1 and 1 settings mutations_sync=0 format CSV", - null, JdbcParseHandler.INSTANCE)[0].getSQL(), - "ALTER TABLE `tbl` UPDATE y = 2 where 1 and 1 settings mutations_sync=0 format CSV"); - } - - @Test(groups = "unit") - public void testDeleteStatementWithoutWhereClause() { - Assert.assertEquals(JdbcParseHandler.INSTANCE.handleStatement("delete from `a\\`' a` . tbl", - StatementType.DELETE, null, "a\\`' a", "tbl", null, null, null, null, null, null, - new HashMap() { - { - put("DELETE", 0); - put("FROM", 8); - } - }, null, null), - new ClickHouseSqlStatement("TRUNCATE TABLE `a\\`' a` . tbl", StatementType.DELETE, - null, "a\\`' a", "tbl", null, null, null, null, null, null, null, null, null)); - Assert.assertEquals(JdbcParseHandler.INSTANCE.handleStatement("delete from `table\\`'1`", - StatementType.DELETE, - null, null, "table1", null, null, null, null, null, null, new HashMap() { - { - put("DELETE", 0); - put("FROM", 7); - } - }, null, null), - new ClickHouseSqlStatement("TRUNCATE TABLE `table\\`'1`", StatementType.DELETE, null, - null, "table1", null, null, null, null, null, null, null, null, null), - null); - } - - @Test(groups = "unit") - public void testDeleteStatementWithWhereClause() { - Map positions = new HashMap<>(); - positions.put("DELETE", 0); - positions.put("FROM", 7); - positions.put("WHERE", 28); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement("delete from `a\\`' a` . tbl where a = b", - StatementType.DELETE, null, "a\\`' a", "tbl", null, null, null, null, null, null, positions, - null, null), - new ClickHouseSqlStatement( - "ALTER TABLE `a\\`' a`.`tbl` DELETE where a = b SETTINGS mutations_sync=1", - StatementType.DELETE, null, "a\\`' a", "tbl", null, null, null, null, null, null, null, null, - null)); - positions.put("DELETE", 0); - positions.put("FROM", 8); - positions.put("WHERE", 26); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement("delete from `table\\`'1` where 1", - StatementType.DELETE, null, null, "table\\`'1", null, null, null, null, null, null, positions, - null, null), - new ClickHouseSqlStatement( - "ALTER TABLE `table\\`'1` DELETE where 1 SETTINGS mutations_sync=1", - StatementType.DELETE, null, null, "table\\`'1", null, null, null, null, null, - null, null, null, null)); - } - - @Test(groups = "unit") - public void testDeleteStatementWithSettings() { - String sql1 = "delete from tbl settings a=1"; - Map positions = new HashMap() { - { - put("DELETE", 0); - put("FROM", sql1.indexOf("from")); - put("SETTINGS", sql1.indexOf("settings")); - } - }; - Map settings = Collections.singletonMap("a", "1"); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql1, StatementType.DELETE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement("TRUNCATE TABLE tbl settings a=1", StatementType.DELETE, - null, null, "tbl", null, null, null, null, null, null, null, settings, null)); - - String sql2 = "delete from tbl where a != 1 and b != 2 settings a=1,b='a'"; - positions = new HashMap() { - { - put("DELETE", 0); - put("FROM", sql2.indexOf("from")); - put("WHERE", sql2.indexOf("where")); - put("SETTINGS", sql2.indexOf("settings")); - } - }; - settings = new HashMap() { - { - put("a", "1"); - put("b", "'a'"); - } - }; - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql2, StatementType.DELETE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement( - "ALTER TABLE `tbl` DELETE where a != 1 and b != 2 SETTINGS mutations_sync=1, a=1,b='a'", - StatementType.DELETE, null, null, "tbl", null, null, null, null, null, null, null, - settings, null)); - - String sql3 = "delete from tbl where a != 1 and b != 2 settings a=1,mutations_sync=2,b='a'"; - positions = new HashMap() { - { - put("DELETE", 0); - put("FROM", sql3.indexOf("from")); - put("WHERE", sql3.indexOf("where")); - put("SETTINGS", sql3.indexOf("settings")); - } - }; - settings = new HashMap() { - { - put("a", "1"); - put("mutations_sync", "2"); - put("b", "'a'"); - } - }; - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql3, StatementType.DELETE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement( - "ALTER TABLE `tbl` DELETE where a != 1 and b != 2 settings a=1,mutations_sync=2,b='a'", - StatementType.DELETE, null, null, "tbl", null, null, null, null, null, null, null, settings, - null)); - } - - @Test(groups = "unit") - public void testUpdateStatementWithoutWhereClause() { - Assert.assertEquals(JdbcParseHandler.INSTANCE.handleStatement("update `a\\`' a` . tbl set a=1", - StatementType.UPDATE, null, "a\\`' a", "tbl", null, null, null, null, null, null, - new HashMap() { - { - put("UPDATE", 0); - put("SET", 23); - } - }, null, null), - new ClickHouseSqlStatement( - "ALTER TABLE `a\\`' a`.`tbl` UPDATE a=1 SETTINGS mutations_sync=1", - StatementType.UPDATE, null, "a\\`' a", "tbl", null, null, null, null, null, null, null, null, - null)); - Assert.assertEquals(JdbcParseHandler.INSTANCE.handleStatement("update `table\\`'1` set a=1", - StatementType.UPDATE, null, null, "table1", null, null, null, null, null, null, - new HashMap() { - { - put("UPDATE", 0); - put("SET", 20); - } - }, null, null), - new ClickHouseSqlStatement("ALTER TABLE `table1` UPDATE a=1 SETTINGS mutations_sync=1", - StatementType.UPDATE, null, null, "table1", null, null, null, null, null, null, null, null, - null)); - } - - @Test(groups = "unit") - public void testUpdateStatementWithWhereClause() { - Map positions = new HashMap<>(); - positions.put("UPDATE", 0); - positions.put("SET", 23); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement( - "Update `a\\`' a` . tbl set a = 2 where a = b", - StatementType.UPDATE, null, "a\\`' a", "tbl", null, null, null, null, null, null, positions, - null, null), - new ClickHouseSqlStatement( - "ALTER TABLE `a\\`' a`.`tbl` UPDATE a = 2 where a = b SETTINGS mutations_sync=1", - StatementType.UPDATE, null, "a\\`' a", "tbl", null, null, null, null, null, null, null, null, - null)); - positions.put("UPDATE", 0); - positions.put("SET", 19); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement("update `table\\`'1` set a = b where 1", - StatementType.UPDATE, null, null, "table\\`'1", null, null, null, null, null, null, positions, - null, null), - new ClickHouseSqlStatement( - "ALTER TABLE `table\\`'1` UPDATE a = b where 1 SETTINGS mutations_sync=1", - StatementType.UPDATE, null, null, "table\\`'1", null, null, null, null, null, null, null, null, - null)); - } - - @Test(groups = "unit") - public void testUpdateStatementWithSettings() { - String sql1 = "update tbl set x=1 settings a=1"; - Map positions = new HashMap() { - { - put("UPDATE", 0); - put("SET", sql1.indexOf("set")); - put("SETTINGS", sql1.indexOf("settings")); - } - }; - Map settings = Collections.singletonMap("a", "1"); - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql1, StatementType.UPDATE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement( - "ALTER TABLE `tbl` UPDATE x=1 SETTINGS mutations_sync=1, a=1", - StatementType.UPDATE, null, null, "tbl", null, null, null, null, null, null, null, settings, - null)); - - String sql2 = "update tbl set x=1, y=2 where a != 1 and b != 2 settings a=1,b='a'"; - positions = new HashMap() { - { - put("UPDATE", 0); - put("SET", sql1.indexOf("set")); - put("WHERE", sql2.indexOf("where")); - put("SETTINGS", sql2.indexOf("settings")); - } - }; - settings = new HashMap() { - { - put("a", "1"); - put("b", "'a'"); - } - }; - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql2, StatementType.UPDATE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement( - "ALTER TABLE `tbl` UPDATE x=1, y=2 where a != 1 and b != 2 SETTINGS mutations_sync=1, a=1,b='a'", - StatementType.UPDATE, null, null, "tbl", null, null, null, null, null, null, null, settings, - null)); - - String sql3 = "update tbl set x=1,y=2 where a != 1 and b != 2 settings a=1,mutations_sync=2,b='a'"; - positions = new HashMap() { - { - put("UPDATE", 0); - put("SET", sql1.indexOf("set")); - put("WHERE", sql3.indexOf("where")); - put("SETTINGS", sql3.indexOf("settings")); - } - }; - settings = new HashMap() { - { - put("a", "1"); - put("mutations_sync", "2"); - put("b", "'a'"); - } - }; - Assert.assertEquals( - JdbcParseHandler.INSTANCE.handleStatement(sql3, StatementType.UPDATE, null, null, "tbl", - null, null, null, null, null, null, positions, settings, null), - new ClickHouseSqlStatement( - "ALTER TABLE `tbl` UPDATE x=1,y=2 where a != 1 and b != 2 settings a=1,mutations_sync=2,b='a'", - StatementType.UPDATE, null, null, "tbl", null, null, null, null, null, null, null, settings, - null)); - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java deleted file mode 100644 index d7f42ab37..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/ClickHouseConnectionImplTest.java +++ /dev/null @@ -1,249 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.SQLException; -import java.sql.Savepoint; -import java.util.Properties; - -import com.clickhouse.client.ClickHouseRequest; -import com.clickhouse.jdbc.ClickHouseConnection; -import com.clickhouse.jdbc.Driver; -import com.clickhouse.jdbc.ClickHouseStatement; -import com.clickhouse.jdbc.JdbcIntegrationTest; -import com.clickhouse.jdbc.parser.ClickHouseSqlStatement; - -import org.testng.Assert; -import org.testng.annotations.Test; - -public class ClickHouseConnectionImplTest extends JdbcIntegrationTest { - @Test(groups = "integration") - public void testManualCommit() throws SQLException { - if (isCloud()) return; //TODO: testManualCommit - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (Driver.ClickHouseConnectionImpl conn = (Driver.ClickHouseConnectionImpl) newConnection()) { - Assert.assertEquals(conn.getAutoCommit(), true); - Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); - conn.setAutoCommit(false); - Assert.assertEquals(conn.getAutoCommit(), false); - JdbcTransaction tx = conn.getJdbcTrasaction(); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertEquals(tx.tx, conn.getTransaction()); - try (ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("select 1; select 2"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - Savepoint s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.releaseSavepoint(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - stmt.execute("select 3"); - Assert.assertEquals(tx.getQueries().size(), 3); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.releaseSavepoint(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - stmt.execute("select 3; select 4"); - Assert.assertEquals(tx.getQueries().size(), 4); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.setSavepoint(); - Assert.assertEquals(tx.getQueries().size(), 4); - Assert.assertEquals(tx.getSavepoints().size(), 2); - stmt.execute("select 5"); - Assert.assertEquals(tx.getQueries().size(), 5); - Assert.assertEquals(tx.getSavepoints().size(), 2); - conn.releaseSavepoint(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - conn.setSavepoint(); - conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 2); - } - - try (ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("select 6"); - Assert.assertEquals(tx.getQueries().size(), 3); - Assert.assertEquals(tx.getSavepoints().size(), 2); - } - conn.commit(); - JdbcTransaction newTx = conn.getJdbcTrasaction(); - Assert.assertNotEquals(newTx, tx); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertNotNull(newTx, "Should have transaction"); - Assert.assertEquals(newTx.getQueries().size(), 0); - Assert.assertEquals(newTx.getSavepoints().size(), 0); - Assert.assertEquals(newTx.tx, conn.getTransaction()); - tx = newTx; - - try (ClickHouseStatement stmt = conn.createStatement()) { - Savepoint s = conn.setSavepoint(); - stmt.execute("select 7; select 8"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - } - conn.commit(); - newTx = conn.getJdbcTrasaction(); - Assert.assertNotEquals(newTx, tx); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertNotNull(newTx, "Should have transaction"); - Assert.assertEquals(newTx.getQueries().size(), 0); - Assert.assertEquals(newTx.getSavepoints().size(), 0); - Assert.assertEquals(newTx.tx, conn.getTransaction()); - } - } - - @Test(groups = "integration") - public void testManualRollback() throws SQLException { - if (isCloud()) return; //TODO: testManualRollback - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (Driver.ClickHouseConnectionImpl conn = (Driver.ClickHouseConnectionImpl) newConnection()) { - Assert.assertEquals(conn.getAutoCommit(), true); - Assert.assertNull(conn.getTransaction(), "Should NOT have any transaction"); - conn.setAutoCommit(false); - Assert.assertEquals(conn.getAutoCommit(), false); - JdbcTransaction tx = conn.getJdbcTrasaction(); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertEquals(tx.tx, conn.getTransaction()); - try (ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("select 1; select 2"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - Savepoint s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.rollback(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - stmt.execute("select 3"); - Assert.assertEquals(tx.getQueries().size(), 3); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.rollback(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - s = conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - stmt.execute("select 3; select 4"); - Assert.assertEquals(tx.getQueries().size(), 4); - Assert.assertEquals(tx.getSavepoints().size(), 1); - conn.setSavepoint(); - Assert.assertEquals(tx.getQueries().size(), 4); - Assert.assertEquals(tx.getSavepoints().size(), 2); - stmt.execute("select 5"); - Assert.assertEquals(tx.getQueries().size(), 5); - Assert.assertEquals(tx.getSavepoints().size(), 2); - conn.rollback(s); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 0); - - conn.setSavepoint(); - conn.setSavepoint("test"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 2); - } - - try (ClickHouseStatement stmt = conn.createStatement()) { - stmt.execute("select 6"); - Assert.assertEquals(tx.getQueries().size(), 3); - Assert.assertEquals(tx.getSavepoints().size(), 2); - } - conn.rollback(); - JdbcTransaction newTx = conn.getJdbcTrasaction(); - Assert.assertNotEquals(newTx, tx); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertNotNull(newTx, "Should have transaction"); - Assert.assertEquals(newTx.getQueries().size(), 0); - Assert.assertEquals(newTx.getSavepoints().size(), 0); - Assert.assertEquals(newTx.tx, conn.getTransaction()); - tx = newTx; - - try (ClickHouseStatement stmt = conn.createStatement()) { - Savepoint s = conn.setSavepoint(); - stmt.execute("select 7; select 8"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints().size(), 1); - } - conn.rollback(); - newTx = conn.getJdbcTrasaction(); - Assert.assertNotEquals(newTx, tx); - Assert.assertNotNull(tx, "Should have transaction"); - Assert.assertEquals(tx.getQueries().size(), 0); - Assert.assertEquals(tx.getSavepoints().size(), 0); - Assert.assertNotNull(newTx, "Should have transaction"); - Assert.assertEquals(newTx.getQueries().size(), 0); - Assert.assertEquals(newTx.getSavepoints().size(), 0); - Assert.assertEquals(newTx.tx, conn.getTransaction()); - } - } - - @Test(groups = "integration") - public void testParse() throws SQLException { - Properties props = new Properties(); - String sql = "delete from table where column=1"; - boolean supportsLightWeightDelete = false; - try (ClickHouseConnection conn = newConnection(props)) { - ClickHouseSqlStatement[] stmts = conn.parse(sql, conn.getConfig(), null); - Assert.assertEquals(stmts.length, 1); - Assert.assertEquals(stmts[0].getSQL(), conn.getServerVersion().check("[23.3,)") ? sql - : "ALTER TABLE `table` DELETE where column=1 SETTINGS mutations_sync=1"); - if (conn.getServerVersion().check("[22.8,)")) { - supportsLightWeightDelete = true; - } - } - - if (!supportsLightWeightDelete) { - return; - } - - props.setProperty("custom_settings", "allow_experimental_lightweight_delete=1"); - try (ClickHouseConnection conn = newConnection(props)) { - ClickHouseSqlStatement[] stmts = conn.parse(sql, conn.getConfig(), null); - Assert.assertEquals(stmts.length, 1); - Assert.assertEquals(stmts[0].getSQL(), conn.getServerVersion().check("[23.3,)") ? sql - : "ALTER TABLE `table` DELETE where column=1 SETTINGS mutations_sync=1"); - - stmts = conn.parse(sql, conn.getConfig(), conn.unwrap(ClickHouseRequest.class).getSettings()); - Assert.assertEquals(stmts.length, 1); - Assert.assertEquals(stmts[0].getSQL(), sql); - } - } - - @Test(groups = "integration") - public void testSwitchAutoCommit() throws SQLException { - if (isCloud()) return; //TODO: testSwitchAutoCommit - Revisit, see: https://github.com/ClickHouse/clickhouse-java/issues/1747 - try (ClickHouseConnection conn = newConnection()) { - Assert.assertEquals(conn.getAutoCommit(), true); - conn.setAutoCommit(false); - Assert.assertEquals(conn.getAutoCommit(), false); - conn.setAutoCommit(true); - Assert.assertEquals(conn.getAutoCommit(), true); - conn.setAutoCommit(false); - Assert.assertEquals(conn.getAutoCommit(), false); - } - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcTransactionTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcTransactionTest.java deleted file mode 100644 index adb1f4c75..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcTransactionTest.java +++ /dev/null @@ -1,98 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.sql.SQLException; -import java.util.Arrays; -import java.util.Collections; - -import org.testng.Assert; -import org.testng.annotations.Test; - -public class JdbcTransactionTest { - @Test(groups = "unit") - public void testQuery() { - JdbcTransaction tx = new JdbcTransaction(); - Assert.assertNotNull(tx.id); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - String queryId = tx.newQuery(null); - Assert.assertNotNull(queryId); - Assert.assertEquals(tx.getQueries(), Collections.singleton(queryId)); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - String newQueryId = tx.newQuery(queryId); - Assert.assertNotNull(newQueryId); - Assert.assertNotEquals(newQueryId, queryId); - Assert.assertEquals(tx.getQueries(), Arrays.asList(queryId, newQueryId)); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - tx.clear(); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - queryId = tx.newQuery(""); - Assert.assertNotNull(queryId); - Assert.assertEquals(tx.getQueries(), Collections.singleton(queryId)); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - } - - @Test(groups = "unit") - public void testSavepoint() throws SQLException { - JdbcTransaction tx = new JdbcTransaction(); - Assert.assertNotNull(tx.id); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - JdbcSavepoint unnamedSavepoint = tx.newSavepoint(null); - JdbcSavepoint s1 = unnamedSavepoint; - Assert.assertEquals(unnamedSavepoint.id, 0); - Assert.assertEquals(unnamedSavepoint.getSavepointId(), 0); - Assert.assertNull(unnamedSavepoint.name, "Un-named savepoint should not have name"); - Assert.assertThrows(SQLException.class, () -> s1.getSavepointName()); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.singleton(unnamedSavepoint)); - - JdbcSavepoint namedSavepoint = tx.newSavepoint("tmp"); - JdbcSavepoint s2 = namedSavepoint; - Assert.assertEquals(namedSavepoint.id, 0); - Assert.assertThrows(SQLException.class, () -> s2.getSavepointId()); - Assert.assertEquals(namedSavepoint.name, "tmp"); - Assert.assertEquals(namedSavepoint.getSavepointName(), "tmp"); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Arrays.asList(unnamedSavepoint, namedSavepoint)); - - tx.toSavepoint(namedSavepoint); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.singleton(unnamedSavepoint)); - - tx.toSavepoint(unnamedSavepoint); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - tx.clear(); - Assert.assertEquals(tx.getQueries(), Collections.emptyList()); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - - String queryId = tx.newQuery(null); - JdbcSavepoint s3 = unnamedSavepoint = tx.newSavepoint(null); - Assert.assertEquals(unnamedSavepoint.id, 1); - Assert.assertEquals(unnamedSavepoint.getSavepointId(), 1); - Assert.assertNull(unnamedSavepoint.name, "Un-named savepoint should not have name"); - Assert.assertThrows(SQLException.class, () -> s3.getSavepointName()); - Assert.assertEquals(tx.getQueries().size(), 1); - Assert.assertEquals(tx.getSavepoints(), Collections.singleton(unnamedSavepoint)); - - tx.newQuery(null); - JdbcSavepoint s4 = namedSavepoint = tx.newSavepoint("tmp"); - Assert.assertEquals(namedSavepoint.id, 2); - Assert.assertThrows(SQLException.class, () -> s4.getSavepointId()); - Assert.assertEquals(namedSavepoint.name, "tmp"); - Assert.assertEquals(namedSavepoint.getSavepointName(), "tmp"); - Assert.assertEquals(tx.getQueries().size(), 2); - Assert.assertEquals(tx.getSavepoints(), Arrays.asList(unnamedSavepoint, namedSavepoint)); - - tx.toSavepoint(unnamedSavepoint); - Assert.assertEquals(tx.getQueries(), Collections.singleton(queryId)); - Assert.assertEquals(tx.getSavepoints(), Collections.emptyList()); - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java deleted file mode 100644 index 322c7f10a..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/internal/JdbcUrlParserTest.java +++ /dev/null @@ -1,180 +0,0 @@ -package com.clickhouse.jdbc.internal; - -import java.net.URI; -import java.net.URISyntaxException; -import java.sql.SQLException; -import java.util.Properties; - -import com.clickhouse.client.ClickHouseCredentials; -import com.clickhouse.client.ClickHouseLoadBalancingPolicy; -import com.clickhouse.client.ClickHouseNode; -import com.clickhouse.client.ClickHouseProtocol; -import com.clickhouse.client.config.ClickHouseDefaults; -import com.clickhouse.jdbc.JdbcUrlParser; -import com.clickhouse.jdbc.JdbcUrlParser.ConnectionInfo; - -import org.testng.Assert; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -public class JdbcUrlParserTest { - @Test(groups = "unit") - public void testParseInvalidUri() { - Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse(null, null)); - Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("", null)); - Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("some_invalid_uri", null)); - Assert.assertThrows(SQLException.class, () -> JdbcUrlParser.parse("jdbc:clickhouse:.", null)); - Assert.assertThrows(SQLException.class, - () -> JdbcUrlParser.parse("jdbc:clickhouse://", null)); - Assert.assertThrows(SQLException.class, - () -> JdbcUrlParser.parse("jdbc:clickhouse:///db", null)); - Assert.assertThrows(SQLException.class, - () -> JdbcUrlParser.parse("clickhouse://a:b:c@aaa", null)); - Assert.assertThrows(SQLException.class, - () -> JdbcUrlParser.parse("clickhouse://::1:1234/a", null)); - } - - @Test(groups = "unit") - public void testParseIpv6() throws SQLException, URISyntaxException { - ConnectionInfo info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]:1234", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://[::1]:1234")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP, 1234).build()); - - info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]/", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://[::1]:8123")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP).build()); - - info = JdbcUrlParser.parse("jdbc:clickhouse://[::1]/dbdb", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://[::1]:8123/dbdb")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("[::1]").port(ClickHouseProtocol.HTTP).database("dbdb") - .build()); - } - - @Test(groups = "unit") - public void testParseAbbrevation() throws SQLException, URISyntaxException { - ConnectionInfo info = JdbcUrlParser.parse("jdbc:ch://localhost", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://localhost:8123")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - - info = JdbcUrlParser.parse("jdbc:ch:grpc://localhost", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:grpc://localhost:9100")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.GRPC).build()); - - info = JdbcUrlParser.parse("jdbc:ch:https://:letmein@[::1]:3218/db1?user=aaa", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://[::1]:3218/db1?ssl=true&sslmode=STRICT")); - Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("[::1]") - .port(ClickHouseProtocol.HTTP, 3218) - .database("db1") - .credentials(ClickHouseCredentials.fromUserAndPassword("aaa", "letmein")) - .addOption("ssl", "true").addOption("sslmode", "STRICT").build()); - Assert.assertEquals(info.getServer().getCredentials().orElse(null), - ClickHouseCredentials.fromUserAndPassword("aaa", "letmein")); - } - - @Test(groups = "unit") - public void testParse() throws SQLException, URISyntaxException { - ConnectionInfo info = JdbcUrlParser.parse("jdbc:ch://localhost", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://localhost:8123")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - - info = JdbcUrlParser.parse("jdbc:ch:grpc://localhost/default", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:grpc://localhost:9100/default")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.GRPC) - .database((String) ClickHouseDefaults.DATABASE - .getEffectiveDefaultValue()) - .build()); - - info = JdbcUrlParser.parse("jdbc:ch:https://:letmein@127.0.0.1:3218/db1", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://127.0.0.1:3218/db1?ssl=true&sslmode=STRICT")); - Assert.assertEquals(info.getServer(), ClickHouseNode.builder().host("127.0.0.1") - .port(ClickHouseProtocol.HTTP, 3218).database("db1") - .credentials(ClickHouseCredentials - .fromUserAndPassword((String) ClickHouseDefaults.USER - .getEffectiveDefaultValue(), "letmein")) - .addOption("ssl", "true").addOption("sslmode", "STRICT") - .build()); - } - - @Test(groups = "unit") - public void testParseWithProperties() throws SQLException, URISyntaxException { - ConnectionInfo info = JdbcUrlParser.parse("jdbc:clickhouse://localhost/", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://localhost:8123")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP).build()); - - info = JdbcUrlParser.parse("jdbc:clickhouse://localhost:4321/ndb", null); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://localhost:4321/ndb")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP, 4321) - .database("ndb").build()); - - Properties props = new Properties(); - props.setProperty("database", "db1"); - info = JdbcUrlParser.parse("jdbc:clickhouse://me@localhost:1234/mydb?password=123", props); - Assert.assertEquals(info.getServer().toUri(JdbcUrlParser.JDBC_CLICKHOUSE_PREFIX), - new URI("jdbc:clickhouse:http://localhost:1234/db1")); - Assert.assertEquals(info.getServer(), - ClickHouseNode.builder().host("localhost").port(ClickHouseProtocol.HTTP, 1234) - .database("db1") - .credentials(ClickHouseCredentials.fromUserAndPassword("me", "123")) - .build()); - Assert.assertEquals(info.getProperties().getProperty("database"), "db1"); - } - - @Test(groups = "unit") - public void testParseCredentials() throws SQLException { - Properties props = new Properties(); - props.setProperty("user", "default1"); - props.setProperty("password", "password1"); - ConnectionInfo connInfo = JdbcUrlParser.parse("jdbc:clickhouse://user:a:passwd@foo.ch/test", - props); - ClickHouseNode server = connInfo.getServer(); - Assert.assertEquals(connInfo.getDefaultCredentials().getUserName(), "default1"); - Assert.assertEquals(connInfo.getDefaultCredentials().getPassword(), "password1"); - Assert.assertEquals(server.getCredentials().get().getUserName(), "user"); - Assert.assertEquals(server.getCredentials().get().getPassword(), "a:passwd"); - - server = JdbcUrlParser.parse("jdbc:clickhouse://let%40me%3Ain:let%40me%3Ain@foo.ch", null) - .getServer(); - Assert.assertEquals(server.getCredentials().get().getUserName(), "let@me:in"); - Assert.assertEquals(server.getCredentials().get().getPassword(), "let@me:in"); - } - - @Test(groups = "unit", dataProvider = "testParseUrlPropertiesProvider") - public void testParseUrlProperties(String url, int numOfNodes) throws SQLException { - - ConnectionInfo info = ClickHouseJdbcUrlParser.parse(url, null); - Assert.assertEquals(info.getNodes().getNodes().size(), numOfNodes); - Assert.assertEquals(info.getNodes().getPolicy().getClass().getSimpleName(), "FirstAlivePolicy"); - for (ClickHouseNode n : info.getNodes().getNodes()) { - Assert.assertEquals(n.getOptions().get("connect_timeout"), "10000"); - Assert.assertEquals(n.getOptions().get("http_connection_provider"), "HTTP_CLIENT"); - } - } - - @DataProvider(name = "testParseUrlPropertiesProvider") - public static Object[][] testParseUrlPropertiesProvider() { - return new Object[][] { - { "jdbc:clickhouse://host1:8123,host2:8123,host3:8123/db1?http_connection_provider=HTTP_CLIENT&load_balancing_policy=firstAlive&connect_timeout=10000", 3 }, - { "jdbc:clickhouse:http://host1:8123,host2:8123,host3:8123/db1?http_connection_provider=HTTP_CLIENT&load_balancing_policy=firstAlive&connect_timeout=10000", 3 } - }; - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlParserTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlParserTest.java deleted file mode 100644 index d637c3923..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlParserTest.java +++ /dev/null @@ -1,912 +0,0 @@ -package com.clickhouse.jdbc.parser; - -import org.testng.Assert; -import org.testng.annotations.Test; - -import static org.testng.Assert.assertEquals; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import com.clickhouse.client.ClickHouseConfig; - -public class ClickHouseSqlParserTest { - private ClickHouseSqlStatement[] parse(String sql) { - return ClickHouseSqlParser.parse(sql, new ClickHouseConfig()); - } - - private String loadSql(String file) { - InputStream inputStream = ClickHouseSqlParserTest.class.getResourceAsStream("/sqls/" + file); - - StringBuilder sql = new StringBuilder(); - try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) { - String line; - while ((line = br.readLine()) != null) { - sql.append(line).append("\n"); - } - } catch (IOException e) { - throw new IllegalArgumentException(e); - } - - return sql.toString(); - } - - private ClickHouseSqlStatement checkSingleStatement(ClickHouseSqlStatement[] stmts, String sql) { - return checkSingleStatement(stmts, sql, StatementType.UNKNOWN, ClickHouseSqlStatement.DEFAULT_DATABASE, - ClickHouseSqlStatement.DEFAULT_TABLE); - } - - private ClickHouseSqlStatement checkSingleStatement(ClickHouseSqlStatement[] stmts, String sql, - StatementType stmtType) { - return checkSingleStatement(stmts, sql, stmtType, ClickHouseSqlStatement.DEFAULT_DATABASE, - ClickHouseSqlStatement.DEFAULT_TABLE); - } - - private ClickHouseSqlStatement checkSingleStatement(ClickHouseSqlStatement[] stmts, String sql, - StatementType stmtType, - String database, String table) { - assertEquals(stmts.length, 1); - - ClickHouseSqlStatement s = stmts[0]; - assertEquals(s.getSQL(), sql); - assertEquals(s.getStatementType(), stmtType); - assertEquals(s.getDatabaseOrDefault(null), database); - assertEquals(s.getTable(), table); - - return stmts[0]; - } - - @Test(groups = "unit") - public void testParseNonSql() throws ParseException { - String sql; - - assertEquals(parse(sql = null), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement(sql, StatementType.UNKNOWN) }); - assertEquals(parse(sql = ""), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement(sql, StatementType.UNKNOWN) }); - - checkSingleStatement(parse(sql = "invalid sql"), sql); - checkSingleStatement(parse(sql = "-- some comments"), sql); - checkSingleStatement(parse(sql = "/*********\r\n\r\t some ***** comments*/"), sql); - - checkSingleStatement(parse(sql = "select"), sql, StatementType.UNKNOWN); - checkSingleStatement(parse(sql = "select ()"), sql, StatementType.UNKNOWN); - checkSingleStatement(parse(sql = "select (()"), sql, StatementType.UNKNOWN); - checkSingleStatement(parse(sql = "select [[]"), sql, StatementType.UNKNOWN); - // checkSingleStatement(parse(sql = "select 1 select"), sql, - // StatementType.UNKNOWN); - } - - @Test(groups = "unit") - public void testAlterStatement() { - String sql; - - checkSingleStatement(parse(sql = "ALTER TABLE alter_test ADD COLUMN Added0 UInt32"), sql, - StatementType.ALTER, - "system", "alter_test"); - checkSingleStatement( - parse(sql = "ALTER TABLE test_db.test_table UPDATE a = 1, \"b\" = '2', `c`=3.3 WHERE d=123 and e=456"), - sql, StatementType.ALTER_UPDATE, "test_db", "test_table"); - checkSingleStatement(parse(sql = "ALTER TABLE tTt on cluster 'cc' delete WHERE d=123 and e=456"), sql, - StatementType.ALTER_DELETE, "system", "tTt"); - checkSingleStatement(parse(sql = "ALTER USER user DEFAULT ROLE role1, role2"), sql, - StatementType.ALTER); - } - - @Test(groups = "unit") - public void testAttachStatement() { - String sql; - - checkSingleStatement(parse(sql = "ATTACH TABLE IF NOT EXISTS t.t ON CLUSTER cluster"), sql, - StatementType.ATTACH); - } - - @Test(groups = "unit") - public void testCheckStatement() { - String sql; - - checkSingleStatement(parse(sql = "check table a"), sql, StatementType.CHECK); - checkSingleStatement(parse(sql = "check table a.a"), sql, StatementType.CHECK); - } - - @Test(groups = "unit") - public void testCreateStatement() { - String sql; - - checkSingleStatement(parse(sql = "create table a(a String) engine=Memory"), sql, StatementType.CREATE); - } - - @Test(groups = "unit") - public void testDeleteStatement() { - String sql; - - checkSingleStatement(parse(sql = "delete from a"), sql, StatementType.DELETE, "system", "a"); - checkSingleStatement(parse(sql = "delete from c.a where upper(a)=upper(lower(b))"), sql, - StatementType.DELETE, - "c", "a"); - } - - @Test(groups = "unit") - public void testDescribeStatement() { - String sql; - - checkSingleStatement(parse(sql = "desc a"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "desc table a"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "describe table a.a"), sql, StatementType.DESCRIBE, "a", "columns"); - checkSingleStatement(parse(sql = "desc table table"), sql, StatementType.DESCRIBE, "system", "columns"); - // fix issue #614 - checkSingleStatement(parse(sql = "desc t1 t2"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "desc table t1 t2"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "desc table t1 as `t2`"), sql, StatementType.DESCRIBE, "system", - "columns"); - } - - @Test(groups = "unit") - public void testDetachStatement() { - String sql; - - checkSingleStatement(parse(sql = "detach TABLE t"), sql, StatementType.DETACH); - checkSingleStatement(parse(sql = "detach TABLE if exists t.t on cluster 'cc'"), sql, - StatementType.DETACH); - } - - @Test(groups = "unit") - public void testDropStatement() { - String sql; - - checkSingleStatement(parse(sql = "drop TEMPORARY table t"), sql, StatementType.DROP); - checkSingleStatement(parse(sql = "drop TABLE if exists t.t on cluster 'cc'"), sql, StatementType.DROP); - } - - @Test(groups = "unit") - public void testExistsStatement() { - String sql; - - checkSingleStatement(parse(sql = "EXISTS TEMPORARY TABLE a"), sql, StatementType.EXISTS); - checkSingleStatement(parse(sql = "EXISTS TABLE a.a"), sql, StatementType.EXISTS); - checkSingleStatement(parse(sql = "EXISTS DICTIONARY c"), sql, StatementType.EXISTS); - } - - @Test(groups = "unit") - public void testExplainStatement() { - String sql; - - checkSingleStatement(parse( - sql = "EXPLAIN SELECT sum(number) FROM numbers(10) UNION ALL SELECT sum(number) FROM numbers(10) ORDER BY sum(number) ASC FORMAT TSV"), - sql, StatementType.EXPLAIN); - checkSingleStatement(parse(sql = "EXPLAIN AST SELECT 1"), sql, StatementType.EXPLAIN); - checkSingleStatement(parse( - sql = "EXPLAIN SYNTAX SELECT * FROM system.numbers AS a, system.numbers AS b, system.numbers AS c"), - sql, StatementType.EXPLAIN); - } - - @Test(groups = "unit") - public void testGrantStatement() { - String sql; - - checkSingleStatement(parse(sql = "GRANT SELECT(x,y) ON db.table TO john WITH GRANT OPTION"), sql, - StatementType.GRANT); - checkSingleStatement(parse(sql = "GRANT INSERT(x,y) ON db.table TO john"), sql, StatementType.GRANT); - } - - @Test(groups = "unit") - public void testInsertStatement() throws ParseException { - String sql; - - ClickHouseSqlStatement s = parse(sql = "insert into table test(a,b) Values (1,2)")[0]; - assertEquals(sql.substring(s.getStartPosition("values"), s.getEndPosition("VALUES")), "Values"); - assertEquals(sql.substring(0, s.getEndPosition("values")) + " (1,2)", sql); - - Pattern values = Pattern.compile("(?i)VALUES[\\s]*\\("); - int valuePosition = -1; - Matcher matcher = values.matcher(sql); - if (matcher.find()) { - valuePosition = matcher.start(); - } - assertEquals(s.getStartPosition("values"), valuePosition); - - s = checkSingleStatement(parse(sql = "insert into function null('a UInt8') values(1)"), sql, - StatementType.INSERT); - Assert.assertEquals(s.getContentBetweenKeywords(ClickHouseSqlStatement.KEYWORD_VALUES_START, - ClickHouseSqlStatement.KEYWORD_VALUES_END, 1), "1"); - s = checkSingleStatement(parse(sql = "insert into function null('a UInt8') values(1)(2)"), sql, - StatementType.INSERT); - Assert.assertEquals(s.getContentBetweenKeywords(ClickHouseSqlStatement.KEYWORD_VALUES_START, - ClickHouseSqlStatement.KEYWORD_VALUES_END, 1), ""); - checkSingleStatement(parse(sql = "insert into function null('a UInt8') select * from number(10)"), sql, - StatementType.INSERT); - checkSingleStatement(parse(sql = "insert into test2(a,b) values('values(',',')"), sql, - StatementType.INSERT, "system", "test2"); - checkSingleStatement(parse(sql = "INSERT INTO table t(a, b, c) values('1', ',', 'ccc')"), sql, - StatementType.INSERT, "system", "t"); - checkSingleStatement(parse(sql = "INSERT INTO table t(a, b, c) values('1', 2, 'ccc') (3,2,1)"), sql, - StatementType.INSERT, "system", "t"); - checkSingleStatement(parse(sql = "INSERT INTO table s.t select * from ttt"), sql, StatementType.INSERT, - "s", "t"); - checkSingleStatement(parse(sql = "INSERT INTO insert_select_testtable (* EXCEPT(b)) Values (2, 2)"), - sql, StatementType.INSERT, "system", "insert_select_testtable"); - checkSingleStatement( - parse(sql = "insert into `test` (num)SETTINGS input_format_null_as_default = 1 values (?)"), - sql, StatementType.INSERT, "system", "test"); - checkSingleStatement( - parse(sql = "insert into `test` (id, name) SETTINGS input_format_null_as_default = 1 values (1,2)(3,4),(5,6)"), - sql, StatementType.INSERT, "system", "test"); - s = checkSingleStatement( - parse(sql = "insert into `test`"), sql, StatementType.INSERT, "system", "test"); - Assert.assertEquals(s.getContentBetweenKeywords(ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_START, - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_END, 1), ""); - s = checkSingleStatement( - parse(sql = "insert into `test` (id, name) format RowBinary"), - sql, StatementType.INSERT, "system", "test"); - Assert.assertEquals(s.getContentBetweenKeywords(ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_START, - ClickHouseSqlStatement.KEYWORD_TABLE_COLUMNS_END, 1), "id, name"); - } - - @Test(groups = "unit") - public void testKillStatement() { - String sql; - - checkSingleStatement(parse(sql = "KILL QUERY WHERE query_id='2-857d-4a57-9ee0-327da5d60a90'"), sql, - StatementType.KILL); - checkSingleStatement(parse( - sql = "KILL MUTATION WHERE database = 'default' AND table = 'table' AND mutation_id = 'mutation_3.txt' SYNC"), - sql, StatementType.KILL); - } - - @Test(groups = "unit") - public void testOptimizeStatement() { - String sql; - - checkSingleStatement( - parse(sql = "OPTIMIZE TABLE a ON CLUSTER cluster PARTITION ID 'partition_id' FINAL"), - sql, - StatementType.OPTIMIZE); - } - - @Test(groups = "unit") - public void testRenameStatement() { - String sql; - - checkSingleStatement(parse(sql = "RENAME TABLE table1 TO table2, table3 TO table4 ON CLUSTER cluster"), - sql, - StatementType.RENAME); - checkSingleStatement(parse( - sql = "RENAME TABLE db1.table1 TO db2.table2, db2.table3 to db2.table4, db3.table5 to db2.table6 ON CLUSTER 'c'"), - sql, StatementType.RENAME); - } - - @Test(groups = "unit") - public void testRevokeStatement() { - String sql; - - checkSingleStatement(parse(sql = "REVOKE SELECT ON accounts.* FROM john"), sql, StatementType.REVOKE); - checkSingleStatement(parse(sql = "REVOKE SELECT(wage) ON accounts.staff FROM mira"), sql, - StatementType.REVOKE); - } - - @Test(groups = "unit") - public void testSelectStatement() { - String sql; - - assertEquals(parse(sql = "select\n1"), new ClickHouseSqlStatement[] { new ClickHouseSqlStatement(sql, - StatementType.SELECT, null, null, "unknown", null, null, null, null, null, null, null, null, null) }); - assertEquals(parse(sql = "select\r\n1"), new ClickHouseSqlStatement[] { new ClickHouseSqlStatement(sql, - StatementType.SELECT, null, null, "unknown", null, null, null, null, null, null, null, null, null) }); - - assertEquals(parse(sql = "select 314 limit 5\nFORMAT JSONCompact;"), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement("select 314 limit 5\nFORMAT JSONCompact", - StatementType.SELECT, null, null, "unknown", null, null, null, - "JSONCompact", null, null, Collections.singletonMap("FORMAT", 19), null, null) }); - - checkSingleStatement(parse(sql = "select (())"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select []"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select [[]]"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select *"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select timezone()"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select @@version, $version"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select * from jdbc('db', 'schema', 'select 1')"), sql, - StatementType.SELECT, - "system", "jdbc"); - checkSingleStatement(parse(sql = "select 1 as a1, a.a as a2, aa(a1, a2) a3, length(a3) as a4 from x"), - sql, - StatementType.SELECT, "system", "x"); - checkSingleStatement(parse(sql = "select x.* from (select [1,2] a, (1,2,3) b, a[1], b.2) x"), sql, - StatementType.SELECT, "system", "x"); - checkSingleStatement(parse(sql = "select (3, [[1,2],[3,4]]) as a, (a.2)[2][1]"), sql, - StatementType.SELECT); - checkSingleStatement( - parse(sql = "select 1,1.1,'\"''`a' a, \"'`\"\"a\" as b, (1 + `a`.a) c, null, inf i, nan as n"), - sql, - StatementType.SELECT); - checkSingleStatement(parse(sql = "select 1 as select"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select 1, 2 a, 3 as b, 1+1-2*3/4, *, c.* from c a"), sql, - StatementType.SELECT, "system", "c"); - checkSingleStatement(parse(sql = "select 1 as select"), sql, StatementType.SELECT); - checkSingleStatement(parse( - sql = " -- cc\nselect 1 as `a.b`, a, 1+1, b from \"a\".`b` inner join a on a.abb/* \n\r\n1*/\n=2 and a.abb = c.a and a=1 and (k is null and j not in(1,2))"), - sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "SELECT idx, s FROM test.mymetadata WHERE idx = ?"), sql, - StatementType.SELECT, - "test", "mymetadata"); - checkSingleStatement(parse(sql = "WITH 2 AS two SELECT two * two"), sql, StatementType.SELECT); - checkSingleStatement(parse( - sql = "SELECT i, array(toUnixTimestamp(dt_server[1])), array(toUnixTimestamp(dt_berlin[1])), array(toUnixTimestamp(dt_lax[1])) FROM test.fun_with_timezones_array"), - sql, StatementType.SELECT, "test", "fun_with_timezones_array"); - checkSingleStatement(parse(sql = "SELECT SUM(x) FROM t WHERE y = ? GROUP BY ?"), sql, - StatementType.SELECT, - "system", "t"); - - assertEquals(parse(sql = loadSql("issue-441_with-totals.sql")), - new ClickHouseSqlStatement[] { new ClickHouseSqlStatement(sql, StatementType.SELECT, - null, null, "unknown", null, null, null, null, null, null, new HashMap() { - { - put("TOTALS", 208); - } - }, null, null) }); - assertEquals(parse(sql = loadSql("issue-555_custom-format.sql")), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement(sql, StatementType.SELECT, null, null, "wrd", null, null, null, - "CSVWithNames", null, null, Collections.singletonMap("FORMAT", 1557), null, null) }); - assertEquals(parse(sql = loadSql("with-clause.sql")), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement(sql, StatementType.SELECT, null, null, "unknown", null, null, null, - null, null, null, null, null, null) }); - } - - @Test(groups = "unit") - public void testSetStatement() { - String sql; - - checkSingleStatement(parse(sql = "SET profile = 'my-profile', mutations_sync=1"), sql, - StatementType.SET); - checkSingleStatement(parse(sql = "SET DEFAULT ROLE role1, role2, role3 TO user"), sql, - StatementType.SET); - } - - @Test(groups = "unit") - public void testShowStatement() { - String sql; - - checkSingleStatement(parse(sql = "SHOW DATABASES LIKE '%de%'"), sql, StatementType.SHOW, "system", - "databases"); - checkSingleStatement(parse(sql = "show tables from db"), sql, StatementType.SHOW, "system", "tables"); - checkSingleStatement(parse(sql = "show dictionaries from db"), sql, StatementType.SHOW, "system", - "dictionaries"); - } - - @Test(groups = "unit") - public void testSystemStatement() { - String sql; - - checkSingleStatement( - parse(sql = "SYSTEM DROP REPLICA 'replica_name' FROM ZKPATH '/path/to/table/in/zk'"), - sql, - StatementType.SYSTEM); - checkSingleStatement(parse(sql = "SYSTEM RESTART REPLICA db.replicated_merge_tree_family_table_name"), - sql, - StatementType.SYSTEM); - } - - @Test(groups = "unit") - public void testTruncateStatement() { - String sql; - - checkSingleStatement(parse(sql = "truncate table a.b"), sql, StatementType.TRUNCATE, "a", "b"); - } - - @Test(groups = "unit") - public void testUpdateStatement() { - String sql; - - checkSingleStatement(parse(sql = "update a set a='1'"), sql, StatementType.UPDATE, - ClickHouseSqlStatement.DEFAULT_DATABASE, "a"); - checkSingleStatement(parse(sql = "update a.a set `a`=2 where upper(a)=upper(lower(b))"), sql, - StatementType.UPDATE, "a", "a"); - } - - @Test(groups = "unit") - public void testUseStatement() throws ParseException { - String sql; - checkSingleStatement(parse(sql = "use system"), sql, StatementType.USE); - } - - @Test(groups = "unit") - public void testWatchStatement() throws ParseException { - String sql; - checkSingleStatement(parse(sql = "watch system.processes"), sql, StatementType.WATCH); - } - - @Test(groups = "unit") - public void testComments() throws ParseException { - String sql; - checkSingleStatement(parse(sql = "select\n--something\n//else\n1/*2*/ from a.b"), sql, - StatementType.SELECT, - "a", "b"); - - checkSingleStatement(parse(sql = "select 1/*/**/*/ from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select 1/*/1/**/*2*/ from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "SELECT /*/**/*/ 1 from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "SELECT /*a/*b*/c*/ 1 from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "SELECT /*ab/*cd*/ef*/ 1 from a.b"), sql, StatementType.SELECT, "a", - "b"); - } - - @Test(groups = "unit") - public void testMultipleStatements() throws ParseException { - assertEquals(parse("use ab;;;select 1; ;\t;\r;\n"), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement("use ab", StatementType.USE, null, "ab", - null, null, null, null, null, null, null, null, null, null), - new ClickHouseSqlStatement("select 1", StatementType.SELECT) }); - assertEquals(parse("select * from \"a;1\".`b;c`;;;select 1 as `a ; a`; ;\t;\r;\n"), - new ClickHouseSqlStatement[] { - new ClickHouseSqlStatement("select * from \"a;1\".`b;c`", - StatementType.SELECT, null, "a;1", "b;c", null, null, null, null, null, null, null, - null, null), - new ClickHouseSqlStatement("select 1 as `a ; a`", - StatementType.SELECT) }); - } - - @Test(groups = "unit") - public void testAlias() throws ParseException { - String sql; - checkSingleStatement(parse(sql = "select 1 as c, 2 b"), sql, StatementType.SELECT); - checkSingleStatement(parse(sql = "select 1 from a.b c"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select 1 select from a.b c"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select 1 from (select 2) b"), sql, StatementType.SELECT, "system", - "b"); - checkSingleStatement(parse(sql = "select 1 from (select 2) as from"), sql, StatementType.SELECT, - "system", - "from"); - checkSingleStatement(parse(sql = "select 1 from a.b c1, b.a c2"), sql, StatementType.SELECT, "a", "b"); - } - - @Test(groups = "unit") - public void testExpression() throws ParseException { - String sql; - checkSingleStatement(parse(sql = "SELECT a._ from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "SELECT 2 BETWEEN 1 + 1 AND 3 - 1 from a.b"), sql, - StatementType.SELECT, "a", - "b"); - checkSingleStatement(parse(sql = "SELECT CASE WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END from a.b"), sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select (1,2) a1, a1.1, a1 .1, a1 . 1 from a.b"), sql, - StatementType.SELECT, - "a", "b"); - checkSingleStatement(parse(sql = "select -.0, +.0, -a from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select 1 and `a`.\"b\" c1, c1 or (c2 and c3), c4 ? c5 : c6 from a.b"), - sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select [[[1,2],[3,4],[5,6]]] a, a[1][1][2] from a.b"), sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement( - parse(sql = "select [[[[]]]], a[1][2][3], ([[1]] || [[2]])[2][1] ,func(1,2) [1] [2] [ 3 ] from a.b"), - sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select c.c1, c.c2 c, c.c3 as cc, c.c4.1.2 from a.b"), sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select - (select (1,).1) from a.b"), sql, StatementType.SELECT, "a", - "b"); - checkSingleStatement(parse(sql = "select 1.1e1,(1) . 1 , ((1,2)).1 .2 . 3 from a.b"), sql, - StatementType.SELECT, - "a", "b"); - checkSingleStatement(parse(sql = "select a.b.c1, c1, b.c1 from a.b"), sql, StatementType.SELECT, "a", - "b"); - checkSingleStatement(parse(sql = "select date'2020-02-04', timestamp '2020-02-04' from a.b"), sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select count (), sum(c1), fake(a1, count(), (1+1)) from a.b"), sql, - StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select {}, {'a':'b', 'c':'1'} from a.b"), sql, StatementType.SELECT, - "a", - "b"); - checkSingleStatement(parse(sql = "select [], [1,2], [ [1,2], [3,4] ] from a.b"), sql, - StatementType.SELECT, "a", - "b"); - checkSingleStatement(parse(sql = "select 1+1-1*1/1 from a.b"), sql, StatementType.SELECT, "a", "b"); - checkSingleStatement(parse(sql = "select (1+(1-1)*1/1)-1 from a.b"), sql, StatementType.SELECT, "a", - "b"); - checkSingleStatement(parse(sql = "select (1+(1+(-1))*1/1)-(select (1,).1) from a.b"), sql, - StatementType.SELECT, - "a", "b"); - } - - @Test(groups = "unit") - public void testFormat() throws ParseException { - String sql = "select 1 as format, format csv"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasFormat(), false); - assertEquals(stmts[0].getFormat(), null); - - sql = "select 1 format csv"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "csv"); - - sql = "select 1 a, a.a b, a.a.a c, e.* except(e1), e.e.* except(e2), 'aaa' format, format csv from numbers(2) FORMAT CSVWithNames"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "CSVWithNames"); - } - - @Test(groups = "unit") - public void testInFile() throws ParseException { - String sql = "insert into mytable from infile 'inputs*.csv'"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasCompressAlgorithm(), false); - assertEquals(stmts[0].getCompressAlgorithm(), null); - assertEquals(stmts[0].hasCompressLevel(), false); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].hasFormat(), false); - assertEquals(stmts[0].getFormat(), null); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'inputs*.csv'"); - - sql = "insert into mytable from infile 'inputs{1,2,3}.bin' format RowBinary"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].getCompressAlgorithm(), null); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "RowBinary"); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'inputs{1,2,3}.bin'"); - - sql = "insert into mytable from infile 'inputs{1,2,3}.n.gz' compression 'gzip' format Native"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].getCompressAlgorithm(), "'gzip'"); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "Native"); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'inputs{1,2,3}.n.gz'"); - - // actually ClickHouse does not support compression level for infile - sql = "insert into mytable from infile 'inputs{1,2,3}.n.gz' compression 'gzip' level 3 settings a=1, b='2' format Native"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasCompressAlgorithm(), true); - assertEquals(stmts[0].getCompressAlgorithm(), "'gzip'"); - assertEquals(stmts[0].hasCompressLevel(), true); - assertEquals(stmts[0].getCompressLevel(), "3"); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "Native"); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'inputs{1,2,3}.n.gz'"); - - sql = "select * from infile"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].getCompressAlgorithm(), null); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].getFormat(), null); - assertEquals(stmts[0].getFile(), null); - - sql = "insert into mytable from infile :a compression ? level ? format Native"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].getCompressAlgorithm(), "?"); - assertEquals(stmts[0].getCompressLevel(), "?"); - assertEquals(stmts[0].getFormat(), "Native"); - assertEquals(stmts[0].getFile(), ":a"); - } - - @Test(groups = "unit") - public void testOutFile() throws ParseException { - String sql = "select 1 into outfile '1.txt'"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasCompressAlgorithm(), false); - assertEquals(stmts[0].getCompressAlgorithm(), null); - assertEquals(stmts[0].hasCompressLevel(), false); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].hasFormat(), false); - assertEquals(stmts[0].getFormat(), null); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'1.txt'"); - - sql = "select * from numbers(10) settings max_result_rows=1 into outfile 'a.csv.gz' compression 'gzip' level 5 format CSV"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasCompressAlgorithm(), true); - assertEquals(stmts[0].getCompressAlgorithm(), "'gzip'"); - assertEquals(stmts[0].hasCompressLevel(), true); - assertEquals(stmts[0].getCompressLevel(), "5"); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "CSV"); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "'a.csv.gz'"); - - sql = "insert into outfile values(1,2,3)"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].getCompressAlgorithm(), null); - assertEquals(stmts[0].getCompressLevel(), null); - assertEquals(stmts[0].getFormat(), null); - assertEquals(stmts[0].getFile(), null); - - sql = "select * from numbers(10) settings max_result_rows=1 into outfile ? compression :a level :b format CSV"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasCompressAlgorithm(), true); - assertEquals(stmts[0].getCompressAlgorithm(), ":a"); - assertEquals(stmts[0].hasCompressLevel(), true); - assertEquals(stmts[0].getCompressLevel(), ":b"); - assertEquals(stmts[0].hasFormat(), true); - assertEquals(stmts[0].getFormat(), "CSV"); - assertEquals(stmts[0].hasFile(), true); - assertEquals(stmts[0].getFile(), "?"); - } - - @Test(groups = "unit") - public void testWithTotals() throws ParseException { - String sql = "select 1 as with totals"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasWithTotals(), false); - - sql = "select 1 with totals"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - assertEquals(stmts[0].hasWithTotals(), true); - } - - @Test(groups = "unit") - public void testParameterHandling() throws ParseException { - String sql = "insert into table d.t(a1, a2, a3) values(?,?,?)"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), sql); - - stmts = ClickHouseSqlParser.parse(sql, new ClickHouseConfig(), new ParseHandler() { - @Override - public String handleParameter(String cluster, String database, String table, int columnIndex) { - return String.valueOf(columnIndex); - } - }); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "insert into table d.t(a1, a2, a3) values(1,2,3)"); - } - - @Test(groups = "unit") - public void testMacroHandling() throws ParseException { - String sql = "select #listOfColumns #ignored from (#subQuery('1','2','3'))"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select from ()"); - - stmts = ClickHouseSqlParser.parse(sql, new ClickHouseConfig(), new ParseHandler() { - @Override - public String handleMacro(String name, List parameters) { - if ("listOfColumns".equals(name)) { - return "a, b"; - } else if ("subQuery".equals(name)) { - return "select " + String.join("+", parameters); - } else { - return null; - } - } - }); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select a, b from (select 1+2+3)"); - } - - @Test(groups = "unit") - public void testExtractDBAndTableName() { - String sql; - - checkSingleStatement(parse(sql = "SELECT 1 from table"), sql, StatementType.SELECT, "system", "table"); - checkSingleStatement(parse(sql = "SELECT 1 from table a"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = "SELECT 1 from\ntable a"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = "SELECT 1\nfrom\ntable a"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = "SELECT 1\nFrom\ntable a"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = "SELECT 1 from db.table a"), sql, StatementType.SELECT, "db", "table"); - checkSingleStatement(parse(sql = " SELECT 1 from \"db.table\" a"), sql, StatementType.SELECT, "system", - "db.table"); - checkSingleStatement(parse(sql = "SELECT 1 from `db.table` a"), sql, StatementType.SELECT, "system", - "db.table"); - checkSingleStatement(parse(sql = "from `db.table` a"), sql, StatementType.UNKNOWN, "system", "unknown"); - checkSingleStatement(parse(sql = " from `db.table` a"), sql, StatementType.UNKNOWN, "system", - "unknown"); - checkSingleStatement(parse(sql = "ELECT from `db.table` a"), sql, StatementType.UNKNOWN, "system", - "unknown"); - checkSingleStatement(parse(sql = "SHOW tables"), sql, StatementType.SHOW, "system", "tables"); - checkSingleStatement(parse(sql = "desc table1"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "DESC table1"), sql, StatementType.DESCRIBE, "system", "columns"); - checkSingleStatement(parse(sql = "SELECT 'from db.table a' from tab"), sql, StatementType.SELECT, - "system", - "tab"); - checkSingleStatement(parse(sql = "SELECT"), sql, StatementType.UNKNOWN, "system", "unknown"); - checkSingleStatement(parse(sql = "S"), sql, StatementType.UNKNOWN, "system", "unknown"); - checkSingleStatement(parse(sql = ""), sql, StatementType.UNKNOWN, "system", "unknown"); - checkSingleStatement(parse(sql = " SELECT 1 from table from"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = " SELECT 1 from table from"), sql, StatementType.SELECT, "system", - "table"); - checkSingleStatement(parse(sql = "SELECT fromUnixTimestamp64Milli(time) as x from table"), sql, - StatementType.SELECT, "system", "table"); - checkSingleStatement(parse(sql = " SELECT fromUnixTimestamp64Milli(time)from table"), sql, - StatementType.SELECT, - "system", "table"); - checkSingleStatement(parse(sql = "/*qq*/ SELECT fromUnixTimestamp64Milli(time)from table"), sql, - StatementType.SELECT, "system", "table"); - checkSingleStatement(parse(sql = " SELECTfromUnixTimestamp64Milli(time)from table"), sql, - StatementType.UNKNOWN, - "system", "unknown"); - checkSingleStatement(parse(sql = " SELECT fromUnixTimestamp64Milli(time)from \".inner.a\""), sql, - StatementType.SELECT, "system", ".inner.a"); - checkSingleStatement(parse(sql = " SELECT fromUnixTimestamp64Milli(time)from db.`.inner.a`"), sql, - StatementType.SELECT, "db", ".inner.a"); - } - - @Test(groups = "unit") - public void testJdbcEscapeSyntax() { - String sql = "select {d '123'}"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select date'123'"); - assertEquals(stmts[0].hasTempTable(), false); - - sql = "select {t '123'}"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select timestamp'1970-01-01 123'"); - assertEquals(stmts[0].hasTempTable(), false); - - sql = "select {ts '123'}"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select timestamp'123'"); - assertEquals(stmts[0].hasTempTable(), false); - - sql = "select {ts '123.1'}"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select toDateTime64('123.1',1)"); - assertEquals(stmts[0].hasTempTable(), false); - - sql = "select {tt '1''2\\'3'}"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select `1'2'3`"); - assertEquals(stmts[0].hasTempTable(), true); - assertEquals(stmts[0].getTempTables(), Collections.singleton("1'2'3")); - - sql = "select {d 1} {t} {tt} {ts 123.1'}"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getSQL(), "select "); - } - - @Test(groups = "unit") - public void testNewParameterSyntax() { - String sql = "select {column_a:String}"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].isQuery(), true); - assertEquals(stmts[0].getSQL(), sql); - - sql = "select :column_a(String)"; - stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].isQuery(), true); - assertEquals(stmts[0].getSQL(), sql); - } - - @Test(groups = "unit") - public void testTcl() { - ClickHouseSqlStatement[] stmts = parse("begin transaction; commit;rollback;"); - assertEquals(stmts.length, 3); - assertEquals(stmts[0].isTCL(), true); - assertEquals(stmts[0].containsKeyword("bEGin"), true); - assertEquals(stmts[0].getSQL(), "begin transaction"); - assertEquals(stmts[1].isTCL(), true); - assertEquals(stmts[1].containsKeyword("Commit"), true); - assertEquals(stmts[1].getSQL(), " commit"); - assertEquals(stmts[2].isTCL(), true); - assertEquals(stmts[2].containsKeyword("RollBack"), true); - assertEquals(stmts[2].getSQL(), "rollback"); - } - - @Test(enabled = false) - public void testSETRoleStatements() { - final String simpleStmt = "SET ROLE ROL1, ROL2"; - ClickHouseSqlStatement[] stmts = parse(simpleStmt); - Assert.assertEquals(stmts.length, 1); - Assert.assertEquals(stmts[0].getStatementType(), StatementType.SET); - Assert.assertEquals(stmts[0].getOperationType(), OperationType.UNKNOWN); - Assert.assertEquals(stmts[0].getSQL(), simpleStmt); - Assert.assertNotNull(stmts[0].getSettings().get("_ROLES")); - - final String compositeStmt = "SET ROLE ROL1; SET ROLE ROL2;"; - stmts = parse(compositeStmt); - Assert.assertEquals(stmts.length, 2); - for (ClickHouseSqlStatement stmt : stmts) { - Assert.assertEquals(stmt.getStatementType(), StatementType.SET); - Assert.assertNotNull(stmts[0].getSettings().get("_ROLES")); - } - } - - // known issue - public void testTernaryOperator() { - String sql = "select x > 2 ? 'a' : 'b' from (select number as x from system.numbers limit ?)"; - ClickHouseSqlStatement[] stmts = parse(sql); - assertEquals(stmts.length, 1); - assertEquals(stmts[0].getStatementType(), StatementType.SELECT); - assertEquals(stmts[0].getParameters().size(), 1); - } - - static void parseAllSqlFiles(File f) throws IOException { - if (f.isDirectory()) { - File[] files = f.listFiles(); - for (File file : files) { - parseAllSqlFiles(file); - } - } else if (f.getName().endsWith(".sql")) { - StringBuilder sql = new StringBuilder(); - try (BufferedReader br = new BufferedReader( - new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8))) { - String line; - while ((line = br.readLine()) != null) { - sql.append(line).append("\n"); - } - } catch (IOException e) { - throw new IllegalArgumentException(e); - } - - ClickHouseSqlParser p = new ClickHouseSqlParser(sql.toString(), null, null); - try { - p.sql(); - } catch (ParseException e) { - System.out.println(f.getAbsolutePath() + " -> " + e.getMessage()); - } catch (TokenMgrException e) { - System.out.println(f.getAbsolutePath() + " -> " + e.getMessage()); - } - } - } - - // TODO: add a sub-module points to ClickHouse/tests/queries? - public static void main(String[] args) throws IOException { - String chTestQueryDir = "D:/Sources/Github/ch/queries"; - if (args != null && args.length > 0) { - chTestQueryDir = args[0]; - } - chTestQueryDir = System.getProperty("chTestQueryDir", chTestQueryDir); - parseAllSqlFiles(new File(chTestQueryDir)); - } -} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java deleted file mode 100644 index 6d2aca481..000000000 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/parser/ClickHouseSqlUtilsTest.java +++ /dev/null @@ -1,67 +0,0 @@ -package com.clickhouse.jdbc.parser; - -import com.clickhouse.jdbc.ClickHouseSqlUtils; -import org.testng.Assert; -import org.testng.annotations.Test; - -public class ClickHouseSqlUtilsTest { - @Test(groups = "unit") - public void testIsQuote() { - Assert.assertFalse(ClickHouseSqlUtils.isQuote('\0')); - - Assert.assertTrue(ClickHouseSqlUtils.isQuote('"')); - Assert.assertTrue(ClickHouseSqlUtils.isQuote('\'')); - Assert.assertTrue(ClickHouseSqlUtils.isQuote('`')); - } - - @Test(groups = "unit") - public void testEscape() { - char[] quotes = new char[] { '"', '\'', '`' }; - String str; - for (int i = 0; i < quotes.length; i++) { - char quote = quotes[i]; - Assert.assertEquals(ClickHouseSqlUtils.escape(str = null, quote), str); - Assert.assertEquals(ClickHouseSqlUtils.escape(str = "", quote), - String.valueOf(quote) + String.valueOf(quote)); - Assert.assertEquals(ClickHouseSqlUtils.escape(str = "\\any \\string\\", quote), - String.valueOf(quote) + "\\\\any \\\\string\\\\" + String.valueOf(quote)); - Assert.assertEquals( - ClickHouseSqlUtils.escape(str = String.valueOf(quote) + "any " + String.valueOf(quote) + "string", - quote), - String.valueOf(quote) + "\\" + String.valueOf(quote) + "any \\" + String.valueOf(quote) + "string" - + String.valueOf(quote)); - Assert.assertEquals(ClickHouseSqlUtils.escape(str = "\\any \\string\\" + String.valueOf(quote), quote), - String.valueOf(quote) + "\\\\any \\\\string\\\\\\" + String.valueOf(quote) + String.valueOf(quote)); - Assert.assertEquals( - ClickHouseSqlUtils.escape(str = String.valueOf(quote) + "\\any \\" + String.valueOf(quote) - + "string\\" + String.valueOf(quote), quote), - String.valueOf(quote) + "\\" + String.valueOf(quote) + "\\\\any \\\\\\" + String.valueOf(quote) - + "string" + "\\\\\\" + String.valueOf(quote) + String.valueOf(quote)); - } - } - - @Test(groups = "unit") - public void testUnescape() { - String str; - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = null), str); - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = ""), str); - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = "\\any \\string\\"), str); - char[] quotes = new char[] { '"', '\'', '`' }; - for (int i = 0; i < quotes.length; i++) { - char quote = quotes[i]; - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = String.valueOf(quote) + "1" + String.valueOf(quote)), - "1"); - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = String.valueOf(quote) + "\\any \\string\\"), str); - Assert.assertEquals(ClickHouseSqlUtils.unescape(str = "\\any \\string\\" + String.valueOf(quote)), str); - Assert.assertEquals( - ClickHouseSqlUtils.unescape(str = String.valueOf(quote) + "\\any" + String.valueOf(quote) - + String.valueOf(quote) + "\\string\\" + String.valueOf(quote)), - "any" + String.valueOf(quote) + "string\\"); - Assert.assertEquals( - ClickHouseSqlUtils.unescape(str = String.valueOf(quote) + String.valueOf(quote) + "\\" - + String.valueOf(quote) + "any" + String.valueOf(quote) + String.valueOf(quote) - + "\\string\\" + String.valueOf(quote)), - String.valueOf(quote) + String.valueOf(quote) + "any" + String.valueOf(quote) + "string\\"); - } - } -} diff --git a/clickhouse-jdbc/src/test/resources/data_samples/test_sample.orc.gz b/clickhouse-jdbc/src/test/resources/data_samples/test_sample.orc.gz deleted file mode 100644 index a387cd5914c911be5c55b0d33bfc5b9c95973c83..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1575 zcmV+?2H5!@iwFQLV=Z6+1C>>KP*hbIzjyC0+`S9Svb-+q10*LkM!oL`{&B({YLntP zr8d?m<5pVQXi+(t(_LL)!Iig&hzN=(NZ}cGG)*%!OLH_&3$#c}w9HTp%`gnha174~jL1lg%u+1PGAzq- zEYAw8$V#ltQ5?-N9LsSW&k3B!Nu116Jk2va%X2)>3%tlnyev=xEieKrZ~`v~f+$FW zEK(vZGAa#D%+Ny&!PHu7C-< z_P}$yfxl+E;Q5_kxMt-rPZ=0pQCORBs}$xd0c1j~I<~{S+kkz9>AfQpi(&q)V0mEq zq)Xov!F#rV1>*}3IDg*^@7oNrm)l~iw-v&B3xU%z5g*|!fcF;wJnY;4dhboJj(jkz zHCi1TVLf@ErKUdce%S_CS1zy>MXMth*0&x!AoEh|@8`febHE+Lis-p5>tMa>K**x{ z;6?S>uzdtzl@2_ z&w%_gz>=`~75cmBkY_sZ9rfkh;d9a<-*hmcqc&#g(KN_A4OsNi>PUn9(?Iw=N7y|_ zD%2qrd|cJPgHBI@dZd7Jj_R-Gm9K%itO2GCSklb1Zm5qNbb1_^x7iJKS`A``^i(>g zt%iCfgSz>j%)61340THe?W|>ON?#JxF9~#BZF{tHauU?h1xjy!>Q`Onf_kc`nG5Qg z2#ypjbG)6L2=z?_OB|;6J9UXr=L8V4z!AK_kpT5h0A9hf=Q#`sQ1^K7>ud*gDkUE3 z9}j}=bzSow77w3;6Fhmz4E#rw?QPZGN>=ZAtaQz-13IN??{!phU93Y}6Yl4ueaG`% zsB$Iu>y(Oz3l(R@+=k-znWcwnV_d7sr|jF7`g-fiCg9n9Y{IEyZmIFiA6#W;4x=1A zGyeFE%CVbImqqsM**~VW@Zus`L5ns54)9y{mb&S~^wpcbCP^K{lA&=<1d@KT3bxYg*wx% z4d0s%rskQP`&OIIu6aoGWc3o=N6!%{JDQ22ssqG>zH*|osesshB7jYSZ`3-_`hq_2i38LFX<4AKxIBQ?~MvW5n7 zgo=o|U#}smA*v##&2`#br=l0(t;Tg^9jYy0>cT((BMZE=fF1_)g7ca8u;}$OdFdP|;P)G!h>z{h~ zQ_t(EXI4EcWmG-mV2rXyY2>4xAs_WO`KbRUAN6A7qn=Ct)wF4oQGBIN{h=s|K0=3> ZM!QkUYvmmbi<$9S=wB``t&!RY004JA6G8w0 diff --git a/clickhouse-jdbc/src/test/resources/data_samples/test_sample.parquet.gz b/clickhouse-jdbc/src/test/resources/data_samples/test_sample.parquet.gz deleted file mode 100644 index eab2be518536700db73d4a9055c29e28d1c12e69..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2019 zcmV<92ORhxiwFQ9V=Z6+1MSy)R1;So2k@CN6DE+sh)fI^WvC$rBuWSgp=d9mieRl! zsVyohh*%V45va@2b!%Egq!d9>5hzC279aRP>H`mJ!EP(IXFb>l>!Z5|Ti0q^w^{2d z*q%N64UdYau&16~-G7!dpWn>g(!F$rodE) zhG`H3u`nH;ff?{D%!D|Ihv#4xB*6198(x4HAra=lT$l$*Fdr7cLP&-bo}09Y>itRW z+Y6L%k``L%pq_grNCm0dOzF7(W)(->|C#SdavimHZE~be(WS#k@`?oFj?3f^*jcvPg&SpNj zqkT`_^aFDbU8rCA(alxwb{@``V|;BFrxqA*6GKV8RW3`j@&t*pjaJ_FP&znmr&P-g zH8t-bOq~%V&QIgqQO;eBOgM4B1kW)F8u?(Fr3?@PMhFEHm>~=-G^+}C4qymorIZ0e zzzCsW0yBhxg_W|C(E4vNAr`v##P|z^-p7O*>ACk$%&g~tcGH6xQz#wGm`pvB8rWtY z9Ah@qQ4b`;ej!UQ#u7p&_n!Qcj=3u@aN6q%ZK>OU=?q$Gc-l3~9(dZZG~;-Mb>%1rk#Srnna$GFXS_n*PO7%k zGN6{_HS;=66Nw%%rKxTwVSlX~zLOLvqBy?-JCQj=JK=nP<7jo= z!54|OWb~(2Y;W>qVhw3O+3M9@U(HvFRm5kv6yBD)@GBGil8q z|ATtgX8wY>iPU;t4Y1a3;yZDi?c2JBxm(U(7t0Ah?&XDJ=9Tjf97kMgj$3iIjPDZ5 zh=Q?VE7Mar7Q1Y|UAB#SPPdV~blkDuP+sc%(!^47+19i&`QRIRmG}m6uXfq+!i5sO zD3*}U7MqK!M~Pl1zE0vsU4O?m^L2f&SWHf(E=&HdvRH4?6qAca#g@`8u{Bh|z5^ERN5uxjguU$v4(-&z9$h>!F z**gvukU^FHRYA$V^V^3c_u41N zXin~$J0}u5>5hyu=)(Tvin9xw5wHtgn7d`4u^hLhl|G&y>k&pmzc)h zL{7YoCxb^|G_8=+KiX%b@3$5O;^!UlET=n{YIc4qy5;OU$Ti;8Al6;_J7Ud}1jOS7dvd(?wBN%_c&Np9YT%o_NL_JX&{L}af4l$RzuouUi`2gT zpat9Ch&$bPMwI z+VT{^4S9d6=YI=-^^E!vklXe{#(000s` B=6C=A diff --git a/clickhouse-jdbc/src/test/resources/sqls/issue-441_with-totals.sql b/clickhouse-jdbc/src/test/resources/sqls/issue-441_with-totals.sql deleted file mode 100644 index 2169ce645..000000000 --- a/clickhouse-jdbc/src/test/resources/sqls/issue-441_with-totals.sql +++ /dev/null @@ -1,13 +0,0 @@ -WITH 2 AS factor -SELECT - number % 2 AS odd_even, - count(*) AS count, - sum(factor * number) AS output -FROM -( - SELECT number - FROM system.numbers - LIMIT 100 -) -GROUP BY number % 2 - WITH TOTALS \ No newline at end of file diff --git a/clickhouse-jdbc/src/test/resources/sqls/issue-555_custom-format.sql b/clickhouse-jdbc/src/test/resources/sqls/issue-555_custom-format.sql deleted file mode 100644 index 79b7097db..000000000 --- a/clickhouse-jdbc/src/test/resources/sqls/issue-555_custom-format.sql +++ /dev/null @@ -1,36 +0,0 @@ -select - JSONExtractRaw(abcedfg.fields, 'someDateField___e') as abc_someDateField___e, - some_word as sw_someWord, - JSONExtractString(abcedfg.fields, 'field') as abc_field, - some_more_words as sw_moreWords , - last_word as sw_lastWord, - JSONExtractInt(abcedfg.fields, 'countOfWords') as abc_countOfWords, - abcedfg.id as abc_id, - JSONExtractString(abcedfg.fields, 'somePlace') as abc_somePlace, - JSONExtractString(abcedfg.fields, 'place') as abc_place, - JSONExtractInt(abcedfg.fields, 'countOfPlaces') as abc_countOfPlaces, - abcedfg.name as abc_name, - (some_more_words * 100 / (even_more_words * (? / 28))) - 100 as sw_wordsPercentChange, - some_unique_words as sw_uniqueWords -from ( - select - abcedfg_id, - sum(if(toDate(sample_date) >= toDate(?, 'UTC'), 1, 0)) some_more_words, - count(distinct if(toDate(sample_date) >= toDate(?, 'UTC'), wrd.word_id, null)) some_unique_words, - sum(if(toDate(sample_date) < toDate(?, 'UTC'), 1, 0)) even_more_words, - min(toDate(sample_date, 'UTC')) some_word, - max(toDate(sample_date, 'UTC')) last_word - from a1234_test.sample wrd - join a1234_test.abcedfg_list_item itm on itm.abcedfg_id = wrd.abcedfg_id - where toDate(sample_date, 'UTC') between - addDays(toDate(?, 'UTC'), -28) - and toDate(?, 'UTC') - and wrd.sample_type_id IN (?) - and itm.abcedfg_list_id IN (?) - and 1 - group by abcedfg_id -) as wrd -join a1234_test.abcedfg abc on abc.id = wrd.abcedfg_id -order by sw_moreWords desc - limit ? offset ? -FORMAT CSVWithNames diff --git a/clickhouse-jdbc/src/test/resources/sqls/with-clause.sql b/clickhouse-jdbc/src/test/resources/sqls/with-clause.sql deleted file mode 100644 index 1b3abb3c7..000000000 --- a/clickhouse-jdbc/src/test/resources/sqls/with-clause.sql +++ /dev/null @@ -1,16 +0,0 @@ -WITH ( - ( - SELECT query_start_time_microseconds - FROM system.query_log - WHERE current_database = currentDatabase() - ORDER BY query_start_time DESC - LIMIT 1 - ) AS time_with_microseconds, - ( - SELECT query_start_time - FROM system.query_log - WHERE current_database = currentDatabase() - ORDER BY query_start_time DESC - LIMIT 1 - ) AS t) -SELECT if(dateDiff('second', toDateTime(time_with_microseconds), toDateTime(t)) = 0, 'ok', 'fail') \ No newline at end of file From 8cdcd7bb197b447c93eb32780d3a6b34fd82b0bd Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Sat, 21 Sep 2024 06:50:26 -0400 Subject: [PATCH 14/21] Tweaked testing --- .../main/java/com/clickhouse/jdbc/Driver.java | 2 +- .../java/com/clickhouse/jdbc/DriverTest.java | 39 ++++++++----------- 2 files changed, 17 insertions(+), 24 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index 4e1cb2ee9..0c3a04ba1 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -42,7 +42,7 @@ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws @Override public int getMajorVersion() { - return 0; + return 1; } @Override diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java index 2b785acea..07ec25d36 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java @@ -6,24 +6,10 @@ import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser; import org.testng.Assert; -import org.testng.annotations.AfterTest; -import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; public class DriverTest extends JdbcIntegrationTest { - private Driver driver; - - @BeforeTest - public void setUp() { - driver = new Driver(); - } - - @AfterTest - public void tearDown() { - driver = null; - } - - @Test(groups = { "unit" }) + @Test public void testDriver() { try { DriverManager.registerDriver(new Driver()); @@ -32,18 +18,20 @@ public void testDriver() { } } - @Test(groups = { "unit" }) + @Test public void testConnect() { try { + Driver driver = new Driver(); Assert.assertNotNull(driver.connect(getEndpointString(), new Properties())); } catch (SQLException e) { Assert.fail("Failed to connect to ClickHouse", e); } } - @Test(groups = { "unit" }) + @Test public void testAcceptsURL() { try { + Driver driver = new Driver(); Assert.assertTrue(driver.acceptsURL(getEndpointString())); Assert.assertTrue(driver.acceptsURL("jdbc:ch://localhost:8123")); Assert.assertTrue(driver.acceptsURL("jdbc:clickhouse://localhost:8123")); @@ -54,9 +42,10 @@ public void testAcceptsURL() { } } - @Test(groups = { "unit" }) + @Test public void testGetPropertyInfo() { try { + Driver driver = new Driver(); Assert.assertEquals(driver.getPropertyInfo(getEndpointString(), new Properties()).length, 7); Properties sample = new Properties(); sample.setProperty("testing", "true"); @@ -66,24 +55,28 @@ public void testGetPropertyInfo() { } } - @Test(groups = { "unit" }) + @Test public void testGetMajorVersion() { - Assert.assertEquals(driver.getMajorVersion(), 0); + Driver driver = new Driver(); + Assert.assertEquals(driver.getMajorVersion(), 1); } - @Test(groups = { "unit" }) + @Test public void testGetMinorVersion() { + Driver driver = new Driver(); Assert.assertEquals(driver.getMinorVersion(), 0); } - @Test(groups = { "unit" }) + @Test public void testJdbcCompliant() { + Driver driver = new Driver(); Assert.assertFalse(driver.jdbcCompliant()); } - @Test(groups = { "unit" }) + @Test public void testGetParentLogger() { try { + Driver driver = new Driver(); driver.getParentLogger(); Assert.fail("Should not reach here"); } catch (SQLException e) { From 1bcef9500978019847559aed0950ddf08624e295 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Tue, 8 Oct 2024 03:31:50 -0400 Subject: [PATCH 15/21] Adding support for Statements and ResultSet --- .../client/BaseIntegrationTest.java | 2 +- .../com/clickhouse/jdbc/ConnectionImpl.java | 4 +- .../jdbc/PreparedStatementImpl.java | 2 +- .../com/clickhouse/jdbc/ResultSetImpl.java | 557 +++++++++++++----- .../com/clickhouse/jdbc/StatementImpl.java | 223 ++++++- .../java/com/clickhouse/jdbc/DriverTest.java | 1 - .../clickhouse/jdbc/JdbcIntegrationTest.java | 7 +- .../com/clickhouse/jdbc/StatementTest.java | 333 +++++++++++ 8 files changed, 940 insertions(+), 189 deletions(-) create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java diff --git a/clickhouse-client/src/test/java/com/clickhouse/client/BaseIntegrationTest.java b/clickhouse-client/src/test/java/com/clickhouse/client/BaseIntegrationTest.java index f09a0aefd..1b3462617 100644 --- a/clickhouse-client/src/test/java/com/clickhouse/client/BaseIntegrationTest.java +++ b/clickhouse-client/src/test/java/com/clickhouse/client/BaseIntegrationTest.java @@ -60,7 +60,7 @@ protected String getIpAddress(ClickHouseNode server) { return ipAddress; } - protected boolean isCloud() { + protected static boolean isCloud() { return ClickHouseServerForTest.isCloud(); } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 711f73fb6..6fddec203 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -13,8 +13,8 @@ public class ConnectionImpl implements Connection, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(ConnectionImpl.class); - private final Client client; - private final JdbcConfiguration config; + protected final Client client; + protected final JdbcConfiguration config; private boolean closed = false; private String catalog; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java index 2b856d85e..0d5411e9a 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -9,7 +9,7 @@ public class PreparedStatementImpl extends StatementImpl implements PreparedStatement, JdbcWrapper { String sql; - public PreparedStatementImpl(Connection connection, String sql) { + public PreparedStatementImpl(ConnectionImpl connection, String sql) { super(connection); this.sql = sql; } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java index 243ebbbf6..f8816c114 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -1,981 +1,1226 @@ package com.clickhouse.jdbc; +import java.io.ByteArrayInputStream; +import java.io.CharArrayReader; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; +import java.net.MalformedURLException; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.sql.*; +import java.time.LocalDate; import java.util.Calendar; import java.util.Map; +import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader; +import com.clickhouse.client.api.query.QueryResponse; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; public class ResultSetImpl implements ResultSet, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(ResultSetImpl.class); + private QueryResponse response; + protected ClickHouseBinaryFormatReader reader; + private final ResultSetMetaData metaData; + private boolean closed; + + public ResultSetImpl(QueryResponse response, ClickHouseBinaryFormatReader reader) { + this.response = response; + this.reader = reader; + this.metaData = new com.clickhouse.jdbc.metadata.ResultSetMetaData(); + this.closed = false; + } + + private void checkClosed() throws SQLException { + if (closed) { + throw new SQLException("ResultSet is closed."); + } + } + @Override public boolean next() throws SQLException { - return false; + checkClosed(); + + Map currentRow = reader.next(); + return currentRow != null; } @Override public void close() throws SQLException { + closed = true; + if (reader != null) { + reader = null; + } + if (response != null) { + try { + response.close(); + } catch (Exception e) { + throw new SQLException(e); + } + response = null; + } } @Override public boolean wasNull() throws SQLException { + checkClosed(); return false; } @Override public String getString(int columnIndex) throws SQLException { - return ""; + checkClosed(); + return reader.getString(columnIndex); } @Override public boolean getBoolean(int columnIndex) throws SQLException { - return false; + checkClosed(); + return reader.getBoolean(columnIndex); } @Override public byte getByte(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getByte(columnIndex); } @Override public short getShort(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getShort(columnIndex); } @Override public int getInt(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getInteger(columnIndex); } @Override public long getLong(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getLong(columnIndex); } @Override public float getFloat(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getFloat(columnIndex); } @Override public double getDouble(int columnIndex) throws SQLException { - return 0; + checkClosed(); + return reader.getDouble(columnIndex); } @Override public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - return null; + checkClosed(); + return reader.getBigDecimal(columnIndex); } @Override public byte[] getBytes(int columnIndex) throws SQLException { - return new byte[0]; + checkClosed(); + return reader.getByteArray(columnIndex); } @Override public Date getDate(int columnIndex) throws SQLException { - return null; + checkClosed(); + LocalDate localDate = reader.getLocalDate(columnIndex); + if(localDate == null) { + return null; + } + return Date.valueOf(localDate); } @Override public Time getTime(int columnIndex) throws SQLException { - return null; + checkClosed(); + return Time.valueOf(reader.getLocalDateTime(columnIndex).toLocalTime()); } @Override public Timestamp getTimestamp(int columnIndex) throws SQLException { - return null; + checkClosed(); + return Timestamp.valueOf(reader.getLocalDateTime(columnIndex)); } @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { - return null; + checkClosed(); + return new ByteArrayInputStream(reader.getString(columnIndex).getBytes(StandardCharsets.UTF_8)); } @Override public InputStream getUnicodeStream(int columnIndex) throws SQLException { - return null; + checkClosed(); + return new ByteArrayInputStream(reader.getString(columnIndex).getBytes(StandardCharsets.UTF_8)); } @Override public InputStream getBinaryStream(int columnIndex) throws SQLException { + checkClosed(); + //TODO: implement return null; } @Override public String getString(String columnLabel) throws SQLException { - return ""; + checkClosed(); + return reader.getString(columnLabel); } @Override public boolean getBoolean(String columnLabel) throws SQLException { - return false; + checkClosed(); + return reader.getBoolean(columnLabel); } @Override public byte getByte(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getByte(columnLabel); } @Override public short getShort(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getShort(columnLabel); } @Override public int getInt(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getInteger(columnLabel); } @Override public long getLong(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getLong(columnLabel); } @Override public float getFloat(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getFloat(columnLabel); } @Override public double getDouble(String columnLabel) throws SQLException { - return 0; + checkClosed(); + return reader.getDouble(columnLabel); } @Override public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { - return null; + checkClosed(); + return reader.getBigDecimal(columnLabel); } @Override public byte[] getBytes(String columnLabel) throws SQLException { - return new byte[0]; + checkClosed(); + return reader.getByteArray(columnLabel); } @Override public Date getDate(String columnLabel) throws SQLException { - return null; + checkClosed(); + LocalDate localDate = reader.getLocalDate(columnLabel); + if(localDate == null) { + return null; + } + return Date.valueOf(localDate); } @Override public Time getTime(String columnLabel) throws SQLException { - return null; + checkClosed(); + return Time.valueOf(reader.getLocalDateTime(columnLabel).toLocalTime()); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { - return null; + checkClosed(); + return Timestamp.valueOf(reader.getLocalDateTime(columnLabel)); } @Override public InputStream getAsciiStream(String columnLabel) throws SQLException { - return null; + checkClosed(); + return new ByteArrayInputStream(reader.getString(columnLabel).getBytes(StandardCharsets.UTF_8)); } @Override public InputStream getUnicodeStream(String columnLabel) throws SQLException { - return null; + checkClosed(); + return new ByteArrayInputStream(reader.getString(columnLabel).getBytes(StandardCharsets.UTF_8)); } @Override public InputStream getBinaryStream(String columnLabel) throws SQLException { + checkClosed(); + //TODO: implement return null; } @Override public SQLWarning getWarnings() throws SQLException { + checkClosed(); return null; } @Override public void clearWarnings() throws SQLException { - + checkClosed(); } @Override public String getCursorName() throws SQLException { + checkClosed(); return ""; } @Override public ResultSetMetaData getMetaData() throws SQLException { - return null; + checkClosed(); + return metaData; } @Override public Object getObject(int columnIndex) throws SQLException { + checkClosed(); return null; } @Override public Object getObject(String columnLabel) throws SQLException { + checkClosed(); return null; } @Override public int findColumn(String columnLabel) throws SQLException { + checkClosed(); return 0; } @Override public Reader getCharacterStream(int columnIndex) throws SQLException { + checkClosed(); return null; } @Override public Reader getCharacterStream(String columnLabel) throws SQLException { + checkClosed(); return null; } @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - return null; + checkClosed(); + return reader.getBigDecimal(columnIndex); } @Override public BigDecimal getBigDecimal(String columnLabel) throws SQLException { - return null; + checkClosed(); + return reader.getBigDecimal(columnLabel); } @Override public boolean isBeforeFirst() throws SQLException { + checkClosed(); return false; } @Override public boolean isAfterLast() throws SQLException { + checkClosed(); return false; } @Override public boolean isFirst() throws SQLException { + checkClosed(); return false; } @Override public boolean isLast() throws SQLException { + checkClosed(); return false; } @Override public void beforeFirst() throws SQLException { - + checkClosed(); } @Override public void afterLast() throws SQLException { - + checkClosed(); } @Override public boolean first() throws SQLException { + checkClosed(); return false; } @Override public boolean last() throws SQLException { + checkClosed(); return false; } @Override public int getRow() throws SQLException { + checkClosed(); return 0; } @Override public boolean absolute(int row) throws SQLException { + checkClosed(); return false; } @Override public boolean relative(int rows) throws SQLException { + checkClosed(); return false; } @Override public boolean previous() throws SQLException { + checkClosed(); return false; } @Override public void setFetchDirection(int direction) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("setFetchDirection is not supported."); } @Override public int getFetchDirection() throws SQLException { - return 0; + checkClosed(); + return FETCH_FORWARD; } @Override public void setFetchSize(int rows) throws SQLException { - + checkClosed(); } @Override public int getFetchSize() throws SQLException { + checkClosed(); return 0; } @Override public int getType() throws SQLException { - return 0; + checkClosed(); + return TYPE_FORWARD_ONLY; } @Override public int getConcurrency() throws SQLException { - return 0; + checkClosed(); + return CONCUR_READ_ONLY; } @Override public boolean rowUpdated() throws SQLException { - return false; + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public boolean rowInserted() throws SQLException { - return false; + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public boolean rowDeleted() throws SQLException { - return false; + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNull(int columnIndex) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateByte(int columnIndex, byte x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateShort(int columnIndex, short x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateInt(int columnIndex, int x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateLong(int columnIndex, long x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateFloat(int columnIndex, float x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateDouble(int columnIndex, double x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateString(int columnIndex, String x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateDate(int columnIndex, Date x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateTime(int columnIndex, Time x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateObject(int columnIndex, Object x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNull(String columnLabel) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBoolean(String columnLabel, boolean x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateByte(String columnLabel, byte x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateShort(String columnLabel, short x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateInt(String columnLabel, int x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateLong(String columnLabel, long x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateFloat(String columnLabel, float x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateDouble(String columnLabel, double x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateString(String columnLabel, String x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBytes(String columnLabel, byte[] x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateDate(String columnLabel, Date x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateTime(String columnLabel, Time x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateObject(String columnLabel, Object x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void insertRow() throws SQLException { - + checkClosed(); } @Override public void updateRow() throws SQLException { - + checkClosed(); } @Override public void deleteRow() throws SQLException { - + checkClosed(); } @Override public void refreshRow() throws SQLException { - + checkClosed(); } @Override public void cancelRowUpdates() throws SQLException { - + checkClosed(); } @Override public void moveToInsertRow() throws SQLException { - + checkClosed(); } @Override public void moveToCurrentRow() throws SQLException { - + checkClosed(); } @Override public Statement getStatement() throws SQLException { + checkClosed(); return null; } @Override public Object getObject(int columnIndex, Map> map) throws SQLException { + checkClosed(); return null; } @Override public Ref getRef(int columnIndex) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Ref is not supported."); } @Override public Blob getBlob(int columnIndex) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Blob is not supported."); } @Override public Clob getClob(int columnIndex) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Clob is not supported."); } @Override public Array getArray(int columnIndex) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Array is not supported."); } @Override public Object getObject(String columnLabel, Map> map) throws SQLException { + checkClosed(); return null; } @Override public Ref getRef(String columnLabel) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Ref is not supported."); } @Override public Blob getBlob(String columnLabel) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Blob is not supported."); } @Override public Clob getClob(String columnLabel) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Clob is not supported."); } @Override public Array getArray(String columnLabel) throws SQLException { - return null; + checkClosed(); + throw new SQLFeatureNotSupportedException("Array is not supported."); } @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { - return null; + checkClosed(); + LocalDate localDate = reader.getLocalDate(columnIndex); + if(localDate == null) { + return null; + } + return Date.valueOf(localDate); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { - return null; + checkClosed(); + LocalDate localDate = reader.getLocalDate(columnLabel); + if(localDate == null) { + return null; + } + return Date.valueOf(localDate); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { - return null; + checkClosed(); + return Time.valueOf(reader.getLocalDateTime(columnIndex).toLocalTime()); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { - return null; + checkClosed(); + return Time.valueOf(reader.getLocalDateTime(columnLabel).toLocalTime()); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - return null; + checkClosed(); + return Timestamp.valueOf(reader.getLocalDateTime(columnIndex)); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { - return null; + checkClosed(); + return Timestamp.valueOf(reader.getLocalDateTime(columnLabel)); } @Override public URL getURL(int columnIndex) throws SQLException { - return null; + checkClosed(); + try { + return new URL(reader.getString(columnIndex)); + } catch (MalformedURLException e) { + throw new SQLDataException(e); + } } @Override public URL getURL(String columnLabel) throws SQLException { - return null; + checkClosed(); + try { + return new URL(reader.getString(columnLabel)); + } catch (MalformedURLException e) { + throw new SQLDataException(e); + } } @Override public void updateRef(int columnIndex, Ref x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateRef(String columnLabel, Ref x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(int columnIndex, Blob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(String columnLabel, Blob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(int columnIndex, Clob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(String columnLabel, Clob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateArray(int columnIndex, Array x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateArray(String columnLabel, Array x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public RowId getRowId(int columnIndex) throws SQLException { + checkClosed(); return null; } @Override public RowId getRowId(String columnLabel) throws SQLException { + checkClosed(); return null; } @Override public void updateRowId(int columnIndex, RowId x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateRowId(String columnLabel, RowId x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public int getHoldability() throws SQLException { - return 0; + checkClosed(); + return HOLD_CURSORS_OVER_COMMIT; } @Override public boolean isClosed() throws SQLException { - return false; + return closed; } @Override public void updateNString(int columnIndex, String nString) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNString(String columnLabel, String nString) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(int columnIndex, NClob nClob) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(String columnLabel, NClob nClob) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public NClob getNClob(int columnIndex) throws SQLException { + checkClosed(); return null; } @Override public NClob getNClob(String columnLabel) throws SQLException { + checkClosed(); return null; } @Override public SQLXML getSQLXML(int columnIndex) throws SQLException { + checkClosed(); return null; } @Override public SQLXML getSQLXML(String columnLabel) throws SQLException { + checkClosed(); return null; } @Override public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public String getNString(int columnIndex) throws SQLException { - return ""; + checkClosed(); + return reader.getString(columnIndex); } @Override public String getNString(String columnLabel) throws SQLException { - return ""; + checkClosed(); + return reader.getString(columnLabel); } @Override public Reader getNCharacterStream(int columnIndex) throws SQLException { - return null; + checkClosed(); + return new CharArrayReader(reader.getString(columnIndex).toCharArray()); } @Override public Reader getNCharacterStream(String columnLabel) throws SQLException { - return null; + checkClosed(); + return new CharArrayReader(reader.getString(columnLabel).toCharArray()); } @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(int columnIndex, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateClob(String columnLabel, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(int columnIndex, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public void updateNClob(String columnLabel, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Writes are not supported."); } @Override public T getObject(int columnIndex, Class type) throws SQLException { + checkClosed(); return null; } @Override public T getObject(String columnLabel, Class type) throws SQLException { + checkClosed(); return null; } @Override public void updateObject(int columnIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + checkClosed(); ResultSet.super.updateObject(columnIndex, x, targetSqlType, scaleOrLength); } @Override public void updateObject(String columnLabel, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + checkClosed(); ResultSet.super.updateObject(columnLabel, x, targetSqlType, scaleOrLength); } @Override public void updateObject(int columnIndex, Object x, SQLType targetSqlType) throws SQLException { + checkClosed(); ResultSet.super.updateObject(columnIndex, x, targetSqlType); } @Override public void updateObject(String columnLabel, Object x, SQLType targetSqlType) throws SQLException { + checkClosed(); ResultSet.super.updateObject(columnLabel, x, targetSqlType); } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index 88e042de3..6578f6a3b 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -1,44 +1,166 @@ package com.clickhouse.jdbc; +import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader; +import com.clickhouse.client.api.metrics.OperationMetrics; +import com.clickhouse.client.api.metrics.ServerMetrics; +import com.clickhouse.client.api.query.QueryResponse; +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLWarning; import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; public class StatementImpl implements Statement, JdbcWrapper { - Connection connection; - public StatementImpl(Connection connection) { + private static final Logger log = LoggerFactory.getLogger(StatementImpl.class); + + ConnectionImpl connection; + private int queryTimeout; + private boolean closed; + private ResultSetImpl currentResultSet; + private OperationMetrics metrics; + private List batch; + + public StatementImpl(ConnectionImpl connection) { this.connection = connection; + this.queryTimeout = 0; + this.closed = false; + this.currentResultSet = null; + this.metrics = null; + this.batch = new ArrayList<>(); + } + + private void checkClosed() throws SQLException { + if (closed) { + throw new SQLException("Statement is closed"); + } + } + + private enum StatementType { + SELECT, INSERT, DELETE, UPDATE, CREATE, DROP, ALTER, TRUNCATE, USE, SHOW, DESCRIBE, EXPLAIN, SET, KILL, OTHER + } + + private StatementType parseStatementType(String sql) { + String[] tokens = sql.trim().split("\\s+"); + if (tokens.length == 0) { + return StatementType.OTHER; + } + + return switch (tokens[0].toUpperCase()) { + case "SELECT" -> StatementType.SELECT; + case "INSERT" -> StatementType.INSERT; + case "DELETE" -> StatementType.DELETE; + case "UPDATE" -> StatementType.UPDATE; + case "CREATE" -> StatementType.CREATE; + case "DROP" -> StatementType.DROP; + case "ALTER" -> StatementType.ALTER; + case "TRUNCATE" -> StatementType.TRUNCATE; + case "USE" -> StatementType.USE; + case "SHOW" -> StatementType.SHOW; + case "DESCRIBE" -> StatementType.DESCRIBE; + case "EXPLAIN" -> StatementType.EXPLAIN; + case "SET" -> StatementType.SET; + case "KILL" -> StatementType.KILL; + default -> StatementType.OTHER; + }; + } + + private String parseTableName(String sql) { + String[] tokens = sql.trim().split("\\s+"); + if (tokens.length < 3) { + return null; + } + + return tokens[2]; + } + + public static String parseJdbcEscapeSyntax(String sql) { + log.trace("Original SQL: {}", sql); + // Replace {d 'YYYY-MM-DD'} with corresponding SQL date format + sql = sql.replaceAll("\\{d '([^']*)'\\}", "toDate('$1')"); + + // Replace {ts 'YYYY-MM-DD HH:mm:ss'} with corresponding SQL timestamp format + sql = sql.replaceAll("\\{ts '([^']*)'\\}", "timestamp('$1')"); + + // Replace function escape syntax {fn } (e.g., {fn UCASE(name)}) + sql = sql.replaceAll("\\{fn ([^\\}]*)\\}", "$1"); + + // Handle outer escape syntax + //sql = sql.replaceAll("\\{escape '([^']*)'\\}", "'$1'"); + + // Add more replacements as needed for other JDBC escape sequences + log.trace("Parsed SQL: {}", sql); + return sql; } @Override public ResultSet executeQuery(String sql) throws SQLException { - return null; + checkClosed(); + + try { + sql = parseJdbcEscapeSyntax(sql); + QueryResponse response = connection.client.query(sql).get(queryTimeout, TimeUnit.SECONDS); + ClickHouseBinaryFormatReader reader = connection.client.newBinaryFormatReader(response); + currentResultSet = new ResultSetImpl(response, reader); + metrics = response.getMetrics(); + } catch (Exception e) { + throw new SQLException(e); + } + + return currentResultSet; } @Override public int executeUpdate(String sql) throws SQLException { - return 0; + checkClosed(); + + if (parseStatementType(sql) == StatementType.SELECT) { + throw new SQLException("executeUpdate() cannot be called with a SELECT statement"); + } + + try { + sql = parseJdbcEscapeSyntax(sql); + QueryResponse response = connection.client.query(sql).get(queryTimeout, TimeUnit.SECONDS); + currentResultSet = null; + metrics = response.getMetrics(); + } catch (Exception e) { + throw new RuntimeException(e); + } + + return (int) metrics.getMetric(ServerMetrics.NUM_ROWS_WRITTEN).getLong(); } @Override public void close() throws SQLException { - + closed = true; + connection.close(); + if (currentResultSet != null) { + currentResultSet.close(); + currentResultSet = null; + } } @Override public int getMaxFieldSize() throws SQLException { + checkClosed(); return 0; } @Override public void setMaxFieldSize(int max) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Set max field size is not supported."); } @Override public int getMaxRows() throws SQLException { + checkClosed(); return 0; } @@ -54,17 +176,20 @@ public void setEscapeProcessing(boolean enable) throws SQLException { @Override public int getQueryTimeout() throws SQLException { - return 0; + checkClosed(); + return queryTimeout; } @Override public void setQueryTimeout(int seconds) throws SQLException { - + checkClosed(); + queryTimeout = seconds; } @Override public void cancel() throws SQLException { - + checkClosed(); + throw new UnsupportedOperationException("Cancel is not supported."); } @Override @@ -84,32 +209,53 @@ public void setCursorName(String name) throws SQLException { @Override public boolean execute(String sql) throws SQLException { - return false; + checkClosed(); + StatementType type = parseStatementType(sql); + + if (type == StatementType.SELECT) { + executeQuery(sql); + return true; + } else { + executeUpdate(sql); + return false; + } } @Override public ResultSet getResultSet() throws SQLException { - return null; + checkClosed(); + + ResultSet resultSet = currentResultSet; + currentResultSet = null; + return resultSet; } @Override public int getUpdateCount() throws SQLException { - return 0; + checkClosed(); + if (currentResultSet == null) { + return (int) metrics.getMetric(ServerMetrics.NUM_ROWS_WRITTEN).getLong(); + } + + return -1; } @Override public boolean getMoreResults() throws SQLException { + checkClosed(); return false; } @Override public void setFetchDirection(int direction) throws SQLException { - + checkClosed(); + throw new UnsupportedOperationException("Fetch direction is not supported."); } @Override public int getFetchDirection() throws SQLException { - return 0; + checkClosed(); + return ResultSet.FETCH_FORWARD; } @Override @@ -124,32 +270,43 @@ public int getFetchSize() throws SQLException { @Override public int getResultSetConcurrency() throws SQLException { - return 0; + checkClosed(); + return ResultSet.CONCUR_READ_ONLY; } @Override public int getResultSetType() throws SQLException { - return 0; + checkClosed(); + return ResultSet.TYPE_FORWARD_ONLY; } @Override public void addBatch(String sql) throws SQLException { - + checkClosed(); + batch.add(sql); } @Override public void clearBatch() throws SQLException { - + checkClosed(); + batch.clear(); } @Override public int[] executeBatch() throws SQLException { - return new int[0]; + checkClosed(); + List results = new ArrayList<>(); + + for(String sql : batch) { + results.add(executeUpdate(sql)); + } + + return results.stream().mapToInt(i -> i).toArray(); } @Override public Connection getConnection() throws SQLException { - return null; + return connection; } @Override @@ -164,32 +321,32 @@ public ResultSet getGeneratedKeys() throws SQLException { @Override public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { - return 0; + return executeUpdate(sql); } @Override public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { - return 0; + return executeUpdate(sql); } @Override public int executeUpdate(String sql, String[] columnNames) throws SQLException { - return 0; + return executeUpdate(sql); } @Override public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { - return false; + return execute(sql); } @Override public boolean execute(String sql, int[] columnIndexes) throws SQLException { - return false; + return execute(sql); } @Override public boolean execute(String sql, String[] columnNames) throws SQLException { - return false; + return execute(sql); } @Override @@ -199,7 +356,7 @@ public int getResultSetHoldability() throws SQLException { @Override public boolean isClosed() throws SQLException { - return false; + return closed; } @Override @@ -224,61 +381,73 @@ public boolean isCloseOnCompletion() throws SQLException { @Override public long getLargeUpdateCount() throws SQLException { + checkClosed(); return Statement.super.getLargeUpdateCount(); } @Override public void setLargeMaxRows(long max) throws SQLException { + checkClosed(); Statement.super.setLargeMaxRows(max); } @Override public long getLargeMaxRows() throws SQLException { + checkClosed(); return Statement.super.getLargeMaxRows(); } @Override public long[] executeLargeBatch() throws SQLException { + checkClosed(); return Statement.super.executeLargeBatch(); } @Override public long executeLargeUpdate(String sql) throws SQLException { + checkClosed(); return Statement.super.executeLargeUpdate(sql); } @Override public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + checkClosed(); return Statement.super.executeLargeUpdate(sql, autoGeneratedKeys); } @Override public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { + checkClosed(); return Statement.super.executeLargeUpdate(sql, columnIndexes); } @Override public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { + checkClosed(); return Statement.super.executeLargeUpdate(sql, columnNames); } @Override public String enquoteLiteral(String val) throws SQLException { + checkClosed(); return Statement.super.enquoteLiteral(val); } @Override public String enquoteIdentifier(String identifier, boolean alwaysQuote) throws SQLException { + checkClosed(); return Statement.super.enquoteIdentifier(identifier, alwaysQuote); } @Override public boolean isSimpleIdentifier(String identifier) throws SQLException { + checkClosed(); return Statement.super.isSimpleIdentifier(identifier); } @Override public String enquoteNCharLiteral(String val) throws SQLException { + checkClosed(); return Statement.super.enquoteNCharLiteral(val); } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java index 07ec25d36..ba9a2732b 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/DriverTest.java @@ -4,7 +4,6 @@ import java.sql.SQLException; import java.util.Properties; -import com.clickhouse.jdbc.internal.ClickHouseJdbcUrlParser; import org.testng.Assert; import org.testng.annotations.Test; diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java index 0000f9aaf..8bb1141f7 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/JdbcIntegrationTest.java @@ -13,8 +13,9 @@ public abstract class JdbcIntegrationTest extends BaseIntegrationTest { private static final Logger LOGGER = LoggerFactory.getLogger(JdbcIntegrationTest.class); + public String getEndpointString() { - return getEndpointString(false); + return getEndpointString(isCloud()); } public String getEndpointString(boolean includeDbName) { return "jdbc:ch:" + (isCloud() ? "https" : "http") + "://" + @@ -30,4 +31,8 @@ public Connection getJdbcConnection() throws SQLException { return new ConnectionImpl(getEndpointString(), info); //return DriverManager.getConnection(getEndpointString(), "default", ClickHouseServerForTest.getPassword()); } + + protected static String getDatabase() { + return isCloud() ? ClickHouseServerForTest.getDatabase() : "default"; + } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java new file mode 100644 index 000000000..178dc2095 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java @@ -0,0 +1,333 @@ +package com.clickhouse.jdbc; + +import com.clickhouse.client.BaseIntegrationTest; +import com.clickhouse.client.ClickHouseServerForTest; +import org.testng.annotations.Test; + +import java.sql.Connection; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +public class StatementTest extends JdbcIntegrationTest { + @Test + public void testExecuteQuerySimpleNumbers() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT 1 AS num")) { + assertTrue(rs.next()); + assertEquals(rs.getByte(1), 1); + assertEquals(rs.getByte("num"), 1); + assertEquals(rs.getShort(1), 1); + assertEquals(rs.getShort("num"), 1); + assertEquals(rs.getInt(1), 1); + assertEquals(rs.getInt("num"), 1); + assertEquals(rs.getLong(1), 1); + assertEquals(rs.getLong("num"), 1); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQuerySimpleFloats() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT 1.1 AS num")) { + assertTrue(rs.next()); + assertEquals(rs.getFloat(1), 1.1f); + assertEquals(rs.getFloat("num"), 1.1f); + assertEquals(rs.getDouble(1), 1.1); + assertEquals(rs.getDouble("num"), 1.1); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQueryBooleans() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT true AS flag")) { + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertTrue(rs.getBoolean("flag")); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQueryStrings() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT 'Hello' AS words")) { + assertTrue(rs.next()); + assertEquals(rs.getString(1), "Hello"); + assertEquals(rs.getString("words"), "Hello"); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQueryNulls() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT NULL AS nothing")) { + assertTrue(rs.next()); + assertNull(rs.getObject(1)); + assertNull(rs.getObject("nothing")); + assertNull(rs.getString(1)); + assertNull(rs.getString("nothing")); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQueryDates() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT toDate('2020-01-01 12:10:07') AS date, toDateTime('2020-01-01 10:11:12', 'Asia/Istanbul') AS datetime")) { + assertTrue(rs.next()); + assertEquals(rs.getDate(1).toString(), "2020-01-01"); + assertEquals(rs.getDate("date").toString(), "2020-01-01"); + assertEquals(rs.getDate(1).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate("date").toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate(1, null).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate("date", null).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getString(1), "2020-01-01T00:00Z[UTC]"); + assertEquals(rs.getString("date"), "2020-01-01T00:00Z[UTC]"); + assertEquals(rs.getDate(2).toString(), "2020-01-01"); + assertEquals(rs.getDate("datetime").toString(), "2020-01-01"); + assertEquals(rs.getDate(2).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate("datetime").toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate(2, null).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getDate("datetime", null).toLocalDate().toString(), "2020-01-01"); + assertEquals(rs.getString(2), "2020-01-01T10:11:12+03:00[Asia/Istanbul]"); + assertEquals(rs.getString("datetime"), "2020-01-01T10:11:12+03:00[Asia/Istanbul]"); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateSimpleNumbers() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".simpleNumbers (num UInt8) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".simpleNumbers VALUES (1), (2), (3)"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT num FROM " + getDatabase() + ".simpleNumbers ORDER BY num")) { + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 1); + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 2); + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 3); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateSimpleFloats() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".simpleFloats (num Float32) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".simpleFloats VALUES (1.1), (2.2), (3.3)"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT num FROM " + getDatabase() + ".simpleFloats ORDER BY num")) { + assertTrue(rs.next()); + assertEquals(rs.getFloat(1), 1.1f); + assertTrue(rs.next()); + assertEquals(rs.getFloat(1), 2.2f); + assertTrue(rs.next()); + assertEquals(rs.getFloat(1), 3.3f); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateBooleans() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".booleans (id UInt8, flag Boolean) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".booleans VALUES (0, true), (1, false), (2, true)"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT flag FROM " + getDatabase() + ".booleans ORDER BY id")) { + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertTrue(rs.next()); + assertFalse(rs.getBoolean(1)); + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateStrings() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".strings (id UInt8, words String) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".strings VALUES (0, 'Hello'), (1, 'World'), (2, 'ClickHouse')"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT words FROM " + getDatabase() + ".strings ORDER BY id")) { + assertTrue(rs.next()); + assertEquals(rs.getString(1), "Hello"); + assertTrue(rs.next()); + assertEquals(rs.getString(1), "World"); + assertTrue(rs.next()); + assertEquals(rs.getString(1), "ClickHouse"); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateNulls() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".nulls (id UInt8, nothing Nullable(String)) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".nulls VALUES (0, 'Hello'), (1, NULL), (2, 'ClickHouse')"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT nothing FROM " + getDatabase() + ".nulls ORDER BY id")) { + assertTrue(rs.next()); + assertEquals(rs.getString(1), "Hello"); + assertTrue(rs.next()); + assertNull(rs.getString(1)); + assertTrue(rs.next()); + assertEquals(rs.getString(1), "ClickHouse"); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteUpdateDates() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".dates (id UInt8, date Nullable(Date), datetime Nullable(DateTime)) ENGINE = Memory"), 0); + assertEquals(stmt.executeUpdate("INSERT INTO " + getDatabase() + ".dates VALUES (0, '2020-01-01', '2020-01-01 10:11:12'), (1, NULL, '2020-01-01 12:10:07'), (2, '2020-01-01', NULL)"), 3); + try (ResultSet rs = stmt.executeQuery("SELECT date, datetime FROM " + getDatabase() + ".dates ORDER BY id")) { + assertTrue(rs.next()); + assertEquals(rs.getDate(1).toString(), "2020-01-01"); + assertEquals(rs.getDate(2).toString(), "2020-01-01"); + assertTrue(rs.next()); + assertNull(rs.getDate(1)); + assertEquals(rs.getDate(2).toString(), "2020-01-01"); + assertTrue(rs.next()); + assertEquals(rs.getDate(1).toString(), "2020-01-01"); + assertNull(rs.getDate(2)); + assertFalse(rs.next()); + } + } + } + } + + + @Test + public void testExecuteUpdateBatch() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + assertEquals(stmt.executeUpdate("CREATE TABLE IF NOT EXISTS " + getDatabase() + ".batch (id UInt8, num UInt8) ENGINE = Memory"), 0); + stmt.addBatch("INSERT INTO " + getDatabase() + ".batch VALUES (0, 1)"); + stmt.addBatch("INSERT INTO " + getDatabase() + ".batch VALUES (1, 2)"); + stmt.addBatch("INSERT INTO " + getDatabase() + ".batch VALUES (2, 3), (3, 4)"); + int[] counts = stmt.executeBatch(); + assertEquals(counts.length, 3); + assertEquals(counts[0], 1); + assertEquals(counts[1], 1); + assertEquals(counts[2], 2); + try (ResultSet rs = stmt.executeQuery("SELECT num FROM " + getDatabase() + ".batch ORDER BY id")) { + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 1); + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 2); + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 3); + assertTrue(rs.next()); + assertEquals(rs.getShort(1), 4); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testJdbcEscapeSyntax() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + try (ResultSet rs = stmt.executeQuery("SELECT {d '2021-11-01'} AS D, {ts '2021-11-01 12:34:56'} AS TS, " + + "{fn ABS(-1)} AS FNABS, {fn CONCAT('Hello', 'World')} AS FNCONCAT, {fn UCASE('hello')} AS FNUPPER, " + + "{fn LCASE('HELLO')} AS FNLOWER, {fn LTRIM(' Hello ')} AS FNLTRIM, {fn RTRIM(' Hello ')} AS FNRTRIM, " + + "{fn LENGTH('Hello')} AS FNLENGTH, {fn LOCATE('l', 'Hello')} AS FNLOCATE, {fn MOD(10, 3)} AS FNMOD, " + + "{fn SQRT(9)} AS FNSQRT, {fn SUBSTRING('Hello', 3, 2)} AS FNSUBSTRING")) { + assertTrue(rs.next()); + assertEquals(rs.getDate(1), Date.valueOf(LocalDate.of(2021, 11, 1))); + //assertEquals(rs.getTimestamp(2), java.sql.Timestamp.valueOf(LocalDateTime.of(2021, 11, 1, 12, 34, 56))); + assertEquals(rs.getInt(3), 1); + assertEquals(rs.getInt("FNABS"), 1); + assertEquals(rs.getString(4), "HelloWorld"); + assertEquals(rs.getString("FNCONCAT"), "HelloWorld"); + assertEquals(rs.getString(5), "HELLO"); + assertEquals(rs.getString("FNUPPER"), "HELLO"); + assertEquals(rs.getString(6), "hello"); + assertEquals(rs.getString("FNLOWER"), "hello"); + assertEquals(rs.getString(7), "Hello "); + assertEquals(rs.getString("FNLTRIM"), "Hello "); + assertEquals(rs.getString(8), " Hello"); + assertEquals(rs.getString("FNRTRIM"), " Hello"); + assertEquals(rs.getInt(9), 5); + assertEquals(rs.getInt("FNLENGTH"), 5); + assertEquals(rs.getInt(10), 3); + assertEquals(rs.getInt("FNLOCATE"), 3); + assertEquals(rs.getInt(11), 1); + assertEquals(rs.getInt("FNMOD"), 1); + assertEquals(rs.getInt(12), 3); + assertEquals(rs.getInt("FNSQRT"), 3); + assertEquals(rs.getInt(13), 3); + assertEquals(rs.getInt("FNSUBSTRING"), 3); + assertEquals(rs.getString(14), "llo"); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testExecuteQueryTimeout() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + stmt.setQueryTimeout(1); + assertThrows(SQLException.class, () -> { + try (ResultSet rs = stmt.executeQuery("SELECT sleep(2)")) { + assertFalse(rs.next()); + } + }); + } + } + } + +} From f09ca169682b99d7ed48455588ba65ad6da3895b Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Tue, 8 Oct 2024 03:32:09 -0400 Subject: [PATCH 16/21] Update StatementImpl.java --- .../src/main/java/com/clickhouse/jdbc/StatementImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index 6578f6a3b..641dde355 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -80,7 +80,7 @@ private String parseTableName(String sql) { return tokens[2]; } - public static String parseJdbcEscapeSyntax(String sql) { + private static String parseJdbcEscapeSyntax(String sql) { log.trace("Original SQL: {}", sql); // Replace {d 'YYYY-MM-DD'} with corresponding SQL date format sql = sql.replaceAll("\\{d '([^']*)'\\}", "toDate('$1')"); From 0afc6126e44566166cb59751b28b6ed720b0c64e Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Tue, 8 Oct 2024 03:41:45 -0400 Subject: [PATCH 17/21] Update ResultSetImpl.java --- .../com/clickhouse/jdbc/ResultSetImpl.java | 46 +++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java index f8816c114..1bd00b444 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -10,6 +10,7 @@ import java.nio.charset.StandardCharsets; import java.sql.*; import java.time.LocalDate; +import java.time.LocalDateTime; import java.util.Calendar; import java.util.Map; @@ -143,13 +144,21 @@ public Date getDate(int columnIndex) throws SQLException { @Override public Time getTime(int columnIndex) throws SQLException { checkClosed(); - return Time.valueOf(reader.getLocalDateTime(columnIndex).toLocalTime()); + LocalDateTime localDateTime = reader.getLocalDateTime(columnIndex); + if(localDateTime == null) { + return null; + } + return Time.valueOf(localDateTime.toLocalTime()); } @Override public Timestamp getTimestamp(int columnIndex) throws SQLException { checkClosed(); - return Timestamp.valueOf(reader.getLocalDateTime(columnIndex)); + LocalDateTime localDateTime = reader.getLocalDateTime(columnIndex); + if(localDateTime == null) { + return null; + } + return Timestamp.valueOf(localDateTime); } @Override @@ -244,13 +253,21 @@ public Date getDate(String columnLabel) throws SQLException { @Override public Time getTime(String columnLabel) throws SQLException { checkClosed(); - return Time.valueOf(reader.getLocalDateTime(columnLabel).toLocalTime()); + LocalDateTime localDateTime = reader.getLocalDateTime(columnLabel); + if(localDateTime == null) { + return null; + } + return Time.valueOf(localDateTime.toLocalTime()); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { checkClosed(); - return Timestamp.valueOf(reader.getLocalDateTime(columnLabel)); + LocalDateTime localDateTime = reader.getLocalDateTime(columnLabel); + if(localDateTime == null) { + return null; + } + return Timestamp.valueOf(localDateTime); } @Override @@ -732,6 +749,7 @@ public Statement getStatement() throws SQLException { @Override public Object getObject(int columnIndex, Map> map) throws SQLException { checkClosed(); + //TODO: Should we implement? return null; } @@ -792,45 +810,37 @@ public Array getArray(String columnLabel) throws SQLException { @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { checkClosed(); - LocalDate localDate = reader.getLocalDate(columnIndex); - if(localDate == null) { - return null; - } - return Date.valueOf(localDate); + return getDate(columnIndex); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { checkClosed(); - LocalDate localDate = reader.getLocalDate(columnLabel); - if(localDate == null) { - return null; - } - return Date.valueOf(localDate); + return getDate(columnLabel); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { checkClosed(); - return Time.valueOf(reader.getLocalDateTime(columnIndex).toLocalTime()); + return getTime(columnIndex); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { checkClosed(); - return Time.valueOf(reader.getLocalDateTime(columnLabel).toLocalTime()); + return getTime(columnLabel); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { checkClosed(); - return Timestamp.valueOf(reader.getLocalDateTime(columnIndex)); + return getTimestamp(columnIndex); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { checkClosed(); - return Timestamp.valueOf(reader.getLocalDateTime(columnLabel)); + return getTimestamp(columnLabel); } @Override From f46b7a69dec10c2e1c917513543631a2fbd37b6a Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Sun, 13 Oct 2024 19:52:11 -0400 Subject: [PATCH 18/21] Update ResultSetImpl.java --- .../src/main/java/com/clickhouse/jdbc/ResultSetImpl.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java index 1bd00b444..d6fd533f7 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -134,6 +134,7 @@ public byte[] getBytes(int columnIndex) throws SQLException { @Override public Date getDate(int columnIndex) throws SQLException { checkClosed(); + //TODO: Add this to ClickHouseBinaryFormatReader LocalDate localDate = reader.getLocalDate(columnIndex); if(localDate == null) { return null; @@ -164,7 +165,8 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException { @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { checkClosed(); - return new ByteArrayInputStream(reader.getString(columnIndex).getBytes(StandardCharsets.UTF_8)); + //TODO: Add this to ClickHouseBinaryFormatReader + throw new SQLFeatureNotSupportedException("AsciiStream is not yet supported."); } @Override @@ -243,6 +245,7 @@ public byte[] getBytes(String columnLabel) throws SQLException { @Override public Date getDate(String columnLabel) throws SQLException { checkClosed(); + //TODO: Add this to ClickHouseBinaryFormatReader LocalDate localDate = reader.getLocalDate(columnLabel); if(localDate == null) { return null; @@ -273,7 +276,8 @@ public Timestamp getTimestamp(String columnLabel) throws SQLException { @Override public InputStream getAsciiStream(String columnLabel) throws SQLException { checkClosed(); - return new ByteArrayInputStream(reader.getString(columnLabel).getBytes(StandardCharsets.UTF_8)); + //TODO: Add this to ClickHouseBinaryFormatReader + throw new SQLFeatureNotSupportedException("AsciiStream is not yet supported."); } @Override From 7d905a02b8407589881f5ab5464f543737eb6c27 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Thu, 17 Oct 2024 17:45:29 -0400 Subject: [PATCH 19/21] Stable base for PreparedStatement --- .../com/clickhouse/jdbc/JdbcTypeMapping.java | 338 ------------------ .../jdbc/PreparedStatementImpl.java | 201 ++++++++--- .../com/clickhouse/jdbc/StatementImpl.java | 12 +- .../jdbc/PreparedStatementTest.java | 212 +++++++++++ .../com/clickhouse/jdbc/StatementTest.java | 5 - 5 files changed, 364 insertions(+), 404 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/PreparedStatementTest.java diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java deleted file mode 100644 index eab0509ce..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/JdbcTypeMapping.java +++ /dev/null @@ -1,338 +0,0 @@ -package com.clickhouse.jdbc; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.sql.Date; -import java.sql.JDBCType; -import java.sql.Time; -import java.sql.Timestamp; -import java.sql.Types; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetDateTime; -import java.time.ZonedDateTime; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; - -import com.clickhouse.data.ClickHouseColumn; -import com.clickhouse.data.ClickHouseDataType; -import com.clickhouse.data.ClickHouseUtils; - -/** - * This class defines mappings among {@link Types}, {@link JDBCType}, - * {@link ClickHouseDataType}, {@link ClickHouseColumn}, and {@link Class}. It - * does not impact serialization and deserialization, which is handled - * separately by {@link com.clickhouse.data.ClickHouseDataProcessor}. - */ -public class JdbcTypeMapping { - /** - * Gets corresponding {@link ClickHouseDataType} of the given {@link Types}. - * - * @param sqlType generic SQL types defined in JDBC - * @return non-null ClickHouse data type - */ - protected static ClickHouseDataType getDataType(int sqlType) { - ClickHouseDataType dataType; - - switch (sqlType) { - case Types.BOOLEAN: - dataType = ClickHouseDataType.UInt8; - break; - case Types.TINYINT: - dataType = ClickHouseDataType.Int8; - break; - case Types.SMALLINT: - dataType = ClickHouseDataType.Int16; - break; - case Types.INTEGER: - dataType = ClickHouseDataType.Int32; - break; - case Types.BIGINT: - dataType = ClickHouseDataType.Int64; - break; - case Types.NUMERIC: - dataType = ClickHouseDataType.Int256; - break; - case Types.FLOAT: - case Types.REAL: - dataType = ClickHouseDataType.Float32; - break; - case Types.DOUBLE: - dataType = ClickHouseDataType.Float64; - break; - case Types.DECIMAL: - dataType = ClickHouseDataType.Decimal; - break; - case Types.BIT: - case Types.BLOB: - case Types.BINARY: - case Types.CHAR: - case Types.CLOB: - case Types.JAVA_OBJECT: - case Types.LONGNVARCHAR: - case Types.LONGVARBINARY: - case Types.LONGVARCHAR: - case Types.NCHAR: - case Types.NCLOB: - case Types.NVARCHAR: - case Types.OTHER: - case Types.SQLXML: - case Types.VARBINARY: - case Types.VARCHAR: - dataType = ClickHouseDataType.String; - break; - case Types.DATE: - dataType = ClickHouseDataType.Date; - break; - case Types.TIME: - case Types.TIME_WITH_TIMEZONE: - case Types.TIMESTAMP: - case Types.TIMESTAMP_WITH_TIMEZONE: - dataType = ClickHouseDataType.DateTime; - break; - case Types.ARRAY: - dataType = ClickHouseDataType.Array; - break; - case Types.STRUCT: - dataType = ClickHouseDataType.Tuple; - break; - case Types.DATALINK: - case Types.DISTINCT: - case Types.REF: - case Types.REF_CURSOR: - case Types.ROWID: - case Types.NULL: - default: - dataType = ClickHouseDataType.Nothing; - break; - } - return dataType; - } - - /** - * Gets corresponding {@link Types} for the given Java class. - * - * @param javaClass non-null Java class - * @return generic SQL type defined in JDBC - */ - protected static int getSqlType(Class javaClass) { // and purpose(e.g. for read or write?) - final int sqlType; - if (javaClass == boolean.class || javaClass == Boolean.class) { - sqlType = Types.BOOLEAN; - } else if (javaClass == byte.class || javaClass == Byte.class) { - sqlType = Types.TINYINT; - } else if (javaClass == short.class || javaClass == Short.class) { - sqlType = Types.SMALLINT; - } else if (javaClass == int.class || javaClass == Integer.class) { - sqlType = Types.INTEGER; - } else if (javaClass == long.class || javaClass == Long.class) { - sqlType = Types.BIGINT; - } else if (javaClass == float.class || javaClass == Float.class) { - sqlType = Types.FLOAT; - } else if (javaClass == double.class || javaClass == Double.class) { - sqlType = Types.DOUBLE; - } else if (javaClass == BigInteger.class) { - sqlType = Types.NUMERIC; - } else if (javaClass == BigDecimal.class) { - sqlType = Types.DECIMAL; - } else if (javaClass == Date.class || javaClass == LocalDate.class) { - sqlType = Types.DATE; - } else if (javaClass == Time.class || javaClass == LocalTime.class) { - sqlType = Types.TIME; - } else if (javaClass == Timestamp.class || javaClass == LocalDateTime.class) { - sqlType = Types.TIMESTAMP; - } else if (javaClass == OffsetDateTime.class || javaClass == ZonedDateTime.class) { - sqlType = Types.TIMESTAMP_WITH_TIMEZONE; - } else if (javaClass == String.class || javaClass == byte[].class || Enum.class.isAssignableFrom(javaClass)) { - sqlType = Types.VARCHAR; - } else if (javaClass.isArray()) { // could be Nested type - sqlType = Types.ARRAY; - } else if (List.class.isAssignableFrom(javaClass) || Map.class.isAssignableFrom(javaClass)) { - sqlType = Types.STRUCT; - } else { - sqlType = Types.OTHER; - } - return sqlType; - } - - /** - * Converts {@link JDBCType} to ClickHouse column. - * - * @param jdbcType JDBC type - * @param scaleOrLength scale or length - * @return non-null ClickHouse column - */ - public static ClickHouseColumn toColumn(JDBCType jdbcType, int scaleOrLength) { - Integer type = jdbcType.getVendorTypeNumber(); - return toColumn(type != null ? type : Types.OTHER, scaleOrLength); - } - - /** - * Converts {@link Types} to ClickHouse column. - * - * @param sqlType generic SQL types defined in JDBC - * @param scaleOrLength scale or length - * @return non-null ClickHouse column - */ - public static ClickHouseColumn toColumn(int sqlType, int scaleOrLength) { - ClickHouseDataType dataType = getDataType(sqlType); - ClickHouseColumn column = null; - if (scaleOrLength > 0) { - if (sqlType == Types.BIT && scaleOrLength == 1) { - dataType = ClickHouseDataType.UInt8; - } else if (sqlType == Types.NUMERIC || sqlType == Types.DECIMAL) { - for (ClickHouseDataType t : new ClickHouseDataType[] {}) { - if (scaleOrLength <= t.getMaxScale() / 2) { - column = ClickHouseColumn.of("", t, false, t.getMaxPrecision() - t.getMaxScale(), - scaleOrLength); - break; - } - } - } else if (dataType == ClickHouseDataType.Date) { - if (scaleOrLength > 2) { - dataType = ClickHouseDataType.Date32; - } - } else if (dataType == ClickHouseDataType.DateTime) { - column = ClickHouseColumn.of("", ClickHouseDataType.DateTime64, false, 0, scaleOrLength); - } else if (dataType == ClickHouseDataType.String) { - column = ClickHouseColumn.of("", ClickHouseDataType.FixedString, false, scaleOrLength, 0); - } - } - - return column == null ? ClickHouseColumn.of("", dataType, false, false) : column; - } - - /** - * Converts {@link ClickHouseColumn} to {@link Class}. - * - * @param column non-null column definition - * @return non-null Java class - */ - public static Class toJavaClass(ClickHouseColumn column) { - Class clazz; - ClickHouseDataType type = column.getDataType(); - switch (type) { - case DateTime: - case DateTime32: - case DateTime64: - clazz = column.getTimeZone() != null ? OffsetDateTime.class : LocalDateTime.class; - break; - default: - clazz = type.getObjectClass(); - break; - } - return clazz; - } - - /** - * Converts {@link ClickHouseColumn} to native type. - * - * @param column non-null column definition - * @return non-null native type - */ - public static String toNativeType(ClickHouseColumn column) { - return column.getOriginalTypeName(); - } - - /** - * Converts {@link ClickHouseColumn} to generic SQL type defined in JDBC. - * - * @param column non-null column definition - * @return generic SQL type defined in JDBC - */ - public static int toSqlType(ClickHouseColumn column) { - int sqlType = Types.OTHER; - switch (column.getDataType()) { - case Bool: - sqlType = Types.BOOLEAN; - break; - case Int8: - sqlType = Types.TINYINT; - break; - case UInt8: - case Int16: - sqlType = Types.SMALLINT; - break; - case UInt16: - case Int32: - sqlType = Types.INTEGER; - break; - case UInt32: - case IntervalYear: - case IntervalQuarter: - case IntervalMonth: - case IntervalWeek: - case IntervalDay: - case IntervalHour: - case IntervalMinute: - case IntervalSecond: - case IntervalMicrosecond: - case IntervalMillisecond: - case IntervalNanosecond: - case Int64: - sqlType = Types.BIGINT; - break; - case UInt64: - case Int128: - case UInt128: - case Int256: - case UInt256: - sqlType = Types.NUMERIC; - break; - case Float32: - sqlType = Types.FLOAT; - break; - case Float64: - sqlType = Types.DOUBLE; - break; - case Decimal: - case Decimal32: - case Decimal64: - case Decimal128: - case Decimal256: - sqlType = Types.DECIMAL; - break; - case Date: - case Date32: - sqlType = Types.DATE; - break; - case DateTime: - case DateTime32: - case DateTime64: - sqlType = column.getTimeZone() != null ? Types.TIMESTAMP_WITH_TIMEZONE : Types.TIMESTAMP; - break; - case Enum8: - case Enum16: - case IPv4: - case IPv6: - case FixedString: - case JSON: - case Object: - case String: - case UUID: - sqlType = Types.VARCHAR; - break; - case Point: - case Ring: - case Polygon: - case MultiPolygon: - case Array: - sqlType = Types.ARRAY; - break; - case Map: // Map - case Nested: // Object[][] - case Tuple: // List - sqlType = Types.STRUCT; - break; - case Nothing: - sqlType = Types.NULL; - break; - default: - break; - } - - return sqlType; - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java index 0d5411e9a..8e5af3ebb 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -1,306 +1,397 @@ package com.clickhouse.jdbc; +import com.clickhouse.logging.Logger; +import com.clickhouse.logging.LoggerFactory; + import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.sql.*; import java.util.Calendar; public class PreparedStatementImpl extends StatementImpl implements PreparedStatement, JdbcWrapper { - String sql; + private static final Logger log = LoggerFactory.getLogger(PreparedStatementImpl.class); + + String initialSql; + String [] sqlSegments; + Object [] parameters; public PreparedStatementImpl(ConnectionImpl connection, String sql) { super(connection); - this.sql = sql; + this.initialSql = sql; + //Split the sql string into an array of strings around question mark tokens + this.sqlSegments = sql.split("\\?"); + + //Create an array of objects to store the parameters + if (initialSql.contains("?")) { + this.parameters = new Object[sqlSegments.length]; + } else { + this.parameters = new Object[0]; + } + } + + private String compileSql() { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < sqlSegments.length; i++) { + sb.append(sqlSegments[i]); + if (i < parameters.length) { + sb.append(parameters[i]); + } + } + log.trace("Compiled SQL: {}", sb); + System.out.println("Compiled SQL: " + sb); + return sb.toString(); } @Override public ResultSet executeQuery() throws SQLException { - return null; + checkClosed(); + return executeQuery(compileSql()); } @Override public int executeUpdate() throws SQLException { - return 0; + checkClosed(); + return executeUpdate(compileSql()); } @Override public void setNull(int parameterIndex, int sqlType) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = null; } @Override public void setBoolean(int parameterIndex, boolean x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setByte(int parameterIndex, byte x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setShort(int parameterIndex, short x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setInt(int parameterIndex, int x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setLong(int parameterIndex, long x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setFloat(int parameterIndex, float x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setDouble(int parameterIndex, double x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setString(int parameterIndex, String x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = "'" + x + "'"; } @Override public void setBytes(int parameterIndex, byte[] x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = new String(x, StandardCharsets.UTF_8); } @Override public void setDate(int parameterIndex, Date x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setTime(int parameterIndex, Time x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("AsciiStream is not supported."); } @Override public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("UnicodeStream is not supported."); } @Override public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("BinaryStream is not supported."); } @Override public void clearParameters() throws SQLException { - + checkClosed(); + if (initialSql.contains("?")) { + this.parameters = new Object[sqlSegments.length]; + } else { + this.parameters = new Object[0]; + } } @Override public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { - + checkClosed(); + setObject(parameterIndex, x, targetSqlType, 0); } @Override public void setObject(int parameterIndex, Object x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Object is not supported."); } @Override public boolean execute() throws SQLException { - return false; + checkClosed(); + return execute(compileSql()); } @Override public void addBatch() throws SQLException { - + checkClosed(); + addBatch(compileSql()); } @Override public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("CharacterStream is not supported."); } @Override public void setRef(int parameterIndex, Ref x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Ref is not supported."); } @Override public void setBlob(int parameterIndex, Blob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Blob is not supported."); } @Override public void setClob(int parameterIndex, Clob x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Clob is not supported."); } @Override public void setArray(int parameterIndex, Array x) throws SQLException { - + checkClosed(); } @Override public ResultSetMetaData getMetaData() throws SQLException { + checkClosed(); return null; } @Override public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = null; } @Override public void setURL(int parameterIndex, URL x) throws SQLException { - + checkClosed(); + parameters[parameterIndex - 1] = x; } @Override public ParameterMetaData getParameterMetaData() throws SQLException { + checkClosed(); return null; } @Override public void setRowId(int parameterIndex, RowId x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("RowId is not supported."); } @Override public void setNString(int parameterIndex, String value) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NString is not supported."); } @Override public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NCharacterStream is not supported."); } @Override public void setNClob(int parameterIndex, NClob value) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NClob is not supported."); } @Override public void setClob(int parameterIndex, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Clob is not supported."); } @Override public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Blob is not supported."); } @Override public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NClob is not supported."); } @Override public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("SQLXML is not supported."); } @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Object is not supported."); } @Override public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("AsciiStream is not supported."); } @Override public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("BinaryStream is not supported."); } @Override public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("CharacterStream is not supported."); } @Override public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("AsciiStream is not supported."); } @Override public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("BinaryStream is not supported."); } @Override public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("CharacterStream is not supported."); } @Override public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NCharacterStream is not supported."); } @Override public void setClob(int parameterIndex, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Clob is not supported."); } @Override public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("Blob is not supported."); } @Override public void setNClob(int parameterIndex, Reader reader) throws SQLException { - + checkClosed(); + throw new SQLFeatureNotSupportedException("NClob is not supported."); } @Override public void setObject(int parameterIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException { + checkClosed(); PreparedStatement.super.setObject(parameterIndex, x, targetSqlType, scaleOrLength); } @Override public void setObject(int parameterIndex, Object x, SQLType targetSqlType) throws SQLException { + checkClosed(); PreparedStatement.super.setObject(parameterIndex, x, targetSqlType); } @Override public long executeLargeUpdate() throws SQLException { + checkClosed(); return PreparedStatement.super.executeLargeUpdate(); } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index 641dde355..e761a6092 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -22,7 +22,7 @@ public class StatementImpl implements Statement, JdbcWrapper { ConnectionImpl connection; private int queryTimeout; - private boolean closed; + protected boolean closed; private ResultSetImpl currentResultSet; private OperationMetrics metrics; private List batch; @@ -36,17 +36,17 @@ public StatementImpl(ConnectionImpl connection) { this.batch = new ArrayList<>(); } - private void checkClosed() throws SQLException { + protected void checkClosed() throws SQLException { if (closed) { throw new SQLException("Statement is closed"); } } - private enum StatementType { + protected enum StatementType { SELECT, INSERT, DELETE, UPDATE, CREATE, DROP, ALTER, TRUNCATE, USE, SHOW, DESCRIBE, EXPLAIN, SET, KILL, OTHER } - private StatementType parseStatementType(String sql) { + protected StatementType parseStatementType(String sql) { String[] tokens = sql.trim().split("\\s+"); if (tokens.length == 0) { return StatementType.OTHER; @@ -71,7 +71,7 @@ private StatementType parseStatementType(String sql) { }; } - private String parseTableName(String sql) { + protected String parseTableName(String sql) { String[] tokens = sql.trim().split("\\s+"); if (tokens.length < 3) { return null; @@ -80,7 +80,7 @@ private String parseTableName(String sql) { return tokens[2]; } - private static String parseJdbcEscapeSyntax(String sql) { + protected static String parseJdbcEscapeSyntax(String sql) { log.trace("Original SQL: {}", sql); // Replace {d 'YYYY-MM-DD'} with corresponding SQL date format sql = sql.replaceAll("\\{d '([^']*)'\\}", "toDate('$1')"); diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/PreparedStatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/PreparedStatementTest.java new file mode 100644 index 000000000..e73297c91 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/PreparedStatementTest.java @@ -0,0 +1,212 @@ +package com.clickhouse.jdbc; + +import org.testng.annotations.Test; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.Types; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + +public class PreparedStatementTest extends JdbcIntegrationTest { + + @Test + public void testSetNull() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setNull(1, Types.INTEGER); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertNull(rs.getObject(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetBoolean() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setBoolean(1, true); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertTrue(rs.getBoolean(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetByte() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setByte(1, (byte) 1); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals((byte) 1, rs.getByte(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetShort() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setShort(1, (short) 1); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals((short) 1, rs.getShort(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetInt() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setInt(1, 1); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetLong() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setLong(1, 1L); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(1L, rs.getLong(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetFloat() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setFloat(1, 1.0f); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(1.0f, rs.getFloat(1), 0.0f); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetDouble() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setDouble(1, 1.0); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(1.0, rs.getDouble(1), 0.0); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetString() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setString(1, "test"); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals("test", rs.getString(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetBytes() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setBytes(1, new byte[] { 1, 2, 3 }); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(new byte[] { 1, 2, 3 }, rs.getBytes(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetDate() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setDate(1, java.sql.Date.valueOf("2021-01-01")); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(java.sql.Date.valueOf("2021-01-01"), rs.getDate(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetTime() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setTime(1, java.sql.Time.valueOf("12:34:56")); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(java.sql.Time.valueOf("12:34:56"), rs.getTime(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testSetTimestamp() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setTimestamp(1, java.sql.Timestamp.valueOf("2021-01-01 12:34:56")); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(java.sql.Timestamp.valueOf("2021-01-01 12:34:56"), rs.getTimestamp(1)); + assertFalse(rs.next()); + } + } + } + } + + @Test + public void testBigDecimal() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (PreparedStatement stmt = conn.prepareStatement("SELECT ?")) { + stmt.setBigDecimal(1, java.math.BigDecimal.valueOf(1.0)); + try (ResultSet rs = stmt.executeQuery()) { + assertTrue(rs.next()); + assertEquals(java.math.BigDecimal.valueOf(1.0), rs.getBigDecimal(1)); + assertFalse(rs.next()); + } + } + } + } +} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java index 178dc2095..26e8517b5 100644 --- a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/StatementTest.java @@ -1,7 +1,5 @@ package com.clickhouse.jdbc; -import com.clickhouse.client.BaseIntegrationTest; -import com.clickhouse.client.ClickHouseServerForTest; import org.testng.annotations.Test; import java.sql.Connection; @@ -9,10 +7,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.sql.Time; import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; From ddde73b059d16fd6dd453b0d28c2a51ff79be07d Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Fri, 18 Oct 2024 05:57:16 -0400 Subject: [PATCH 20/21] PR feedback --- .../clickhouse/jdbc/PreparedStatementImpl.java | 17 ++++++++--------- .../java/com/clickhouse/jdbc/StatementImpl.java | 10 +++++----- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java index 8e5af3ebb..769981acc 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/PreparedStatementImpl.java @@ -1,7 +1,7 @@ package com.clickhouse.jdbc; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.InputStream; import java.io.Reader; @@ -12,19 +12,19 @@ import java.util.Calendar; public class PreparedStatementImpl extends StatementImpl implements PreparedStatement, JdbcWrapper { - private static final Logger log = LoggerFactory.getLogger(PreparedStatementImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(PreparedStatementImpl.class); - String initialSql; + String originalSql; String [] sqlSegments; Object [] parameters; public PreparedStatementImpl(ConnectionImpl connection, String sql) { super(connection); - this.initialSql = sql; + this.originalSql = sql; //Split the sql string into an array of strings around question mark tokens this.sqlSegments = sql.split("\\?"); //Create an array of objects to store the parameters - if (initialSql.contains("?")) { + if (originalSql.contains("?")) { this.parameters = new Object[sqlSegments.length]; } else { this.parameters = new Object[0]; @@ -39,8 +39,7 @@ private String compileSql() { sb.append(parameters[i]); } } - log.trace("Compiled SQL: {}", sb); - System.out.println("Compiled SQL: " + sb); + LOG.trace("Compiled SQL: {}", sb); return sb.toString(); } @@ -161,7 +160,7 @@ public void setBinaryStream(int parameterIndex, InputStream x, int length) throw @Override public void clearParameters() throws SQLException { checkClosed(); - if (initialSql.contains("?")) { + if (originalSql.contains("?")) { this.parameters = new Object[sqlSegments.length]; } else { this.parameters = new Object[0]; diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java index e761a6092..51d820d60 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/StatementImpl.java @@ -4,8 +4,8 @@ import com.clickhouse.client.api.metrics.OperationMetrics; import com.clickhouse.client.api.metrics.ServerMetrics; import com.clickhouse.client.api.query.QueryResponse; -import com.clickhouse.logging.Logger; -import com.clickhouse.logging.LoggerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.Connection; import java.sql.ResultSet; @@ -18,7 +18,7 @@ import java.util.concurrent.TimeUnit; public class StatementImpl implements Statement, JdbcWrapper { - private static final Logger log = LoggerFactory.getLogger(StatementImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(StatementImpl.class); ConnectionImpl connection; private int queryTimeout; @@ -81,7 +81,7 @@ protected String parseTableName(String sql) { } protected static String parseJdbcEscapeSyntax(String sql) { - log.trace("Original SQL: {}", sql); + LOG.trace("Original SQL: {}", sql); // Replace {d 'YYYY-MM-DD'} with corresponding SQL date format sql = sql.replaceAll("\\{d '([^']*)'\\}", "toDate('$1')"); @@ -95,7 +95,7 @@ protected static String parseJdbcEscapeSyntax(String sql) { //sql = sql.replaceAll("\\{escape '([^']*)'\\}", "'$1'"); // Add more replacements as needed for other JDBC escape sequences - log.trace("Parsed SQL: {}", sql); + LOG.trace("Parsed SQL: {}", sql); return sql; } From 1241e17c1764308f9a38f4d69c1f88720168e797 Mon Sep 17 00:00:00 2001 From: Paultagoras Date: Tue, 29 Oct 2024 18:12:20 -0400 Subject: [PATCH 21/21] Updating the JDBC metadata, and test cases --- .../clickhouse/jdbc/ClickHouseSqlUtils.java | 73 ----- .../com/clickhouse/jdbc/ClickHouseStruct.java | 32 -- .../com/clickhouse/jdbc/ConnectionImpl.java | 49 +++- .../main/java/com/clickhouse/jdbc/Driver.java | 25 +- .../com/clickhouse/jdbc/ResultSetImpl.java | 7 +- .../clickhouse/jdbc/internal/JdbcUtils.java | 55 ++++ .../jdbc/metadata/DatabaseMetaData.java | 276 ++++++++++++------ .../jdbc/metadata/ParameterMetaData.java | 12 +- .../jdbc/metadata/ResultSetMetaData.java | 50 +++- .../jdbc/metadata/DatabaseMetaDataTest.java | 110 +++++++ .../jdbc/metadata/ParameterMetaDataTest.java | 80 +++++ .../jdbc/metadata/ResultSetMetaDataTest.java | 143 +++++++++ 12 files changed, 688 insertions(+), 224 deletions(-) delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java delete mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStruct.java create mode 100644 clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcUtils.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/DatabaseMetaDataTest.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ParameterMetaDataTest.java create mode 100644 clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ResultSetMetaDataTest.java diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java deleted file mode 100644 index c23ba5638..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseSqlUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -package com.clickhouse.jdbc; - -public final class ClickHouseSqlUtils { - public static boolean isQuote(char ch) { - return ch == '"' || ch == '\'' || ch == '`'; - } - - /** - * Escape quotes in given string. - * - * @param str string - * @param quote quote to escape - * @return escaped string - */ - public static String escape(String str, char quote) { - if (str == null) { - return str; - } - - int len = str.length(); - StringBuilder sb = new StringBuilder(len + 10).append(quote); - - for (int i = 0; i < len; i++) { - char ch = str.charAt(i); - if (ch == quote || ch == '\\') { - sb.append('\\'); - } - sb.append(ch); - } - - return sb.append(quote).toString(); - } - - /** - * Unescape quoted string. - * - * @param str quoted string - * @return unescaped string - */ - public static String unescape(String str) { - if (str == null || str.isEmpty()) { - return str; - } - - int len = str.length(); - char quote = str.charAt(0); - if (!isQuote(quote) || quote != str.charAt(len - 1)) { // not a quoted string - return str; - } - - StringBuilder sb = new StringBuilder(len = len - 1); - for (int i = 1; i < len; i++) { - char ch = str.charAt(i); - - if (++i >= len) { - sb.append(ch); - } else { - char nextChar = str.charAt(i); - if (ch == '\\' || (ch == quote && nextChar == quote)) { - sb.append(nextChar); - } else { - sb.append(ch); - i--; - } - } - } - - return sb.toString(); - } - - private ClickHouseSqlUtils() { - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStruct.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStruct.java deleted file mode 100644 index af4941e78..000000000 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ClickHouseStruct.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.clickhouse.jdbc; - -import java.sql.SQLException; -import java.sql.Struct; -import java.util.Map; - -import com.clickhouse.data.ClickHouseChecker; - -public class ClickHouseStruct implements Struct { - private final String typeName; - private final Object[] values; - - protected ClickHouseStruct(String typeName, Object[] values) { - this.typeName = ClickHouseChecker.nonNull(typeName, "SQLTypeName"); - this.values = ClickHouseChecker.nonNull(values, "values"); - } - - @Override - public String getSQLTypeName() throws SQLException { - return typeName; - } - - @Override - public Object[] getAttributes() throws SQLException { - return values; - } - - @Override - public Object[] getAttributes(Map> map) throws SQLException { - return getAttributes(); - } -} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java index 6fddec203..0b01e309d 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ConnectionImpl.java @@ -1,6 +1,8 @@ package com.clickhouse.jdbc; import com.clickhouse.client.api.Client; +import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader; +import com.clickhouse.client.api.query.QueryResponse; import com.clickhouse.jdbc.internal.JdbcConfiguration; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; @@ -9,10 +11,12 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; public class ConnectionImpl implements Connection, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(ConnectionImpl.class); + protected final String url; protected final Client client; protected final JdbcConfiguration config; @@ -21,6 +25,7 @@ public class ConnectionImpl implements Connection, JdbcWrapper { private String schema; public ConnectionImpl(String url, Properties info) { + this.url = url; this.config = new JdbcConfiguration(url, info); this.client = new Client.Builder() .addEndpoint(config.getProtocol() + "://" + config.getHost() + ":" + config.getPort()) @@ -31,6 +36,46 @@ public ConnectionImpl(String url, Properties info) { .build(); } + public String getUser() { + return config.getUser(); + } + + public String getURL() { + return url; + } + + private String getServerVersion() throws SQLException { + try (QueryResponse response = client.query("SELECT version()").get(30, TimeUnit.SECONDS)) { + // Create a reader to access the data in a convenient way + ClickHouseBinaryFormatReader reader = client.newBinaryFormatReader(response); + // Read the next record from stream and parse it as a string + reader.next(); + return reader.getString(0); + } catch (Exception e) { + log.error("Failed to retrieve server version.", e); + throw new SQLException("Failed to retrieve server version.", e); + } + } + + public int getMajorVersion() throws SQLException { + String version = getServerVersion(); + try { + return Integer.parseInt(version.split("\\.")[0]); + } catch (NumberFormatException e) { + log.error("Failed to parse major version from server version: " + version, e); + throw new SQLException("Failed to parse major version from server version: " + version); + } + } + + public int getMinorVersion() throws SQLException { + String version = getServerVersion(); + try { + return Integer.parseInt(version.split("\\.")[1]); + } catch (NumberFormatException e) { + log.error("Failed to parse minor version from server version: " + version, e); + throw new SQLException("Failed to parse minor version from server version: " + version); + } + } @Override public Statement createStatement() throws SQLException { @@ -95,7 +140,7 @@ public boolean isClosed() throws SQLException { @Override public DatabaseMetaData getMetaData() throws SQLException { checkOpen(); - return new com.clickhouse.jdbc.metadata.DatabaseMetaData(); + return new com.clickhouse.jdbc.metadata.DatabaseMetaData(this); } @Override @@ -109,7 +154,7 @@ public void setReadOnly(boolean readOnly) throws SQLException { @Override public boolean isReadOnly() throws SQLException { checkOpen(); - return true; + return false; } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java index 0c3a04ba1..c1f3bc3b7 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/Driver.java @@ -12,8 +12,19 @@ */ public class Driver implements java.sql.Driver { private static final Logger log = LoggerFactory.getLogger(Driver.class); + public static final String driverVersion; static { + String tempDriverVersion = Driver.class.getPackage().getImplementationVersion(); + //If the version is not available, set it to 1.0 + if (tempDriverVersion == null || tempDriverVersion.isEmpty()) { + log.warn("ClickHouse JDBC driver version is not available"); + tempDriverVersion = "1.0"; + } + + driverVersion = tempDriverVersion; + log.info("ClickHouse JDBC driver version: {}", driverVersion); + try { DriverManager.registerDriver(new Driver()); } catch (SQLException e) { @@ -40,14 +51,24 @@ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws return new JdbcConfiguration(url, info).getPropertyInfo(); } + public static int getDriverMajorVersion() { + return Integer.parseInt(driverVersion.split("\\.")[0]); + } + @Override public int getMajorVersion() { - return 1; + //Convert the version string to an integer + return Integer.parseInt(driverVersion.split("\\.")[0]); + } + + public static int getDriverMinorVersion() { + return Integer.parseInt(driverVersion.split("\\.")[1]); } @Override public int getMinorVersion() { - return 0; + //Convert the version string to an integer + return Integer.parseInt(driverVersion.split("\\.")[1]); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java index d6fd533f7..88b4798ad 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/ResultSetImpl.java @@ -15,6 +15,7 @@ import java.util.Map; import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader; +import com.clickhouse.client.api.metadata.TableSchema; import com.clickhouse.client.api.query.QueryResponse; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; @@ -29,7 +30,7 @@ public class ResultSetImpl implements ResultSet, JdbcWrapper { public ResultSetImpl(QueryResponse response, ClickHouseBinaryFormatReader reader) { this.response = response; this.reader = reader; - this.metaData = new com.clickhouse.jdbc.metadata.ResultSetMetaData(); + this.metaData = new com.clickhouse.jdbc.metadata.ResultSetMetaData(this); this.closed = false; } @@ -39,6 +40,10 @@ private void checkClosed() throws SQLException { } } + public TableSchema getSchema() { + return reader.getSchema(); + } + @Override public boolean next() throws SQLException { diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcUtils.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcUtils.java new file mode 100644 index 000000000..d60e2dea1 --- /dev/null +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/internal/JdbcUtils.java @@ -0,0 +1,55 @@ +package com.clickhouse.jdbc.internal; + +import com.clickhouse.data.ClickHouseDataType; + +import java.sql.Types; +import java.util.HashMap; +import java.util.Map; + +public class JdbcUtils { + //Define a map to store the mapping between ClickHouse data types and SQL data types + private static final Map CLICKHOUSE_TO_SQL_TYPE_MAP = generateTypeMap(); + private static Map generateTypeMap() { + Map map = new HashMap<>(); + map.put(ClickHouseDataType.Int8, Types.TINYINT); + map.put(ClickHouseDataType.UInt8, Types.TINYINT); + map.put(ClickHouseDataType.Int16, Types.SMALLINT); + map.put(ClickHouseDataType.UInt16, Types.SMALLINT); + map.put(ClickHouseDataType.Int32, Types.INTEGER); + map.put(ClickHouseDataType.UInt32, Types.INTEGER); + map.put(ClickHouseDataType.Int64, Types.BIGINT); + map.put(ClickHouseDataType.UInt64, Types.BIGINT); + map.put(ClickHouseDataType.Float32, Types.FLOAT); + map.put(ClickHouseDataType.Float64, Types.DOUBLE); + map.put(ClickHouseDataType.Decimal32, Types.DECIMAL); + map.put(ClickHouseDataType.Decimal64, Types.DECIMAL); + map.put(ClickHouseDataType.Decimal128, Types.DECIMAL); + map.put(ClickHouseDataType.String, Types.CHAR); + map.put(ClickHouseDataType.FixedString, Types.CHAR); + map.put(ClickHouseDataType.Enum8, Types.VARCHAR); + map.put(ClickHouseDataType.Enum16, Types.VARCHAR); + map.put(ClickHouseDataType.Date, Types.DATE); + map.put(ClickHouseDataType.DateTime, Types.TIMESTAMP); + map.put(ClickHouseDataType.Array, Types.ARRAY); + map.put(ClickHouseDataType.Nested, Types.ARRAY); + map.put(ClickHouseDataType.Map, Types.JAVA_OBJECT); + return map; + } + + public static int convertToSqlType(ClickHouseDataType clickhouseType) { + if (clickhouseType == null) { + return Types.NULL; + } + + return CLICKHOUSE_TO_SQL_TYPE_MAP.getOrDefault(clickhouseType, Types.OTHER); + } + + public static String generateSqlTypeEnum(String columnName) { + StringBuilder sql = new StringBuilder("multiIf("); + for (ClickHouseDataType type : CLICKHOUSE_TO_SQL_TYPE_MAP.keySet()) { + sql.append(columnName).append(" == '").append(type.name()).append("', ").append(CLICKHOUSE_TO_SQL_TYPE_MAP.get(type)).append(", "); + } + sql.append(Types.OTHER + ")"); + return sql.toString(); + } +} diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java index fcbf5da9b..8be2b9131 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/DatabaseMetaData.java @@ -4,43 +4,53 @@ import java.sql.ResultSet; import java.sql.RowIdLifetime; import java.sql.SQLException; +import java.sql.Statement; +import com.clickhouse.jdbc.ConnectionImpl; +import com.clickhouse.jdbc.Driver; import com.clickhouse.jdbc.JdbcWrapper; +import com.clickhouse.jdbc.ResultSetImpl; +import com.clickhouse.jdbc.internal.JdbcUtils; import com.clickhouse.logging.Logger; import com.clickhouse.logging.LoggerFactory; public class DatabaseMetaData implements java.sql.DatabaseMetaData, JdbcWrapper { private static final Logger log = LoggerFactory.getLogger(DatabaseMetaData.class); + ConnectionImpl connection; + + public DatabaseMetaData(ConnectionImpl connection) { + this.connection = connection; + } @Override public boolean allProceduresAreCallable() throws SQLException { - return false; + return true; } @Override public boolean allTablesAreSelectable() throws SQLException { - return false; + return true; } @Override public String getURL() throws SQLException { - return ""; + return connection.getURL(); } @Override public String getUserName() throws SQLException { - return ""; + return connection.getUser(); } @Override public boolean isReadOnly() throws SQLException { - return false; + return connection.isReadOnly(); } @Override public boolean nullsAreSortedHigh() throws SQLException { - return false; + return true; } @Override @@ -60,7 +70,7 @@ public boolean nullsAreSortedAtEnd() throws SQLException { @Override public String getDatabaseProductName() throws SQLException { - return ""; + return "ClickHouse"; } @Override @@ -70,22 +80,22 @@ public String getDatabaseProductVersion() throws SQLException { @Override public String getDriverName() throws SQLException { - return ""; + return "ClickHouse JDBC Driver"; } @Override public String getDriverVersion() throws SQLException { - return ""; + return Driver.driverVersion; } @Override public int getDriverMajorVersion() { - return 0; + return Driver.getDriverMajorVersion(); } @Override public int getDriverMinorVersion() { - return 0; + return Driver.getDriverMinorVersion(); } @Override @@ -100,7 +110,7 @@ public boolean usesLocalFilePerTable() throws SQLException { @Override public boolean supportsMixedCaseIdentifiers() throws SQLException { - return false; + return true; } @Override @@ -115,12 +125,12 @@ public boolean storesLowerCaseIdentifiers() throws SQLException { @Override public boolean storesMixedCaseIdentifiers() throws SQLException { - return false; + return true; } @Override public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { - return false; + return true; } @Override @@ -135,42 +145,56 @@ public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { @Override public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { - return false; + return true; } @Override public String getIdentifierQuoteString() throws SQLException { - return ""; + return "`"; } @Override public String getSQLKeywords() throws SQLException { - return ""; + return "APPLY,ASOF,ATTACH,CLUSTER,DATABASE,DATABASES,DETACH," + + "DICTIONARY,DICTIONARIES,ILIKE,INF,LIMIT,LIVE,KILL,MATERIALIZED," + + "NAN,OFFSET,OPTIMIZE,OUTFILE,POLICY,PREWHERE,PROFILE,QUARTER,QUOTA," + + "RENAME,REPLACE,SAMPLE,SETTINGS,SHOW,TABLES,TIES,TOP,TOTALS,TRUNCATE,USE,WATCH,WEEK"; } @Override public String getNumericFunctions() throws SQLException { - return ""; + // took from below URLs(not from system.functions): + // https://clickhouse.com/docs/en/sql-reference/functions/arithmetic-functions/ + // https://clickhouse.com/docs/en/sql-reference/functions/math-functions/ + return "abs,acos,acosh,asin,asinh,atan,atan2,atanh,cbrt,cos,cosh,divide,e,erf,erfc,exp,exp10,exp2,gcd,hypot,intDiv,intDivOrZero,intExp10,intExp2,lcm,lgamma,ln,log,log10,log1p,log2,minus,modulo,moduloOrZero,multiply,negate,pi,plus,pow,power,sign,sin,sinh,sqrt,tan,tgamma"; } @Override public String getStringFunctions() throws SQLException { - return ""; + // took from below URLs(not from system.functions): + // https://clickhouse.com/docs/en/sql-reference/functions/string-functions/ + // https://clickhouse.com/docs/en/sql-reference/functions/string-search-functions/ + // https://clickhouse.com/docs/en/sql-reference/functions/string-replace-functions/ + return "appendTrailingCharIfAbsent,base64Decode,base64Encode,char_length,CHAR_LENGTH,character_length,CHARACTER_LENGTH,concat,concatAssumeInjective,convertCharset,countMatches,countSubstrings,countSubstringsCaseInsensitive,countSubstringsCaseInsensitiveUTF8,CRC32,CRC32IEEE,CRC64,decodeXMLComponent,empty,encodeXMLComponent,endsWith,extract,extractAll,extractAllGroupsHorizontal,extractAllGroupsVertical,extractTextFromHTML ,format,ilike,isValidUTF8,lcase,leftPad,leftPadUTF8,length,lengthUTF8,like,locate,lower,lowerUTF8,match,mid,multiFuzzyMatchAllIndices,multiFuzzyMatchAny,multiFuzzyMatchAnyIndex,multiMatchAllIndices,multiMatchAny,multiMatchAnyIndex,multiSearchAllPositions,multiSearchAllPositionsUTF8,multiSearchAny,multiSearchFirstIndex,multiSearchFirstPosition,ngramDistance,ngramSearch,normalizedQueryHash,normalizeQuery,notEmpty,notLike,position,positionCaseInsensitive,positionCaseInsensitiveUTF8,positionUTF8,regexpQuoteMeta,repeat,replace,replaceAll,replaceOne,replaceRegexpAll,replaceRegexpOne,reverse,reverseUTF8,rightPad,rightPadUTF8,startsWith,substr,substring,substringUTF8,tokens,toValidUTF8,trim,trimBoth,trimLeft,trimRight,tryBase64Decode,ucase,upper,upperUTF8"; } @Override public String getSystemFunctions() throws SQLException { - return ""; + // took from below URL(not from system.functions): + // https://clickhouse.com/docs/en/sql-reference/functions/other-functions/ + return "bar,basename,blockNumber,blockSerializedSize,blockSize,buildId,byteSize,countDigits,currentDatabase,currentProfiles,currentRoles,currentUser,defaultProfiles,defaultRoles,defaultValueOfArgumentType,defaultValueOfTypeName,dumpColumnStructure,enabledProfiles,enabledRoles,errorCodeToName,filesystemAvailable,filesystemCapacity,filesystemFree,finalizeAggregation,formatReadableQuantity,formatReadableSize,formatReadableTimeDelta,FQDN,getMacro,getServerPort,getSetting,getSizeOfEnumType,greatest,hasColumnInTable,hostName,identity,ifNotFinite,ignore,indexHint,initializeAggregation,initialQueryID,isConstant,isDecimalOverflow,isFinite,isInfinite,isNaN,joinGet,least,MACNumToString,MACStringToNum,MACStringToOUI,materialize,modelEvaluate,neighbor,queryID,randomFixedString,randomPrintableASCII,randomString,randomStringUTF8,replicate,rowNumberInAllBlocks,rowNumberInBlock,runningAccumulate,runningConcurrency,runningDifference,runningDifferenceStartingWithFirstValue,shardCount ,shardNum,sleep,sleepEachRow,tcpPort,throwIf,toColumnTypeName,toTypeName,transform,uptime,version,visibleWidth"; } @Override public String getTimeDateFunctions() throws SQLException { - return ""; + // took from below URL(not from system.functions): + // https://clickhouse.com/docs/en/sql-reference/functions/date-time-functions/ + return "addDays,addHours,addMinutes,addMonths,addQuarters,addSeconds,addWeeks,addYears,date_add,date_diff,date_sub,date_trunc,dateName,formatDateTime,FROM_UNIXTIME,fromModifiedJulianDay,fromModifiedJulianDayOrNull,now,subtractDays,subtractHours,subtractMinutes,subtractMonths,subtractQuarters,subtractSeconds,subtractWeeks,subtractYears,timeSlot,timeSlots,timestamp_add,timestamp_sub,timeZone,timeZoneOf,timeZoneOffset,today,toDayOfMonth,toDayOfWeek,toDayOfYear,toHour,toISOWeek,toISOYear,toMinute,toModifiedJulianDay,toModifiedJulianDayOrNull,toMonday,toMonth,toQuarter,toRelativeDayNum,toRelativeHourNum,toRelativeMinuteNum,toRelativeMonthNum,toRelativeQuarterNum,toRelativeSecondNum,toRelativeWeekNum,toRelativeYearNum,toSecond,toStartOfDay,toStartOfFifteenMinutes,toStartOfFiveMinute,toStartOfHour,toStartOfInterval,toStartOfISOYear,toStartOfMinute,toStartOfMonth,toStartOfQuarter,toStartOfSecond,toStartOfTenMinutes,toStartOfWeek,toStartOfYear,toTime,toTimeZone,toUnixTimestamp,toWeek,toYear,toYearWeek,toYYYYMM,toYYYYMMDD,toYYYYMMDDhhmmss,yesterday"; } @Override public String getSearchStringEscape() throws SQLException { - return ""; + return "\\"; } @Override @@ -180,22 +204,22 @@ public String getExtraNameCharacters() throws SQLException { @Override public boolean supportsAlterTableWithAddColumn() throws SQLException { - return false; + return true; } @Override public boolean supportsAlterTableWithDropColumn() throws SQLException { - return false; + return true; } @Override public boolean supportsColumnAliasing() throws SQLException { - return false; + return true; } @Override public boolean nullPlusNonNullIsNull() throws SQLException { - return false; + return true; } @Override @@ -210,7 +234,7 @@ public boolean supportsConvert(int fromType, int toType) throws SQLException { @Override public boolean supportsTableCorrelationNames() throws SQLException { - return false; + return true; } @Override @@ -220,32 +244,32 @@ public boolean supportsDifferentTableCorrelationNames() throws SQLException { @Override public boolean supportsExpressionsInOrderBy() throws SQLException { - return false; + return true; } @Override public boolean supportsOrderByUnrelated() throws SQLException { - return false; + return true; } @Override public boolean supportsGroupBy() throws SQLException { - return false; + return true; } @Override public boolean supportsGroupByUnrelated() throws SQLException { - return false; + return true; } @Override public boolean supportsGroupByBeyondSelect() throws SQLException { - return false; + return true; } @Override public boolean supportsLikeEscapeClause() throws SQLException { - return false; + return true; } @Override @@ -260,17 +284,17 @@ public boolean supportsMultipleTransactions() throws SQLException { @Override public boolean supportsNonNullableColumns() throws SQLException { - return false; + return true; } @Override public boolean supportsMinimumSQLGrammar() throws SQLException { - return false; + return true; } @Override public boolean supportsCoreSQLGrammar() throws SQLException { - return false; + return true; } @Override @@ -280,7 +304,7 @@ public boolean supportsExtendedSQLGrammar() throws SQLException { @Override public boolean supportsANSI92EntryLevelSQL() throws SQLException { - return false; + return true; } @Override @@ -300,92 +324,92 @@ public boolean supportsIntegrityEnhancementFacility() throws SQLException { @Override public boolean supportsOuterJoins() throws SQLException { - return false; + return true; } @Override public boolean supportsFullOuterJoins() throws SQLException { - return false; + return true; } @Override public boolean supportsLimitedOuterJoins() throws SQLException { - return false; + return true; } @Override public String getSchemaTerm() throws SQLException { - return ""; + return "database"; } @Override public String getProcedureTerm() throws SQLException { - return ""; + return "procedure"; } @Override public String getCatalogTerm() throws SQLException { - return ""; + return "database"; } @Override public boolean isCatalogAtStart() throws SQLException { - return false; + return true; } @Override public String getCatalogSeparator() throws SQLException { - return ""; + return "."; } @Override public boolean supportsSchemasInDataManipulation() throws SQLException { - return false; + return true; } @Override public boolean supportsSchemasInProcedureCalls() throws SQLException { - return false; + return true; } @Override public boolean supportsSchemasInTableDefinitions() throws SQLException { - return false; + return true; } @Override public boolean supportsSchemasInIndexDefinitions() throws SQLException { - return false; + return true; } @Override public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { - return false; + return true; } @Override public boolean supportsCatalogsInDataManipulation() throws SQLException { - return false; + return true; } @Override public boolean supportsCatalogsInProcedureCalls() throws SQLException { - return false; + return true; } @Override public boolean supportsCatalogsInTableDefinitions() throws SQLException { - return false; + return true; } @Override public boolean supportsCatalogsInIndexDefinitions() throws SQLException { - return false; + return true; } @Override public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { - return false; + return true; } @Override @@ -410,7 +434,7 @@ public boolean supportsStoredProcedures() throws SQLException { @Override public boolean supportsSubqueriesInComparisons() throws SQLException { - return false; + return true; } @Override @@ -420,27 +444,27 @@ public boolean supportsSubqueriesInExists() throws SQLException { @Override public boolean supportsSubqueriesInIns() throws SQLException { - return false; + return true; } @Override public boolean supportsSubqueriesInQuantifieds() throws SQLException { - return false; + return true; } @Override public boolean supportsCorrelatedSubqueries() throws SQLException { - return false; + return true; } @Override public boolean supportsUnion() throws SQLException { - return false; + return true; } @Override public boolean supportsUnionAll() throws SQLException { - return false; + return true; } @Override @@ -540,7 +564,7 @@ public int getMaxRowSize() throws SQLException { @Override public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { - return false; + return true; } @Override @@ -570,7 +594,7 @@ public int getMaxUserNameLength() throws SQLException { @Override public int getDefaultTransactionIsolation() throws SQLException { - return 0; + return connection.getTransactionIsolation(); } @Override @@ -580,7 +604,7 @@ public boolean supportsTransactions() throws SQLException { @Override public boolean supportsTransactionIsolationLevel(int level) throws SQLException { - return false; + return level == connection.getTransactionIsolation(); } @Override @@ -605,6 +629,7 @@ public boolean dataDefinitionIgnoredInTransactions() throws SQLException { @Override public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { + //TODO: Drill into this return null; } @@ -615,67 +640,112 @@ public ResultSet getProcedureColumns(String catalog, String schemaPattern, Strin @Override public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) throws SQLException { - return null; + String sql = "SELECT database AS TABLE_CAT, database AS TABLE_SCHEM, name AS TABLE_NAME, engine AS TABLE_TYPE," + + " '' AS REMARKS, '' AS TYPE_CAT, '' AS TYPE_SCHEM, '' AS TYPE_NAME, '' AS SELF_REFERENCING_COL_NAME, '' AS REF_GENERATION" + + " FROM system.tables" + + " WHERE database LIKE '" + (schemaPattern == null ? "%" : schemaPattern) + "'" + + " AND database LIKE '" + (catalog == null ? "%" : catalog) + "'" + + " AND name LIKE '" + (tableNamePattern == null ? "%" : tableNamePattern) + "'"; + if (types != null && types.length > 0) { + sql += "AND engine IN ("; + for (String type : types) { + sql += "'" + type + "',"; + } + sql = sql.substring(0, sql.length() - 1) + ") "; + } + return connection.createStatement().executeQuery(sql); } @Override public ResultSet getSchemas() throws SQLException { - return null; + return connection.createStatement().executeQuery("SELECT name AS TABLE_SCHEM, null AS TABLE_CATALOG FROM system.databases ORDER BY name"); } @Override public ResultSet getCatalogs() throws SQLException { - return null; + return connection.createStatement().executeQuery("SELECT name AS TABLE_CAT FROM system.databases ORDER BY name"); } @Override public ResultSet getTableTypes() throws SQLException { - return null; + //https://clickhouse.com/docs/en/engines/table-engines/ + return connection.createStatement().executeQuery("SELECT c1 AS TABLE_TYPE " + + "FROM VALUES ('MergeTree','ReplacingMergeTree','SummingMergeTree','AggregatingMergeTree','CollapsingMergeTree','VersionedCollapsingMergeTree','GraphiteMergeTree'," + + "'TinyLog','StripeLog','Log'," + + "'ODBC','JDBC','MySQL','MongoDB','Redis','HDFS','S3','Kafka','EmbeddedRocksDB','RabbitMQ','PostgreSQL','S3Queue','TimeSeries'," + + "'Distributed','Dictionary','Merge','File','Null','Set','Join','URL','View','Memory','Buffer','KeeperMap')"); } @Override public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { - return null; + //TODO: Best way to convert type to JDBC data type + String sql = "SELECT system.columns.database AS TABLE_CAT, system.columns.database AS TABLE_SCHEM, system.columns.table AS TABLE_NAME, system.columns.name AS COLUMN_NAME, " + + JdbcUtils.generateSqlTypeEnum("system.columns.type") + " AS DATA_TYPE, system.columns.type AS TYPE_NAME, toInt32(system.columns.numeric_precision) AS COLUMN_SIZE, toInt32(0) AS BUFFER_LENGTH, toInt32(system.columns.numeric_precision) AS DECIMAL_DIGITS," + + " toInt32(system.columns.numeric_precision_radix) AS NUM_PREC_RADIX, toInt32(2) AS NULLABLE, system.columns.comment AS REMARKS, system.columns.default_expression AS COLUMN_DEF, toInt32(0) AS SQL_DATA_TYPE," + + " 0 AS SQL_DATETIME_SUB, 0 AS CHAR_OCTET_LENGTH, toInt32(system.columns.position) AS ORDINAL_POSITION, '' AS IS_NULLABLE," + + " NULL AS SCOPE_CATALOG, NULL AS SCOPE_SCHEMA, NULL AS SCOPE_TABLE, NULL AS SOURCE_DATA_TYPE" + + " FROM system.columns" + + " WHERE database LIKE '" + (schemaPattern == null ? "%" : schemaPattern) + "'" + + " AND database LIKE '" + (catalog == null ? "%" : catalog) + "'" + + " AND table LIKE '" + (tableNamePattern == null ? "%" : tableNamePattern) + "'" + + " AND name LIKE '" + (columnNamePattern == null ? "%" : columnNamePattern) + "'"; + return connection.createStatement().executeQuery(sql); } @Override public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getColumnPrivileges is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, NULL AS COLUMN_NAME, NULL AS GRANTOR, NULL AS GRANTEE, NULL AS PRIVILEGE, NULL AS IS_GRANTABLE"); } @Override public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getTablePrivileges is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, NULL AS GRANTOR, NULL AS GRANTEE, NULL AS PRIVILEGE, NULL AS IS_GRANTABLE"); } @Override public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getBestRowIdentifier is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS SCOPE, NULL AS COLUMN_NAME, NULL AS DATA_TYPE, NULL AS TYPE_NAME, NULL AS COLUMN_SIZE, NULL AS BUFFER_LENGTH, NULL AS DECIMAL_DIGITS, NULL AS PSEUDO_COLUMN"); } @Override public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getVersionColumns is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS SCOPE, NULL AS COLUMN_NAME, NULL AS DATA_TYPE, NULL AS TYPE_NAME, NULL AS COLUMN_SIZE, NULL AS BUFFER_LENGTH, NULL AS DECIMAL_DIGITS, NULL AS PSEUDO_COLUMN"); } @Override public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getPrimaryKeys is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, NULL AS COLUMN_NAME, NULL AS KEY_SEQ, NULL AS PK_NAME"); } @Override public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getImportedKeys is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS PKTABLE_CAT, NULL AS PKTABLE_SCHEM, NULL AS PKTABLE_NAME, NULL AS PKCOLUMN_NAME, NULL AS FKTABLE_CAT, NULL AS FKTABLE_SCHEM, NULL AS FKTABLE_NAME, NULL AS FKCOLUMN_NAME, NULL AS KEY_SEQ, NULL AS UPDATE_RULE, NULL AS DELETE_RULE, NULL AS FK_NAME, NULL AS PK_NAME, NULL AS DEFERRABILITY"); } @Override public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getExportedKeys is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS PKTABLE_CAT, NULL AS PKTABLE_SCHEM, NULL AS PKTABLE_NAME, NULL AS PKCOLUMN_NAME, NULL AS FKTABLE_CAT, NULL AS FKTABLE_SCHEM, NULL AS FKTABLE_NAME, NULL AS FKCOLUMN_NAME, NULL AS KEY_SEQ, NULL AS UPDATE_RULE, NULL AS DELETE_RULE, NULL AS FK_NAME, NULL AS PK_NAME, NULL AS DEFERRABILITY"); } @Override public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable, String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getCrossReference is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS PKTABLE_CAT, NULL AS PKTABLE_SCHEM, NULL AS PKTABLE_NAME, NULL AS PKCOLUMN_NAME, NULL AS FKTABLE_CAT, NULL AS FKTABLE_SCHEM, NULL AS FKTABLE_NAME, NULL AS FKCOLUMN_NAME, NULL AS KEY_SEQ, NULL AS UPDATE_RULE, NULL AS DELETE_RULE, NULL AS FK_NAME, NULL AS PK_NAME"); } @Override @@ -690,7 +760,7 @@ public ResultSet getIndexInfo(String catalog, String schema, String table, boole @Override public boolean supportsResultSetType(int type) throws SQLException { - return false; + return ResultSet.TYPE_FORWARD_ONLY == type; } @Override @@ -750,12 +820,14 @@ public boolean supportsBatchUpdates() throws SQLException { @Override public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getUDTs is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TYPE_CAT, NULL AS TYPE_SCHEM, NULL AS TYPE_NAME, NULL AS CLASS_NAME, NULL AS DATA_TYPE, NULL AS REMARKS, NULL AS BASE_TYPE"); } @Override public Connection getConnection() throws SQLException { - return null; + return connection; } @Override @@ -780,17 +852,23 @@ public boolean supportsGetGeneratedKeys() throws SQLException { @Override public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getSuperTypes is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TYPE_CAT, NULL AS TYPE_SCHEM, NULL AS TYPE_NAME, NULL AS SUPERTYPE_CAT, NULL AS SUPERTYPE_SCHEM, NULL AS SUPERTYPE_NAME"); } @Override public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getSuperTables is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, NULL AS SUPERTABLE_NAME"); } @Override public ResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getAttributes is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TYPE_CAT, NULL AS TYPE_SCHEM, NULL AS TYPE_NAME, NULL AS ATTR_NAME, NULL AS DATA_TYPE, NULL AS ATTR_TYPE_NAME, NULL AS ATTR_SIZE, NULL AS DECIMAL_DIGITS, NULL AS NUM_PREC_RADIX, NULL AS NULLABLE, NULL AS REMARKS, NULL AS ATTR_DEF, NULL AS SQL_DATA_TYPE, NULL AS SQL_DATETIME_SUB, NULL AS CHAR_OCTET_LENGTH, NULL AS ORDINAL_POSITION, NULL AS IS_NULLABLE, NULL AS SCOPE_CATALOG, NULL AS SCOPE_SCHEMA, NULL AS SCOPE_TABLE, NULL AS SOURCE_DATA_TYPE"); } @Override @@ -800,32 +878,32 @@ public boolean supportsResultSetHoldability(int holdability) throws SQLException @Override public int getResultSetHoldability() throws SQLException { - return 0; + return ResultSet.HOLD_CURSORS_OVER_COMMIT; } @Override public int getDatabaseMajorVersion() throws SQLException { - return 0; + return connection.getMajorVersion(); } @Override public int getDatabaseMinorVersion() throws SQLException { - return 0; + return connection.getMinorVersion(); } @Override public int getJDBCMajorVersion() throws SQLException { - return 0; + return Driver.getDriverMajorVersion(); } @Override public int getJDBCMinorVersion() throws SQLException { - return 0; + return Driver.getDriverMinorVersion(); } @Override public int getSQLStateType() throws SQLException { - return 0; + return sqlStateSQL; } @Override @@ -840,12 +918,12 @@ public boolean supportsStatementPooling() throws SQLException { @Override public RowIdLifetime getRowIdLifetime() throws SQLException { - return null; + return RowIdLifetime.ROWID_UNSUPPORTED; } @Override public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException { - return null; + return connection.createStatement().executeQuery("SELECT name AS TABLE_SCHEM, null AS TABLE_CATALOG FROM system.databases WHERE name LIKE '" + (schemaPattern == null ? "%" : schemaPattern) + "'"); } @Override @@ -860,22 +938,30 @@ public boolean autoCommitFailureClosesAllResultSets() throws SQLException { @Override public ResultSet getClientInfoProperties() throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getClientInfoProperties is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS NAME, NULL AS MAX_LEN, NULL AS DEFAULT_VALUE, NULL AS DESCRIPTION"); } @Override public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getFunctions is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS FUNCTION_CAT, NULL AS FUNCTION_SCHEM, NULL AS FUNCTION_NAME, NULL AS REMARKS, NULL AS FUNCTION_TYPE, NULL AS SPECIFIC_NAME"); } @Override public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getFunctionColumns is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS FUNCTION_CAT, NULL AS FUNCTION_SCHEM, NULL AS FUNCTION_NAME, NULL AS COLUMN_NAME, NULL AS COLUMN_TYPE, NULL AS DATA_TYPE, NULL AS TYPE_NAME, NULL AS PRECISION, NULL AS LENGTH, NULL AS SCALE, NULL AS RADIX, NULL AS NULLABLE, NULL AS REMARKS, NULL AS CHAR_OCTET_LENGTH, NULL AS ORDINAL_POSITION, NULL AS IS_NULLABLE, NULL AS SPECIFIC_NAME"); } @Override public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { - return null; + //Return an empty result set with the required columns + log.warn("getPseudoColumns is not supported and may return invalid results"); + return connection.createStatement().executeQuery("SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, NULL AS COLUMN_NAME, NULL AS DATA_TYPE, NULL AS COLUMN_SIZE, NULL AS DECIMAL_DIGITS, NULL AS NUM_PREC_RADIX, NULL AS COLUMN_USAGE, NULL AS REMARKS, NULL AS CHAR_OCTET_LENGTH, NULL AS IS_NULLABLE"); } @Override diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java index 41bb69c11..3dcbfba20 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ParameterMetaData.java @@ -1,7 +1,9 @@ package com.clickhouse.jdbc.metadata; import com.clickhouse.data.ClickHouseColumn; +import com.clickhouse.data.ClickHouseDataType; import com.clickhouse.jdbc.JdbcWrapper; +import com.clickhouse.jdbc.internal.JdbcUtils; import java.sql.SQLException; import java.util.List; @@ -52,21 +54,23 @@ public int getScale(int param) throws SQLException { @Override public int getParameterType(int param) throws SQLException { - return 0; + //TODO: Should we implement .getSQLType()? + return JdbcUtils.convertToSqlType(getParam(param).getDataType()); } @Override public String getParameterTypeName(int param) throws SQLException { - return ""; + return getParam(param).getDataType().name(); } @Override public String getParameterClassName(int param) throws SQLException { - return ""; + //TODO: Should we implement .getClassName()? + return getParam(param).getDataType().getObjectClass().getName(); } @Override public int getParameterMode(int param) throws SQLException { - return 0; + return parameterModeIn; } } diff --git a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java index 2183ea97a..55103af1a 100644 --- a/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java +++ b/clickhouse-jdbc/src/main/java/com/clickhouse/jdbc/metadata/ResultSetMetaData.java @@ -2,13 +2,33 @@ import java.sql.SQLException; +import com.clickhouse.client.api.metadata.TableSchema; +import com.clickhouse.data.ClickHouseColumn; import com.clickhouse.jdbc.JdbcWrapper; +import com.clickhouse.jdbc.ResultSetImpl; +import com.clickhouse.jdbc.internal.JdbcUtils; public class ResultSetMetaData implements java.sql.ResultSetMetaData, JdbcWrapper { + private final ResultSetImpl resultSet; + public ResultSetMetaData(ResultSetImpl resultSet) { + this.resultSet = resultSet; + } + + private ClickHouseColumn getColumn(int column) throws SQLException { + if (column < 1 || column > getColumnCount()) { + throw new SQLException("Column index out of range: " + column); + } + return resultSet.getSchema().getColumns().get(column - 1); + } @Override public int getColumnCount() throws SQLException { - return 0; + try { + TableSchema schema = resultSet.getSchema(); + return schema.getColumns().size(); + } catch (Exception e) { + throw new SQLException(e); + } } @Override @@ -18,12 +38,12 @@ public boolean isAutoIncrement(int column) throws SQLException { @Override public boolean isCaseSensitive(int column) throws SQLException { - return false; + return true; } @Override public boolean isSearchable(int column) throws SQLException { - return false; + return true; } @Override @@ -33,27 +53,27 @@ public boolean isCurrency(int column) throws SQLException { @Override public int isNullable(int column) throws SQLException { - return 0; + return getColumn(column).isNullable() ? columnNullable : columnNoNulls; } @Override public boolean isSigned(int column) throws SQLException { - return false; + return getColumn(column).getDataType().isSigned(); } @Override public int getColumnDisplaySize(int column) throws SQLException { - return 0; + return 80; } @Override public String getColumnLabel(int column) throws SQLException { - return ""; + return getColumn(column).getColumnName(); } @Override public String getColumnName(int column) throws SQLException { - return ""; + return getColumn(column).getColumnName(); } @Override @@ -63,17 +83,17 @@ public String getSchemaName(int column) throws SQLException { @Override public int getPrecision(int column) throws SQLException { - return 0; + return getColumn(column).getPrecision(); } @Override public int getScale(int column) throws SQLException { - return 0; + return getColumn(column).getScale(); } @Override public String getTableName(int column) throws SQLException { - return ""; + return resultSet.getSchema().getTableName(); } @Override @@ -83,17 +103,17 @@ public String getCatalogName(int column) throws SQLException { @Override public int getColumnType(int column) throws SQLException { - return 0; + return JdbcUtils.convertToSqlType(getColumn(column).getDataType()); } @Override public String getColumnTypeName(int column) throws SQLException { - return ""; + return getColumn(column).getDataType().name(); } @Override public boolean isReadOnly(int column) throws SQLException { - return false; + return true; } @Override @@ -108,6 +128,6 @@ public boolean isDefinitelyWritable(int column) throws SQLException { @Override public String getColumnClassName(int column) throws SQLException { - return ""; + throw new UnsupportedOperationException("Not implemented yet."); } } diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/DatabaseMetaDataTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/DatabaseMetaDataTest.java new file mode 100644 index 000000000..5ea846391 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/DatabaseMetaDataTest.java @@ -0,0 +1,110 @@ +package com.clickhouse.jdbc.metadata; + +import com.clickhouse.jdbc.JdbcIntegrationTest; +import org.testng.annotations.Ignore; +import org.testng.annotations.Test; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Types; +import java.sql.DatabaseMetaData; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +public class DatabaseMetaDataTest extends JdbcIntegrationTest { + @Test + public void testGetColumns() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getColumns("system", null, "numbers", null); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_NAME"), "numbers"); + assertEquals(rs.getString("COLUMN_NAME"), "number"); + assertEquals(rs.getInt("DATA_TYPE"), Types.BIGINT); + assertEquals(rs.getString("TYPE_NAME"), "UInt64"); + assertEquals(rs.getInt("COLUMN_SIZE"), 64); + assertEquals(rs.getInt("DECIMAL_DIGITS"), 64); + assertEquals(rs.getInt("NUM_PREC_RADIX"), 2); + assertEquals(rs.getInt("NULLABLE"), DatabaseMetaData.attributeNullableUnknown); + assertEquals(rs.getString("IS_NULLABLE"), ""); + assertEquals(rs.getInt("ORDINAL_POSITION"), 1); + assertFalse(rs.next()); + } + } + + @Test + public void testGetTables() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getTables("system", null, "numbers", null); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_NAME"), "numbers"); + assertEquals(rs.getString("TABLE_TYPE"), "SystemNumbers"); + assertFalse(rs.next()); + } + } + + @Ignore("ClickHouse does not support primary keys") + @Test + public void testGetPrimaryKeys() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getPrimaryKeys("system", null, "numbers"); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_NAME"), "numbers"); + assertEquals(rs.getString("COLUMN_NAME"), "number"); + assertEquals(rs.getShort("KEY_SEQ"), 1); + assertFalse(rs.next()); + } + } + + @Test + public void testGetSchemas() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getSchemas(); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_SCHEM"), "INFORMATION_SCHEMA"); + } + } + + @Test + public void testGetCatalogs() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getCatalogs(); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_CAT"), "INFORMATION_SCHEMA"); + } + } + + @Test + public void testGetTableTypes() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getTableTypes(); + assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_TYPE"), "MergeTree"); + } + } + + @Test + public void testGetColumnsWithEmptyCatalog() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getColumns("", null, "numbers", null); + assertFalse(rs.next()); + } + } + + @Test + public void testGetColumnsWithEmptySchema() throws Exception { + try (Connection conn = getJdbcConnection()) { + DatabaseMetaData dbmd = conn.getMetaData(); + ResultSet rs = dbmd.getColumns("system", "", "numbers", null); + assertFalse(rs.next()); + } + } +} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ParameterMetaDataTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ParameterMetaDataTest.java new file mode 100644 index 000000000..aa7fe3438 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ParameterMetaDataTest.java @@ -0,0 +1,80 @@ +package com.clickhouse.jdbc.metadata; + +import com.clickhouse.data.ClickHouseColumn; +import com.clickhouse.data.ClickHouseDataType; +import com.clickhouse.jdbc.JdbcIntegrationTest; +import org.testng.annotations.Test; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +public class ParameterMetaDataTest extends JdbcIntegrationTest { + @Test + public void testGetParameterCount() throws SQLException { + ParameterMetaData metaData = new ParameterMetaData(Collections.emptyList()); + assertEquals(metaData.getParameterCount(), 0); + + metaData = new ParameterMetaData(List.of(ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false))); + assertEquals(metaData.getParameterCount(), 1); + } + + @Test + public void testIsNullable() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, true); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.isNullable(1), ParameterMetaData.parameterNullable); + } + + @Test + public void testIsSigned() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertTrue(metaData.isSigned(1)); + + column = ClickHouseColumn.of("param2", ClickHouseDataType.UInt32, false); + metaData = new ParameterMetaData(Collections.singletonList(column)); + assertFalse(metaData.isSigned(1)); + } + + @Test + public void testGetPrecisionAndScale() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false, 10, 5); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.getPrecision(1), 10); + assertEquals(metaData.getScale(1), 5); + } + + @Test + public void testGetParameterType() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.getParameterType(1), java.sql.Types.INTEGER); + } + + @Test + public void testGetParameterTypeName() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.getParameterTypeName(1), "Int32"); + } + + @Test + public void testGetParameterClassName() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.getParameterClassName(1), "java.lang.Integer"); + } + + @Test + public void testGetParameterMode() throws SQLException { + ClickHouseColumn column = ClickHouseColumn.of("param1", ClickHouseDataType.Int32, false); + ParameterMetaData metaData = new ParameterMetaData(Collections.singletonList(column)); + assertEquals(metaData.getParameterMode(1), ParameterMetaData.parameterModeIn); + } +} diff --git a/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ResultSetMetaDataTest.java b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ResultSetMetaDataTest.java new file mode 100644 index 000000000..484ffa3f6 --- /dev/null +++ b/clickhouse-jdbc/src/test/java/com/clickhouse/jdbc/metadata/ResultSetMetaDataTest.java @@ -0,0 +1,143 @@ +package com.clickhouse.jdbc.metadata; + +import com.clickhouse.jdbc.JdbcIntegrationTest; +import org.testng.annotations.Test; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.Statement; +import java.sql.Types; + +import static org.testng.Assert.assertEquals; + +public class ResultSetMetaDataTest extends JdbcIntegrationTest { + @Test + public void testGetColumnCount() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT 1 AS a, 2 AS b, 3 AS c"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(3, rsmd.getColumnCount()); + } + } + } + + @Test + public void testGetColumnLabel() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT 1 AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals("a", rsmd.getColumnLabel(1)); + } + } + } + + @Test + public void testGetColumnName() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT 1 AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals("a", rsmd.getColumnName(1)); + } + } + } + + @Test + public void testGetColumnTypeIntegers() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toInt8(1), toInt16(1), toInt32(1), toInt64(1) AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnType(1), Types.TINYINT); + assertEquals(rsmd.getColumnType(2), Types.SMALLINT); + assertEquals(rsmd.getColumnType(3), Types.INTEGER); + assertEquals(rsmd.getColumnType(4), Types.BIGINT); + } + } + } + + @Test + public void testGetColumnTypeFloats() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toFloat32(1), toFloat64(1) AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnType(1), Types.FLOAT); + assertEquals(rsmd.getColumnType(2), Types.DOUBLE); + } + } + } + + @Test + public void testGetColumnTypeString() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toString(1) AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnType(1), Types.CHAR); + } + } + } + + @Test + public void testGetColumnTypeDateAndTime() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toDate('2021-01-01') AS a, toDateTime('2021-01-01 00:00:00') AS b"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnType(1), Types.DATE); + assertEquals(rsmd.getColumnType(2), Types.TIMESTAMP); + } + } + } + + @Test + public void testGetColumnTypeName() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toInt16(1), toUInt16(1) AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnTypeName(1), "Int16"); + assertEquals(rsmd.getColumnTypeName(2), "UInt16"); + } + } + } + + @Test + public void testGetColumnDisplaySize() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT 1 AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getColumnDisplaySize(1), 80); + } + } + } + + @Test + public void testGetColumnPrecision() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT 1 AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getPrecision(1), 3); + } + } + } + + @Test + public void testGetColumnScale() throws Exception { + try (Connection conn = getJdbcConnection()) { + try (Statement stmt = conn.createStatement()) { + ResultSet rs = stmt.executeQuery("SELECT toInt8(1), toDecimal32(1, 5), toDecimal64(1, 5) AS a"); + ResultSetMetaData rsmd = rs.getMetaData(); + assertEquals(rsmd.getScale(1), 0); + assertEquals(rsmd.getScale(2), 5); + assertEquals(rsmd.getScale(3), 5); + } + } + } +}