Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ import org.apache.spark.internal.Logging
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.SessionCatalog
import org.apache.spark.sql.hive.thriftserver.ThriftserverShimUtils.toJavaSQLType
import org.apache.spark.sql.types._

/**
Expand Down Expand Up @@ -131,7 +130,8 @@ private[hive] class SparkGetColumnsOperation(
* For array, map, string, and binaries, the column size is variable, return null as unknown.
*/
private def getColumnSize(typ: DataType): Option[Int] = typ match {
case dt @ (BooleanType | _: NumericType | DateType | TimestampType) => Some(dt.defaultSize)
case dt @ (BooleanType | _: NumericType | DateType | TimestampType | CalendarIntervalType) =>
Some(dt.defaultSize)
case StructType(fields) =>
val sizeArr = fields.map(f => getColumnSize(f.dataType))
if (sizeArr.contains(None)) {
Expand Down Expand Up @@ -164,6 +164,28 @@ private[hive] class SparkGetColumnsOperation(
case _ => None
}

private def toJavaSQLType(typ: DataType): Integer = typ match {
case NullType => java.sql.Types.NULL
case BooleanType => java.sql.Types.BOOLEAN
case ByteType => java.sql.Types.TINYINT
case ShortType => java.sql.Types.SMALLINT
case IntegerType => java.sql.Types.INTEGER
case LongType => java.sql.Types.BIGINT
case FloatType => java.sql.Types.FLOAT
case DoubleType => java.sql.Types.DOUBLE
case _: DecimalType => java.sql.Types.DECIMAL
case StringType => java.sql.Types.VARCHAR
case BinaryType => java.sql.Types.BINARY
case DateType => java.sql.Types.DATE
case TimestampType => java.sql.Types.TIMESTAMP
case _: ArrayType => java.sql.Types.ARRAY
case _: MapType => java.sql.Types.JAVA_OBJECT
case _: StructType => java.sql.Types.STRUCT
// Hive's year-month and day-time intervals are mapping to java.sql.Types.OTHER
case _: CalendarIntervalType => java.sql.Types.OTHER
case _ => throw new IllegalArgumentException(s"Unrecognized type name: ${typ.sql}")
}

private def addToRowSet(
columnPattern: Pattern,
dbName: String,
Expand All @@ -177,7 +199,7 @@ private[hive] class SparkGetColumnsOperation(
dbName, // TABLE_SCHEM
tableName, // TABLE_NAME
column.name, // COLUMN_NAME
toJavaSQLType(column.dataType.sql).asInstanceOf[AnyRef], // DATA_TYPE
toJavaSQLType(column.dataType), // DATA_TYPE
column.dataType.sql, // TYPE_NAME
getColumnSize(column.dataType).map(_.asInstanceOf[AnyRef]).orNull, // COLUMN_SIZE
null, // BUFFER_LENGTH, unused
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -686,6 +686,23 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
}
}

test("Query Intervals in VIEWs through thrift server") {
val viewName1 = "view_interval_1"
val viewName2 = "view_interval_2"
val ddl1 = s"CREATE GLOBAL TEMP VIEW $viewName1 AS SELECT INTERVAL 1 DAY AS i"
val ddl2 = s"CREATE TEMP VIEW $viewName2 as select * from global_temp.$viewName1"
withJdbcStatement(viewName1, viewName2) { statement =>
statement.executeQuery(ddl1)
statement.executeQuery(ddl2)
val rs = statement.executeQuery(s"SELECT v1.i as a, v2.i as b FROM global_temp.$viewName1" +
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What will be returned by rs.getMetadata.getColumnType for the interval column?
I believe that will be a string, because of how we just make it a string in the output schema in https://github.com/apache/spark/blob/master/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala#L362

There was a discussion last year in #25694 (comment), that because the interval is just returned as string in the query output schema, and not compliant to standard SQL interval types, it should not be listed by SparkGetTypeInfoOperation, and be treated just as strings.

If we want to make it be returned as a java.sql.Types.OTHER, to make this complete, it should be returned as such in result set metadata as well, and listed in SparkGetTypeInfoOperation.
Or we could map it to string.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the ResultSetMetadata describes the return types of the result set, the GetColumnOperation retrieves the metadata of the original table or view. IMHO, they do not have to be the same.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will make a followup to add it to SparkGetTypeInfoOperation

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It would be the most consistent if the table columns metadata, and metadata of the result of select * from table had consistent types. I don't really have a preference whether it should be string or interval. Last year the discussion leaned towards string, because of the intervals not really being standard compliant, but indeed there might be value in having it marked as interval in the schema, so the application might try to parse it as such... Though, e.g. between Spark 2.4 and Spark 3.0 the format of the returned interval changed, see https://issues.apache.org/jira/browse/SPARK-32132, which might have broken how anyone would have parsed it, so Spark is not really giving any guarantees as to the stability of the format of this type...

Ultimately, I think this is very rarely used, as intervals didn't even work at all until #25277 and it was not raised earlier...

Copy link
Member Author

@yaooqinn yaooqinn Aug 27, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The ResultSetMetadata is used by the client-side user to do the right work on the result set what they get,when the interval values become strings, they need to follow the rules of String operations in their future activities.

The GetColumnOperation tells the client-side users about how the data is stored and formed at the server-side, then if the JDBC-users know column A is an interval type, then they should notice that they can not perform string functions, comparators, etc on column A.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In ResultSetMetadata would the client-side user not learn that they should follow string rules by using getColumnClassName, which should return String, while it would be still valuable to them to know that that column represents an interval?

s" v1 join $viewName2 v2 on date_part('DAY', v1.i) = date_part('DAY', v2.i)")
while (rs.next()) {
assert(rs.getString("a") === "1 days")
assert(rs.getString("b") === "1 days")
}
}
}

test("ThriftCLIService FetchResults FETCH_FIRST, FETCH_NEXT, FETCH_PRIOR") {
def checkResult(rows: RowSet, start: Long, end: Long): Unit = {
assert(rows.getStartOffset() == start)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver

import java.sql.{DatabaseMetaData, ResultSet}

import org.apache.spark.sql.types.{ArrayType, BinaryType, BooleanType, DecimalType, DoubleType, FloatType, IntegerType, MapType, NumericType, StringType, StructType, TimestampType}
import org.apache.spark.sql.types.{ArrayType, BinaryType, BooleanType, CalendarIntervalType, DecimalType, DoubleType, FloatType, IntegerType, MapType, NumericType, StringType, StructType, TimestampType}

class SparkMetadataOperationSuite extends HiveThriftJdbcTest {

Expand Down Expand Up @@ -333,4 +333,31 @@ class SparkMetadataOperationSuite extends HiveThriftJdbcTest {
assert(pos === 17, "all columns should have been verified")
}
}

test("get columns operation should handle interval column properly") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: Can we also test an extract query of the table with calendar types?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This PR fixes SparkGetColumnsOperation. As a meta operation, it is not related to extract-queries that executed by SparkExecuteStatementOperation. BTW, I notice that test cases for extracting interval values in VIEWS through the thrift server might be missing in the current codebase. Maybe we can make another PR to improve these, WDYT, @cloud-fan Or I can just add some UTs here https://github.com/yaooqinn/spark/blob/d24d27f1bc39e915df23d65f8fda0d83e716b308/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala#L674

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's why I think we should add a small check here to make sure they actually work.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm OK to add the missing test in this PR.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done, thank you guys for the check~

val viewName = "view_interval"
val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select interval 1 day as i"

withJdbcStatement(viewName) { statement =>
statement.execute(ddl)
val data = statement.getConnection.getMetaData
val rowSet = data.getColumns("", "global_temp", viewName, null)
while (rowSet.next()) {
assert(rowSet.getString("TABLE_CAT") === null)
assert(rowSet.getString("TABLE_SCHEM") === "global_temp")
assert(rowSet.getString("TABLE_NAME") === viewName)
assert(rowSet.getString("COLUMN_NAME") === "i")
assert(rowSet.getInt("DATA_TYPE") === java.sql.Types.OTHER)
assert(rowSet.getString("TYPE_NAME").equalsIgnoreCase(CalendarIntervalType.sql))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if we treat it as string type, can we still report the TYPE_NAME as INTERVAL?

Copy link
Member Author

@yaooqinn yaooqinn Aug 27, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess that all the meta operations should do nothing else but specifically describe how all the objects(database/table /columns etc) stored in the Spark system.

image

If we change the column meta here from INTERVAL to STRING, so the column here becomes orderable and comparable?(JDBC users may guess), they may have no idea about what they are dealing with and get a completely different awareness of the meta-information comparing to those users who use spark-sql self-contained applications.

assert(rowSet.getInt("COLUMN_SIZE") === CalendarIntervalType.defaultSize)
assert(rowSet.getInt("DECIMAL_DIGITS") === 0)
assert(rowSet.getInt("NUM_PREC_RADIX") === 0)
assert(rowSet.getInt("NULLABLE") === 0)
assert(rowSet.getString("REMARKS") === "")
assert(rowSet.getInt("ORDINAL_POSITION") === 0)
assert(rowSet.getString("IS_NULLABLE") === "YES")
assert(rowSet.getString("IS_AUTO_INCREMENT") === "NO")
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@ private[thriftserver] object ThriftserverShimUtils {
RowSetFactory.create(getResultSetSchema, getProtocolVersion)
}

private[thriftserver] def toJavaSQLType(s: String): Int = Type.getType(s).toJavaSQLType

private[thriftserver] def supportedType(): Seq[Type] = {
Seq(NULL_TYPE, BOOLEAN_TYPE, STRING_TYPE, BINARY_TYPE,
TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ private[thriftserver] object ThriftserverShimUtils {
RowSetFactory.create(getResultSetSchema, getProtocolVersion, false)
}

private[thriftserver] def toJavaSQLType(s: String): Int = Type.getType(s).toJavaSQLType

private[thriftserver] def supportedType(): Seq[Type] = {
Seq(NULL_TYPE, BOOLEAN_TYPE, STRING_TYPE, BINARY_TYPE,
TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE,
Expand Down