-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-21739][SQL]Cast expression should initialize timezoneId when it is called statically to convert something into TimestampType #18960
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
2537c1e
86331e3
492b756
a264e3a
5a04ddc
93581bb
5b051dd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -37,6 +37,7 @@ import org.apache.spark.sql.execution._ | |
| import org.apache.spark.sql.execution.metric.SQLMetrics | ||
| import org.apache.spark.sql.hive._ | ||
| import org.apache.spark.sql.hive.client.HiveClientImpl | ||
| import org.apache.spark.sql.internal.SQLConf | ||
| import org.apache.spark.sql.types.{BooleanType, DataType} | ||
| import org.apache.spark.util.Utils | ||
|
|
||
|
|
@@ -104,7 +105,7 @@ case class HiveTableScanExec( | |
| hadoopConf) | ||
|
|
||
| private def castFromString(value: String, dataType: DataType) = { | ||
| Cast(Literal(value), dataType).eval(null) | ||
| Cast(Literal(value), dataType, Option(SQLConf.get.sessionLocalTimeZone)).eval(null) | ||
|
||
| } | ||
|
|
||
| private def addColumnMetadataToConf(hiveConf: Configuration): Unit = { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -68,4 +68,25 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl | |
| sql("DROP TABLE IF EXISTS createAndInsertTest") | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-21739: Cast expression should initialize timezoneId " + | ||
|
||
| "when it is called statically to convert something into TimestampType") { | ||
| // create table for test | ||
| sql("CREATE TABLE table_with_timestamp_partition(value int) PARTITIONED by (ts timestamp)") | ||
| sql("INSERT OVERWRITE TABLE table_with_timestamp_partition " + | ||
| "partition (ts = '2010-01-01 00:00:00.000') VALUES (1)") | ||
| sql("INSERT OVERWRITE TABLE table_with_timestamp_partition " + | ||
| "partition (ts = '2010-01-02 00:00:00.000') VALUES (2)") | ||
|
|
||
| // test for Cast expression in TableReader | ||
| checkAnswer(sql("select value from table_with_timestamp_partition"), | ||
| Seq(Row(1), Row(2))) | ||
|
|
||
| // test for Cast expression in HiveTableScanExec | ||
| checkAnswer(sql("select value from table_with_timestamp_partition " + | ||
| "where ts = '2010-01-02 00:00:00.000'"), Row(2)) | ||
|
|
||
| sql("DROP TABLE IF EXISTS table_with_timestamp_partition") | ||
|
||
| } | ||
|
|
||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The same here.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We also need a test case for verifying this.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do you mean a test case for HadoopTableReader? a little confusing