diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala index 40f25d5599d0b..c845dd81f3e32 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql.execution import java.sql.{Date, Timestamp} +import java.time.{Duration, Period} import org.json4s.DefaultFormats import org.json4s.JsonDSL._ @@ -43,6 +44,8 @@ abstract class BaseScriptTransformationSuite extends SparkPlanTest with SQLTestU import testImplicits._ import ScriptTransformationIOSchema._ + protected def defaultSerDe(): String + protected val uncaughtExceptionHandler = new TestUncaughtExceptionHandler private var defaultUncaughtExceptionHandler: Thread.UncaughtExceptionHandler = _ @@ -599,6 +602,37 @@ abstract class BaseScriptTransformationSuite extends SparkPlanTest with SQLTestU 'e.cast("string")).collect()) } } + + test("SPARK-35220: DayTimeIntervalType/YearMonthIntervalType show different " + + "between hive serde and row format delimited\t") { + assume(TestUtils.testCommandAvailable("/bin/bash")) + withTempView("v") { + val df = Seq( + (Duration.ofDays(1), Period.ofMonths(10)) + ).toDF("a", "b") + df.createTempView("v") + + if (defaultSerDe == "hive-serde") { + checkAnswer(sql( + """ + |SELECT TRANSFORM(a, b) + | USING 'cat' AS (a, b) + |FROM v + |""".stripMargin), + identity, + Row("1 00:00:00.000000000", "0-10") :: Nil) + } else { + checkAnswer(sql( + """ + |SELECT TRANSFORM(a, b) + | USING 'cat' AS (a, b) + |FROM v + |""".stripMargin), + identity, + Row("INTERVAL '1 00:00:00' DAY TO SECOND", "INTERVAL '0-10' YEAR TO MONTH") :: Nil) + } + } + } } case class ExceptionInjectingOperator(child: SparkPlan) extends UnaryExecNode { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala index f69aea3729daa..5638743b7633d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala @@ -25,6 +25,8 @@ import org.apache.spark.sql.test.SharedSparkSession class SparkScriptTransformationSuite extends BaseScriptTransformationSuite with SharedSparkSession { import testImplicits._ + override protected def defaultSerDe(): String = "row-format-delimited" + override def createScriptTransformationExec( script: String, output: Seq[Attribute], diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationSuite.scala index e4b21e4159fd1..71a745b180494 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationSuite.scala @@ -39,6 +39,8 @@ class HiveScriptTransformationSuite extends BaseScriptTransformationSuite with T import ScriptTransformationIOSchema._ + override protected def defaultSerDe(): String = "hive-serde" + override def createScriptTransformationExec( script: String, output: Seq[Attribute],