diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index a9155e8daf10..6fb3712dd6de 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1235,11 +1235,13 @@ object TryToTimestampExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 1 || numArgs == 2) { - ParseToTimestamp( + // The expression ParseToTimestamp will throw an SparkUpgradeException if the input is invalid + // even when failOnError is false. We need to catch the exception and return null. + TryEval(ParseToTimestamp( expressions.head, expressions.drop(1).lastOption, SQLConf.get.timestampType, - failOnError = false) + failOnError = false)) } else { throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(1, 2), numArgs) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index b261ecfb0cee..07445c8910d0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -1392,4 +1392,9 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { checkAnswer(df.selectExpr("try_to_timestamp(a)"), Seq(Row(ts))) checkAnswer(df.select(try_to_timestamp(col("a"))), Seq(Row(ts))) } + + test("try_to_timestamp: return null on SparkUpgradeException") { + val df = spark.sql("SELECT try_to_timestamp('2016-12-1', 'yyyy-MM-dd')") + checkAnswer(df, Seq(Row(null))) + } }