Skip to content
Closed
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 11 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -1087,6 +1087,12 @@
],
"sqlState" : "42K03"
},
"DATETIME_ARGUMENT_OUT_OF_RANGE" : {
"message" : [
"<rangeMessage>. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22023"
},
"DATETIME_OVERFLOW" : {
"message" : [
"Datetime operation overflow: <operation>."
Expand Down Expand Up @@ -2832,6 +2838,11 @@
"expects an integer literal, but got <invalidValue>."
]
},
"INTERVAL_WITH_MICROSECONDS" : {
"message" : [
"Cannot add hours, minutes, seconds or microseconds to a date. If necessary set <ansiConfig> to \"false\" to bypass this error."
]
},
"LENGTH" : {
"message" : [
"Expects `length` greater than or equal to 0, but got <length>."
Expand Down Expand Up @@ -6701,11 +6712,6 @@
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_2000" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2003" : {
"message" : [
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1723,7 +1723,7 @@ case class DateAddInterval(
override def nullSafeEval(start: Any, interval: Any): Any = {
val itvl = interval.asInstanceOf[CalendarInterval]
if (ansiEnabled || itvl.microseconds == 0) {
DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl)
DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl, prettyName)
} else {
val startTs = DateTimeUtils.daysToMicros(start.asInstanceOf[Int], zoneId)
val resultTs = DateTimeUtils.timestampAddInterval(
Expand All @@ -1735,14 +1735,14 @@ case class DateAddInterval(
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
nullSafeCodeGen(ctx, ev, (sd, i) => if (ansiEnabled) {
s"""${ev.value} = $dtu.dateAddInterval($sd, $i);"""
s"""${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName");"""
} else {
val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName)
val startTs = ctx.freshName("startTs")
val resultTs = ctx.freshName("resultTs")
s"""
|if ($i.microseconds == 0) {
| ${ev.value} = $dtu.dateAddInterval($sd, $i);
| ${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName");
|} else {
| long $startTs = $dtu.daysToMicros($sd, $zid);
| long $resultTs =
Expand Down Expand Up @@ -2489,14 +2489,14 @@ case class MakeDate(
localDateToDays(ld)
} catch {
case e: java.time.DateTimeException =>
if (failOnError) throw QueryExecutionErrors.ansiDateTimeError(e) else null
if (failOnError) throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) else null
}
}

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
val failOnErrorBranch = if (failOnError) {
"throw QueryExecutionErrors.ansiDateTimeError(e);"
"throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);"
} else {
s"${ev.isNull} = true;"
}
Expand Down Expand Up @@ -2723,7 +2723,7 @@ case class MakeTimestamp(
} catch {
case e: SparkDateTimeException if failOnError => throw e
case e: DateTimeException if failOnError =>
throw QueryExecutionErrors.ansiDateTimeError(e)
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e)
case _: DateTimeException => null
}
}
Expand Down Expand Up @@ -2754,7 +2754,7 @@ case class MakeTimestamp(
val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName)
val d = Decimal.getClass.getName.stripSuffix("$")
val failOnErrorBranch = if (failOnError) {
"throw QueryExecutionErrors.ansiDateTimeError(e);"
"throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);"
} else {
s"${ev.isNull} = true;"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -302,10 +302,10 @@ object DateTimeUtils extends SparkDateTimeUtils {
*/
def dateAddInterval(
start: Int,
interval: CalendarInterval): Int = {
interval: CalendarInterval,
funcName: String): Int = {
if (interval.microseconds != 0) {
throw QueryExecutionErrors.ansiIllegalArgumentError(
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date")
throw QueryExecutionErrors.invalidIntervalWithMicrosecondsError(funcName)
}
val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days)
localDateToDays(ld)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,21 +277,22 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
summary = "")
}

def ansiDateTimeError(e: Exception): SparkDateTimeException = {
def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changes in this PR clash with #48242 by @itholic

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's resolve that ticket first, but keep this one open for now, as I am not sure what complete scope will be of the other ticket and this PRs goal is to remove _LEGACY_ERROR_TEMP_2000 completely.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@MaxGekk I aligned my changes with the @itholic PR, so we can go on and merge this change, then the other PR can improve the message, while this one stays scoped to assigning proper error classes.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sgtm

new SparkDateTimeException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
errorClass = "DATETIME_ARGUMENT_OUT_OF_RANGE",
messageParameters = Map(
"message" -> e.getMessage,
"rangeMessage" -> e.getMessage,
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
context = Array.empty,
summary = "")
}

def ansiIllegalArgumentError(message: String): SparkIllegalArgumentException = {
def invalidIntervalWithMicrosecondsError(funcName: String): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
errorClass = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS",
messageParameters = Map(
"message" -> message,
"parameter" -> toSQLId("interval"),
"functionName" -> toSQLId(funcName),
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,9 +436,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) {
checkErrorInExpression[SparkIllegalArgumentException](
DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))),
"_LEGACY_ERROR_TEMP_2000",
Map("message" ->
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS",
Map("parameter" -> "`interval`", "functionName" -> "`dateaddinterval`",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

dateaddinterval there is no such function. That might confuse users. We could pass + instead, or create a error condition especially for the DateAddInterval expression (not sub-class of INVALID_PARAMETER_VALUE ).

"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -536,15 +536,16 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {

test("date add interval with day precision") {
val input = days(1997, 2, 28)
assert(dateAddInterval(input, new CalendarInterval(36, 0, 0)) === days(2000, 2, 28))
assert(dateAddInterval(input, new CalendarInterval(36, 47, 0)) === days(2000, 4, 15))
assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0)) === days(1996, 1, 28))
assert(dateAddInterval(input, new CalendarInterval(36, 0, 0), "") === days(2000, 2, 28))
assert(dateAddInterval(input, new CalendarInterval(36, 47, 0), "") === days(2000, 4, 15))
assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0), "") === days(1996, 1, 28))
checkError(
exception = intercept[SparkIllegalArgumentException](
dateAddInterval(input, new CalendarInterval(36, 47, 1))),
condition = "_LEGACY_ERROR_TEMP_2000",
dateAddInterval(input, new CalendarInterval(36, 47, 1), "test")),
condition = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS",
parameters = Map(
"message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"parameter" -> "`interval`",
"functionName" -> "`test`",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

Expand Down
10 changes: 6 additions & 4 deletions sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
}
}

Expand All @@ -68,10 +69,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33"
"rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
}
}

Expand Down Expand Up @@ -185,10 +186,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
}
}

Expand All @@ -200,10 +202,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -687,10 +687,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid date 'FEBRUARY 30'"
"rangeMessage" : "Invalid date 'FEBRUARY 30'"
}
}

Expand All @@ -702,10 +703,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
}
}

Expand All @@ -717,10 +719,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1"
"rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
}
}

Expand Down Expand Up @@ -185,10 +186,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
}
}

Expand All @@ -200,10 +202,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
}
}

Expand Down