Skip to content
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 12 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -1083,6 +1083,12 @@
],
"sqlState" : "22008"
},
"DATE_TIME_FIELD_OUT_OF_BOUNDS" : {
"message" : [
"Invalid value for datetime field. If necessary set <ansiConfig> to false to bypass this error."
],
"sqlState" : "22008"
},
"DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : {
"message" : [
"Decimal precision <precision> exceeds max precision <maxPrecision>."
Expand Down Expand Up @@ -4782,6 +4788,12 @@
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATETIME_UNIT_ADDITION" : {
"message" : [
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date. If necessary set <ansiConfig> to false to bypass this error."
],
"sqlState" : "22008"
},
"UNSUPPORTED_DEFAULT_VALUE" : {
"message" : [
"DEFAULT column values is not supported."
Expand Down Expand Up @@ -6691,11 +6703,6 @@
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_2000" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2003" : {
"message" : [
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
Expand Down
23 changes: 20 additions & 3 deletions common/utils/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -306,8 +306,9 @@ private[spark] class SparkDateTimeException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
context: Array[QueryContext])
extends DateTimeException(message) with SparkThrowable {
context: Array[QueryContext],
cause: Option[Throwable])
extends DateTimeException(message, cause.orNull) with SparkThrowable {

def this(
errorClass: String,
Expand All @@ -318,7 +319,23 @@ private[spark] class SparkDateTimeException private(
SparkThrowableHelper.getMessage(errorClass, messageParameters, summary),
Option(errorClass),
messageParameters,
context
context,
cause = None
)
}

def this(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext],
summary: String,
cause: Option[Throwable]) = {
this(
SparkThrowableHelper.getMessage(errorClass, messageParameters, summary),
Option(errorClass),
messageParameters,
context,
cause.orElse(None)
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,8 +304,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
start: Int,
interval: CalendarInterval): Int = {
if (interval.microseconds != 0) {
throw QueryExecutionErrors.ansiIllegalArgumentError(
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date")
throw QueryExecutionErrors.ansiIllegalArgumentError()
}
val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days)
localDateToDays(ld)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,19 +279,18 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE

def ansiDateTimeError(e: Exception): SparkDateTimeException = {
new SparkDateTimeException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
errorClass = "DATE_TIME_FIELD_OUT_OF_BOUNDS",
messageParameters = Map(
"message" -> e.getMessage,
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
context = Array.empty,
summary = "")
summary = "",
cause = Some(e))
}

def ansiIllegalArgumentError(message: String): SparkIllegalArgumentException = {
def ansiIllegalArgumentError(): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
errorClass = "UNSUPPORTED_DATETIME_UNIT_ADDITION",
messageParameters = Map(
"message" -> message,
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) {
checkErrorInExpression[SparkIllegalArgumentException](
DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))),
"_LEGACY_ERROR_TEMP_2000",
"DATE_TIME_FIELD_OUT_OF_BOUNDS",
Map("message" ->
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
checkError(
exception = intercept[SparkIllegalArgumentException](
dateAddInterval(input, new CalendarInterval(36, 47, 1))),
condition = "_LEGACY_ERROR_TEMP_2000",
condition = "DATE_TIME_FIELD_OUT_OF_BOUNDS",
parameters = Map(
"message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
Expand Down
12 changes: 6 additions & 6 deletions sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand All @@ -68,10 +68,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down Expand Up @@ -185,10 +185,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand All @@ -200,10 +200,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -687,10 +687,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid date 'FEBRUARY 30'"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand All @@ -702,10 +702,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand All @@ -717,10 +717,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down Expand Up @@ -185,10 +185,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand All @@ -200,10 +200,10 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATE_TIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22008",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"ansiConfig" : "\"spark.sql.ansi.enabled\""
}
}

Expand Down