From ed94384f1db4ae2bb0e5cac21ddb20d224bf4458 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 3 Jul 2025 18:05:10 +0900 Subject: [PATCH 01/11] temp --- .../resources/error/error-conditions.json | 6 +++++ .../sql/catalyst/expressions/arithmetic.scala | 27 ++++++++++++++----- .../sql/errors/QueryExecutionErrors.scala | 8 ++++++ .../ArithmeticExpressionSuite.scala | 4 +-- .../decimalArithmeticOperations.sql.out | 4 +-- .../QueryExecutionAnsiErrorsSuite.scala | 20 ++++++++++++++ 6 files changed, 59 insertions(+), 10 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 000b1f524f207..b2c4e7f49eb2d 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1366,6 +1366,12 @@ ], "sqlState" : "22012" }, + "MOD_BY_ZERO" : { + "message" : [ + "Mod by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22012" + }, "DUPLICATED_CTE_NAMES" : { "message" : [ "CTE definition can't have duplicate names: ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index f9e8b6a17896f..4c4ed01fc8775 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -645,7 +645,13 @@ trait DivModLike extends BinaryArithmetic { } else { if (isZero(input2)) { // when we reach here, failOnError must be true. - throw QueryExecutionErrors.divideByZeroError(getContextOrNull()) + val context = getContextOrNull() + val ex = this match { + case _: Remainder => QueryExecutionErrors.modByZeroError(context) + case _: Pmod => QueryExecutionErrors.modByZeroError(context) + case _ => QueryExecutionErrors.divideByZeroError(context) + } + throw ex } if (checkDivideOverflow && input1 == Long.MinValue && input2 == -1) { throw QueryExecutionErrors.overflowInIntegralDivideError(getContextOrNull()) @@ -660,6 +666,15 @@ trait DivModLike extends BinaryArithmetic { /** * Special case handling due to division/remainder by 0 => null or ArithmeticException. */ + protected def divideByZeroErrorCode(ctx: CodegenContext): String = { + val errorContextCode = getContextOrNullCode(ctx, failOnError) + this match { + case _: Remainder => s"QueryExecutionErrors.modByZeroError($errorContextCode)" + case _: Pmod => s"QueryExecutionErrors.modByZeroError($errorContextCode)" + case _ => s"QueryExecutionErrors.divideByZeroError($errorContextCode)" + } + } + override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval1 = left.genCode(ctx) val eval2 = right.genCode(ctx) @@ -697,7 +712,7 @@ trait DivModLike extends BinaryArithmetic { // evaluate right first as we have a chance to skip left if right is 0 if (!left.nullable && !right.nullable) { val divByZero = if (failOnError) { - s"throw QueryExecutionErrors.divideByZeroError($errorContextCode);" + s"throw ${divideByZeroErrorCode(ctx)};" } else { s"${ev.isNull} = true;" } @@ -715,7 +730,7 @@ trait DivModLike extends BinaryArithmetic { } else { val nullOnErrorCondition = if (failOnError) "" else s" || $isZero" val failOnErrorBranch = if (failOnError) { - s"if ($isZero) throw QueryExecutionErrors.divideByZeroError($errorContextCode);" + s"if ($isZero) throw ${divideByZeroErrorCode(ctx)};" } else { "" } @@ -1038,7 +1053,7 @@ case class Pmod( } else { if (isZero(input2)) { // when we reach here, failOnError must bet true. - throw QueryExecutionErrors.divideByZeroError(getContextOrNull()) + throw QueryExecutionErrors.modByZeroError(getContextOrNull()) } pmodFunc(input1, input2) } @@ -1095,7 +1110,7 @@ case class Pmod( // evaluate right first as we have a chance to skip left if right is 0 if (!left.nullable && !right.nullable) { val divByZero = if (failOnError) { - s"throw QueryExecutionErrors.divideByZeroError($errorContext);" + s"throw QueryExecutionErrors.modByZeroError($errorContext);" } else { s"${ev.isNull} = true;" } @@ -1112,7 +1127,7 @@ case class Pmod( } else { val nullOnErrorCondition = if (failOnError) "" else s" || $isZero" val failOnErrorBranch = if (failOnError) { - s"if ($isZero) throw QueryExecutionErrors.divideByZeroError($errorContext);" + s"if ($isZero) throw QueryExecutionErrors.modByZeroError($errorContext);" } else { "" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 7f623039778ab..ad5cf38e234dc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -205,6 +205,14 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = getSummary(context)) } + def modByZeroError(context: QueryContext): ArithmeticException = { + new SparkArithmeticException( + errorClass = "MOD_BY_ZERO", + messageParameters = Map("config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), + context = Array(context), + summary = getSummary(context)) + } + def intervalDividedByZeroError(context: QueryContext): ArithmeticException = { new SparkArithmeticException( errorClass = "INTERVAL_DIVIDED_BY_ZERO", diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 89f0b95f5c18f..88234dc0856c6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -463,7 +463,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper } withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( - Remainder(left, Literal(convert(0))), "Division by zero") + Remainder(left, Literal(convert(0))), "Mod by zero") } } checkEvaluation(Remainder(positiveShortLit, positiveShortLit), 0.toShort) @@ -567,7 +567,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper } withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( - Pmod(left, Literal(convert(0))), "Division by zero") + Pmod(left, Literal(convert(0))), "Mod by zero") } } checkEvaluation(Pmod(Literal(-7), Literal(3)), 2) diff --git a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out index cb52778c420ae..640df3e55e6ec 100644 --- a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out @@ -36,7 +36,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "DIVIDE_BY_ZERO", + "errorClass" : "MOD_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" @@ -58,7 +58,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "DIVIDE_BY_ZERO", + "errorClass" : "MOD_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index 267b633c76760..0110359f5094b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -85,6 +85,26 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest callSitePattern = getCurrentClassCallSitePattern)) } + test("MOD_BY_ZERO: can't take modulo of an integer by zero") { + checkError( + exception = intercept[SparkArithmeticException] { + sql("select 6 % 0").collect() + }, + condition = "MOD_BY_ZERO", + sqlState = "22012", + parameters = Map("config" -> ansiConf), + context = ExpectedContext(fragment = "6 % 0", start = 7, stop = 11)) + + checkError( + exception = intercept[SparkArithmeticException] { + sql("select pmod(6, 0)").collect() + }, + condition = "MOD_BY_ZERO", + sqlState = "22012", + parameters = Map("config" -> ansiConf), + context = ExpectedContext(fragment = "pmod(6, 0)", start = 7, stop = 17)) + } + test("INTERVAL_DIVIDED_BY_ZERO: interval divided by zero") { checkError( exception = intercept[SparkArithmeticException] { From a8e1b2d8bd4b4c5372aa15e2fa4270c75da35bfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Sat, 5 Jul 2025 12:32:44 +0900 Subject: [PATCH 02/11] Change from mod to remainder --- common/utils/src/main/resources/error/error-conditions.json | 2 +- .../sql/catalyst/expressions/ArithmeticExpressionSuite.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index b2c4e7f49eb2d..cf77fa45d2e6c 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1368,7 +1368,7 @@ }, "MOD_BY_ZERO" : { "message" : [ - "Mod by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." + "Remainder by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." ], "sqlState" : "22012" }, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 88234dc0856c6..84376b2ad2a82 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -463,7 +463,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper } withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( - Remainder(left, Literal(convert(0))), "Mod by zero") + Remainder(left, Literal(convert(0))), "Remainder by zero") } } checkEvaluation(Remainder(positiveShortLit, positiveShortLit), 0.toShort) @@ -567,7 +567,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper } withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( - Pmod(left, Literal(convert(0))), "Mod by zero") + Pmod(left, Literal(convert(0))), "Remainder by zero") } } checkEvaluation(Pmod(Literal(-7), Literal(3)), 2) From d378e0af289cbf21779d573b62dd5c9882c7b6e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Sat, 5 Jul 2025 12:41:41 +0900 Subject: [PATCH 03/11] Change from mod to remainder --- common/utils/src/main/resources/error/error-conditions.json | 2 +- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- .../sql-tests/results/decimalArithmeticOperations.sql.out | 4 ++-- .../spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 6 +++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index cf77fa45d2e6c..c979536a52bb7 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1366,7 +1366,7 @@ ], "sqlState" : "22012" }, - "MOD_BY_ZERO" : { + "REMAINDER_BY_ZERO" : { "message" : [ "Remainder by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." ], diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index ad5cf38e234dc..f1a312ce76d12 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -207,7 +207,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE def modByZeroError(context: QueryContext): ArithmeticException = { new SparkArithmeticException( - errorClass = "MOD_BY_ZERO", + errorClass = "REMAINDER_BY_ZERO", messageParameters = Map("config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = Array(context), summary = getSummary(context)) diff --git a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out index 640df3e55e6ec..5110af2189cd2 100644 --- a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out @@ -36,7 +36,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "MOD_BY_ZERO", + "errorClass" : "REMAINDER_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" @@ -58,7 +58,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "MOD_BY_ZERO", + "errorClass" : "REMAINDER_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index 0110359f5094b..abdf5e210ebf7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -85,12 +85,12 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest callSitePattern = getCurrentClassCallSitePattern)) } - test("MOD_BY_ZERO: can't take modulo of an integer by zero") { + test("REMAINDER_BY_ZERO: can't take modulo of an integer by zero") { checkError( exception = intercept[SparkArithmeticException] { sql("select 6 % 0").collect() }, - condition = "MOD_BY_ZERO", + condition = "REMAINDER_BY_ZERO", sqlState = "22012", parameters = Map("config" -> ansiConf), context = ExpectedContext(fragment = "6 % 0", start = 7, stop = 11)) @@ -99,7 +99,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest exception = intercept[SparkArithmeticException] { sql("select pmod(6, 0)").collect() }, - condition = "MOD_BY_ZERO", + condition = "REMAINDER_BY_ZERO", sqlState = "22012", parameters = Map("config" -> ansiConf), context = ExpectedContext(fragment = "pmod(6, 0)", start = 7, stop = 17)) From 9614eabb69c4d69f8199f5862255644d1daee13e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Sat, 5 Jul 2025 12:49:34 +0900 Subject: [PATCH 04/11] Change from mod to remainder --- .../sql/catalyst/expressions/arithmetic.scala | 14 +++++++------- .../spark/sql/errors/QueryExecutionErrors.scala | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 4c4ed01fc8775..5b31420c10830 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -647,8 +647,8 @@ trait DivModLike extends BinaryArithmetic { // when we reach here, failOnError must be true. val context = getContextOrNull() val ex = this match { - case _: Remainder => QueryExecutionErrors.modByZeroError(context) - case _: Pmod => QueryExecutionErrors.modByZeroError(context) + case _: Remainder => QueryExecutionErrors.remainderByZeroError(context) + case _: Pmod => QueryExecutionErrors.remainderByZeroError(context) case _ => QueryExecutionErrors.divideByZeroError(context) } throw ex @@ -669,8 +669,8 @@ trait DivModLike extends BinaryArithmetic { protected def divideByZeroErrorCode(ctx: CodegenContext): String = { val errorContextCode = getContextOrNullCode(ctx, failOnError) this match { - case _: Remainder => s"QueryExecutionErrors.modByZeroError($errorContextCode)" - case _: Pmod => s"QueryExecutionErrors.modByZeroError($errorContextCode)" + case _: Remainder => s"QueryExecutionErrors.remainderByZeroError($errorContextCode)" + case _: Pmod => s"QueryExecutionErrors.remainderByZeroError($errorContextCode)" case _ => s"QueryExecutionErrors.divideByZeroError($errorContextCode)" } } @@ -1053,7 +1053,7 @@ case class Pmod( } else { if (isZero(input2)) { // when we reach here, failOnError must bet true. - throw QueryExecutionErrors.modByZeroError(getContextOrNull()) + throw QueryExecutionErrors.remainderByZeroError(getContextOrNull()) } pmodFunc(input1, input2) } @@ -1110,7 +1110,7 @@ case class Pmod( // evaluate right first as we have a chance to skip left if right is 0 if (!left.nullable && !right.nullable) { val divByZero = if (failOnError) { - s"throw QueryExecutionErrors.modByZeroError($errorContext);" + s"throw QueryExecutionErrors.remainderByZeroError($errorContext);" } else { s"${ev.isNull} = true;" } @@ -1127,7 +1127,7 @@ case class Pmod( } else { val nullOnErrorCondition = if (failOnError) "" else s" || $isZero" val failOnErrorBranch = if (failOnError) { - s"if ($isZero) throw QueryExecutionErrors.modByZeroError($errorContext);" + s"if ($isZero) throw QueryExecutionErrors.remainderByZeroError($errorContext);" } else { "" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index f1a312ce76d12..9acabfa05d03c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -205,7 +205,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = getSummary(context)) } - def modByZeroError(context: QueryContext): ArithmeticException = { + def remainderByZeroError(context: QueryContext): ArithmeticException = { new SparkArithmeticException( errorClass = "REMAINDER_BY_ZERO", messageParameters = Map("config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), From 72f17aa5a327d83a4c04d424c925beb2ee47b1a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Sat, 5 Jul 2025 13:38:01 +0900 Subject: [PATCH 05/11] test change --- .../ArithmeticExpressionSuite.scala | 47 +++++++++++++++---- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 84376b2ad2a82..63864c4d2602a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -873,12 +873,11 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper test("SPARK-33008: division by zero on divide-like operations returns incorrect result") { withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { - val operators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + // Test division operations + val divideOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = Seq((Divide(_, _), testDecimalAndDoubleType), - (IntegralDivide(_, _), testDecimalAndLongType), - (Remainder(_, _), testNumericDataTypes), - (Pmod(_, _), testNumericDataTypes)) - operators.foreach { case (operator, testTypesFn) => + (IntegralDivide(_, _), testDecimalAndLongType)) + divideOperators.foreach { case (operator, testTypesFn) => testTypesFn { convert => val one = Literal(convert(1)) val zero = Literal(convert(0)) @@ -887,6 +886,20 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkExceptionInExpression[ArithmeticException](operator(one, zero), "Division by zero") } } + + // Test remainder operations + val remainderOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + Seq((Remainder(_, _), testNumericDataTypes), + (Pmod(_, _), testNumericDataTypes)) + remainderOperators.foreach { case (operator, testTypesFn) => + testTypesFn { convert => + val one = Literal(convert(1)) + val zero = Literal(convert(0)) + checkEvaluation(operator(Literal.create(null, one.dataType), zero), null) + checkEvaluation(operator(one, Literal.create(null, zero.dataType)), null) + checkExceptionInExpression[ArithmeticException](operator(one, zero), "Remainder by zero") + } + } } } @@ -931,12 +944,11 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper test("SPARK-34920: error class") { withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { - val operators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + // Test division operations + val divideOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = Seq((Divide(_, _), testDecimalAndDoubleType), - (IntegralDivide(_, _), testDecimalAndLongType), - (Remainder(_, _), testNumericDataTypes), - (Pmod(_, _), testNumericDataTypes)) - operators.foreach { case (operator, testTypesFn) => + (IntegralDivide(_, _), testDecimalAndLongType)) + divideOperators.foreach { case (operator, testTypesFn) => testTypesFn { convert => val one = Literal(convert(1)) val zero = Literal(convert(0)) @@ -946,6 +958,21 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper "Division by zero") } } + + // Test remainder operations + val remainderOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + Seq((Remainder(_, _), testNumericDataTypes), + (Pmod(_, _), testNumericDataTypes)) + remainderOperators.foreach { case (operator, testTypesFn) => + testTypesFn { convert => + val one = Literal(convert(1)) + val zero = Literal(convert(0)) + checkEvaluation(operator(Literal.create(null, one.dataType), zero), null) + checkEvaluation(operator(one, Literal.create(null, zero.dataType)), null) + checkExceptionInExpression[SparkArithmeticException](operator(one, zero), + "Remainder by zero") + } + } } } From ded07e4615ccfd4e1f706cca9f807ede68c801b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Wed, 3 Sep 2025 19:00:51 +0900 Subject: [PATCH 06/11] ci: trigger From bf26925a09914684039289f2b4dd596d9cc115e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 4 Sep 2025 08:46:21 +0900 Subject: [PATCH 07/11] scala style code --- .../ArithmeticExpressionSuite.scala | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index b4946451b7dc1..649ce2478825e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -874,7 +874,9 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper test("SPARK-33008: division by zero on divide-like operations returns incorrect result") { withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { // Test division operations - val divideOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + val divideOperators: Seq[ + ((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit) + ] = Seq((Divide(_, _), testDecimalAndDoubleType), (IntegralDivide(_, _), testDecimalAndLongType)) divideOperators.foreach { case (operator, testTypesFn) => @@ -886,9 +888,11 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkExceptionInExpression[ArithmeticException](operator(one, zero), "Division by zero") } } - + // Test remainder operations - val remainderOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + val remainderOperators: Seq[ + ((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit) + ] = Seq((Remainder(_, _), testNumericDataTypes), (Pmod(_, _), testNumericDataTypes)) remainderOperators.foreach { case (operator, testTypesFn) => @@ -945,7 +949,9 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper test("SPARK-34920: error class") { withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { // Test division operations - val divideOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + val divideOperators: Seq[ + ((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit) + ] = Seq((Divide(_, _), testDecimalAndDoubleType), (IntegralDivide(_, _), testDecimalAndLongType)) divideOperators.foreach { case (operator, testTypesFn) => @@ -958,9 +964,11 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper "Division by zero") } } - + // Test remainder operations - val remainderOperators: Seq[((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit)] = + val remainderOperators: Seq[ + ((Expression, Expression) => Expression, ((Int => Any) => Unit) => Unit) + ] = Seq((Remainder(_, _), testNumericDataTypes), (Pmod(_, _), testNumericDataTypes)) remainderOperators.foreach { case (operator, testTypesFn) => From 0cedb9c8b587b4a366c657e4fd6f49c567f56a8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 4 Sep 2025 11:45:31 +0900 Subject: [PATCH 08/11] fix --- .../apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index abdf5e210ebf7..d77d4c688aa95 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -102,7 +102,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest condition = "REMAINDER_BY_ZERO", sqlState = "22012", parameters = Map("config" -> ansiConf), - context = ExpectedContext(fragment = "pmod(6, 0)", start = 7, stop = 17)) + context = ExpectedContext(fragment = "pmod(6, 0)", start = 7, stop = 16)) } test("INTERVAL_DIVIDED_BY_ZERO: interval divided by zero") { From 23ecf5f360d420a142df445a55d6f7b34a23e7f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 4 Sep 2025 11:48:08 +0900 Subject: [PATCH 09/11] fix operators.sql.out --- sql/core/src/test/resources/sql-tests/results/operators.sql.out | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/resources/sql-tests/results/operators.sql.out b/sql/core/src/test/resources/sql-tests/results/operators.sql.out index 356e5eca5feb2..2c14afe8c4832 100644 --- a/sql/core/src/test/resources/sql-tests/results/operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/operators.sql.out @@ -496,7 +496,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "DIVIDE_BY_ZERO", + "errorClass" : "REMAINDER_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" From b0cea7c43790d4d1e5dd44a6e6ba4e02a39e6a73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 4 Sep 2025 13:23:14 +0900 Subject: [PATCH 10/11] fix operators.sql.out(pmod) --- .../src/test/resources/sql-tests/results/operators.sql.out | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/operators.sql.out b/sql/core/src/test/resources/sql-tests/results/operators.sql.out index 2c14afe8c4832..64d2c6f5f2ac8 100644 --- a/sql/core/src/test/resources/sql-tests/results/operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/operators.sql.out @@ -566,7 +566,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "DIVIDE_BY_ZERO", + "errorClass" : "REMAINDER_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" @@ -588,7 +588,7 @@ struct<> -- !query output org.apache.spark.SparkArithmeticException { - "errorClass" : "DIVIDE_BY_ZERO", + "errorClass" : "REMAINDER_BY_ZERO", "sqlState" : "22012", "messageParameters" : { "config" : "\"spark.sql.ansi.enabled\"" From f18422f1e0d8a8a999b270cfbbb04b9735fb99ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=84=80=E1=85=A9=E1=86=BC=E1=84=89=E1=85=A5=E1=86=BC?= =?UTF-8?q?=E1=84=8C=E1=85=A2?= Date: Thu, 4 Sep 2025 14:55:19 +0900 Subject: [PATCH 11/11] error-conditions.json formatting --- .../src/main/resources/error/error-conditions.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index dc95ef287b478..49ac5bcc7cd40 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1395,12 +1395,6 @@ ], "sqlState" : "22012" }, - "REMAINDER_BY_ZERO" : { - "message" : [ - "Remainder by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22012" - }, "DUPLICATED_CTE_NAMES" : { "message" : [ "CTE definition can't have duplicate names: ." @@ -4926,6 +4920,12 @@ ], "sqlState" : "42601" }, + "REMAINDER_BY_ZERO" : { + "message" : [ + "Remainder by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22012" + }, "RENAME_SRC_PATH_NOT_FOUND" : { "message" : [ "Failed to rename as was not found."