From a93bc3da632e1dac8833fcf5fc4f38ca29c19f55 Mon Sep 17 00:00:00 2001 From: Vitalii Li Date: Thu, 28 Apr 2022 12:57:22 -0700 Subject: [PATCH] [SPARK-39060][SQL] Typo in error messages of decimal overflow --- .../main/scala/org/apache/spark/sql/types/Decimal.scala | 4 ++-- .../test/resources/sql-tests/results/ansi/cast.sql.out | 2 +- .../results/ansi/decimalArithmeticOperations.sql.out | 8 ++++---- .../resources/sql-tests/results/ansi/interval.sql.out | 2 +- .../spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala index 12ce7a306012..1eeaa46736e9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala @@ -227,9 +227,9 @@ final class Decimal extends Ordered[Decimal] with Serializable { def toDebugString: String = { if (decimalVal.ne(null)) { - s"Decimal(expanded,$decimalVal,$precision,$scale})" + s"Decimal(expanded, $decimalVal, $precision, $scale)" } else { - s"Decimal(compact,$longVal,$precision,$scale})" + s"Decimal(compact, $longVal, $precision, $scale)" } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index 566e27a0e20e..476ec158f1fc 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -666,7 +666,7 @@ select cast('123.45' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,123.45,5,2}) cannot be represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 123.45, 5, 2) cannot be represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select cast('123.45' as decimal(4, 2)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out index 1640875973ea..d4b15d929524 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out @@ -76,7 +76,7 @@ select (5e36BD + 0.1) + 5e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select (5e36BD + 0.1) + 5e36BD ^^^^^^^^^^^^^^^^^^^^^^^ @@ -88,7 +88,7 @@ select (-4e36BD - 0.1) - 7e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select (-4e36BD - 0.1) - 7e36BD ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -100,7 +100,7 @@ select 12345678901234567890.0 * 12345678901234567890.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 152415787532388367501905199875019052100, 39, 0) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select 12345678901234567890.0 * 12345678901234567890.0 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -112,7 +112,7 @@ select 1e35BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1000000000000000000000000000000000000, 37, 0) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select 1e35BD / 0.1 ^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 3cc089ebbf66..2f20953f6ea2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -664,7 +664,7 @@ select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,1234567890123456789,20,0}) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. +[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1234567890123456789, 20, 0) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error. == SQL(line 1, position 7) == select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index 220a98e27b9f..635ce61acfc2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -75,7 +75,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase }, errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION", msg = - "Decimal(expanded,66666666666666.666,17,3}) cannot be represented as Decimal(8, 1). " + + "Decimal(expanded, 66666666666666.666, 17, 3) cannot be represented as Decimal(8, 1). " + s"If necessary set $ansiConf to false to bypass this error." + """ |== SQL(line 1, position 7) ==