Skip to content

Commit a93bc3d

Browse files
committed
[SPARK-39060][SQL] Typo in error messages of decimal overflow
1 parent 9305cc7 commit a93bc3d

File tree

5 files changed

+9
-9
lines changed

5 files changed

+9
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -227,9 +227,9 @@ final class Decimal extends Ordered[Decimal] with Serializable {
227227

228228
def toDebugString: String = {
229229
if (decimalVal.ne(null)) {
230-
s"Decimal(expanded,$decimalVal,$precision,$scale})"
230+
s"Decimal(expanded, $decimalVal, $precision, $scale)"
231231
} else {
232-
s"Decimal(compact,$longVal,$precision,$scale})"
232+
s"Decimal(compact, $longVal, $precision, $scale)"
233233
}
234234
}
235235

sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -666,7 +666,7 @@ select cast('123.45' as decimal(4, 2))
666666
struct<>
667667
-- !query output
668668
org.apache.spark.SparkArithmeticException
669-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,123.45,5,2}) cannot be represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
669+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 123.45, 5, 2) cannot be represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
670670
== SQL(line 1, position 7) ==
671671
select cast('123.45' as decimal(4, 2))
672672
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ select (5e36BD + 0.1) + 5e36BD
7676
struct<>
7777
-- !query output
7878
org.apache.spark.SparkArithmeticException
79-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
79+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
8080
== SQL(line 1, position 7) ==
8181
select (5e36BD + 0.1) + 5e36BD
8282
^^^^^^^^^^^^^^^^^^^^^^^
@@ -88,7 +88,7 @@ select (-4e36BD - 0.1) - 7e36BD
8888
struct<>
8989
-- !query output
9090
org.apache.spark.SparkArithmeticException
91-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
91+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
9292
== SQL(line 1, position 7) ==
9393
select (-4e36BD - 0.1) - 7e36BD
9494
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -100,7 +100,7 @@ select 12345678901234567890.0 * 12345678901234567890.0
100100
struct<>
101101
-- !query output
102102
org.apache.spark.SparkArithmeticException
103-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
103+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 152415787532388367501905199875019052100, 39, 0) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
104104
== SQL(line 1, position 7) ==
105105
select 12345678901234567890.0 * 12345678901234567890.0
106106
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -112,7 +112,7 @@ select 1e35BD / 0.1
112112
struct<>
113113
-- !query output
114114
org.apache.spark.SparkArithmeticException
115-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
115+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1000000000000000000000000000000000000, 37, 0) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
116116
== SQL(line 1, position 7) ==
117117
select 1e35BD / 0.1
118118
^^^^^^^^^^^^

sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -664,7 +664,7 @@ select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)
664664
struct<>
665665
-- !query output
666666
org.apache.spark.SparkArithmeticException
667-
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded,1234567890123456789,20,0}) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
667+
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1234567890123456789, 20, 0) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
668668
== SQL(line 1, position 7) ==
669669
select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)
670670
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
7575
},
7676
errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
7777
msg =
78-
"Decimal(expanded,66666666666666.666,17,3}) cannot be represented as Decimal(8, 1). " +
78+
"Decimal(expanded, 66666666666666.666, 17, 3) cannot be represented as Decimal(8, 1). " +
7979
s"If necessary set $ansiConf to false to bypass this error." +
8080
"""
8181
|== SQL(line 1, position 7) ==

0 commit comments

Comments
 (0)