Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
125 changes: 125 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -3558,5 +3558,130 @@
"message" : [
"not support type: <dataType>"
]
},
"_LEGACY_ERROR_TEMP_2101" : {
"message" : [
"Not support non-primitive type now"
]
},
"_LEGACY_ERROR_TEMP_2102" : {
"message" : [
"Unsupported type: <catalogString>"
]
},
"_LEGACY_ERROR_TEMP_2103" : {
"message" : [
"Dictionary encoding should not be used because of dictionary overflow."
]
},
"_LEGACY_ERROR_TEMP_2104" : {
"message" : [
"End of the iterator"
]
},
"_LEGACY_ERROR_TEMP_2105" : {
"message" : [
"Could not allocate memory to grow BytesToBytesMap"
]
},
"_LEGACY_ERROR_TEMP_2106" : {
"message" : [
"Can't acquire <size> bytes memory to build hash relation, got <got> bytes"
]
},
"_LEGACY_ERROR_TEMP_2107" : {
"message" : [
"There is not enough memory to build hash map"
]
},
"_LEGACY_ERROR_TEMP_2108" : {
"message" : [
"Does not support row that is larger than 256M"
]
},
"_LEGACY_ERROR_TEMP_2109" : {
"message" : [
"Cannot build HashedRelation with more than 1/3 billions unique keys"
]
},
"_LEGACY_ERROR_TEMP_2110" : {
"message" : [
"Can not build a HashedRelation that is larger than 8G"
]
},
"_LEGACY_ERROR_TEMP_2111" : {
"message" : [
"failed to push a row into <rowQueue>"
]
},
"_LEGACY_ERROR_TEMP_2112" : {
"message" : [
"Unexpected window function frame <frame>."
]
},
"_LEGACY_ERROR_TEMP_2113" : {
"message" : [
"Unable to parse <stats> as a percentile"
]
},
"_LEGACY_ERROR_TEMP_2114" : {
"message" : [
"<stats> is not a recognised statistic"
]
},
"_LEGACY_ERROR_TEMP_2115" : {
"message" : [
"Unknown column: <unknownColumn>"
]
},
"_LEGACY_ERROR_TEMP_2116" : {
"message" : [
"Unexpected: <o>"
]
},
"_LEGACY_ERROR_TEMP_2117" : {
"message" : [
"Unscaled value too large for precision. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2118" : {
"message" : [
"Decimal precision <precision> exceeds max precision <maxPrecision>"
]
},
"_LEGACY_ERROR_TEMP_2119" : {
"message" : [
"out of decimal type range: <str>"
]
},
"_LEGACY_ERROR_TEMP_2120" : {
"message" : [
"Do not support array of type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2121" : {
"message" : [
"Do not support type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2122" : {
"message" : [
"Failed parsing <simpleString>: <raw>"
]
},
"_LEGACY_ERROR_TEMP_2123" : {
"message" : [
"Failed to merge fields '<leftName>' and '<rightName>'. <message>"
]
},
"_LEGACY_ERROR_TEMP_2124" : {
"message" : [
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>"
]
},
"_LEGACY_ERROR_TEMP_2125" : {
"message" : [
"Failed to merge incompatible data types ${leftCatalogString} and ${rightCatalogString}"
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1156,114 +1156,180 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
cause = null)
}

def notSupportNonPrimitiveTypeError(): Throwable = {
new RuntimeException("Not support non-primitive type now")
def notSupportNonPrimitiveTypeError(): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2101",
messageParameters = Map.empty)
}

def unsupportedTypeError(dataType: DataType): Throwable = {
new Exception(s"Unsupported type: ${dataType.catalogString}")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2102",
messageParameters = Map("catalogString" -> dataType.catalogString),
cause = null)
}

def useDictionaryEncodingWhenDictionaryOverflowError(): Throwable = {
new IllegalStateException(
"Dictionary encoding should not be used because of dictionary overflow.")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2103",
messageParameters = Map.empty,
cause = null)
}

def endOfIteratorError(): Throwable = {
new NoSuchElementException("End of the iterator")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2104",
messageParameters = Map.empty,
cause = null)
}

def cannotAllocateMemoryToGrowBytesToBytesMapError(): Throwable = {
new IOException("Could not allocate memory to grow BytesToBytesMap")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2105",
messageParameters = Map.empty,
cause = null)
}

def cannotAcquireMemoryToBuildLongHashedRelationError(size: Long, got: Long): Throwable = {
new SparkException(s"Can't acquire $size bytes memory to build hash relation, " +
s"got $got bytes")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2106",
messageParameters = Map("size" -> size.toString(), "got" -> got.toString()),
cause = null)
}

def cannotAcquireMemoryToBuildUnsafeHashedRelationError(): Throwable = {
new SparkOutOfMemoryError("There is not enough memory to build hash map")
new SparkOutOfMemoryError(
"_LEGACY_ERROR_TEMP_2107")
}

def rowLargerThan256MUnsupportedError(): Throwable = {
new UnsupportedOperationException("Does not support row that is larger than 256M")
def rowLargerThan256MUnsupportedError(): SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
errorClass = "_LEGACY_ERROR_TEMP_2108",
messageParameters = Map.empty)
}

def cannotBuildHashedRelationWithUniqueKeysExceededError(): Throwable = {
new UnsupportedOperationException(
"Cannot build HashedRelation with more than 1/3 billions unique keys")
def cannotBuildHashedRelationWithUniqueKeysExceededError(): SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
errorClass = "_LEGACY_ERROR_TEMP_2109",
messageParameters = Map.empty)
}

def cannotBuildHashedRelationLargerThan8GError(): Throwable = {
new UnsupportedOperationException(
"Can not build a HashedRelation that is larger than 8G")
def cannotBuildHashedRelationLargerThan8GError(): SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
errorClass = "_LEGACY_ERROR_TEMP_2110",
messageParameters = Map.empty)
}

def failedToPushRowIntoRowQueueError(rowQueue: String): Throwable = {
new SparkException(s"failed to push a row into $rowQueue")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2111",
messageParameters = Map("rowQueue" -> rowQueue),
cause = null)
}

def unexpectedWindowFunctionFrameError(frame: String): Throwable = {
new RuntimeException(s"Unexpected window function frame $frame.")
def unexpectedWindowFunctionFrameError(frame: String): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2112",
messageParameters = Map("frame" -> frame))
}

def cannotParseStatisticAsPercentileError(
stats: String, e: NumberFormatException): Throwable = {
new IllegalArgumentException(s"Unable to parse $stats as a percentile", e)
stats: String, e: NumberFormatException): SparkIllegalArgumentException = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@itholic You just ignored e? Seems like some kind of regression. Could you add a constructor to SparkIllegalArgumentException which accepts cause too.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for pointing out.

Let me address it with #38104 (comment).

new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2113",
messageParameters = Map("stats" -> stats))
}

def statisticNotRecognizedError(stats: String): Throwable = {
new IllegalArgumentException(s"$stats is not a recognised statistic")
def statisticNotRecognizedError(stats: String): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2114",
messageParameters = Map("stats" -> stats))
}

def unknownColumnError(unknownColumn: String): Throwable = {
new IllegalArgumentException(s"Unknown column: $unknownColumn")
def unknownColumnError(unknownColumn: String): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2115",
messageParameters = Map("unknownColumn" -> unknownColumn.toString()))
}

def unexpectedAccumulableUpdateValueError(o: Any): Throwable = {
new IllegalArgumentException(s"Unexpected: $o")
def unexpectedAccumulableUpdateValueError(o: Any): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2116",
messageParameters = Map("o" -> o.toString()))
}

def unscaledValueTooLargeForPrecisionError(): Throwable = {
new ArithmeticException("Unscaled value too large for precision. " +
s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this error.")
def unscaledValueTooLargeForPrecisionError(): SparkArithmeticException = {
new SparkArithmeticException(
errorClass = "_LEGACY_ERROR_TEMP_2117",
messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
context = Array.empty,
summary = "")
}

def decimalPrecisionExceedsMaxPrecisionError(precision: Int, maxPrecision: Int): Throwable = {
new ArithmeticException(
s"Decimal precision $precision exceeds max precision $maxPrecision")
def decimalPrecisionExceedsMaxPrecisionError(
precision: Int, maxPrecision: Int): SparkArithmeticException = {
new SparkArithmeticException(
errorClass = "_LEGACY_ERROR_TEMP_2118",
messageParameters = Map(
"precision" -> precision.toString(),
"maxPrecision" -> maxPrecision.toString()),
context = Array.empty,
summary = "")
}

def outOfDecimalTypeRangeError(str: UTF8String): Throwable = {
new ArithmeticException(s"out of decimal type range: $str")
def outOfDecimalTypeRangeError(str: UTF8String): SparkArithmeticException = {
new SparkArithmeticException(
errorClass = "_LEGACY_ERROR_TEMP_2119",
messageParameters = Map("str" -> str.toString()),
context = Array.empty,
summary = "")
}

def unsupportedArrayTypeError(clazz: Class[_]): Throwable = {
new RuntimeException(s"Do not support array of type $clazz.")
def unsupportedArrayTypeError(clazz: Class[_]): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2120",
messageParameters = Map("clazz" -> clazz.toString()))
}

def unsupportedJavaTypeError(clazz: Class[_]): Throwable = {
new RuntimeException(s"Do not support type $clazz.")
def unsupportedJavaTypeError(clazz: Class[_]): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2121",
messageParameters = Map("clazz" -> clazz.toString()))
}

def failedParsingStructTypeError(raw: String): Throwable = {
new RuntimeException(s"Failed parsing ${StructType.simpleString}: $raw")
def failedParsingStructTypeError(raw: String): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2122",
messageParameters = Map("simpleString" -> StructType.simpleString, "raw" -> raw))
}

def failedMergingFieldsError(leftName: String, rightName: String, e: Throwable): Throwable = {
new SparkException(s"Failed to merge fields '$leftName' and '$rightName'. ${e.getMessage}")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2123",
messageParameters = Map(
"leftName" -> leftName,
"rightName" -> rightName,
"message" -> e.getMessage),
cause = null)
}

def cannotMergeDecimalTypesWithIncompatibleScaleError(
leftScale: Int, rightScale: Int): Throwable = {
new SparkException("Failed to merge decimal types with incompatible " +
s"scale $leftScale and $rightScale")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2124",
messageParameters = Map(
"leftScale" -> leftScale.toString(),
"rightScale" -> rightScale.toString()),
cause = null)
}

def cannotMergeIncompatibleDataTypesError(left: DataType, right: DataType): Throwable = {
new SparkException(s"Failed to merge incompatible data types ${left.catalogString}" +
s" and ${right.catalogString}")
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2125",
messageParameters = Map(
"leftCatalogString" -> left.catalogString,
"rightCatalogString" -> right.catalogString),
cause = null)
}

def exceedMapSizeLimitError(size: Int): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import scala.collection.mutable.ArrayBuffer
import scala.util.Random

import org.apache.spark.SparkConf
import org.apache.spark.SparkException
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Kryo._
import org.apache.spark.memory.{TaskMemoryManager, UnifiedMemoryManager}
Expand Down Expand Up @@ -534,7 +535,7 @@ class HashedRelationSuite extends SharedSparkSession {
buffer.append(keyIterator.next().getLong(0))
}
// attempt an illegal next() call
val caught = intercept[NoSuchElementException] {
val caught = intercept[SparkException] {
keyIterator.next()
}
assert(caught.getLocalizedMessage === "End of the iterator")
Expand Down