Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion R/pkg/tests/fulltests/test_sparkSQL.R
Original file line number Diff line number Diff line change
Expand Up @@ -3961,7 +3961,8 @@ test_that("Call DataFrameWriter.save() API in Java without path and check argume
# It makes sure that we can omit path argument in write.df API and then it calls
# DataFrameWriter.save() without path.
expect_error(write.df(df, source = "csv"),
"Error in save : illegal argument - Expected exactly one path to be specified")
paste("Error in save : org.apache.spark.SparkIllegalArgumentException:",
"Expected exactly one path to be specified"))
expect_error(write.json(df, jsonPath),
"Error in json : analysis error - Path file:.*already exists")
expect_error(write.text(df, jsonPath),
Expand Down
130 changes: 125 additions & 5 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -3008,11 +3008,6 @@
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2001" : {
"message" : [
"<message> If necessary set <ansiConfig> to false to bypass this error"
]
},
Comment on lines -3011 to -3015
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Addressed the #38104 (comment).

@MaxGekk FYI

"_LEGACY_ERROR_TEMP_2002" : {
"message" : [
"<message>"
Expand Down Expand Up @@ -3132,5 +3127,130 @@
"message" : [
"<className> must override either <m1> or <m2>"
]
},
"_LEGACY_ERROR_TEMP_2026" : {
"message" : [
"Failed to convert value <value> (class of <cls>) with the type of <dataType> to JSON."
]
},
"_LEGACY_ERROR_TEMP_2027" : {
"message" : [
"Unexpected operator <op> in correlated subquery<pos>"
]
},
"_LEGACY_ERROR_TEMP_2028" : {
"message" : [
"This line should be unreachable<err>"
]
},
"_LEGACY_ERROR_TEMP_2029" : {
"message" : [
"Not supported rounding mode: <roundMode>"
]
},
"_LEGACY_ERROR_TEMP_2030" : {
"message" : [
"Can not handle nested schema yet... plan <plan>"
]
},
"_LEGACY_ERROR_TEMP_2031" : {
"message" : [
"The input external row cannot be null."
]
},
"_LEGACY_ERROR_TEMP_2032" : {
"message" : [
"<fieldCannotBeNullMsg>"
]
},
"_LEGACY_ERROR_TEMP_2033" : {
"message" : [
"Unable to create database <name> as failed to create its directory <locationUri>"
]
},
"_LEGACY_ERROR_TEMP_2034" : {
"message" : [
"Unable to drop database <name> as failed to delete its directory <locationUri>"
]
},
"_LEGACY_ERROR_TEMP_2035" : {
"message" : [
"Unable to create table <table> as failed to create its directory <defaultTableLocation>"
]
},
"_LEGACY_ERROR_TEMP_2036" : {
"message" : [
"Unable to delete partition path <partitionPath>"
]
},
"_LEGACY_ERROR_TEMP_2037" : {
"message" : [
"Unable to drop table <table> as failed to delete its directory <dir>"
]
},
"_LEGACY_ERROR_TEMP_2038" : {
"message" : [
"Unable to rename table <oldName> to <newName> as failed to rename its directory <oldDir>"
]
},
"_LEGACY_ERROR_TEMP_2039" : {
"message" : [
"Unable to create partition path <partitionPath>"
]
},
"_LEGACY_ERROR_TEMP_2040" : {
"message" : [
"Unable to rename partition path <oldPartPath>"
]
},
"_LEGACY_ERROR_TEMP_2041" : {
"message" : [
"<methodName> is not implemented"
]
},
"_LEGACY_ERROR_TEMP_2042" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2043" : {
"message" : [
"- <sqlValue> caused overflow"
]
},
"_LEGACY_ERROR_TEMP_2044" : {
"message" : [
"<sqlValue1> <symbol> <sqlValue2> caused overflow"
]
},
"_LEGACY_ERROR_TEMP_2045" : {
"message" : [
"Unsupported table change: <message>"
]
},
"_LEGACY_ERROR_TEMP_2046" : {
"message" : [
"[BUG] Not a DataSourceRDDPartition: <split>"
]
},
"_LEGACY_ERROR_TEMP_2047" : {
"message" : [
"'path' is not specified"
]
},
"_LEGACY_ERROR_TEMP_2048" : {
"message" : [
"Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it."
]
},
"_LEGACY_ERROR_TEMP_2049" : {
"message" : [
"Data source <className> does not support streamed <operator>"
]
},
"_LEGACY_ERROR_TEMP_2050" : {
"message" : [
"Expected exactly one path to be specified, but got: <paths>"
]
}
}
Loading