Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -567,11 +567,11 @@ case class DataSource(
}
if (filteredOut.nonEmpty) {
if (filteredIn.isEmpty) {
throw new AnalysisException(
s"All paths were ignored:\n${filteredOut.mkString("\n ")}")
logWarning(
s"All paths were ignored:\n ${filteredOut.mkString("\n ")}")
} else {
logDebug(
s"Some paths were ignored:\n${filteredOut.mkString("\n ")}")
s"Some paths were ignored:\n ${filteredOut.mkString("\n ")}")
}
}
}
Expand Down
7 changes: 0 additions & 7 deletions sql/core/src/test/resources/test-data/_cars.csv

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils with Te
private val carsEmptyValueFile = "test-data/cars-empty-value.csv"
private val carsBlankColName = "test-data/cars-blank-column-name.csv"
private val carsCrlf = "test-data/cars-crlf.csv"
private val carsFilteredOutFile = "test-data/_cars.csv"
private val emptyFile = "test-data/empty.csv"
private val commentsFile = "test-data/comments.csv"
private val disableCommentsFile = "test-data/disable_comments.csv"
Expand Down Expand Up @@ -347,25 +346,6 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils with Te
assert(result.schema.fieldNames.size === 1)
}

test("SPARK-26339 Not throw an exception if some of specified paths are filtered in") {
val cars = spark
.read
.option("header", "false")
.csv(testFile(carsFile), testFile(carsFilteredOutFile))

verifyCars(cars, withHeader = false, checkTypes = false)
}

test("SPARK-26339 Throw an exception only if all of the specified paths are filtered out") {
val e = intercept[AnalysisException] {
val cars = spark
.read
.option("header", "false")
.csv(testFile(carsFilteredOutFile))
}.getMessage
assert(e.contains("All paths were ignored:"))
}

test("DDL test with empty file") {
withView("carsTable") {
spark.sql(
Expand Down