Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
merge with master
  • Loading branch information
windpiger committed Mar 2, 2017
commit a4f771a60f0c716e1811acab5fffead1929d8e80
Original file line number Diff line number Diff line change
Expand Up @@ -1736,4 +1736,103 @@ class HiveDDLSuite
}
}
}

Seq(true, false).foreach { shouldDelete =>
val tcName = if (shouldDelete) "non-existent" else "existed"
test(s"CTAS for external data source table with a $tcName location") {
withTable("t", "t1") {
withTempDir {
dir =>
if (shouldDelete) {
dir.delete()
}
spark.sql(
s"""
|CREATE TABLE t
|USING parquet
|LOCATION '$dir'
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
""".stripMargin)

val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
assert(table.location == dir.getAbsolutePath)

checkAnswer(spark.table("t"), Row(3, 4, 1, 2))
}
// partition table
withTempDir {
dir =>
if (shouldDelete) {
dir.delete()
}
spark.sql(
s"""
|CREATE TABLE t1
|USING parquet
|PARTITIONED BY(a, b)
|LOCATION '$dir'
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
""".stripMargin)

val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
assert(table.location == dir.getAbsolutePath)

val partDir = new File(dir, "a=3")
assert(partDir.exists())

checkAnswer(spark.table("t1"), Row(1, 2, 3, 4))
}
}
}

test(s"CTAS for external hive table with a $tcName location") {
withTable("t", "t1") {
withSQLConf("hive.exec.dynamic.partition.mode" -> "nonstrict") {
withTempDir {
dir =>
if (shouldDelete) {
dir.delete()
}
spark.sql(
s"""
|CREATE TABLE t
|USING hive
|LOCATION '$dir'
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
""".stripMargin)
val dirPath = new Path(dir.getAbsolutePath)
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
assert(new Path(table.location) == fs.makeQualified(dirPath))

checkAnswer(spark.table("t"), Row(3, 4, 1, 2))
}
// partition table
withTempDir {
dir =>
if (shouldDelete) {
dir.delete()
}
spark.sql(
s"""
|CREATE TABLE t1
|USING hive
|PARTITIONED BY(a, b)
|LOCATION '$dir'
|AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
""".stripMargin)
val dirPath = new Path(dir.getAbsolutePath)
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
assert(new Path(table.location) == fs.makeQualified(dirPath))

val partDir = new File(dir, "a=3")
assert(partDir.exists())

checkAnswer(spark.table("t1"), Row(1, 2, 3, 4))
}
}
}
}
}
}
You are viewing a condensed version of this merge commit. You can view the full changes here.