Skip to content
Closed
Prev Previous commit
Next Next commit
Merge remote-tracking branch 'upstream/master' into refactorDDLSuite
  • Loading branch information
gatorsmile committed Mar 6, 2017
commit b186dcddb800a7e1e42b84fbf90da34a93bf7e84
Original file line number Diff line number Diff line change
Expand Up @@ -89,67 +89,6 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with Befo
}
}

test("select/insert into the managed table") {
val tabName = "tbl"
withTable(tabName) {
sql(s"CREATE TABLE $tabName (i INT, j STRING)")
val catalogTable =
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
assert(catalogTable.tableType == CatalogTableType.MANAGED)

var message = intercept[AnalysisException] {
sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
}.getMessage
assert(message.contains("Hive support is required to insert into the following tables"))
message = intercept[AnalysisException] {
sql(s"SELECT * FROM $tabName")
}.getMessage
assert(message.contains("Hive support is required to select over the following tables"))
}
}

test("select/insert into external table") {
withTempDir { tempDir =>
val tabName = "tbl"
withTable(tabName) {
sql(
s"""
|CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|LOCATION '$tempDir'
""".stripMargin)
val catalogTable =
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
assert(catalogTable.tableType == CatalogTableType.EXTERNAL)

var message = intercept[AnalysisException] {
sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
}.getMessage
assert(message.contains("Hive support is required to insert into the following tables"))
message = intercept[AnalysisException] {
sql(s"SELECT * FROM $tabName")
}.getMessage
assert(message.contains("Hive support is required to select over the following tables"))
}
}
}

test("Create Hive Table As Select") {
import testImplicits._
withTable("t", "t1") {
var e = intercept[AnalysisException] {
sql("CREATE TABLE t SELECT 1 as a, 1 as b")
}.getMessage
assert(e.contains("Hive support is required to use CREATE Hive TABLE AS SELECT"))

spark.range(1).select('id as 'a, 'id as 'b).write.saveAsTable("t1")
e = intercept[AnalysisException] {
sql("CREATE TABLE t SELECT a, b from t1")
}.getMessage
assert(e.contains("Hive support is required to use CREATE Hive TABLE AS SELECT"))
}
}

test("alter table: set location (datasource table)") {
testSetLocation(isDatasourceTable = true)
}
Expand Down Expand Up @@ -189,6 +128,52 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with Befo
test("drop table - data source table") {
testDropTable(isDatasourceTable = true)
}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The above 10 test cases are currently running with InMemoryCatalog only. The reason is HiveExternalCatalog does not allow users to change the table provider from hive to the others. In the future PRs, we can fix it.


test("create a managed Hive source table") {
assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
val tabName = "tbl"
withTable(tabName) {
val e = intercept[AnalysisException] {
sql(s"CREATE TABLE $tabName (i INT, j STRING)")
}.getMessage
assert(e.contains("Hive support is required to CREATE Hive TABLE"))
}
}

test("create an external Hive source table") {
assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
withTempDir { tempDir =>
val tabName = "tbl"
withTable(tabName) {
val e = intercept[AnalysisException] {
sql(
s"""
|CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|LOCATION '${tempDir.toURI}'
""".stripMargin)
}.getMessage
assert(e.contains("Hive support is required to CREATE Hive TABLE"))
}
}
}

test("Create Hive Table As Select") {
import testImplicits._
withTable("t", "t1") {
var e = intercept[AnalysisException] {
sql("CREATE TABLE t SELECT 1 as a, 1 as b")
}.getMessage
assert(e.contains("Hive support is required to CREATE Hive TABLE (AS SELECT)"))

spark.range(1).select('id as 'a, 'id as 'b).write.saveAsTable("t1")
e = intercept[AnalysisException] {
sql("CREATE TABLE t SELECT a, b from t1")
}.getMessage
assert(e.contains("Hive support is required to CREATE Hive TABLE (AS SELECT)"))
}
}

}

abstract class DDLSuite extends QueryTest with SQLTestUtils {
Expand Down Expand Up @@ -883,11 +868,13 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
testUnsetProperties(isDatasourceTable = false)
}

test("alter table: set serde") {
// TODO: move this test to HiveDDLSuite.scala
ignore("alter table: set serde") {
testSetSerde(isDatasourceTable = false)
}

test("alter table: set serde partition") {
// TODO: move this test to HiveDDLSuite.scala
ignore("alter table: set serde partition") {
testSetSerdePartition(isDatasourceTable = false)
}

Expand Down Expand Up @@ -1614,6 +1601,21 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
)
}

test("create a data source table without schema") {
import testImplicits._
withTempPath { tempDir =>
withTable("tab1", "tab2") {
(("a", "b") :: Nil).toDF().write.json(tempDir.getCanonicalPath)

val e = intercept[AnalysisException] { sql("CREATE TABLE tab1 USING json") }.getMessage
assert(e.contains("Unable to infer schema for JSON. It must be specified manually"))

sql(s"CREATE TABLE tab2 using json location '${tempDir.toURI}'")
checkAnswer(spark.table("tab2"), Row("a", "b"))
}
}
}

test("create table using CLUSTERED BY without schema specification") {
import testImplicits._
withTempPath { tempDir =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ class HiveCatalogedDDLSuite extends DDLSuite with TestHiveSingleton with BeforeA
owner = "",
properties = table.properties.filterKeys(!nondeterministicProps.contains(_)),
// View texts are checked separately
viewOriginalText = None,
viewText = None
)
}
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.