Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
SPARK-15269: set locationUFI to the non-hive compatible metastore table
  • Loading branch information
xwu0226 committed May 17, 2016
commit c3027a1c849cc6562eaa8a46d2d0da6768f0b009
Original file line number Diff line number Diff line change
Expand Up @@ -348,13 +348,16 @@ object CreateDataSourceTableUtils extends Logging {
className = provider,
options = options)

def newSparkSQLSpecificMetastoreTable(): CatalogTable = {
def newSparkSQLSpecificMetastoreTable(relation: HadoopFsRelation): CatalogTable = {
CatalogTable(
identifier = tableIdent,
tableType = tableType,
schema = Nil,
storage = CatalogStorageFormat(
locationUri = None,
// We don't want Hive metastore to implicitly create a table directory,
// which may be not the one Data Source table is referring to,
// yet which will be left behind when the table is dropped for an external table
locationUri = Some(relation.location.paths.map(_.toUri.toString).head),
inputFormat = None,
outputFormat = None,
serde = None,
Expand Down Expand Up @@ -458,13 +461,15 @@ object CreateDataSourceTableUtils extends Logging {
s"Could not persist $qualifiedTableName in a Hive compatible way. Persisting " +
s"it into Hive metastore in Spark SQL specific format."
logWarning(warningMessage, e)
val table = newSparkSQLSpecificMetastoreTable()
val table =
newSparkSQLSpecificMetastoreTable(resolvedRelation.asInstanceOf[HadoopFsRelation])
sparkSession.sessionState.catalog.createTable(table, ignoreIfExists = false)
}

case (None, message) =>
logWarning(message)
val table = newSparkSQLSpecificMetastoreTable()
val table =
newSparkSQLSpecificMetastoreTable(resolvedRelation.asInstanceOf[HadoopFsRelation])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do not think we can do this cast. There are other kinds of relations.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks! I will update.

sparkSession.sessionState.catalog.createTable(table, ignoreIfExists = false)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1104,4 +1104,18 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
}
}
}

test("SPARK-15269: non-hive compative table") {
withTempPath { dir =>
val path = dir.getCanonicalPath
spark.range(1).write.json(path)

withTable("ddl_test1") {
sql(s"CREATE TABLE ddl_test1 USING json OPTIONS (PATH '$path')")
sql("DROP TABLE ddl_test1")
sql(s"CREATE TABLE ddl_test1 USING json AS SELECT 10 AS a")
checkAnswer(sql("select * from ddl_test1"), Seq(Row(10)))
}
}
}
}