Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -271,11 +271,15 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
* @since 1.3.0
*/
def refreshTable(tableName: String): Unit = {
catalog.refreshTable(catalog.client.currentDatabase, tableName)
val dbAndTableName = tableName.split("\\.")
catalog.refreshTable(dbAndTableName.lift(dbAndTableName.size -2)
.getOrElse(catalog.client.currentDatabase), dbAndTableName.last)
}

protected[hive] def invalidateTable(tableName: String): Unit = {
catalog.invalidateTable(catalog.client.currentDatabase, tableName)
val dbAndTableName = tableName.split("\\.")
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

notice repeated code.
IMHO relative/fully-prefixed tableName logic should be extracted into something like cannonicalTableName(relativeOrPrefixedTableName)

catalog.invalidateTable(dbAndTableName.lift(dbAndTableName.size -2)
.getOrElse(catalog.client.currentDatabase), dbAndTableName.last)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,11 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
provider: String,
options: Map[String, String],
isExternal: Boolean): Unit = {
val (dbName, tblName) = processDatabaseAndTableName(client.currentDatabase, tableName)
val dbAndTableName = tableName.split("\\.")
val (dbName, tblName) = processDatabaseAndTableName(
dbAndTableName
.lift(dbAndTableName.size -2)
.getOrElse(client.currentDatabase), dbAndTableName.last)
val tableProperties = new scala.collection.mutable.HashMap[String, String]
tableProperties.put("spark.sql.sources.provider", provider)

Expand Down Expand Up @@ -203,9 +207,11 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive

def hiveDefaultTableFilePath(tableName: String): String = {
// Code based on: hiveWarehouse.getTablePath(currentDatabase, tableName)
val dbAndTableName = tableName.split("\\.")
new Path(
new Path(client.getDatabase(client.currentDatabase).location),
tableName.toLowerCase).toString
new Path(client.getDatabase(dbAndTableName.lift(dbAndTableName.size -2)
.getOrElse(client.currentDatabase)).location),
dbAndTableName.last.toLowerCase).toString
}

def tableExists(tableIdentifier: Seq[String]): Boolean = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.spark.sql._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException

abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils {
override lazy val sqlContext: SQLContext = TestHive
Expand Down Expand Up @@ -609,4 +610,56 @@ class ParquetHadoopFsRelationSuite extends HadoopFsRelationTest {
}
}
}

test("SPARK-7943:DF created by hiveContext can create table to specific db by saveAstable") {

val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
// use dbname.tablename to specific db
sqlContext.sql("""create database if not exists testdb7943""")
df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("testdb7943.tbl7943_1")

df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("tbl7943_2")

intercept[NoSuchDatabaseException] {
df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("testdb7943-2.tbl1")
}

sqlContext.sql("""use testdb7943""")

df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("tbl7943_3")
df.write
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("default.tbl7943_4")

checkAnswer(
sqlContext.sql("show TABLES in testdb7943"),
Seq(Row("tbl7943_1", false), Row("tbl7943_3", false)))

val result = sqlContext.sql("show TABLES in default")
checkAnswer(
result.filter("tableName = 'tbl7943_2'"),
Row("tbl7943_2", false))

checkAnswer(
result.filter("tableName = 'tbl7943_4'"),
Row("tbl7943_4", false))

sqlContext.sql("""use default""")
sqlContext.sql("""drop table if exists tbl7943_2 """)
sqlContext.sql("""drop table if exists tbl7943_4 """)
sqlContext.sql("""drop database if exists testdb7943 CASCADE""")
}
}