@@ -28,6 +28,7 @@ import org.apache.spark.sql.execution.RunnableCommand
2828import org .apache .spark .sql .hive .HiveContext
2929import org .apache .spark .sql .types ._
3030import org .apache .spark .util .Utils
31+ import org .apache .hadoop .hive .conf .HiveConf
3132
3233/**
3334 * Analyzes the given table in the current database to generate statistics, which will be
@@ -60,6 +61,12 @@ case class DropTable(
6061 val databaseName = dbAndTableName
6162 .lift(dbAndTableName.size - 2 )
6263 .getOrElse(hiveContext.catalog.client.currentDatabase)
64+ // tempDbname is used to pass the test "drop_partitions_filter"
65+ // when we set hive.exec.drop.ignorenonexistent=false and run "drop table dbname.tablename"
66+ // Hive will throws out Exception (This is a bug of Hive)
67+ val tempDbname =
68+ if (hiveContext.hiveconf.getBoolVar(HiveConf .ConfVars .DROPIGNORESNONEXISTENT ))
69+ s " $databaseName. " else " "
6370 try {
6471 hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(dbAndTableName.last))
6572 } catch {
@@ -72,7 +79,7 @@ case class DropTable(
7279 case e : Throwable => log.warn(s " ${e.getMessage}" , e)
7380 }
7481 hiveContext.invalidateTable(dbAndTableName.last)
75- hiveContext.runSqlHive(s " DROP TABLE $ifExistsClause$databaseName . ${dbAndTableName.last}" )
82+ hiveContext.runSqlHive(s " DROP TABLE $ifExistsClause$tempDbname ${dbAndTableName.last}" )
7683 hiveContext.catalog.unregisterTable(Seq (databaseName, dbAndTableName.last))
7784 Seq .empty[InternalRow ]
7885 }
0 commit comments