Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
initial commit -a
  • Loading branch information
imback82 committed Dec 7, 2020
commit 4d7525034f35a3d2207a31f9cc1a40da96e4f1be
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ class Analyzer(override val catalogManager: CatalogManager)
TypeCoercion.typeCoercionRules ++
extendedResolutionRules : _*),
Batch("Post-Hoc Resolution", Once,
Seq(ResolveNoopDropTable) ++
Seq(ResolveCommandsWithIfExists) ++
postHocResolutionRules: _*),
Batch("Normalize Alter Table", Once, ResolveAlterTableChanges),
Batch("Remove Unresolved Hints", Once,
Expand Down Expand Up @@ -887,6 +887,11 @@ class Analyzer(override val catalogManager: CatalogManager)
u.failAnalysis(s"${ident.quoted} is a temp view. '$cmd' expects a table")
}
u
case u @ UnresolvedView(ident, cmd) =>
lookupTempView(ident).map { _ =>
ResolvedView(ident.asIdentifier, isTemp = true)
}
.getOrElse(u)
case u @ UnresolvedTableOrView(ident, cmd, allowTempView) =>
lookupTempView(ident)
.map { _ =>
Expand Down Expand Up @@ -977,6 +982,12 @@ class Analyzer(override val catalogManager: CatalogManager)
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
.getOrElse(u)

case u @ UnresolvedView(NonSessionCatalogAndIdentifier(catalog, ident), cmd) =>
u.failAnalysis(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this seems like something that we should put in CheckAnalysis

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Moved to CheckAnalysis.

s"Cannot specify catalog `${catalog.name}` for view ${ident.quoted} " +
"because view support in v2 catalog has not been implemented yet. " +
s"$cmd expects a view.")

case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident), _, _) =>
CatalogV2Util.loadTable(catalog, ident)
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
Expand Down Expand Up @@ -1111,6 +1122,12 @@ class Analyzer(override val catalogManager: CatalogManager)
case table => table
}.getOrElse(u)

case u @ UnresolvedView(identifier, cmd) =>
lookupTableOrView(identifier).map {
case v: ResolvedView => v
case _ => u.failAnalysis(s"${identifier.quoted} is a table. '$cmd' expects a view.")
}.getOrElse(u)

case u @ UnresolvedTableOrView(identifier, _, _) =>
lookupTableOrView(identifier).getOrElse(u)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,11 +193,6 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)

case DropViewStatement(NonSessionCatalogAndTable(catalog, viewName), _) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view ${viewName.quoted} " +
s"because view support in catalog has not been implemented yet")

case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
if !isSessionCatalog(catalog) =>
CreateNamespace(catalog.asNamespaceCatalog, ns, c.ifNotExists, c.properties)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,19 @@

package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.sql.catalyst.plans.logical.{DropTable, LogicalPlan, NoopDropTable}
import org.apache.spark.sql.catalyst.plans.logical.{DropTable, DropView, LogicalPlan, NoopCommand}
import org.apache.spark.sql.catalyst.rules.Rule

/**
* A rule for handling [[DropTable]] logical plan when the table or temp view is not resolved.
* If "ifExists" flag is set to true, the plan is resolved to [[NoopDropTable]],
* which is a no-op command.
* A rule for handling commands when the table or temp view is not resolved.
* These commands support a flag, "ifExists", so that they do not fail when a relation is not
* resolved. If the "ifExists" flag is set to true. the plan is resolved to [[NoopCommand]],
*/
object ResolveNoopDropTable extends Rule[LogicalPlan] {
object ResolveCommandsWithIfExists extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case DropTable(u: UnresolvedTableOrView, ifExists, _) if ifExists =>
NoopDropTable(u.multipartIdentifier)
NoopCommand(u.multipartIdentifier)
case DropView(u: UnresolvedView, ifExists) if ifExists =>
NoopCommand(u.multipartIdentifier)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shall we add cmdName: String to NoopCommand?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, added.

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,18 @@ case class UnresolvedTable(
override def output: Seq[Attribute] = Nil
}

/**
* Holds the name of a view that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedView]] during analysis.
*/
case class UnresolvedView(
multipartIdentifier: Seq[String],
commandName: String) extends LeafNode {
override lazy val resolved: Boolean = false

override def output: Seq[Attribute] = Nil
}

/**
* Holds the name of a table or view that has yet to be looked up in a catalog. It will
* be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3164,11 +3164,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
}

/**
* Create a [[DropViewStatement]] command.
* Create a [[DropView]] command.
*/
override def visitDropView(ctx: DropViewContext): AnyRef = withOrigin(ctx) {
DropViewStatement(
visitMultipartIdentifier(ctx.multipartIdentifier()),
DropView(
UnresolvedView(visitMultipartIdentifier(ctx.multipartIdentifier()), "DROP VIEW"),
ctx.EXISTS != null)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,13 +346,6 @@ case class RenameTableStatement(
newName: Seq[String],
isView: Boolean) extends ParsedStatement

/**
* A DROP VIEW statement, as parsed from SQL.
*/
case class DropViewStatement(
viewName: Seq[String],
ifExists: Boolean) extends ParsedStatement

/**
* An INSERT INTO statement, as parsed from SQL.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,9 +419,9 @@ case class DropTable(
}

/**
* The logical plan for handling non-existing table for DROP TABLE command.
* The logical plan for no-op command handling non-existing table.
*/
case class NoopDropTable(multipartIdentifier: Seq[String]) extends Command
case class NoopCommand(multipartIdentifier: Seq[String]) extends Command

/**
* The logical plan of the ALTER TABLE command.
Expand Down Expand Up @@ -706,3 +706,12 @@ case class ShowPartitions(
override val output: Seq[Attribute] = Seq(
AttributeReference("partition", StringType, nullable = false)())
}

/**
* The logical plan of the DROP VIEW command.
*/
case class DropView(
child: LogicalPlan,
ifExists: Boolean) extends Command {
override def children: Seq[LogicalPlan] = child :: Nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser
import java.util.Locale

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView}
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource}
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
Expand Down Expand Up @@ -722,12 +722,15 @@ class DDLParserSuite extends AnalysisTest {

test("drop view") {
parseCompare(s"DROP VIEW testcat.db.view",
DropViewStatement(Seq("testcat", "db", "view"), ifExists = false))
parseCompare(s"DROP VIEW db.view", DropViewStatement(Seq("db", "view"), ifExists = false))
DropView(UnresolvedView(Seq("testcat", "db", "view"), "DROP VIEW"), ifExists = false))
parseCompare(s"DROP VIEW db.view",
DropView(UnresolvedView(Seq("db", "view"), "DROP VIEW"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS db.view",
DropViewStatement(Seq("db", "view"), ifExists = true))
parseCompare(s"DROP VIEW view", DropViewStatement(Seq("view"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS view", DropViewStatement(Seq("view"), ifExists = true))
DropView(UnresolvedView(Seq("db", "view"), "DROP VIEW"), ifExists = true))
parseCompare(s"DROP VIEW view",
DropView(UnresolvedView(Seq("view"), "DROP VIEW"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS view",
DropView(UnresolvedView(Seq("view"), "DROP VIEW"), ifExists = true))
}

private def testCreateOrReplaceDdl(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -353,9 +353,8 @@ class ResolveSessionCatalog(
}
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = false, purge = purge)

// v1 DROP TABLE supports temp view.
case DropViewStatement(TempViewOrV1Table(name), ifExists) =>
DropTableCommand(name.asTableIdentifier, ifExists, isView = true, purge = false)
case DropView(r: ResolvedView, ifExists) =>
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = true, purge = false)

case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
if isSessionCatalog(catalog) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
case DropTable(r: ResolvedTable, ifExists, purge) =>
DropTableExec(r.catalog, r.identifier, ifExists, purge, invalidateCache(r)) :: Nil

case _: NoopDropTable =>
case _: NoopCommand =>
LocalTableScanExec(Nil, Nil) :: Nil

case AlterTable(catalog, ident, _, changes) =>
Expand Down