Skip to content

Commit 4d75250

Browse files
committed
initial commit -a
1 parent 119539f commit 4d75250

File tree

10 files changed

+64
-34
lines changed

10 files changed

+64
-34
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ class Analyzer(override val catalogManager: CatalogManager)
277277
TypeCoercion.typeCoercionRules ++
278278
extendedResolutionRules : _*),
279279
Batch("Post-Hoc Resolution", Once,
280-
Seq(ResolveNoopDropTable) ++
280+
Seq(ResolveCommandsWithIfExists) ++
281281
postHocResolutionRules: _*),
282282
Batch("Normalize Alter Table", Once, ResolveAlterTableChanges),
283283
Batch("Remove Unresolved Hints", Once,
@@ -887,6 +887,11 @@ class Analyzer(override val catalogManager: CatalogManager)
887887
u.failAnalysis(s"${ident.quoted} is a temp view. '$cmd' expects a table")
888888
}
889889
u
890+
case u @ UnresolvedView(ident, cmd) =>
891+
lookupTempView(ident).map { _ =>
892+
ResolvedView(ident.asIdentifier, isTemp = true)
893+
}
894+
.getOrElse(u)
890895
case u @ UnresolvedTableOrView(ident, cmd, allowTempView) =>
891896
lookupTempView(ident)
892897
.map { _ =>
@@ -977,6 +982,12 @@ class Analyzer(override val catalogManager: CatalogManager)
977982
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
978983
.getOrElse(u)
979984

985+
case u @ UnresolvedView(NonSessionCatalogAndIdentifier(catalog, ident), cmd) =>
986+
u.failAnalysis(
987+
s"Cannot specify catalog `${catalog.name}` for view ${ident.quoted} " +
988+
"because view support in v2 catalog has not been implemented yet. " +
989+
s"$cmd expects a view.")
990+
980991
case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident), _, _) =>
981992
CatalogV2Util.loadTable(catalog, ident)
982993
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
@@ -1111,6 +1122,12 @@ class Analyzer(override val catalogManager: CatalogManager)
11111122
case table => table
11121123
}.getOrElse(u)
11131124

1125+
case u @ UnresolvedView(identifier, cmd) =>
1126+
lookupTableOrView(identifier).map {
1127+
case v: ResolvedView => v
1128+
case _ => u.failAnalysis(s"${identifier.quoted} is a table. '$cmd' expects a view.")
1129+
}.getOrElse(u)
1130+
11141131
case u @ UnresolvedTableOrView(identifier, _, _) =>
11151132
lookupTableOrView(identifier).getOrElse(u)
11161133
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -193,11 +193,6 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
193193
writeOptions = c.writeOptions,
194194
orCreate = c.orCreate)
195195

196-
case DropViewStatement(NonSessionCatalogAndTable(catalog, viewName), _) =>
197-
throw new AnalysisException(
198-
s"Can not specify catalog `${catalog.name}` for view ${viewName.quoted} " +
199-
s"because view support in catalog has not been implemented yet")
200-
201196
case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
202197
if !isSessionCatalog(catalog) =>
203198
CreateNamespace(catalog.asNamespaceCatalog, ns, c.ifNotExists, c.properties)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveNoopDropTable.scala renamed to sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,19 @@
1717

1818
package org.apache.spark.sql.catalyst.analysis
1919

20-
import org.apache.spark.sql.catalyst.plans.logical.{DropTable, LogicalPlan, NoopDropTable}
20+
import org.apache.spark.sql.catalyst.plans.logical.{DropTable, DropView, LogicalPlan, NoopCommand}
2121
import org.apache.spark.sql.catalyst.rules.Rule
2222

2323
/**
24-
* A rule for handling [[DropTable]] logical plan when the table or temp view is not resolved.
25-
* If "ifExists" flag is set to true, the plan is resolved to [[NoopDropTable]],
26-
* which is a no-op command.
24+
* A rule for handling commands when the table or temp view is not resolved.
25+
* These commands support a flag, "ifExists", so that they do not fail when a relation is not
26+
* resolved. If the "ifExists" flag is set to true. the plan is resolved to [[NoopCommand]],
2727
*/
28-
object ResolveNoopDropTable extends Rule[LogicalPlan] {
28+
object ResolveCommandsWithIfExists extends Rule[LogicalPlan] {
2929
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
3030
case DropTable(u: UnresolvedTableOrView, ifExists, _) if ifExists =>
31-
NoopDropTable(u.multipartIdentifier)
31+
NoopCommand(u.multipartIdentifier)
32+
case DropView(u: UnresolvedView, ifExists) if ifExists =>
33+
NoopCommand(u.multipartIdentifier)
3234
}
3335
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,18 @@ case class UnresolvedTable(
4545
override def output: Seq[Attribute] = Nil
4646
}
4747

48+
/**
49+
* Holds the name of a view that has yet to be looked up in a catalog. It will be resolved to
50+
* [[ResolvedView]] during analysis.
51+
*/
52+
case class UnresolvedView(
53+
multipartIdentifier: Seq[String],
54+
commandName: String) extends LeafNode {
55+
override lazy val resolved: Boolean = false
56+
57+
override def output: Seq[Attribute] = Nil
58+
}
59+
4860
/**
4961
* Holds the name of a table or view that has yet to be looked up in a catalog. It will
5062
* be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3164,11 +3164,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
31643164
}
31653165

31663166
/**
3167-
* Create a [[DropViewStatement]] command.
3167+
* Create a [[DropView]] command.
31683168
*/
31693169
override def visitDropView(ctx: DropViewContext): AnyRef = withOrigin(ctx) {
3170-
DropViewStatement(
3171-
visitMultipartIdentifier(ctx.multipartIdentifier()),
3170+
DropView(
3171+
UnresolvedView(visitMultipartIdentifier(ctx.multipartIdentifier()), "DROP VIEW"),
31723172
ctx.EXISTS != null)
31733173
}
31743174

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -346,13 +346,6 @@ case class RenameTableStatement(
346346
newName: Seq[String],
347347
isView: Boolean) extends ParsedStatement
348348

349-
/**
350-
* A DROP VIEW statement, as parsed from SQL.
351-
*/
352-
case class DropViewStatement(
353-
viewName: Seq[String],
354-
ifExists: Boolean) extends ParsedStatement
355-
356349
/**
357350
* An INSERT INTO statement, as parsed from SQL.
358351
*

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -419,9 +419,9 @@ case class DropTable(
419419
}
420420

421421
/**
422-
* The logical plan for handling non-existing table for DROP TABLE command.
422+
* The logical plan for no-op command handling non-existing table.
423423
*/
424-
case class NoopDropTable(multipartIdentifier: Seq[String]) extends Command
424+
case class NoopCommand(multipartIdentifier: Seq[String]) extends Command
425425

426426
/**
427427
* The logical plan of the ALTER TABLE command.
@@ -706,3 +706,12 @@ case class ShowPartitions(
706706
override val output: Seq[Attribute] = Seq(
707707
AttributeReference("partition", StringType, nullable = false)())
708708
}
709+
710+
/**
711+
* The logical plan of the DROP VIEW command.
712+
*/
713+
case class DropView(
714+
child: LogicalPlan,
715+
ifExists: Boolean) extends Command {
716+
override def children: Seq[LogicalPlan] = child :: Nil
717+
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser
2020
import java.util.Locale
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView}
23+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
2424
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource}
2525
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2626
import org.apache.spark.sql.catalyst.plans.logical._
@@ -722,12 +722,15 @@ class DDLParserSuite extends AnalysisTest {
722722

723723
test("drop view") {
724724
parseCompare(s"DROP VIEW testcat.db.view",
725-
DropViewStatement(Seq("testcat", "db", "view"), ifExists = false))
726-
parseCompare(s"DROP VIEW db.view", DropViewStatement(Seq("db", "view"), ifExists = false))
725+
DropView(UnresolvedView(Seq("testcat", "db", "view"), "DROP VIEW"), ifExists = false))
726+
parseCompare(s"DROP VIEW db.view",
727+
DropView(UnresolvedView(Seq("db", "view"), "DROP VIEW"), ifExists = false))
727728
parseCompare(s"DROP VIEW IF EXISTS db.view",
728-
DropViewStatement(Seq("db", "view"), ifExists = true))
729-
parseCompare(s"DROP VIEW view", DropViewStatement(Seq("view"), ifExists = false))
730-
parseCompare(s"DROP VIEW IF EXISTS view", DropViewStatement(Seq("view"), ifExists = true))
729+
DropView(UnresolvedView(Seq("db", "view"), "DROP VIEW"), ifExists = true))
730+
parseCompare(s"DROP VIEW view",
731+
DropView(UnresolvedView(Seq("view"), "DROP VIEW"), ifExists = false))
732+
parseCompare(s"DROP VIEW IF EXISTS view",
733+
DropView(UnresolvedView(Seq("view"), "DROP VIEW"), ifExists = true))
731734
}
732735

733736
private def testCreateOrReplaceDdl(

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -353,9 +353,8 @@ class ResolveSessionCatalog(
353353
}
354354
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = false, purge = purge)
355355

356-
// v1 DROP TABLE supports temp view.
357-
case DropViewStatement(TempViewOrV1Table(name), ifExists) =>
358-
DropTableCommand(name.asTableIdentifier, ifExists, isView = true, purge = false)
356+
case DropView(r: ResolvedView, ifExists) =>
357+
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = true, purge = false)
359358

360359
case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
361360
if isSessionCatalog(catalog) =>

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
251251
case DropTable(r: ResolvedTable, ifExists, purge) =>
252252
DropTableExec(r.catalog, r.identifier, ifExists, purge, invalidateCache(r)) :: Nil
253253

254-
case _: NoopDropTable =>
254+
case _: NoopCommand =>
255255
LocalTableScanExec(Nil, Nil) :: Nil
256256

257257
case AlterTable(catalog, ident, _, changes) =>

0 commit comments

Comments
 (0)