Skip to content
Closed
Original file line number Diff line number Diff line change
Expand Up @@ -4231,7 +4231,7 @@
},
"UNSET_NONEXISTENT_PROPERTIES" : {
"message" : [
"Attempted to unset non-existent properties [<properties>] in table <table>."
"Attempted to unset non-existent properties [<properties>] in relation <relationId>."
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I want to reuse it in the namespace scene

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but namespace is not a relation... maybe just table or namespace <name>

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yea, okay.

],
"sqlState" : "42K0J"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,8 @@ statement
(WITH (DBPROPERTIES | PROPERTIES) propertyList))* #createNamespace
| ALTER namespace identifierReference
SET (DBPROPERTIES | PROPERTIES) propertyList #setNamespaceProperties
| ALTER namespace identifierReference
UNSET (DBPROPERTIES | PROPERTIES) (IF EXISTS)? propertyList #unsetNamespaceProperties
| ALTER namespace identifierReference
SET locationSpec #setNamespaceLocation
| DROP namespace (IF EXISTS)? identifierReference
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,24 +53,9 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
case CurrentNamespace =>
ResolvedNamespace(currentCatalog, catalogManager.currentNamespace.toImmutableArraySeq)
case UnresolvedNamespace(Seq(), fetchMetadata) =>
resolveNamespace(currentCatalog, Seq.empty[String], fetchMetadata)
ResolveCatalogs.resolveNamespace(currentCatalog, Seq.empty[String], fetchMetadata)
case UnresolvedNamespace(CatalogAndNamespace(catalog, ns), fetchMetadata) =>
resolveNamespace(catalog, ns, fetchMetadata)
}

private def resolveNamespace(
catalog: CatalogPlugin,
ns: Seq[String],
fetchMetadata: Boolean): ResolvedNamespace = {
catalog match {
case supportsNS: SupportsNamespaces if fetchMetadata =>
ResolvedNamespace(
catalog,
ns,
supportsNS.loadNamespaceMetadata(ns.toArray).asScala.toMap)
case _ =>
ResolvedNamespace(catalog, ns)
}
ResolveCatalogs.resolveNamespace(catalog, ns, fetchMetadata)
}

private def resolveVariableName(nameParts: Seq[String]): ResolvedIdentifier = {
Expand Down Expand Up @@ -98,3 +83,20 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
}
}
}

object ResolveCatalogs {
def resolveNamespace(
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Extract method resolveNamespace from class ResolveCatalogs to object ResolveCatalogs and reuse it in UnsetNamespacePropertiesCommand

catalog: CatalogPlugin,
ns: Seq[String],
fetchMetadata: Boolean): ResolvedNamespace = {
catalog match {
case supportsNS: SupportsNamespaces if fetchMetadata =>
ResolvedNamespace(
catalog,
ns,
supportsNS.loadNamespaceMetadata(ns.toArray).asScala.toMap)
case _ =>
ResolvedNamespace(catalog, ns)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3671,7 +3671,7 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging {
}
}

private def cleanNamespaceProperties(
protected def cleanNamespaceProperties(
properties: Map[String, String],
ctx: ParserRuleContext): Map[String, String] = withOrigin(ctx) {
import SupportsNamespaces._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2659,12 +2659,12 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
}

def unsetNonExistentPropertiesError(
properties: Seq[String], table: TableIdentifier): Throwable = {
properties: Seq[String], nameParts: Seq[String]): Throwable = {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Reuse UNSET_NONEXISTENT_PROPERTIES

new AnalysisException(
errorClass = "UNSET_NONEXISTENT_PROPERTIES",
messageParameters = Map(
"properties" -> properties.map(toSQLId).mkString(", "),
"table" -> toSQLId(table.nameParts))
"relationId" -> toSQLId(nameParts))
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1098,4 +1098,24 @@ class SparkSqlAstBuilder extends AstBuilder {

(ctx.LOCAL != null, finalStorage, Some(DDLUtils.HIVE_PROVIDER))
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have to put the parse process of UnsetNamespacePropertiesCommand here, because UnsetNamespacePropertiesCommand can only be defined within the module sql/core, and AstBuilder cannot access the class UnsetNamespacePropertiesCommand within the module

/**
* Create a [[UnsetNamespacePropertiesCommand]] command.
*
* For example:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
* For example:
* Expected format:

* {{{
* ALTER (DATABASE|SCHEMA|NAMESPACE) database
* UNSET (DBPROPERTIES | PROPERTIES) [IF EXISTS] ('comment', 'key');
* }}}
*/
override def visitUnsetNamespaceProperties(
ctx: UnsetNamespacePropertiesContext): LogicalPlan = withOrigin(ctx) {
val properties = visitPropertyKeys(ctx.propertyList)
val cleanedProperties = cleanNamespaceProperties(properties.map(_ -> "").toMap, ctx).keys.toSeq
val ifExists = ctx.EXISTS != null
UnsetNamespacePropertiesCommand(
withIdentClause(ctx.identifierReference(), UnresolvedIdentifier(_)),
cleanedProperties,
ifExists)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan, SupervisingCommand}
import org.apache.spark.sql.catalyst.trees.LeafLike
import org.apache.spark.sql.catalyst.trees.{LeafLike, UnaryLike}
import org.apache.spark.sql.connector.ExternalCommandRunner
import org.apache.spark.sql.execution.{CommandExecutionMode, ExplainMode, LeafExecNode, SparkPlan, UnaryExecNode}
import org.apache.spark.sql.execution.metric.SQLMetric
Expand All @@ -51,6 +51,7 @@ trait RunnableCommand extends Command {
}

trait LeafRunnableCommand extends RunnableCommand with LeafLike[LogicalPlan]
trait UnaryRunnableCommand extends RunnableCommand with UnaryLike[LogicalPlan]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because RunnableCommand is defined in the module sql/core, it seems that adding UnaryRunnableCommand here is more appropriate.


/**
* A physical operator that executes the run method of a `RunnableCommand` and
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ case class AlterTableUnsetPropertiesCommand(
&& key != TableCatalog.PROP_COMMENT)
if (nonexistentKeys.nonEmpty) {
throw QueryCompilationErrors.unsetNonExistentPropertiesError(
nonexistentKeys, table.identifier)
nonexistentKeys, table.identifier.nameParts)
}
}
// If comment is in the table property, we reset it to None
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command

import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedIdentifier}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.test.SharedSparkSession

class AlterNamespaceUnsetPropertiesParserSuite extends AnalysisTest with SharedSparkSession {

private lazy val parser = new SparkSqlParser()

private def parseException(sqlText: String): SparkThrowable = {
intercept[ParseException](sql(sqlText).collect())
}

private def parsePlan(sqlText: String): LogicalPlan = {
parser.parsePlan(sqlText)
}

test("unset namespace properties") {
Seq("DATABASE", "SCHEMA", "NAMESPACE").foreach { nsToken =>
Seq("PROPERTIES", "DBPROPERTIES").foreach { propToken =>
comparePlans(
parsePlan(s"ALTER $nsToken a.b.c UNSET $propToken ('a', 'b', 'c')"),
UnsetNamespacePropertiesCommand(
UnresolvedIdentifier(Seq("a", "b", "c")), Seq("a", "b", "c"), ifExists = false))

comparePlans(
parsePlan(s"ALTER $nsToken a.b.c UNSET $propToken IF EXISTS ('a', 'b', 'c')"),
UnsetNamespacePropertiesCommand(
UnresolvedIdentifier(Seq("a", "b", "c")), Seq("a", "b", "c"), ifExists = true))

comparePlans(
parsePlan(s"ALTER $nsToken a.b.c UNSET $propToken ('a')"),
UnsetNamespacePropertiesCommand(
UnresolvedIdentifier(Seq("a", "b", "c")), Seq("a"), ifExists = false))
}
}
}

test("property values must not be set") {
val sql = "ALTER NAMESPACE my_db UNSET PROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
exception = parseException(sql),
errorClass = "_LEGACY_ERROR_TEMP_0035",
parameters = Map("message" -> "Values should not be specified for key(s): [key_with_value]"),
context = ExpectedContext(
fragment = sql,
start = 0,
stop = 80))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, SupportsNamespaces}
import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
import org.apache.spark.sql.internal.SQLConf

/**
* This base suite contains unified tests for the `ALTER NAMESPACE ... UNSET PROPERTIES` command
* that check V1 and V2 table catalogs. The tests that cannot run for all supported catalogs are
* located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterNamespaceUnsetPropertiesSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterNamespaceUnsetPropertiesSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterNamespaceUnsetPropertiesSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterNamespaceUnsetPropertiesSuite`
*/
trait AlterNamespaceUnsetPropertiesSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER NAMESPACE ... UNSET PROPERTIES"

protected def namespace: String

protected def getProperties(namespace: String): String = {
val propsRow = sql(s"DESCRIBE NAMESPACE EXTENDED $namespace")
.toDF("key", "value")
.where("key like 'Properties%'")
.collect()
assert(propsRow.length == 1)
propsRow(0).getString(1)
}

test("namespace does not exist") {
val ns = "not_exist"
val e = intercept[AnalysisException] {
sql(s"ALTER NAMESPACE $catalog.$ns UNSET PROPERTIES ('d')")
}
checkError(e,
errorClass = "SCHEMA_NOT_FOUND",
parameters = Map("schemaName" -> s"`$ns`"))
Copy link
Contributor

@cloud-fan cloud-fan Jul 9, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

just noticed one thing: why does the error message omit the catalog name? For table not found, the error message contains the user-given table name in the SQL statement.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let me investigate it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

From the user's perspective (when he uses multiple catalogs for federated queries and encounters some tables that do not exist), we should fix it, WDYT?

Copy link
Contributor Author

@panbingkun panbingkun Jul 9, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also, do we also need to fix similar scenarios (when table not exist) below together?

private def requireTableExists(name: TableIdentifier): Unit = {
if (!tableExists(name)) {
throw new NoSuchTableException(db = name.database.get, table = name.table)
}
}
private def requireTableNotExists(name: TableIdentifier): Unit = {
if (tableExists(name)) {
throw new TableAlreadyExistsException(db = name.database.get, table = name.table)
}
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cloud-fan Initial PR, just attach catalog name before the namespace. If this way is ok, I will continue to add table related as well.
#47276
Thanks!

}

test("basic test") {
val ns = s"$catalog.$namespace"
withNamespace(ns) {
sql(s"CREATE NAMESPACE $ns")
assert(getProperties(ns) === "")
sql(s"ALTER NAMESPACE $ns SET PROPERTIES ('a'='a', 'b'='b', 'c'='c')")
assert(getProperties(ns) === "((a,a), (b,b), (c,c))")
sql(s"ALTER NAMESPACE $ns UNSET PROPERTIES ('b')")
assert(getProperties(ns) === "((a,a), (c,c))")

checkError(
exception = intercept[AnalysisException] {
sql(s"ALTER NAMESPACE $ns UNSET PROPERTIES ('b')")
},
errorClass = "UNSET_NONEXISTENT_PROPERTIES",
parameters = Map("properties" -> "`b`", "relationId" -> toSQLId(namespace)))
sql(s"ALTER NAMESPACE $ns UNSET PROPERTIES IF EXISTS ('b')")
}
}

test("test reserved properties") {
import SupportsNamespaces._
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
val ns = s"$catalog.$namespace"
withSQLConf((SQLConf.LEGACY_PROPERTY_NON_RESERVED.key, "false")) {
CatalogV2Util.NAMESPACE_RESERVED_PROPERTIES.filterNot(_ == PROP_COMMENT).foreach { key =>
withNamespace(ns) {
sql(s"CREATE NAMESPACE $ns")
val sqlText = s"ALTER NAMESPACE $ns UNSET PROPERTIES ('$key')"
checkErrorMatchPVals(
exception = intercept[ParseException] {
sql(sqlText)
},
errorClass = "UNSUPPORTED_FEATURE.SET_NAMESPACE_PROPERTY",
parameters = Map(
"property" -> key,
"msg" -> ".*"),
sqlState = None,
context = ExpectedContext(
fragment = sqlText,
start = 0,
stop = 37 + ns.length + key.length)
)
}
}
}
withSQLConf((SQLConf.LEGACY_PROPERTY_NON_RESERVED.key, "true")) {
CatalogV2Util.NAMESPACE_RESERVED_PROPERTIES.filterNot(_ == PROP_COMMENT).foreach { key =>
withNamespace(ns) {
// Set the location explicitly because v2 catalog may not set the default location.
// Without this, `meta.get(key)` below may return null.
sql(s"CREATE NAMESPACE $ns LOCATION 'tmp/prop_test'")
assert(getProperties(ns) === "")
sql(s"ALTER NAMESPACE $ns UNSET PROPERTIES ('$key')")
assert(getProperties(ns) === "", s"$key is a reserved namespace property and ignored")
val meta = spark.sessionState.catalogManager.catalog(catalog)
.asNamespaceCatalog.loadNamespaceMetadata(namespace.split('.'))
assert(!meta.get(key).contains("foo"),
"reserved properties should not have side effects")
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
errorClass = "UNSET_NONEXISTENT_PROPERTIES",
parameters = Map(
"properties" -> "`test_prop1`, `test_prop2`",
"table" -> "`spark_catalog`.`default`.`test_table`")
"relationId" -> "`spark_catalog`.`default`.`test_table`")
)
}
}
Expand Down Expand Up @@ -1152,7 +1152,7 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('c', 'xyz')")
},
errorClass = "UNSET_NONEXISTENT_PROPERTIES",
parameters = Map("properties" -> "`xyz`", "table" -> "`spark_catalog`.`dbx`.`tab1`")
parameters = Map("properties" -> "`xyz`", "relationId" -> "`spark_catalog`.`dbx`.`tab1`")
)
// property to unset does not exist, but "IF EXISTS" is specified
sql("ALTER TABLE tab1 UNSET TBLPROPERTIES IF EXISTS ('c', 'xyz')")
Expand Down
Loading