Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1075,19 +1075,11 @@ class DDLParserSuite extends AnalysisTest {
ifExists = true))
}

// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Only tests related to SET have been moved, and tests related to UNSET will doing in another PR

// ALTER TABLE table_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key');
test("alter table: alter table properties") {
val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
"'comment' = 'new_comment')"
val sql2_table = "ALTER TABLE table_name UNSET TBLPROPERTIES ('comment', 'test')"
val sql3_table = "ALTER TABLE table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')"

comparePlans(
parsePlan(sql1_table),
SetTableProperties(
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
Map("test" -> "test", "comment" -> "new_comment")))
comparePlans(
parsePlan(sql2_table),
UnsetTableProperties(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.SetTableProperties
import org.apache.spark.sql.test.SharedSparkSession

class AlterTableSetTblPropertiesParserSuite extends AnalysisTest with SharedSparkSession {

private def parseException(sqlText: String): SparkThrowable = {
intercept[ParseException](sql(sqlText).collect())
}

// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
test("alter table: alter table properties") {
Copy link
Contributor Author

@panbingkun panbingkun Jun 19, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ported from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
"'comment' = 'new_comment')"
comparePlans(
parsePlan(sql1_table),
SetTableProperties(
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
Map("test" -> "test", "comment" -> "new_comment")))
}

test("alter table - property values must be set") {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ported from sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala

val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
exception = parseException(sql),
errorClass = "_LEGACY_ERROR_TEMP_0035",
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
context = ExpectedContext(
fragment = sql,
start = 0,
stop = 78))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.catalyst.TableIdentifier

/**
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
* command that check V1 and V2 table catalogs. The tests that cannot run for all supported
* catalogs are located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableSetTblPropertiesSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
*/
trait AlterTableSetTblPropertiesSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. SET TBLPROPERTIES"

def checkTblProps(tableIdent: TableIdentifier, expectedTblProps: Map[String, String]): Unit

test("alter table set tblproperties") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing")
val tableIdent = TableIdentifier("tbl", Some("ns"), Some(catalog))
checkTblProps(tableIdent, Map.empty[String, String])

sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))

sql(s"USE $catalog.ns")
sql(s"ALTER TABLE tbl SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))

sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v8')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v8", "k3" -> "v3"))

// table to alter does not exist
checkError(
exception = intercept[AnalysisException] {
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
},
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
parameters = Map("relationName" -> "`does_not_exist`"),
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -103,18 +103,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
stop = 98))
}

test("alter table - property values must be set") {
val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
exception = parseException(sql),
errorClass = "_LEGACY_ERROR_TEMP_0035",
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
context = ExpectedContext(
fragment = sql,
start = 0,
stop = 78))
}

test("alter table unset properties - property values must NOT be set") {
val sql = "ALTER TABLE my_tab UNSET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,10 +327,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {

protected val reversedProperties = Seq(PROP_OWNER)

test("alter table: set properties (datasource table)") {
testSetProperties(isDatasourceTable = true)
}

test("alter table: unset properties (datasource table)") {
testUnsetProperties(isDatasourceTable = true)
}
Expand Down Expand Up @@ -1117,40 +1113,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
)
}

protected def testSetProperties(isDatasourceTable: Boolean): Unit = {
if (!isUsingHiveMetastore) {
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
}
val catalog = spark.sessionState.catalog
val tableIdent = TableIdentifier("tab1", Some("dbx"))
createDatabase(catalog, "dbx")
createTable(catalog, tableIdent, isDatasourceTable)
def getProps: Map[String, String] = {
if (isUsingHiveMetastore) {
normalizeCatalogTable(catalog.getTableMetadata(tableIdent)).properties
} else {
catalog.getTableMetadata(tableIdent).properties
}
}
assert(getProps.isEmpty)
// set table properties
sql("ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('andrew' = 'or14', 'kor' = 'bel')")
assert(getProps == Map("andrew" -> "or14", "kor" -> "bel"))
// set table properties without explicitly specifying database
catalog.setCurrentDatabase("dbx")
sql("ALTER TABLE tab1 SET TBLPROPERTIES ('kor' = 'belle', 'kar' = 'bol')")
assert(getProps == Map("andrew" -> "or14", "kor" -> "belle", "kar" -> "bol"))
// table to alter does not exist
checkError(
exception = intercept[AnalysisException] {
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
},
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
parameters = Map("relationName" -> "`does_not_exist`"),
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
)
}

protected def testUnsetProperties(isDatasourceTable: Boolean): Unit = {
if (!isUsingHiveMetastore) {
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command.v1

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION

/**
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
* command that check V1 table catalogs. The tests that cannot run for all V1 catalogs
* are located in more specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
*/
trait AlterTableSetTblPropertiesSuiteBase extends command.AlterTableSetTblPropertiesSuiteBase {

private[sql] lazy val sessionCatalog = spark.sessionState.catalog

private def isUsingHiveMetastore: Boolean = {
spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive"
}

private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
props.filterNot(p => Seq("transient_lastDdlTime").contains(p._1))
}

private def getTableProperties(tableIdent: TableIdentifier): Map[String, String] = {
sessionCatalog.getTableMetadata(tableIdent).properties
}

override def checkTblProps(tableIdent: TableIdentifier,
expectedTblProps: Map[String, String]): Unit = {
val actualTblProps = getTableProperties(tableIdent)
if (isUsingHiveMetastore) {
assert(normalizeTblProps(actualTblProps) == expectedTblProps)
} else {
assert(actualTblProps == expectedTblProps)
}
}
}

class AlterTableSetTblPropertiesSuite
extends AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command.v2

import scala.jdk.CollectionConverters._

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.connector.catalog.{Identifier, Table}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to
* check V2 table catalogs.
*/
class AlterTableSetTblPropertiesSuite
extends command.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase {

private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
props.filterNot(p => Seq("provider", "owner").contains(p._1))
}

private def getTableMetadata(tableIndent: TableIdentifier): Table = {
val nameParts = tableIndent.nameParts
val v2Catalog = spark.sessionState.catalogManager.catalog(nameParts.head).asTableCatalog
val namespace = nameParts.drop(1).init.toArray
v2Catalog.loadTable(Identifier.of(namespace, nameParts.last))
}

override def checkTblProps(tableIdent: TableIdentifier,
expectedTblProps: Map[String, String]): Unit = {
val actualTblProps = getTableMetadata(tableIdent).properties.asScala.toMap
assert(normalizeTblProps(actualTblProps) === expectedTblProps)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,6 @@ class HiveDDLSuite
fs.exists(filesystemPath)
}

test("alter table: set properties") {
testSetProperties(isDatasourceTable = false)
}

test("alter table: unset properties") {
testUnsetProperties(isDatasourceTable = false)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.execution.command

import org.apache.spark.sql.execution.command.v1

/**
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to check
* V1 Hive external table catalog.
*/
class AlterTableSetTblPropertiesSuite
extends v1.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase