Skip to content

Commit cd8bf11

Browse files
panbingkuncloud-fan
authored andcommitted
[SPARK-48659][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. SET TBLPROPERTIES tests
### What changes were proposed in this pull request? - Move parser tests from `o.a.s.s.c.p.DDLParserSuite` and `o.a.s.s.e.c.DDLParserSuite` to `AlterTableSetTblPropertiesParserSuite`. - Porting and refactoring DS v1 tests from `DDLSuite` and `HiveDDLSuite` to `v1.AlterTableSetTblPropertiesBase` and to `v1.AlterTableSetTblPropertiesSuite`. - Add a test for DSv2 `ALTER TABLE .. RENAME` to `v2.AlterTableSetTblPropertiesSuite`. ### Why are the changes needed? - To improve test coverage. - Align with other similar tests, eg: `AlterTableRename*` ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? - Add new UT & Update existed UT. - By running new test suites: ``` $ build/sbt -Phive -Phive-thriftserver "test:testOnly *AlterTableSetTblPropertiesSuite" $ build/sbt -Phive -Phive-thriftserver "test:testOnly *AlterTableSetTblPropertiesParserSuite" ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #47018 from panbingkun/SPARK-48659. Authored-by: panbingkun <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 3469ec6 commit cd8bf11

File tree

9 files changed

+263
-62
lines changed

9 files changed

+263
-62
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1075,19 +1075,11 @@ class DDLParserSuite extends AnalysisTest {
10751075
ifExists = true))
10761076
}
10771077

1078-
// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
10791078
// ALTER TABLE table_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key');
10801079
test("alter table: alter table properties") {
1081-
val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
1082-
"'comment' = 'new_comment')"
10831080
val sql2_table = "ALTER TABLE table_name UNSET TBLPROPERTIES ('comment', 'test')"
10841081
val sql3_table = "ALTER TABLE table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')"
10851082

1086-
comparePlans(
1087-
parsePlan(sql1_table),
1088-
SetTableProperties(
1089-
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
1090-
Map("test" -> "test", "comment" -> "new_comment")))
10911083
comparePlans(
10921084
parsePlan(sql2_table),
10931085
UnsetTableProperties(
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command
19+
20+
import org.apache.spark.SparkThrowable
21+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
22+
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
23+
import org.apache.spark.sql.catalyst.parser.ParseException
24+
import org.apache.spark.sql.catalyst.plans.logical.SetTableProperties
25+
import org.apache.spark.sql.test.SharedSparkSession
26+
27+
class AlterTableSetTblPropertiesParserSuite extends AnalysisTest with SharedSparkSession {
28+
29+
private def parseException(sqlText: String): SparkThrowable = {
30+
intercept[ParseException](sql(sqlText).collect())
31+
}
32+
33+
// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
34+
test("alter table: alter table properties") {
35+
val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
36+
"'comment' = 'new_comment')"
37+
comparePlans(
38+
parsePlan(sql1_table),
39+
SetTableProperties(
40+
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
41+
Map("test" -> "test", "comment" -> "new_comment")))
42+
}
43+
44+
test("alter table - property values must be set") {
45+
val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
46+
checkError(
47+
exception = parseException(sql),
48+
errorClass = "_LEGACY_ERROR_TEMP_0035",
49+
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
50+
context = ExpectedContext(
51+
fragment = sql,
52+
start = 0,
53+
stop = 78))
54+
}
55+
}
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command
19+
20+
import org.apache.spark.sql.{AnalysisException, QueryTest}
21+
import org.apache.spark.sql.catalyst.TableIdentifier
22+
23+
/**
24+
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
25+
* command that check V1 and V2 table catalogs. The tests that cannot run for all supported
26+
* catalogs are located in more specific test suites:
27+
*
28+
* - V2 table catalog tests:
29+
* `org.apache.spark.sql.execution.command.v2.AlterTableSetTblPropertiesSuite`
30+
* - V1 table catalog tests:
31+
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuiteBase`
32+
* - V1 In-Memory catalog:
33+
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
34+
* - V1 Hive External catalog:
35+
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
36+
*/
37+
trait AlterTableSetTblPropertiesSuiteBase extends QueryTest with DDLCommandTestUtils {
38+
override val command = "ALTER TABLE .. SET TBLPROPERTIES"
39+
40+
def checkTblProps(tableIdent: TableIdentifier, expectedTblProps: Map[String, String]): Unit
41+
42+
test("alter table set tblproperties") {
43+
withNamespaceAndTable("ns", "tbl") { t =>
44+
sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing")
45+
val tableIdent = TableIdentifier("tbl", Some("ns"), Some(catalog))
46+
checkTblProps(tableIdent, Map.empty[String, String])
47+
48+
sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
49+
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))
50+
51+
sql(s"USE $catalog.ns")
52+
sql(s"ALTER TABLE tbl SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
53+
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))
54+
55+
sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v8')")
56+
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v8", "k3" -> "v3"))
57+
58+
// table to alter does not exist
59+
checkError(
60+
exception = intercept[AnalysisException] {
61+
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
62+
},
63+
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
64+
parameters = Map("relationName" -> "`does_not_exist`"),
65+
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
66+
)
67+
}
68+
}
69+
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -107,18 +107,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
107107
stop = 98))
108108
}
109109

110-
test("alter table - property values must be set") {
111-
val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
112-
checkError(
113-
exception = parseException(sql),
114-
errorClass = "_LEGACY_ERROR_TEMP_0035",
115-
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
116-
context = ExpectedContext(
117-
fragment = sql,
118-
start = 0,
119-
stop = 78))
120-
}
121-
122110
test("alter table unset properties - property values must NOT be set") {
123111
val sql = "ALTER TABLE my_tab UNSET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
124112
checkError(

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 0 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -327,10 +327,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
327327

328328
protected val reversedProperties = Seq(PROP_OWNER)
329329

330-
test("alter table: set properties (datasource table)") {
331-
testSetProperties(isDatasourceTable = true)
332-
}
333-
334330
test("alter table: unset properties (datasource table)") {
335331
testUnsetProperties(isDatasourceTable = true)
336332
}
@@ -1117,40 +1113,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
11171113
)
11181114
}
11191115

1120-
protected def testSetProperties(isDatasourceTable: Boolean): Unit = {
1121-
if (!isUsingHiveMetastore) {
1122-
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
1123-
}
1124-
val catalog = spark.sessionState.catalog
1125-
val tableIdent = TableIdentifier("tab1", Some("dbx"))
1126-
createDatabase(catalog, "dbx")
1127-
createTable(catalog, tableIdent, isDatasourceTable)
1128-
def getProps: Map[String, String] = {
1129-
if (isUsingHiveMetastore) {
1130-
normalizeCatalogTable(catalog.getTableMetadata(tableIdent)).properties
1131-
} else {
1132-
catalog.getTableMetadata(tableIdent).properties
1133-
}
1134-
}
1135-
assert(getProps.isEmpty)
1136-
// set table properties
1137-
sql("ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('andrew' = 'or14', 'kor' = 'bel')")
1138-
assert(getProps == Map("andrew" -> "or14", "kor" -> "bel"))
1139-
// set table properties without explicitly specifying database
1140-
catalog.setCurrentDatabase("dbx")
1141-
sql("ALTER TABLE tab1 SET TBLPROPERTIES ('kor' = 'belle', 'kar' = 'bol')")
1142-
assert(getProps == Map("andrew" -> "or14", "kor" -> "belle", "kar" -> "bol"))
1143-
// table to alter does not exist
1144-
checkError(
1145-
exception = intercept[AnalysisException] {
1146-
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
1147-
},
1148-
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
1149-
parameters = Map("relationName" -> "`does_not_exist`"),
1150-
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
1151-
)
1152-
}
1153-
11541116
protected def testUnsetProperties(isDatasourceTable: Boolean): Unit = {
11551117
if (!isUsingHiveMetastore) {
11561118
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command.v1
19+
20+
import org.apache.spark.sql.catalyst.TableIdentifier
21+
import org.apache.spark.sql.execution.command
22+
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
23+
24+
/**
25+
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
26+
* command that check V1 table catalogs. The tests that cannot run for all V1 catalogs
27+
* are located in more specific test suites:
28+
*
29+
* - V1 In-Memory catalog:
30+
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
31+
* - V1 Hive External catalog:
32+
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
33+
*/
34+
trait AlterTableSetTblPropertiesSuiteBase extends command.AlterTableSetTblPropertiesSuiteBase {
35+
36+
private[sql] lazy val sessionCatalog = spark.sessionState.catalog
37+
38+
private def isUsingHiveMetastore: Boolean = {
39+
spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive"
40+
}
41+
42+
private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
43+
props.filterNot(p => Seq("transient_lastDdlTime").contains(p._1))
44+
}
45+
46+
private def getTableProperties(tableIdent: TableIdentifier): Map[String, String] = {
47+
sessionCatalog.getTableMetadata(tableIdent).properties
48+
}
49+
50+
override def checkTblProps(tableIdent: TableIdentifier,
51+
expectedTblProps: Map[String, String]): Unit = {
52+
val actualTblProps = getTableProperties(tableIdent)
53+
if (isUsingHiveMetastore) {
54+
assert(normalizeTblProps(actualTblProps) == expectedTblProps)
55+
} else {
56+
assert(actualTblProps == expectedTblProps)
57+
}
58+
}
59+
}
60+
61+
class AlterTableSetTblPropertiesSuite
62+
extends AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command.v2
19+
20+
import scala.jdk.CollectionConverters._
21+
22+
import org.apache.spark.sql.catalyst.TableIdentifier
23+
import org.apache.spark.sql.connector.catalog.{Identifier, Table}
24+
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
25+
import org.apache.spark.sql.execution.command
26+
27+
/**
28+
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to
29+
* check V2 table catalogs.
30+
*/
31+
class AlterTableSetTblPropertiesSuite
32+
extends command.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase {
33+
34+
private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
35+
props.filterNot(p => Seq("provider", "owner").contains(p._1))
36+
}
37+
38+
private def getTableMetadata(tableIndent: TableIdentifier): Table = {
39+
val nameParts = tableIndent.nameParts
40+
val v2Catalog = spark.sessionState.catalogManager.catalog(nameParts.head).asTableCatalog
41+
val namespace = nameParts.drop(1).init.toArray
42+
v2Catalog.loadTable(Identifier.of(namespace, nameParts.last))
43+
}
44+
45+
override def checkTblProps(tableIdent: TableIdentifier,
46+
expectedTblProps: Map[String, String]): Unit = {
47+
val actualTblProps = getTableMetadata(tableIdent).properties.asScala.toMap
48+
assert(normalizeTblProps(actualTblProps) === expectedTblProps)
49+
}
50+
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -154,10 +154,6 @@ class HiveDDLSuite
154154
fs.exists(filesystemPath)
155155
}
156156

157-
test("alter table: set properties") {
158-
testSetProperties(isDatasourceTable = false)
159-
}
160-
161157
test("alter table: unset properties") {
162158
testUnsetProperties(isDatasourceTable = false)
163159
}
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.hive.execution.command
19+
20+
import org.apache.spark.sql.execution.command.v1
21+
22+
/**
23+
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to check
24+
* V1 Hive external table catalog.
25+
*/
26+
class AlterTableSetTblPropertiesSuite
27+
extends v1.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase

0 commit comments

Comments
 (0)