-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-19261][SQL] Alter add columns for Hive serde and some datasource tables #16626
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
52ca902
f498fa6
522443e
1af2654
ec57ee9
ec74849
8fca889
4a17529
9699128
9860e5c
dfff364
9f23254
180092f
5a8aa80
d3860e6
55577aa
6fa913a
7231efe
e4e9ecf
75e7441
9847030
1a383bb
f994ce9
5bf7360
599c45e
b3edfea
7d8a515
e895278
e171ac4
4391edd
a3fef12
1eb7cd3
04ce8f4
7d8437d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
- Loading branch information
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions._ | |
| import org.apache.spark.sql.catalyst.parser.CatalystSqlParser | ||
| import org.apache.spark.sql.catalyst.plans.PlanTest | ||
| import org.apache.spark.sql.catalyst.plans.logical.{Range, SubqueryAlias, View} | ||
| import org.apache.spark.sql.types.IntegerType | ||
| import org.apache.spark.sql.types.{IntegerType, StructField, StructType} | ||
|
|
||
| class InMemorySessionCatalogSuite extends SessionCatalogSuite { | ||
| protected val utils = new CatalogTestUtils { | ||
|
|
@@ -452,14 +452,23 @@ abstract class SessionCatalogSuite extends PlanTest { | |
| } | ||
|
|
||
| test("alter table add columns") { | ||
| val externalCatalog = newBasicCatalog() | ||
| val sessionCatalog = new SessionCatalog(externalCatalog) | ||
| sessionCatalog.createTable(newTable("t1", "default"), ignoreIfExists = false) | ||
| val oldTab = externalCatalog.getTable("default", "t1") | ||
| sessionCatalog.alterTableSchema(TableIdentifier("t1", Some("default")), | ||
| oldTab.schema.add("c3", IntegerType)) | ||
| val newTab = externalCatalog.getTable("default", "t1") | ||
| assert(newTab.schema.equals(oldTab.schema.add("c3", IntegerType))) | ||
| withBasicCatalog { sessionCatalog => | ||
| sessionCatalog.createTable(newTable("t1", "default"), ignoreIfExists = false) | ||
| val oldTab = sessionCatalog.externalCatalog.getTable("default", "t1") | ||
| sessionCatalog.alterTableSchema(TableIdentifier("t1", Some("default")), | ||
| oldTab.schema.add("c3", IntegerType)) | ||
| val newTab = sessionCatalog.externalCatalog.getTable("default", "t1") | ||
| if (sessionCatalog.externalCatalog.isInstanceOf[InMemoryCatalog]) { | ||
| assert(newTab.schema.toString == oldTab.schema.add("c3", IntegerType).toString) | ||
|
||
| } else { | ||
| // HiveExternalCatalog will always arrange the partition columns to the end | ||
|
||
| val oldTabSchema = StructType(oldTab.schema.take( | ||
| oldTab.schema.length - oldTab.partitionColumnNames.length) ++ | ||
| Seq(StructField("c3", IntegerType)) ++ | ||
| oldTab.schema.takeRight(oldTab.partitionColumnNames.length)) | ||
| assert(newTab.schema.toString == oldTabSchema.toString) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| test("get table") { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -165,6 +165,28 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with Befo | |
| assert(e.contains("Hive support is required to CREATE Hive TABLE (AS SELECT)")) | ||
| } | ||
| } | ||
|
|
||
| Seq("true", "false").foreach { caseSensitive => | ||
| test(s"alter table add columns with existing column name - caseSensitive $caseSensitive") { | ||
| withSQLConf(("spark.sql.caseSensitive", caseSensitive)) { | ||
| withTable("t1") { | ||
| sql("CREATE TABLE t1 (c1 int) USING PARQUET") | ||
| if (caseSensitive == "false") { | ||
| val e = intercept[AnalysisException] { | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| }.getMessage | ||
| assert(e.contains("Found duplicate column(s)")) | ||
| } else { | ||
| // hive catalog will still complains that c1 is duplicate column name because hive | ||
| // identifiers are case insensitive. | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| assert(sql("SELECT * FROM t1").schema | ||
| .equals(new StructType().add("c1", IntegerType).add("C1", StringType))) | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| abstract class DDLSuite extends QueryTest with SQLTestUtils { | ||
|
|
@@ -2269,26 +2291,6 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { | |
| } | ||
| } | ||
|
|
||
| Seq("true", "false").foreach { caseSensitive => | ||
| test(s"alter table add columns with existing column name - caseSensitive $caseSensitive") { | ||
| withSQLConf(("spark.sql.caseSensitive", caseSensitive)) { | ||
| withTable("t1") { | ||
| sql("CREATE TABLE t1 (c1 int) USING PARQUET") | ||
| if (caseSensitive == "false") { | ||
| val e = intercept[AnalysisException] { | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| }.getMessage | ||
| assert(e.contains("Found duplicate column(s)")) | ||
| } else { | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| assert(sql("SELECT * FROM t1").schema | ||
| .equals(new StructType().add("c1", IntegerType).add("C1", StringType))) | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| test("alter table add columns to table referenced by a view") { | ||
|
||
| withTable("t1") { | ||
| withView("v1") { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1940,4 +1940,25 @@ class HiveDDLSuite | |
| assert(e.contains("Found duplicate column(s)")) | ||
| } | ||
| } | ||
|
|
||
| Seq("true", "false").foreach { caseSensitive => | ||
| test(s"alter table add columns with existing column name - caseSensitive $caseSensitive") { | ||
| withSQLConf(("spark.sql.caseSensitive", caseSensitive)) { | ||
| withTable("t1") { | ||
| sql("CREATE TABLE t1 (c1 int) USING PARQUET") | ||
| if (caseSensitive == "false") { | ||
|
||
| val e = intercept[AnalysisException] { | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| }.getMessage | ||
| assert(e.contains("Found duplicate column(s)")) | ||
| } else { | ||
| val e = intercept[AnalysisException] { | ||
| sql("ALTER TABLE t1 ADD COLUMNS (C1 string)") | ||
| }.getMessage | ||
| assert(e.contains("HiveException")) | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Also add a negative test case for dropping columns, although we do not support it now.