Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
create abstract class for add/replace columns classes
- extract schema colums requirements checks to mehtod verifyColumnsToAddReplace
- class AlterTableAddColumnsCommand extends from base abstract class
- class AlterTableAddColumnsCommand uses verifyColumnsToAddReplace to check columns requirements
  - slight change in behaviour here : before DDLUtils.checkDataColNames (ie: check column names format) was done only on the columns to be added. now it's done on all columns, columns to be added and columns already in the table
  • Loading branch information
manu-olx committed Jun 4, 2019
commit b20dc955171223f66aba7dc4580c7edd17140df6
Original file line number Diff line number Diff line change
Expand Up @@ -181,17 +181,35 @@ case class AlterTableRenameCommand(

}

abstract class AlterTableAddReplaceColumnsCommandsBase extends RunnableCommand {
/**
* Ensure the columns to add/replace meet requirements.
*/
protected def verifyColumnsToAddReplace(
table: TableIdentifier,
catalogTable: CatalogTable,
colsToVerify: Seq[StructField]): Unit = {

SchemaUtils.checkColumnNameDuplication(
colsToVerify.map(_.name),
"in the table definition of " + table.identifier,
conf.caseSensitiveAnalysis)

DDLUtils.checkDataColNames(catalogTable, colsToVerify.map(_.name))
}
}

/**
* A command that add columns to a table
* The syntax of using this command in SQL is:
* {{{
* ALTER TABLE table_identifier
* ADD COLUMNS (col_name data_type [COMMENT col_comment], ...);
* }}}
*/
*/
case class AlterTableAddColumnsCommand(
table: TableIdentifier,
colsToAdd: Seq[StructField]) extends RunnableCommand {
colsToAdd: Seq[StructField]) extends AlterTableAddReplaceColumnsCommandsBase {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val catalogTable = verifyAlterTableAddColumn(sparkSession.sessionState.conf, catalog, table)
Expand All @@ -204,11 +222,7 @@ case class AlterTableAddColumnsCommand(
}
catalog.refreshTable(table)

SchemaUtils.checkColumnNameDuplication(
(colsToAdd ++ catalogTable.schema).map(_.name),
"in the table definition of " + table.identifier,
conf.caseSensitiveAnalysis)
DDLUtils.checkDataColNames(catalogTable, colsToAdd.map(_.name))
verifyColumnsToAddReplace(table, catalogTable, colsToAdd ++ catalogTable.schema)

catalog.alterTableDataSchema(table, StructType(catalogTable.dataSchema ++ colsToAdd))
Seq.empty[Row]
Expand Down