diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index c4edadba278f..4514675c56f3 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -985,7 +985,15 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) { } else { CreateTable(tableDescWithPartitionColNames, mode, Some(q)) } - case None => CreateTable(tableDesc, mode, None) + case None => + // When creating partitioned table, we must specify data type for the partition columns. + if (Option(ctx.partitionColumnNames).isDefined) { + val errorMessage = "Must specify a data type for each partition column while creating " + + "Hive partitioned table." + operationNotAllowed(errorMessage, ctx) + } + + CreateTable(tableDesc, mode, None) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index a907fcae526c..cd8e2eaa2b4d 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -548,6 +548,14 @@ class HiveDDLSuite assert(e.message == "Found duplicate column(s) in the table definition of `default`.`tbl`: `a`") } + test("create partitioned table without specifying data type for the partition columns") { + val e = intercept[AnalysisException] { + sql("CREATE TABLE tbl(a int) PARTITIONED BY (b) STORED AS parquet") + } + assert(e.message.contains("Must specify a data type for each partition column while creating " + + "Hive partitioned table.")) + } + test("add/drop partition with location - managed table") { val tab = "tab_with_partitions" withTempDir { tmpDir =>