From 5439710a0bea6b927aec533eaa841c4237364841 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sun, 13 Dec 2020 09:52:44 +0300 Subject: [PATCH 1/5] Remove LOCATION from the test "universal type conversions of partition values" --- .../sql/execution/command/AlterTableAddPartitionSuiteBase.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala index 9d2c58b7e4351..2457bb9f8b57c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala @@ -154,7 +154,7 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest with SQLTestUtils { | part8 = '2020-11-23', | part9 = '2020-11-23 22:13:10.123456' |""".stripMargin - sql(s"ALTER TABLE $t ADD PARTITION ($partSpec) LOCATION 'loc1'") + sql(s"ALTER TABLE $t ADD PARTITION ($partSpec)") val expected = Map( "part0" -> "-1", "part1" -> "0", From 4da34f207eb4a26a7d6295d5ad304707b8e23cd1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sun, 13 Dec 2020 10:39:07 +0300 Subject: [PATCH 2/5] Fix the test "create/drop partitions in managed tables with location" --- .../sql/catalyst/catalog/ExternalCatalogSuite.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala index 55712d0da518d..1db8935706f5a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala @@ -408,8 +408,8 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac partitionColumnNames = Seq("partCol1", "partCol2")) catalog.createTable(table, ignoreIfExists = false) - val newLocationPart1 = newUriForDatabase() - val newLocationPart2 = newUriForDatabase() + val newLocationPart1 = newUriForPartition(Seq("p1=1", "p2=2")) + val newLocationPart2 = newUriForPartition(Seq("p1=3", "p2=4")) val partition1 = CatalogTablePartition(Map("partCol1" -> "1", "partCol2" -> "2"), @@ -991,6 +991,13 @@ abstract class CatalogTestUtils { def newUriForDatabase(): URI = new URI(Utils.createTempDir().toURI.toString.stripSuffix("/")) + def newUriForPartition(parts: Seq[String]): URI = { + val path = parts.foldLeft(Utils.createTempDir()) { + case (parent, part) => new java.io.File(parent, part) + } + new URI(path.toURI.toString.stripSuffix("/")) + } + def newDb(name: String): CatalogDatabase = { CatalogDatabase(name, name + " description", newUriForDatabase(), Map.empty) } From 8916218ed62775dce96f22379a6004618ef4ee76 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sun, 13 Dec 2020 11:04:33 +0300 Subject: [PATCH 3/5] Change paths in "add/drop partition with location - managed table" --- .../org/apache/spark/sql/hive/execution/HiveDDLSuite.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 9f75f8797fe37..a6c40851b1c4e 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -601,8 +601,8 @@ class HiveDDLSuite val tab = "tab_with_partitions" withTempDir { tmpDir => val basePath = new File(tmpDir.getCanonicalPath) - val part1Path = new File(basePath + "/part1") - val part2Path = new File(basePath + "/part2") + val part1Path = new File(new File(basePath, "part10"), "part11") + val part2Path = new File(new File(basePath, "part20"), "part21") val dirSet = part1Path :: part2Path :: Nil // Before data insertion, all the directory are empty From 4a9ababfd3fca1890fdf730b949a934ef31d8abd Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sun, 13 Dec 2020 11:54:43 +0300 Subject: [PATCH 4/5] Fix StatisticsSuite --- .../org/apache/spark/sql/hive/StatisticsSuite.scala | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala index 2e98a76c52488..5357f4b63d794 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala @@ -983,12 +983,16 @@ class StatisticsSuite extends StatisticsCollectionTestBase with TestHiveSingleto assert(fetched1.get.colStats.size == 2) withTempPaths(numPaths = 2) { case Seq(dir1, dir2) => - val file1 = new File(dir1 + "/data") + val partDir1 = new File(new File(dir1, "ds=2008-04-09"), "hr=11") + val file1 = new File(partDir1, "data") + file1.getParentFile.mkdirs() Utils.tryWithResource(new PrintWriter(file1)) { writer => writer.write("1,a") } - val file2 = new File(dir2 + "/data") + val partDir2 = new File(new File(dir2, "ds=2008-04-09"), "hr=12") + val file2 = new File(partDir2, "data") + file2.getParentFile.mkdirs() Utils.tryWithResource(new PrintWriter(file2)) { writer => writer.write("1,a") } @@ -997,8 +1001,8 @@ class StatisticsSuite extends StatisticsCollectionTestBase with TestHiveSingleto sql( s""" |ALTER TABLE $table ADD - |PARTITION (ds='2008-04-09', hr='11') LOCATION '${dir1.toURI.toString}' - |PARTITION (ds='2008-04-09', hr='12') LOCATION '${dir2.toURI.toString}' + |PARTITION (ds='2008-04-09', hr='11') LOCATION '${partDir1.toURI.toString}' + |PARTITION (ds='2008-04-09', hr='12') LOCATION '${partDir1.toURI.toString}' """.stripMargin) if (autoUpdate) { val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None) From e9a613303ef8f4476db464f02578493ada1cf49d Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Mon, 14 Dec 2020 08:09:58 +0300 Subject: [PATCH 5/5] Update sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala Co-authored-by: Hyukjin Kwon --- .../spark/sql/catalyst/catalog/ExternalCatalogSuite.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala index 1db8935706f5a..d310538e302de 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala @@ -992,9 +992,7 @@ abstract class CatalogTestUtils { def newUriForDatabase(): URI = new URI(Utils.createTempDir().toURI.toString.stripSuffix("/")) def newUriForPartition(parts: Seq[String]): URI = { - val path = parts.foldLeft(Utils.createTempDir()) { - case (parent, part) => new java.io.File(parent, part) - } + val path = parts.foldLeft(Utils.createTempDir())(new java.io.File(_, _)) new URI(path.toURI.toString.stripSuffix("/")) }