Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix all other failures
  • Loading branch information
HyukjinKwon committed Sep 27, 2018
commit c4c82ba2e859ea78b51dcb84c67e0d75cac39327
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import java.net.URI
import java.util.Locale

import org.apache.hadoop.fs.Path
import org.apache.hadoop.util.VersionInfo
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
Expand Down Expand Up @@ -2697,6 +2698,11 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}

test(s"basic DDL using locale tr - caseSensitive $caseSensitive") {
// There seems a bug about dealing with non-ascii compatible characters in file names
// for the usage of File.toPath() in few specific JDKs, which looks updated in HADOOP-12045
// for local file systems specifically. This affects Hadoop 2.8.0+ per the JIRA.
// See https://stackoverflow.com/questions/37409379/invalidpathexception-for-chinese-filename
assume(VersionInfo.getVersion < "2.8.0")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> s"$caseSensitive") {
withLocale("tr") {
val dbName = "DaTaBaSe_I"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ class HiveSparkSubmitSuite
}

test("SPARK-9757 Persist Parquet relation with decimal column") {
assume(VersionInfo.getVersion < "3.0.0", "Only Hive 2.3+ supports Hadoop 3+. See HIVE-16081.")
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val args = Seq(
"--class", SPARK_9757.getClass.getName.stripSuffix("$"),
Expand Down Expand Up @@ -252,6 +253,7 @@ class HiveSparkSubmitSuite
}

test("SPARK-16901: set javax.jdo.option.ConnectionURL") {
assume(VersionInfo.getVersion < "3.0.0", "Only Hive 2.3+ supports Hadoop 3+. See HIVE-16081.")
// In this test, we set javax.jdo.option.ConnectionURL and set metastore version to
// 0.13. This test will make sure that javax.jdo.option.ConnectionURL will not be
// overridden by hive's default settings when we create a HiveConf object inside
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import java.util.Date
import scala.language.existentials

import org.apache.hadoop.fs.Path
import org.apache.hadoop.util.VersionInfo
import org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER
import org.apache.parquet.hadoop.ParquetFileReader
import org.scalatest.BeforeAndAfterEach
Expand Down Expand Up @@ -397,6 +398,11 @@ class HiveDDLSuite
}

test("create Hive-serde table and view with unicode columns and comment") {
// There seems a bug about dealing with non-ascii compatible characters in file names
// for the usage of File.toPath() in few specific JDKs, which looks updated in HADOOP-12045
// for local file systems specifically. This affects Hadoop 2.8.0+ per the JIRA.
// See https://stackoverflow.com/questions/37409379/invalidpathexception-for-chinese-filename
assume(VersionInfo.getVersion < "2.8.0")
val catalog = spark.sessionState.catalog
val tabName = "tab1"
val viewName = "view1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,6 @@ class Hive_2_1_DDLSuite extends SparkFunSuite with TestHiveSingleton with Before
new HiveExternalCatalog(sparkConf, hadoopConf)
}

override def afterEach: Unit = {
catalog.listTables("default").foreach { t =>
catalog.dropTable("default", t, true, false)
}
spark.sessionState.catalog.reset()
}

override def afterAll(): Unit = {
try {
catalog = null
Expand All @@ -84,7 +77,14 @@ class Hive_2_1_DDLSuite extends SparkFunSuite with TestHiveSingleton with Before

private def test_2_1(title: String)(func: => Unit): Unit = test(title) {
assume(VersionInfo.getVersion < "3.0.0", "Only Hive 2.3+ supports Hadoop 3+. See HIVE-16081.")
func
try {
func
} finally {
catalog.listTables("default").foreach { t =>
catalog.dropTable("default", t, true, false)
}
spark.sessionState.catalog.reset()
}
}

test_2_1("SPARK-21617: ALTER TABLE for non-compatible DataSource tables") {
Expand Down