Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix test case.
  • Loading branch information
dongjoon-hyun committed Feb 14, 2018
commit 46c8697b1981f57eeacb48bea31dec1e89f4e66a
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,26 @@ package org.apache.spark.sql
import java.io.FileNotFoundException

import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.SparkException
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext

class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {

class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext with BeforeAndAfterAll {
import testImplicits._

override def beforeAll(): Unit = {
super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.ORC_IMPLEMENTATION, "native")
}

override def afterAll(): Unit = {
spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
super.afterAll()
}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The test coverage is the same.


private val allFileBasedDataSources = Seq("orc", "parquet", "csv", "json", "text")
private val nameWithSpecialChars = "sp&cial%c hars"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,16 @@ import org.apache.spark.util.Utils
class FileStreamSinkSuite extends StreamTest {
import testImplicits._

override def beforeAll(): Unit = {
Copy link
Contributor

@cloud-fan cloud-fan Feb 15, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: a simpler way to fix this

override val conf = super.conf.copy(SQLConf.ORC_IMPLEMENTATION -> "native")

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, @cloud-fan .
I tested it, but that doesn't work in this FileStreamSinkSuite.

super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.ORC_IMPLEMENTATION, "native")
}

override def afterAll(): Unit = {
spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
super.afterAll()
}

test("unpartitioned writing and batch reading") {
val inputData = MemoryStream[Int]
val df = inputData.toDF()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,16 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
.collect { case s @ StreamingRelation(dataSource, _, _) => s.schema }.head
}

override def beforeAll(): Unit = {
super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.ORC_IMPLEMENTATION, "native")
}

override def afterAll(): Unit = {
spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
super.afterAll()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

    try {
      spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
    } finally {
      super.afterAll()
    }

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks. Yep. It's done.

}

// ============= Basic parameter exists tests ================

test("FileStreamSource schema: no path") {
Expand Down