Skip to content
Closed
Prev Previous commit
Next Next commit
not import map
  • Loading branch information
yaooqinn committed Feb 1, 2019
commit 10ebff783f6f68d463b52ad45feba6b84e39a675
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ package org.apache.spark.sql.internal
import java.net.URL
import java.util.Locale

import scala.collection.Map
import scala.reflect.ClassTag
import scala.util.control.NonFatal

Expand All @@ -41,7 +40,9 @@ import org.apache.spark.util.{MutableURLClassLoader, Utils}
/**
* A class that holds all state shared across sessions in a given [[SQLContext]].
*/
private[sql] class SharedState(val sparkContext: SparkContext, initConfig: Map[String, String])
private[sql] class SharedState(
val sparkContext: SparkContext,
initConfig: scala.collection.Map[String, String])
extends Logging {
private val conf = sparkContext.conf.clone()
private val hadoopConf = new Configuration(sparkContext.hadoopConfiguration)
Expand Down Expand Up @@ -121,7 +122,7 @@ private[sql] class SharedState(val sparkContext: SparkContext, initConfig: Map[S
SessionCatalog.DEFAULT_DATABASE,
"default database",
CatalogUtils.stringToURI(warehousePath),
Map.empty[String, String])
Map())
// Create default database if it doesn't exist
if (!externalCatalog.databaseExists(SessionCatalog.DEFAULT_DATABASE)) {
// There may be another Spark application creating default database at the same time, here we
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class SharedStateSuite extends SparkFunSuite {

test("the catalog should be determined at the very first") {
val conf = new SparkConf().setMaster("local").setAppName("SharedState Test")
val sc = new SparkContext(conf)
val sc = SparkContext.getOrCreate(conf)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this being used?

Copy link
Member

@gatorsmile gatorsmile Feb 1, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

// Create a spark context first
SparkContext.getOrCreate(conf)

val ss = SparkSession.builder().enableHiveSupport().getOrCreate()
assert(ss.sharedState.externalCatalog.unwrapped.getClass.getName ===
"org.apache.spark.sql.hive.HiveExternalCatalog", "The catalog should be hive ")
Expand Down