Skip to content

Commit e017763

Browse files
felixcheungshivaram
authored andcommitted
[SPARK-16184][SPARKR] conf API for SparkSession
## What changes were proposed in this pull request? Add `conf` method to get Runtime Config from SparkSession ## How was this patch tested? unit tests, manual tests This is how it works in sparkR shell: ``` SparkSession available as 'spark'. > conf() $hive.metastore.warehouse.dir [1] "file:/opt/spark-2.0.0-bin-hadoop2.6/R/spark-warehouse" $spark.app.id [1] "local-1466749575523" $spark.app.name [1] "SparkR" $spark.driver.host [1] "10.0.2.1" $spark.driver.port [1] "45629" $spark.executorEnv.LD_LIBRARY_PATH [1] "$LD_LIBRARY_PATH:/usr/lib/R/lib:/usr/lib/x86_64-linux-gnu:/usr/lib/jvm/default-java/jre/lib/amd64/server" $spark.executor.id [1] "driver" $spark.home [1] "/opt/spark-2.0.0-bin-hadoop2.6" $spark.master [1] "local[*]" $spark.sql.catalogImplementation [1] "hive" $spark.submit.deployMode [1] "client" > conf("spark.master") $spark.master [1] "local[*]" ``` Author: Felix Cheung <[email protected]> Closes #13885 from felixcheung/rconf. (cherry picked from commit 30b182b) Signed-off-by: Shivaram Venkataraman <[email protected]>
1 parent b03b097 commit e017763

File tree

4 files changed

+57
-10
lines changed

4 files changed

+57
-10
lines changed

R/pkg/NAMESPACE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ export("sparkR.session")
1010
export("sparkR.init")
1111
export("sparkR.stop")
1212
export("sparkR.session.stop")
13+
export("sparkR.conf")
1314
export("print.jobj")
1415

1516
export("sparkRSQL.init",

R/pkg/R/SQLContext.R

Lines changed: 46 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -110,11 +110,53 @@ infer_type <- function(x) {
110110
}
111111
}
112112

113-
getDefaultSqlSource <- function() {
113+
#' Get Runtime Config from the current active SparkSession
114+
#'
115+
#' Get Runtime Config from the current active SparkSession.
116+
#' To change SparkSession Runtime Config, please see `sparkR.session()`.
117+
#'
118+
#' @param key (optional) The key of the config to get, if omitted, all config is returned
119+
#' @param defaultValue (optional) The default value of the config to return if they config is not
120+
#' set, if omitted, the call fails if the config key is not set
121+
#' @return a list of config values with keys as their names
122+
#' @rdname sparkR.conf
123+
#' @name sparkR.conf
124+
#' @export
125+
#' @examples
126+
#'\dontrun{
127+
#' sparkR.session()
128+
#' allConfigs <- sparkR.conf()
129+
#' masterValue <- unlist(sparkR.conf("spark.master"))
130+
#' namedConfig <- sparkR.conf("spark.executor.memory", "0g")
131+
#' }
132+
#' @note sparkR.conf since 2.0.0
133+
sparkR.conf <- function(key, defaultValue) {
114134
sparkSession <- getSparkSession()
115-
conf <- callJMethod(sparkSession, "conf")
116-
source <- callJMethod(conf, "get", "spark.sql.sources.default", "org.apache.spark.sql.parquet")
117-
source
135+
if (missing(key)) {
136+
m <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getSessionConf", sparkSession)
137+
as.list(m, all.names = TRUE, sorted = TRUE)
138+
} else {
139+
conf <- callJMethod(sparkSession, "conf")
140+
value <- if (missing(defaultValue)) {
141+
tryCatch(callJMethod(conf, "get", key),
142+
error = function(e) {
143+
if (any(grep("java.util.NoSuchElementException", as.character(e)))) {
144+
stop(paste0("Config '", key, "' is not set"))
145+
} else {
146+
stop(paste0("Unknown error: ", as.character(e)))
147+
}
148+
})
149+
} else {
150+
callJMethod(conf, "get", key, defaultValue)
151+
}
152+
l <- setNames(list(value), key)
153+
l
154+
}
155+
}
156+
157+
getDefaultSqlSource <- function() {
158+
l <- sparkR.conf("spark.sql.sources.default", "org.apache.spark.sql.parquet")
159+
l[["spark.sql.sources.default"]]
118160
}
119161

120162
#' Create a SparkDataFrame

R/pkg/inst/tests/testthat/test_sparkSQL.R

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2365,7 +2365,7 @@ test_that("randomSplit", {
23652365
expect_true(all(sapply(abs(counts / num - weights / sum(weights)), function(e) { e < 0.05 })))
23662366
})
23672367

2368-
test_that("Change config on SparkSession", {
2368+
test_that("Setting and getting config on SparkSession", {
23692369
# first, set it to a random but known value
23702370
conf <- callJMethod(sparkSession, "conf")
23712371
property <- paste0("spark.testing.", as.character(runif(1)))
@@ -2378,17 +2378,17 @@ test_that("Change config on SparkSession", {
23782378
names(l) <- property
23792379
sparkR.session(sparkConfig = l)
23802380

2381-
conf <- callJMethod(sparkSession, "conf")
2382-
newValue <- callJMethod(conf, "get", property, "")
2381+
newValue <- unlist(sparkR.conf(property, ""), use.names = FALSE)
23832382
expect_equal(value2, newValue)
23842383

23852384
value <- as.character(runif(1))
23862385
sparkR.session(spark.app.name = "sparkSession test", spark.testing.r.session.r = value)
2387-
conf <- callJMethod(sparkSession, "conf")
2388-
appNameValue <- callJMethod(conf, "get", "spark.app.name", "")
2389-
testValue <- callJMethod(conf, "get", "spark.testing.r.session.r", "")
2386+
allconf <- sparkR.conf()
2387+
appNameValue <- allconf[["spark.app.name"]]
2388+
testValue <- allconf[["spark.testing.r.session.r"]]
23902389
expect_equal(appNameValue, "sparkSession test")
23912390
expect_equal(testValue, value)
2391+
expect_error(sparkR.conf("completely.dummy"), "Config 'completely.dummy' is not set")
23922392
})
23932393

23942394
test_that("enableHiveSupport on SparkSession", {

sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,10 @@ private[sql] object SQLUtils extends Logging {
7171
}
7272
}
7373

74+
def getSessionConf(spark: SparkSession): JMap[String, String] = {
75+
spark.conf.getAll.asJava
76+
}
77+
7478
def getJavaSparkContext(spark: SparkSession): JavaSparkContext = {
7579
new JavaSparkContext(spark.sparkContext)
7680
}

0 commit comments

Comments
 (0)