Skip to content

Commit eba4de6

Browse files
author
Ilya Ganelin
committed
[SPARK-5932] Updated spark.shuffle.file.buffer.kb
1 parent b809a78 commit eba4de6

File tree

4 files changed

+11
-5
lines changed

4 files changed

+11
-5
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,11 @@ private[spark] object SparkConf extends Logging {
480480
DeprecatedConfig("spark.kryoserializer.buffer.mb", "1.4",
481481
"Please use spark.kryoserializer.buffer instead."),
482482
DeprecatedConfig("spark.kryoserializer.buffer.max.mb", "1.4",
483-
"Please use spark.kryoserializer.buffer.max instead."))
483+
"Please use spark.kryoserializer.buffer.max instead."),
484+
DeprecatedConfig("spark.shuffle.file.buffer.kb", "1.4",
485+
"Please use spark.shuffle.file.buffer instead."))
486+
487+
484488

485489

486490
Map(configs.map { cfg => (cfg.key -> cfg) }:_*)
@@ -512,7 +516,9 @@ private[spark] object SparkConf extends Logging {
512516
"spark.kryoserializer.buffer" -> Seq(
513517
AlternateConfig("spark.kryoserializer.buffer.mb", "1.4")),
514518
"spark.kryoserializer.buffer.max" -> Seq(
515-
AlternateConfig("spark.kryoserializer.buffer.max.mb", "1.4"))
519+
AlternateConfig("spark.kryoserializer.buffer.max.mb", "1.4")),
520+
"spark.shuffle.file.buffer" -> Seq(
521+
AlternateConfig("spark.shuffle.file.buffer.kb", "1.4"))
516522
)
517523

518524
/**

core/src/main/scala/org/apache/spark/shuffle/FileShuffleBlockManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class FileShuffleBlockManager(conf: SparkConf)
7878
private val consolidateShuffleFiles =
7979
conf.getBoolean("spark.shuffle.consolidateFiles", false)
8080

81-
private val bufferSize = conf.getInt("spark.shuffle.file.buffer.kb", 32) * 1024
81+
private val bufferSize = conf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024
8282

8383
/**
8484
* Contains all the state related to a particular shuffle. This includes a pool of unused

core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ class ExternalAppendOnlyMap[K, V, C](
9090
// Number of bytes spilled in total
9191
private var _diskBytesSpilled = 0L
9292

93-
private val fileBufferSize = sparkConf.getInt("spark.shuffle.file.buffer.kb", 32) * 1024
93+
private val fileBufferSize = sparkConf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024
9494

9595
// Write metrics for current spill
9696
private var curWriteMetrics: ShuffleWriteMetrics = _

core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ private[spark] class ExternalSorter[K, V, C](
9797

9898
private val conf = SparkEnv.get.conf
9999
private val spillingEnabled = conf.getBoolean("spark.shuffle.spill", true)
100-
private val fileBufferSize = conf.getInt("spark.shuffle.file.buffer.kb", 32) * 1024
100+
private val fileBufferSize = conf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024
101101
private val transferToEnabled = conf.getBoolean("spark.file.transferTo", true)
102102

103103
// Size of object batches when reading/writing from serializers.

0 commit comments

Comments
 (0)