diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala index 3d43e3c367aa..e7320b2c46ad 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala @@ -53,7 +53,7 @@ private[spark] class DiskBlockManager(conf: SparkConf, deleteFilesOnStop: Boolea /** Looks up a file by hashing it into one of our local subdirectories. */ // This method should be kept in sync with // org.apache.spark.network.shuffle.ExternalShuffleBlockResolver#getFile(). - def getFile(filename: String): File = { + private def getFile(filename: String): File = { // Figure out which local directory it hashes to, and which subdirectory in that val hash = Utils.nonNegativeHash(filename) val dirId = hash % localDirs.length diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index 95d70479ef01..e0b57bb94a8b 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -100,7 +100,7 @@ private[spark] class DiskStore( } def getBytes(blockId: BlockId): BlockData = { - val file = diskManager.getFile(blockId.name) + val file = diskManager.getFile(blockId) val blockSize = getSize(blockId) securityManager.getIOEncryptionKey() match { @@ -116,7 +116,7 @@ private[spark] class DiskStore( def remove(blockId: BlockId): Boolean = { blockSizes.remove(blockId.name) - val file = diskManager.getFile(blockId.name) + val file = diskManager.getFile(blockId) if (file.exists()) { val ret = file.delete() if (!ret) { @@ -129,7 +129,7 @@ private[spark] class DiskStore( } def contains(blockId: BlockId): Boolean = { - val file = diskManager.getFile(blockId.name) + val file = diskManager.getFile(blockId) file.exists() }