Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Avoid per-loop type cast
  • Loading branch information
caneGuy committed Aug 25, 2017
commit a02575e3bb3f5f7f2f2d9f7a0a51a1f162c12c4d
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,8 @@ package object config {
.internal()
.doc("The chunk size during writing out the bytes of ChunkedByteBuffer.")
.bytesConf(ByteUnit.BYTE)
.checkValue(_ <= Int.MaxValue, "The chunk size during writing out the bytes of" +
" ChunkedByteBuffer should not larger than Int.MaxValue.")
.createWithDefault(64 * 1024 * 1024)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add a checkValue to make sure the value is smaller than Int.Max


private[spark] val CHECKPOINT_COMPRESS =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ private[spark] class ChunkedByteBuffer(var chunks: Array[ByteBuffer]) {
// Chunk size in bytes
private val bufferWriteChunkSize =
Option(SparkEnv.get).map(_.conf.get(config.BUFFER_WRITE_CHUNK_SIZE))
.getOrElse(config.BUFFER_WRITE_CHUNK_SIZE.defaultValue.get)
.getOrElse(config.BUFFER_WRITE_CHUNK_SIZE.defaultValue.get).toInt

private[this] var disposed: Boolean = false

Expand All @@ -65,7 +65,7 @@ private[spark] class ChunkedByteBuffer(var chunks: Array[ByteBuffer]) {
for (bytes <- getChunks()) {
while (bytes.remaining() > 0) {
val ioSize = Math.min(bytes.remaining(), bufferWriteChunkSize)
bytes.limit(bytes.position + ioSize.toInt)
bytes.limit(bytes.position + ioSize)
channel.write(bytes)
}
}
Expand Down