Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Make spill tests wait until job has completed before returning the nu…
…mber of stages that spilled
  • Loading branch information
srowen committed Jun 24, 2016
commit bcc0297e8af639132bebb0703eb7df8c8fea15f9
11 changes: 10 additions & 1 deletion core/src/main/scala/org/apache/spark/TestUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import java.net.{URI, URL}
import java.nio.charset.StandardCharsets
import java.nio.file.Paths
import java.util.Arrays
import java.util.concurrent.CountDownLatch
import java.util.jar.{JarEntry, JarOutputStream}

import scala.collection.JavaConverters._
Expand Down Expand Up @@ -190,8 +191,12 @@ private[spark] object TestUtils {
private class SpillListener extends SparkListener {
private val stageIdToTaskMetrics = new mutable.HashMap[Int, ArrayBuffer[TaskMetrics]]
private val spilledStageIds = new mutable.HashSet[Int]
private val stagesDone = new CountDownLatch(1)

def numSpilledStages: Int = spilledStageIds.size
def numSpilledStages: Int = {
stagesDone.await()
spilledStageIds.size
}

override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = {
stageIdToTaskMetrics.getOrElseUpdate(
Expand All @@ -206,4 +211,8 @@ private class SpillListener extends SparkListener {
spilledStageIds += stageId
}
}

override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
stagesDone.countDown()
}
}