Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
c8e8abe
SPARK-23429: Add executor memory metrics to heartbeat and expose in e…
edwinalu Mar 9, 2018
5d6ae1c
modify MimaExcludes.scala to filter changes to SparkListenerExecutorM…
edwinalu Apr 2, 2018
ad10d28
Address code review comments, change event logging to stage end.
edwinalu Apr 22, 2018
10ed328
Add configuration parameter spark.eventLog.logExecutorMetricsUpdates.…
edwinalu May 15, 2018
2d20367
wip on enum based metrics
squito May 23, 2018
f904f1e
wip ... has both enum and non-enum version
squito May 23, 2018
c502ec4
case objects, mostly complete
squito May 23, 2018
7879e66
Merge pull request #1 from squito/metric_enums
edwinalu Jun 3, 2018
2662f6f
Address comments (move heartbeater from DAGScheduler to SparkContext,…
edwinalu Jun 10, 2018
2871335
SPARK-23429: Add executor memory metrics to heartbeat and expose in e…
edwinalu Mar 9, 2018
da83f2e
modify MimaExcludes.scala to filter changes to SparkListenerExecutorM…
edwinalu Apr 2, 2018
f25a44b
Address code review comments, change event logging to stage end.
edwinalu Apr 22, 2018
ca85c82
Add configuration parameter spark.eventLog.logExecutorMetricsUpdates.…
edwinalu May 15, 2018
8b74ba8
wip on enum based metrics
squito May 23, 2018
036148c
wip ... has both enum and non-enum version
squito May 23, 2018
91fb1db
case objects, mostly complete
squito May 23, 2018
2d8894a
Address comments (move heartbeater from DAGScheduler to SparkContext,…
edwinalu Jun 10, 2018
99044e6
Merge branch 'SPARK-23429.2' of https://github.com/edwinalu/spark int…
edwinalu Jun 14, 2018
263c8c8
code review comments
edwinalu Jun 14, 2018
812fdcf
code review comments:
edwinalu Jun 22, 2018
7ed42a5
Address code review comments. Also make executorUpdates in SparkListe…
edwinalu Jun 28, 2018
8d9acdf
Revert and make executorUpdates in SparkListenerExecutorMetricsUpdate…
edwinalu Jun 29, 2018
20799d2
code review comments: hid array implementation of executor metrics, a…
edwinalu Jul 25, 2018
8905d23
merge with master
edwinalu Jul 25, 2018
04875b8
Integration of ProcessTreeMetrics with PR 21221
Jul 26, 2018
a0eed11
address code review comments
edwinalu Aug 5, 2018
162b9b2
Merge branch 'SPARK-23429.2' of https://github.com/edwinalu/spark int…
Aug 6, 2018
29a44c7
Changing the position of ptree and also make the computation configur…
Aug 7, 2018
3671427
Seperate metrics for jvm, python and others and update the tests
Aug 8, 2018
03cd5bc
code review comments
edwinalu Aug 13, 2018
c79b5ab
Merge branch 'SPARK-23429.2' of https://github.com/edwinalu/spark int…
Aug 14, 2018
10e7f15
Merge branch 'master' into SPARK-23429.2
edwinalu Aug 14, 2018
a14b82a
merge conflicts
edwinalu Aug 14, 2018
2897281
disable stage executor metrics logging by default
edwinalu Aug 16, 2018
8f97b50
Merge branch 'SPARK-23429.2' of https://github.com/rezasafi/spark int…
Aug 17, 2018
b14cebc
Update JsonProtocolSuite with new metrics.
Aug 17, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
case objects, mostly complete
  • Loading branch information
squito authored and edwinalu committed Jun 14, 2018
commit 91fb1db09504fc4386477ab51221d28240c3c901
38 changes: 0 additions & 38 deletions core/src/main/java/org/apache/spark/scheduler/MemoryTypes.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.executor

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.scheduler.MemoryTypes
import org.apache.spark.metrics.MetricGetter

/**
* :: DeveloperApi ::
Expand All @@ -32,5 +32,5 @@ import org.apache.spark.scheduler.MemoryTypes
*/
@DeveloperApi
class ExecutorMetrics private[spark] (val timestamp: Long) extends Serializable {
val metrics = new Array[Long](MemoryTypes.values().length)
val metrics = new Array[Long](MetricGetter.values.length)
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import org.apache.spark.memory.MemoryManager

sealed trait MetricGetter {
def getMetricValue(memoryManager: MemoryManager): Long
val name = getClass().getName().stripSuffix("$")
}

abstract class MemoryManagerMetricGetter(f: MemoryManager => Long) extends MetricGetter {
Expand All @@ -32,9 +33,8 @@ abstract class MemoryManagerMetricGetter(f: MemoryManager => Long) extends Metri
}

abstract class MBeanMetricGetter(mBeanName: String) extends MetricGetter {
private val name = new ObjectName(mBeanName)
val bean = ManagementFactory.newPlatformMXBeanProxy(ManagementFactory.getPlatformMBeanServer,
name.toString, classOf[BufferPoolMXBean])
new ObjectName(mBeanName).toString, classOf[BufferPoolMXBean])

override def getMetricValue(memoryManager: MemoryManager): Long = {
bean.getMemoryUsed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@
package org.apache.spark.scheduler

import org.apache.spark.executor.ExecutorMetrics
import org.apache.spark.metrics.MetricGetter
import org.apache.spark.status.api.v1.PeakMemoryMetrics

/**
* Records the peak values for executor level metrics. If jvmUsedHeapMemory is -1, then no
* values have been recorded yet.
*/
private[spark] class PeakExecutorMetrics {
val metrics = new Array[Long](MemoryTypes.values().length)
val metrics = new Array[Long](MetricGetter.values.length)
metrics(0) = -1

/**
Expand All @@ -38,7 +39,7 @@ private[spark] class PeakExecutorMetrics {
def compareAndUpdate(executorMetrics: ExecutorMetrics): Boolean = {
var updated: Boolean = false

(0 until MemoryTypes.values().length).foreach { metricIdx =>
(0 until MetricGetter.values.length).foreach { metricIdx =>
val newVal = executorMetrics.metrics(metricIdx)
if ( newVal > metrics(metricIdx)) {
updated = true
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/status/api/v1/api.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.annotation.JsonDeserialize

import org.apache.spark.JobExecutionStatus
import org.apache.spark.scheduler.MemoryTypes
import org.apache.spark.metrics.MetricGetter

case class ApplicationInfo private[spark](
id: String,
Expand Down Expand Up @@ -110,7 +110,7 @@ class MemoryMetrics private[spark](

class PeakMemoryMetrics private[spark]() {
// TODO special json-ification
val metrics = new Array[Long](MemoryTypes.values().length)
val metrics = new Array[Long](MetricGetter.values.length)
}

class JobData private[spark](
Expand Down
38 changes: 13 additions & 25 deletions core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import org.json4s.jackson.JsonMethods._

import org.apache.spark._
import org.apache.spark.executor._
import org.apache.spark.metrics.MetricGetter
import org.apache.spark.rdd.RDDOperationScope
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
Expand Down Expand Up @@ -389,17 +390,12 @@ private[spark] object JsonProtocol {
* @return the JSON representation
*/
def executorMetricsToJson(executorMetrics: ExecutorMetrics): JValue = {
("Timestamp" -> executorMetrics.timestamp) ~
("JVM Used Heap Memory" -> executorMetrics.jvmUsedHeapMemory) ~
("JVM Used Nonheap Memory" -> executorMetrics.jvmUsedNonHeapMemory) ~
("Onheap Execution Memory" -> executorMetrics.onHeapExecutionMemory) ~
("Offheap Execution Memory" -> executorMetrics.offHeapExecutionMemory) ~
("Onheap Storage Memory" -> executorMetrics.onHeapStorageMemory) ~
("Offheap Storage Memory" -> executorMetrics.offHeapStorageMemory) ~
("Onheap Unified Memory" -> executorMetrics.onHeapUnifiedMemory) ~
("Offheap Unified Memory" -> executorMetrics.offHeapUnifiedMemory) ~
("Direct Memory" -> executorMetrics.directMemory) ~
("Mapped Memory" -> executorMetrics.mappedMemory)
val metrics = MetricGetter.idxAndValues.map { case (idx, metric) =>
JField(metric.name, executorMetrics.metrics(idx))
}
JObject(
(Seq(JField("Timestamp", executorMetrics.timestamp)) ++ metrics): _*
)
}

def taskEndReasonToJson(taskEndReason: TaskEndReason): JValue = {
Expand Down Expand Up @@ -616,20 +612,12 @@ private[spark] object JsonProtocol {
*/
def executorMetricsFromJson(json: JValue): ExecutorMetrics = {
val timeStamp = (json \ "Timestamp").extract[Long]
val jvmUsedHeapMemory = (json \ "JVM Used Heap Memory").extract[Long]
val jvmUsedNonHeapMemory = (json \ "JVM Used Nonheap Memory").extract[Long]
val onHeapExecutionMemory = (json \ "Onheap Execution Memory").extract[Long]
val offHeapExecutionMemory = (json \ "Offheap Execution Memory").extract[Long]
val onHeapStorageMemory = (json \ "Onheap Storage Memory").extract[Long]
val offHeapStorageMemory = (json \ "Offheap Storage Memory").extract[Long]
val onHeapUnifiedMemory = (json \ "Onheap Unified Memory").extract[Long]
val offHeapUnifiedMemory = (json \ "Offheap Unified Memory").extract[Long]
val directMemory = (json \ "Direct Memory").extract[Long]
val mappedMemory = (json \ "Mapped Memory").extract[Long]
new ExecutorMetrics(timeStamp, jvmUsedHeapMemory, jvmUsedNonHeapMemory,
onHeapExecutionMemory, offHeapExecutionMemory, onHeapStorageMemory,
offHeapStorageMemory, onHeapUnifiedMemory, offHeapUnifiedMemory, directMemory,
mappedMemory)
val metrics = new ExecutorMetrics(timeStamp)
MetricGetter.idxAndValues.foreach { case (idx, metric) =>
val metricValue = (json \ metric.name).extract[Long]
metrics.metrics(idx) = metricValue
}
metrics
}

def taskEndFromJson(json: JValue): SparkListenerTaskEnd = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics}
import org.apache.spark.internal.Logging
import org.apache.spark.io._
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.metrics.{MetricGetter, MetricsSystem}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.{JsonProtocol, Utils}

Expand Down Expand Up @@ -412,16 +412,9 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
(executorMetrics1, executorMetrics2) match {
case (Some(e1), Some(e2)) =>
assert(e1.timestamp === e2.timestamp)
assert(e1.jvmUsedHeapMemory === e2.jvmUsedHeapMemory)
assert(e1.jvmUsedNonHeapMemory === e2.jvmUsedNonHeapMemory)
assert(e1.onHeapExecutionMemory === e2.onHeapExecutionMemory)
assert(e1.offHeapExecutionMemory === e2.offHeapExecutionMemory)
assert(e1.onHeapStorageMemory === e2.onHeapStorageMemory)
assert(e1.offHeapStorageMemory === e2.offHeapStorageMemory)
assert(e1.onHeapUnifiedMemory === e2.onHeapUnifiedMemory)
assert(e1.offHeapUnifiedMemory === e2.offHeapUnifiedMemory)
assert(e1.directMemory === e2.directMemory)
assert(e1.mappedMemory === e2.mappedMemory)
(0 until MetricGetter.values.length).foreach { idx =>
assert(e1.metrics(idx) === e2.metrics(idx))
}
case (None, None) =>
case _ =>
assert(false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.scalatest.exceptions.TestFailedException

import org.apache.spark._
import org.apache.spark.executor._
import org.apache.spark.metrics.MetricGetter
import org.apache.spark.rdd.RDDOperationScope
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
Expand Down Expand Up @@ -686,12 +687,9 @@ private[spark] object JsonProtocolSuite extends Assertions {
(metrics1, metrics2) match {
case (Some(m1), Some(m2)) =>
assert(m1.timestamp === m2.timestamp)
assert(m1.jvmUsedHeapMemory === m2.jvmUsedHeapMemory)
assert(m1.jvmUsedNonHeapMemory === m2.jvmUsedNonHeapMemory)
assert(m1.onHeapExecutionMemory === m2.onHeapExecutionMemory)
assert(m1.offHeapExecutionMemory === m2.offHeapExecutionMemory)
assert(m1.onHeapStorageMemory === m2.onHeapStorageMemory)
assert(m1.offHeapStorageMemory === m2.offHeapStorageMemory)
(0 until MetricGetter.values.length).foreach { idx =>
assert(m1.metrics(idx) === m2.metrics(idx))
}
case (None, None) =>
case _ =>
assert(false)
Expand Down