Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
CR, Moved implementations to TaskContextImpl
  • Loading branch information
ScrapCodes committed Oct 14, 2014
commit 7ecc2fe712a40d73192ed9e28d8eca6d48c941f3
115 changes: 12 additions & 103 deletions core/src/main/java/org/apache/spark/TaskContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,6 @@
@DeveloperApi
public abstract class TaskContext implements Serializable {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd prefer this class not have a constructor (the whole idea here is that we don't want user to instantiate it). Basically we want to move all of the logic here to TaskContextImpl and only define the interface here in terms of abstract methods.


private int stageId;
private int partitionId;
private long attemptId;
private boolean runningLocally;
private TaskMetrics taskMetrics;

TaskContext(int stageId, int partitionId, long attemptId, boolean runningLocally,
TaskMetrics taskMetrics) {
this.attemptId = attemptId;
this.partitionId = partitionId;
this.runningLocally = runningLocally;
this.stageId = stageId;
this.taskMetrics = taskMetrics;
}

private static ThreadLocal<TaskContext> taskContext =
new ThreadLocal<TaskContext>();

Expand All @@ -74,132 +59,56 @@ static void unset() {
taskContext.remove();
}

// List of callback functions to execute when the task completes.
private transient List<TaskCompletionListener> onCompleteCallbacks =
new ArrayList<TaskCompletionListener>();

// Whether the corresponding task has been killed.
private volatile boolean interrupted = false;

// Whether the task has completed.
private volatile boolean completed = false;

/**
* Checks whether the task has completed.
*/
public boolean isCompleted() {
return completed;
}
public abstract boolean isCompleted();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

//cc @rxin - should this be "isComplete" rather than "isCompleted"?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually this looks good - we use isCompleted elsewhere already in user-facing code.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My understanding is that isCompleted means it has been completed, whereas isComplete means it is "whole".

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I think isCompleted is right, since we'd use isFailed instead of isFailure or isFail.


/**
* Checks whether the task has been killed.
*/
public boolean isInterrupted() {
return interrupted;
}
public abstract boolean isInterrupted();

/**
* Add a (Java friendly) listener to be executed on task completion.
* This will be called in all situation - success, failure, or cancellation.
* <p/>
* An example use is for HadoopRDD to register a callback to close the input stream.
*/
public TaskContext addTaskCompletionListener(TaskCompletionListener listener) {
onCompleteCallbacks.add(listener);
return this;
}
public abstract TaskContext addTaskCompletionListener(TaskCompletionListener listener);

/**
* Add a listener in the form of a Scala closure to be executed on task completion.
* This will be called in all situations - success, failure, or cancellation.
* <p/>
* An example use is for HadoopRDD to register a callback to close the input stream.
*/
public TaskContext addTaskCompletionListener(final Function1<TaskContext, Unit> f) {
onCompleteCallbacks.add(new TaskCompletionListener() {
@Override
public void onTaskCompletion(TaskContext context) {
f.apply(context);
}
});
return this;
}
public abstract TaskContext addTaskCompletionListener(final Function1<TaskContext, Unit> f);

/**
* Add a callback function to be executed on task completion. An example use
* is for HadoopRDD to register a callback to close the input stream.
* Will be called in any situation - success, failure, or cancellation.
*
* Deprecated: use addTaskCompletionListener
*
*
* @param f Callback function.
*/
@Deprecated
public void addOnCompleteCallback(final Function0<Unit> f) {
onCompleteCallbacks.add(new TaskCompletionListener() {
@Override
public void onTaskCompletion(TaskContext context) {
f.apply();
}
});
}
public abstract void addOnCompleteCallback(final Function0<Unit> f);

/**
* ::Internal API::
* Marks the task as completed and triggers the listeners.
*/
public void markTaskCompleted() throws TaskCompletionListenerException {
completed = true;
List<String> errorMsgs = new ArrayList<String>(2);
// Process complete callbacks in the reverse order of registration
List<TaskCompletionListener> revlist =
new ArrayList<TaskCompletionListener>(onCompleteCallbacks);
Collections.reverse(revlist);
for (TaskCompletionListener tcl: revlist) {
try {
tcl.onTaskCompletion(this);
} catch (Throwable e) {
errorMsgs.add(e.getMessage());
}
}

if (!errorMsgs.isEmpty()) {
throw new TaskCompletionListenerException(JavaConversions.asScalaBuffer(errorMsgs));
}
}

/**
* ::Internal API::
* Marks the task for interruption, i.e. cancellation.
*/
public void markInterrupted() {
interrupted = true;
}

public int stageId() {
return stageId;
}
public abstract int stageId();

public int partitionId() {
return partitionId;
}
public abstract int partitionId();

public long attemptId() {
return attemptId;
}
public abstract long attemptId();

@Deprecated
/** Deprecated: use isRunningLocally() */
public boolean runningLocally() {
return runningLocally;
}
public abstract boolean runningLocally();

public boolean isRunningLocally() {
return runningLocally;
}
public abstract boolean isRunningLocally();

/** ::Internal API:: */
public TaskMetrics taskMetrics() {
return taskMetrics;
}
public abstract TaskMetrics taskMetrics();
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@ private [spark] object TaskContextHelper {
def setTaskContext(tc: TaskContext): Unit = TaskContext.setTaskContext(tc)

def unset(): Unit = TaskContext.unset()

}
86 changes: 80 additions & 6 deletions core/src/main/scala/org/apache/spark/TaskContextImpl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,85 @@
package org.apache.spark

import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.{TaskCompletionListener, TaskCompletionListenerException}

private[spark] class TaskContextImpl(stageId: Int,
partitionId: Int,
attemptId: Long,
runningLocally: Boolean = false,
taskMetrics: TaskMetrics = TaskMetrics.empty)
extends TaskContext(stageId, partitionId, attemptId, runningLocally, taskMetrics);
import scala.collection.mutable.ArrayBuffer

private[spark] class TaskContextImpl(_stageId: Int,
_partitionId: Int,
_attemptId: Long,
_runningLocally: Boolean = false,
_taskMetrics: TaskMetrics = TaskMetrics.empty)
extends TaskContext(_stageId, _partitionId, _attemptId, _runningLocally, _taskMetrics)
with Logging {

// List of callback functions to execute when the task completes.
@transient private val onCompleteCallbacks = new ArrayBuffer[TaskCompletionListener]

// Whether the corresponding task has been killed.
@volatile private var interrupted: Boolean = false

// Whether the task has completed.
@volatile private var completed: Boolean = false

override def addTaskCompletionListener(listener: TaskCompletionListener): this.type = {
onCompleteCallbacks += listener
this
}

override def addTaskCompletionListener(f: TaskContext => Unit): this.type = {
onCompleteCallbacks += new TaskCompletionListener {
override def onTaskCompletion(context: TaskContext): Unit = f(context)
}
this
}

@deprecated("use addTaskCompletionListener", "1.1.0")
override def addOnCompleteCallback(f: () => Unit) {
onCompleteCallbacks += new TaskCompletionListener {
override def onTaskCompletion(context: TaskContext): Unit = f()
}
}

/** Marks the task as completed and triggers the listeners. */
private[spark] def markTaskCompleted(): Unit = {
completed = true
val errorMsgs = new ArrayBuffer[String](2)
// Process complete callbacks in the reverse order of registration
onCompleteCallbacks.reverse.foreach { listener =>
try {
listener.onTaskCompletion(this)
} catch {
case e: Throwable =>
errorMsgs += e.getMessage
logError("Error in TaskCompletionListener", e)
}
}
if (errorMsgs.nonEmpty) {
throw new TaskCompletionListenerException(errorMsgs)
}
}

/** Marks the task for interruption, i.e. cancellation. */
private[spark] def markInterrupted(): Unit = {
interrupted = true
}

override def isCompleted: Boolean = completed

override def taskMetrics(): TaskMetrics = _taskMetrics

override def isRunningLocally: Boolean = _runningLocally

override def runningLocally(): Boolean = _runningLocally

override def isInterrupted: Boolean = interrupted

override def partitionId(): Int = _partitionId

override def attemptId(): Long = _attemptId

override def stageId(): Int = _stageId

}