File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed
core/src/main/scala/org/apache/spark/api/python Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -202,7 +202,7 @@ private[spark] class PythonRDD(
202202 this .interrupt()
203203 }
204204
205- override def run (): Unit = Utils .tryLog {
205+ override def run (): Unit = Utils .logUncaughtExceptions {
206206 try {
207207 val stream = new BufferedOutputStream (worker.getOutputStream, bufferSize)
208208 val dataOut = new DataOutputStream (stream)
@@ -248,13 +248,13 @@ private[spark] class PythonRDD(
248248 } catch {
249249 case e : Exception if context.isCompleted || context.isInterrupted =>
250250 logDebug(" Exception thrown after task completion (likely due to cleanup)" , e)
251- worker.shutdownOutput()
251+ Utils .tryLog( worker.shutdownOutput() )
252252
253253 case e : Exception =>
254254 // We must avoid throwing exceptions here, because the thread uncaught exception handler
255255 // will kill the whole executor (see org.apache.spark.executor.Executor).
256256 _exception = e
257- worker.shutdownOutput()
257+ Utils .tryLog( worker.shutdownOutput() )
258258 } finally {
259259 // Release memory used by this thread for shuffles
260260 env.shuffleMemoryManager.releaseMemoryForThisThread()
You can’t perform that action at this time.
0 commit comments