From eeafad0c9d2af38732d061d9490ec16bec996a9a Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Mon, 17 Jun 2024 14:59:19 +0800 Subject: [PATCH] [SPARK-48642][CORE] False reported SparkOutOfMemoryError caused by killing task on spilling --- .../main/java/org/apache/spark/memory/TaskMemoryManager.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java index 7e993c8a2a3a..fe798e40a6ad 100644 --- a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java +++ b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java @@ -18,6 +18,7 @@ package org.apache.spark.memory; import javax.annotation.concurrent.GuardedBy; +import java.io.InterruptedIOException; import java.io.IOException; import java.nio.channels.ClosedByInterruptException; import java.util.Arrays; @@ -244,7 +245,7 @@ private long trySpillAndAcquire( cList.remove(idx); return 0; } - } catch (ClosedByInterruptException e) { + } catch (ClosedByInterruptException | InterruptedIOException e) { // This called by user to kill a task (e.g: speculative task). logger.error("error while calling spill() on {}", e, MDC.of(LogKeys.MEMORY_CONSUMER$.MODULE$, consumerToSpill));