Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Make pyspark.testing compatible with pyspark-connect
  • Loading branch information
HyukjinKwon committed Apr 7, 2024
commit 41a0f597d7a341470392d03462de6e8aa064d1af
11 changes: 9 additions & 2 deletions python/pyspark/testing/connectutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import functools
import unittest
import uuid
import contextlib

grpc_requirement_message = None
try:
Expand All @@ -45,6 +46,7 @@
have_googleapis_common_protos = googleapis_common_protos_requirement_message is None

from pyspark import Row, SparkConf
from pyspark.util import is_remote_only
from pyspark.testing.utils import PySparkErrorTestUtils
from pyspark.testing.sqlutils import (
have_pandas,
Expand Down Expand Up @@ -184,7 +186,9 @@ def setUpClass(cls):
.remote(cls.master())
.getOrCreate()
)
cls._legacy_sc = PySparkSession._instantiatedSession._sc
cls._legacy_sc = None
if not is_remote_only():
cls._legacy_sc = PySparkSession._instantiatedSession._sc
cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(cls.tempdir.name)
cls.testData = [Row(key=i, value=str(i)) for i in range(100)]
Expand All @@ -203,4 +207,7 @@ def test_assert_remote_mode(self):
def quiet(self):
from pyspark.testing.utils import QuietTest

return QuietTest(self._legacy_sc)
if self._legacy_sc is not None:
return QuietTest(self._legacy_sc)
else:
return contextlib.nullcontext()