Skip to content

Commit 8d0466d

Browse files
JoshRosenconviva-zz
authored andcommitted
[SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
This addresses a PySpark issue where a failed attempt to construct SparkContext would prevent any future SparkContext creation. Author: Josh Rosen <[email protected]> Closes apache#1606 from JoshRosen/SPARK-1550 and squashes the following commits: ec7fadc [Josh Rosen] [SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
1 parent 10f622c commit 8d0466d

File tree

2 files changed

+18
-6
lines changed

2 files changed

+18
-6
lines changed

python/pyspark/context.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,16 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
100100
tempNamedTuple = namedtuple("Callsite", "function file linenum")
101101
self._callsite = tempNamedTuple(function=None, file=None, linenum=None)
102102
SparkContext._ensure_initialized(self, gateway=gateway)
103-
103+
try:
104+
self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
105+
conf)
106+
except:
107+
# If an error occurs, clean up in order to allow future SparkContext creation:
108+
self.stop()
109+
raise
110+
111+
def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
112+
conf):
104113
self.environment = environment or {}
105114
self._conf = conf or SparkConf(_jvm=self._jvm)
106115
self._batchSize = batchSize # -1 represents an unlimited batch size
@@ -249,17 +258,14 @@ def defaultMinPartitions(self):
249258
"""
250259
return self._jsc.sc().defaultMinPartitions()
251260

252-
def __del__(self):
253-
self.stop()
254-
255261
def stop(self):
256262
"""
257263
Shut down the SparkContext.
258264
"""
259-
if self._jsc:
265+
if getattr(self, "_jsc", None):
260266
self._jsc.stop()
261267
self._jsc = None
262-
if self._accumulatorServer:
268+
if getattr(self, "_accumulatorServer", None):
263269
self._accumulatorServer.shutdown()
264270
self._accumulatorServer = None
265271
with SparkContext._lock:

python/pyspark/tests.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,12 @@ def func():
209209

210210
class TestRDDFunctions(PySparkTestCase):
211211

212+
def test_failed_sparkcontext_creation(self):
213+
# Regression test for SPARK-1550
214+
self.sc.stop()
215+
self.assertRaises(Exception, lambda: SparkContext("an-invalid-master-name"))
216+
self.sc = SparkContext("local")
217+
212218
def test_save_as_textfile_with_unicode(self):
213219
# Regression test for SPARK-970
214220
x = u"\u00A1Hola, mundo!"

0 commit comments

Comments
 (0)