Skip to content

Commit 5cdb6fa

Browse files
committed
changed for SCCallSiteSync
1 parent e685853 commit 5cdb6fa

File tree

2 files changed

+2
-7
lines changed

2 files changed

+2
-7
lines changed

core/pom.xml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
24-
<<<<<<< HEAD
2524
<version>1.2.0-SNAPSHOT</version>
26-
=======
27-
<version>1.1.0-SNAPSHOT</version>
28-
>>>>>>> modified the code base on comment in https://github.com/tdas/spark/pull/10
2925
<relativePath>../pom.xml</relativePath>
3026
</parent>
3127

python/pyspark/streaming/dstream.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,10 @@
2222
from pyspark.serializers import NoOpSerializer,\
2323
BatchedSerializer, CloudPickleSerializer, pack_long,\
2424
CompressedSerializer
25-
from pyspark.rdd import _JavaStackTrace
2625
from pyspark.storagelevel import StorageLevel
2726
from pyspark.resultiterable import ResultIterable
2827
from pyspark.streaming.util import rddToFileName, RDDFunction
29-
28+
from pyspark.traceback_utils import SCCallSiteSync
3029

3130
from py4j.java_collections import ListConverter, MapConverter
3231

@@ -187,7 +186,7 @@ def add_shuffle_key(split, iterator):
187186
yield outputSerializer.dumps(items)
188187
keyed = PipelinedDStream(self, add_shuffle_key)
189188
keyed._bypass_serializer = True
190-
with _JavaStackTrace(self.ctx) as st:
189+
with SCCallSiteSync(self.context) as css:
191190
partitioner = self.ctx._jvm.PythonPartitioner(numPartitions,
192191
id(partitionFunc))
193192
jdstream = self.ctx._jvm.PythonPairwiseDStream(keyed._jdstream.dstream(),

0 commit comments

Comments
 (0)