Skip to content

Commit aa868a9

Browse files
author
Andrew Or
committed
Ensure that HadoopRDD is actually serializable
1 parent c3bfcae commit aa868a9

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ private[spark] class HadoopPartition(rddId: Int, idx: Int, @transient s: InputSp
9999
*/
100100
@DeveloperApi
101101
class HadoopRDD[K, V](
102-
sc: SparkContext,
102+
@transient sc: SparkContext,
103103
broadcastedConf: Broadcast[SerializableWritable[Configuration]],
104104
initLocalJobConfFuncOpt: Option[JobConf => Unit],
105105
inputFormatClass: Class[_ <: InputFormat[K, V]],
@@ -108,6 +108,10 @@ class HadoopRDD[K, V](
108108
minPartitions: Int)
109109
extends RDD[(K, V)](sc, Nil) with Logging {
110110

111+
if (initLocalJobConfFuncOpt.isDefined) {
112+
sc.clean(initLocalJobConfFuncOpt.get)
113+
}
114+
111115
def this(
112116
sc: SparkContext,
113117
conf: JobConf,

0 commit comments

Comments
 (0)