This repository was archived by the owner on May 9, 2024. It is now read-only.
File tree Expand file tree Collapse file tree 2 files changed +5
-5
lines changed
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +5
-5
lines changed Original file line number Diff line number Diff line change @@ -120,7 +120,7 @@ private[sql] class ParquetRelation2(
120
120
private val maybeDataSchema : Option [StructType ],
121
121
private val maybePartitionSpec : Option [PartitionSpec ],
122
122
parameters : Map [String , String ])(
123
- @ transient val sqlContext : SQLContext )
123
+ val sqlContext : SQLContext )
124
124
extends HadoopFsRelation (maybePartitionSpec)
125
125
with Logging {
126
126
Original file line number Diff line number Diff line change @@ -608,10 +608,10 @@ abstract class HadoopFsRelation private[sql](maybePartitionSpec: Option[Partitio
608
608
* @since 1.4.0
609
609
*/
610
610
def buildScan (
611
- requiredColumns : Array [String ],
612
- filters : Array [Filter ],
613
- inputFiles : Array [FileStatus ],
614
- broadcastedConf : Broadcast [SerializableWritable [Configuration ]]): RDD [Row ] = {
611
+ requiredColumns : Array [String ],
612
+ filters : Array [Filter ],
613
+ inputFiles : Array [FileStatus ],
614
+ broadcastedConf : Broadcast [SerializableWritable [Configuration ]]): RDD [Row ] = {
615
615
buildScan(requiredColumns, filters, inputFiles)
616
616
}
617
617
You can’t perform that action at this time.
0 commit comments