Skip to content
This repository was archived by the owner on May 9, 2024. It is now read-only.

Commit e393555

Browse files
committed
Cheng's comments.
1 parent 2eb53bb commit e393555

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ private[sql] class ParquetRelation2(
120120
private val maybeDataSchema: Option[StructType],
121121
private val maybePartitionSpec: Option[PartitionSpec],
122122
parameters: Map[String, String])(
123-
@transient val sqlContext: SQLContext)
123+
val sqlContext: SQLContext)
124124
extends HadoopFsRelation(maybePartitionSpec)
125125
with Logging {
126126

sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -608,10 +608,10 @@ abstract class HadoopFsRelation private[sql](maybePartitionSpec: Option[Partitio
608608
* @since 1.4.0
609609
*/
610610
def buildScan(
611-
requiredColumns: Array[String],
612-
filters: Array[Filter],
613-
inputFiles: Array[FileStatus],
614-
broadcastedConf: Broadcast[SerializableWritable[Configuration]]): RDD[Row] = {
611+
requiredColumns: Array[String],
612+
filters: Array[Filter],
613+
inputFiles: Array[FileStatus],
614+
broadcastedConf: Broadcast[SerializableWritable[Configuration]]): RDD[Row] = {
615615
buildScan(requiredColumns, filters, inputFiles)
616616
}
617617

0 commit comments

Comments
 (0)