Skip to content

Commit c3ab36d

Browse files
baishuoliancheng
authored andcommitted
modify for some bad indentation
1 parent 7ce2d9f commit c3ab36d

File tree

1 file changed

+16
-19
lines changed

1 file changed

+16
-19
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ case class InsertIntoHiveTable(
157157
}
158158

159159
var count = 0
160-
// writer for Dynamic Partition
160+
// writer for Dynamic Partition
161161
var writer2: SparkHiveHadoopWriter = null
162162
while(iter.hasNext) {
163163
val record = iter.next()
@@ -201,9 +201,6 @@ case class InsertIntoHiveTable(
201201
}
202202
writerMap.clear()
203203
}
204-
205-
206-
207204
}
208205
/*
209206
* e.g.
@@ -215,9 +212,9 @@ case class InsertIntoHiveTable(
215212
* return: /part2=val2/part3=val3
216213
*/
217214
private def getDynamicPartDir(partCols: Array[String],
218-
row: Row,
219-
dynamicPartNum: Int,
220-
defaultPartName: String): String = {
215+
row: Row,
216+
dynamicPartNum: Int,
217+
defaultPartName: String): String = {
221218
assert(dynamicPartNum > 0)
222219
partCols
223220
.takeRight(dynamicPartNum)
@@ -230,11 +227,11 @@ case class InsertIntoHiveTable(
230227
* if rowVal is null or "",will return HiveConf.get(hive.exec.default.partition.name) with default
231228
* */
232229
private def handleNull(rowVal: Any, defaultPartName: String): String = {
233-
if (rowVal == null ||String.valueOf(rowVal).length == 0) {
234-
defaultPartName
235-
} else {
236-
String.valueOf(rowVal)
237-
}
230+
if (rowVal == null ||String.valueOf(rowVal).length == 0) {
231+
defaultPartName
232+
} else {
233+
String.valueOf(rowVal)
234+
}
238235
}
239236

240237
override def execute() = result
@@ -326,13 +323,13 @@ case class InsertIntoHiveTable(
326323
serializer.serialize(outputData, standardOI) -> dynamicPartPath
327324
}
328325
}
329-
saveAsHiveFile(
330-
rdd,
331-
outputClass,
332-
fileSinkConf,
333-
jobConfSer,
334-
sc.hiveconf.getBoolean("hive.exec.compress.output", false),
335-
dynamicPartNum)
326+
saveAsHiveFile(
327+
rdd,
328+
outputClass,
329+
fileSinkConf,
330+
jobConfSer,
331+
sc.hiveconf.getBoolean("hive.exec.compress.output", false),
332+
dynamicPartNum)
336333

337334
val outputPath = FileOutputFormat.getOutputPath(jobConf)
338335
// Have to construct the format of dbname.tablename.

0 commit comments

Comments
 (0)