@@ -157,7 +157,7 @@ case class InsertIntoHiveTable(
157
157
}
158
158
159
159
var count = 0
160
- // writer for Dynamic Partition
160
+ // writer for Dynamic Partition
161
161
var writer2 : SparkHiveHadoopWriter = null
162
162
while (iter.hasNext) {
163
163
val record = iter.next()
@@ -201,9 +201,6 @@ case class InsertIntoHiveTable(
201
201
}
202
202
writerMap.clear()
203
203
}
204
-
205
-
206
-
207
204
}
208
205
/*
209
206
* e.g.
@@ -215,9 +212,9 @@ case class InsertIntoHiveTable(
215
212
* return: /part2=val2/part3=val3
216
213
*/
217
214
private def getDynamicPartDir (partCols : Array [String ],
218
- row : Row ,
219
- dynamicPartNum : Int ,
220
- defaultPartName : String ): String = {
215
+ row : Row ,
216
+ dynamicPartNum : Int ,
217
+ defaultPartName : String ): String = {
221
218
assert(dynamicPartNum > 0 )
222
219
partCols
223
220
.takeRight(dynamicPartNum)
@@ -230,11 +227,11 @@ case class InsertIntoHiveTable(
230
227
* if rowVal is null or "",will return HiveConf.get(hive.exec.default.partition.name) with default
231
228
* */
232
229
private def handleNull (rowVal : Any , defaultPartName : String ): String = {
233
- if (rowVal == null || String .valueOf(rowVal).length == 0 ) {
234
- defaultPartName
235
- } else {
236
- String .valueOf(rowVal)
237
- }
230
+ if (rowVal == null || String .valueOf(rowVal).length == 0 ) {
231
+ defaultPartName
232
+ } else {
233
+ String .valueOf(rowVal)
234
+ }
238
235
}
239
236
240
237
override def execute () = result
@@ -326,13 +323,13 @@ case class InsertIntoHiveTable(
326
323
serializer.serialize(outputData, standardOI) -> dynamicPartPath
327
324
}
328
325
}
329
- saveAsHiveFile(
330
- rdd,
331
- outputClass,
332
- fileSinkConf,
333
- jobConfSer,
334
- sc.hiveconf.getBoolean(" hive.exec.compress.output" , false ),
335
- dynamicPartNum)
326
+ saveAsHiveFile(
327
+ rdd,
328
+ outputClass,
329
+ fileSinkConf,
330
+ jobConfSer,
331
+ sc.hiveconf.getBoolean(" hive.exec.compress.output" , false ),
332
+ dynamicPartNum)
336
333
337
334
val outputPath = FileOutputFormat .getOutputPath(jobConf)
338
335
// Have to construct the format of dbname.tablename.
0 commit comments