@@ -261,7 +261,6 @@ private[hive] case class HiveGenericUdaf(
261
261
// Initialize (reinitialize) the aggregation buffer
262
262
override def reset (buf : MutableRow ): Unit = {
263
263
val buffer = evaluator.getNewAggregationBuffer
264
- .asInstanceOf [GenericUDAFEvaluator .AbstractAggregationBuffer ]
265
264
evaluator.reset(buffer)
266
265
// This is a hack, we never use the mutable row as buffer, but define our own buffer,
267
266
// which is set as the first element of the buffer
@@ -276,27 +275,27 @@ private[hive] case class HiveGenericUdaf(
276
275
}.toArray
277
276
278
277
evaluator.iterate(
279
- buf.getAs[GenericUDAFEvaluator .AbstractAggregationBuffer ](bound.ordinal),
278
+ buf.getAs[GenericUDAFEvaluator .AggregationBuffer ](bound.ordinal),
280
279
args)
281
280
}
282
281
283
282
// Merge 2 aggregation buffer, and write back to the later one
284
283
override def merge (value : Row , buf : MutableRow ): Unit = {
285
- val buffer = buf.getAs[GenericUDAFEvaluator .AbstractAggregationBuffer ](bound.ordinal)
284
+ val buffer = buf.getAs[GenericUDAFEvaluator .AggregationBuffer ](bound.ordinal)
286
285
evaluator.merge(buffer, wrap(value.get(bound.ordinal), bufferObjectInspector))
287
286
}
288
287
289
288
@ deprecated
290
289
override def terminatePartial (buf : MutableRow ): Unit = {
291
- val buffer = buf.getAs[GenericUDAFEvaluator .AbstractAggregationBuffer ](bound.ordinal)
290
+ val buffer = buf.getAs[GenericUDAFEvaluator .AggregationBuffer ](bound.ordinal)
292
291
// this is for serialization
293
292
buf(bound) = unwrap(evaluator.terminatePartial(buffer), bufferObjectInspector)
294
293
}
295
294
296
295
// Output the final result by feeding the aggregation buffer
297
296
override def terminate (input : Row ): Any = {
298
297
unwrap(evaluator.terminate(
299
- input.getAs[GenericUDAFEvaluator .AbstractAggregationBuffer ](bound.ordinal)),
298
+ input.getAs[GenericUDAFEvaluator .AggregationBuffer ](bound.ordinal)),
300
299
objectInspector)
301
300
}
302
301
}
0 commit comments