Skip to content

Commit 0a827b3

Browse files
committed
Add comments and doc. Move some classes to the right places.
1 parent a19fea6 commit 0a827b3

File tree

5 files changed

+278
-225
lines changed

5 files changed

+278
-225
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate2/aggregates.scala

Lines changed: 0 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -279,77 +279,5 @@ case class Average(child: Expression) extends AlgebraicAggregate {
279279
override def children: Seq[Expression] = child :: Nil
280280
}
281281

282-
abstract class AggregationBuffer(
283-
toCatalystConverters: Array[Any => Any],
284-
toScalaConverters: Array[Any => Any],
285-
bufferOffset: Int)
286-
extends Row {
287282

288-
override def length: Int = toCatalystConverters.length
289283

290-
protected val offsets: Array[Int] = {
291-
val newOffsets = new Array[Int](length)
292-
var i = 0
293-
while (i < newOffsets.length) {
294-
newOffsets(i) = bufferOffset + i
295-
i += 1
296-
}
297-
newOffsets
298-
}
299-
}
300-
301-
class MutableAggregationBuffer(
302-
toCatalystConverters: Array[Any => Any],
303-
toScalaConverters: Array[Any => Any],
304-
bufferOffset: Int,
305-
var underlyingBuffer: MutableRow)
306-
extends AggregationBuffer(toCatalystConverters, toScalaConverters, bufferOffset) {
307-
308-
override def apply(i: Int): Any = {
309-
if (i >= length || i < 0) {
310-
throw new IllegalArgumentException(
311-
s"Could not access ${i}th value in this buffer because it only has $length values.")
312-
}
313-
toScalaConverters(i)(underlyingBuffer(offsets(i)))
314-
}
315-
316-
def update(i: Int, value: Any): Unit = {
317-
if (i >= length || i < 0) {
318-
throw new IllegalArgumentException(
319-
s"Could not update ${i}th value in this buffer because it only has $length values.")
320-
}
321-
underlyingBuffer.update(offsets(i), toCatalystConverters(i)(value))
322-
}
323-
324-
override def copy(): MutableAggregationBuffer = {
325-
new MutableAggregationBuffer(
326-
toCatalystConverters,
327-
toScalaConverters,
328-
bufferOffset,
329-
underlyingBuffer)
330-
}
331-
}
332-
333-
class InputAggregationBuffer(
334-
toCatalystConverters: Array[Any => Any],
335-
toScalaConverters: Array[Any => Any],
336-
bufferOffset: Int,
337-
var underlyingInputBuffer: Row)
338-
extends AggregationBuffer(toCatalystConverters, toScalaConverters, bufferOffset) {
339-
340-
override def apply(i: Int): Any = {
341-
if (i >= length || i < 0) {
342-
throw new IllegalArgumentException(
343-
s"Could not access ${i}th value in this buffer because it only has $length values.")
344-
}
345-
toScalaConverters(i)(underlyingInputBuffer(offsets(i)))
346-
}
347-
348-
override def copy(): InputAggregationBuffer = {
349-
new InputAggregationBuffer(
350-
toCatalystConverters,
351-
toScalaConverters,
352-
bufferOffset,
353-
underlyingInputBuffer)
354-
}
355-
}

sql/core/src/main/scala/org/apache/spark/sql/UDAFRegistration.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919

2020
import org.apache.spark.Logging
2121
import org.apache.spark.sql.catalyst.expressions.{Expression}
22-
import org.apache.spark.sql.execution.expressions.aggregate2.{ScalaUDAF, UserDefinedAggregateFunction}
22+
import org.apache.spark.sql.expressions.aggregate2.{ScalaUDAF, UserDefinedAggregateFunction}
2323

2424

2525
class UDAFRegistration private[sql] (sqlContext: SQLContext) extends Logging {

sql/core/src/main/scala/org/apache/spark/sql/execution/expressions/aggregate2/udaf.scala

Lines changed: 0 additions & 149 deletions
This file was deleted.

0 commit comments

Comments
 (0)