Skip to content

Commit 6edb5ac

Browse files
committed
Format update.
1 parent 70b169c commit 6edb5ac

File tree

3 files changed

+7
-9
lines changed

3 files changed

+7
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate2/aggregates.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ abstract class AggregateFunction2
110110
def bufferSchema: StructType
111111

112112
/** Attributes of fields in bufferSchema. */
113-
def bufferAttributes: Seq[Attribute]
113+
def bufferAttributes: Seq[AttributeReference]
114114

115115
/** Clones bufferAttributes. */
116116
def cloneBufferAttributes: Seq[Attribute]
@@ -147,7 +147,7 @@ case class MyDoubleSum(child: Expression) extends AggregateFunction2 {
147147
override val bufferSchema: StructType =
148148
StructType(StructField("currentSum", DoubleType, true) :: Nil)
149149

150-
override val bufferAttributes: Seq[Attribute] = bufferSchema.toAttributes
150+
override val bufferAttributes: Seq[AttributeReference] = bufferSchema.toAttributes
151151

152152
override lazy val cloneBufferAttributes = bufferAttributes.map(_.newInstance())
153153

@@ -205,8 +205,8 @@ abstract class AlgebraicAggregate extends AggregateFunction2 with Serializable {
205205
override lazy val cloneBufferAttributes = bufferAttributes.map(_.newInstance())
206206

207207
implicit class RichAttribute(a: AttributeReference) {
208-
def left = a
209-
def right = cloneBufferAttributes(bufferAttributes.indexOf(a))
208+
def left: AttributeReference = a
209+
def right: AttributeReference = cloneBufferAttributes(bufferAttributes.indexOf(a))
210210
}
211211

212212
/** An AlgebraicAggregate's bufferSchema is derived from bufferAttributes. */
@@ -277,4 +277,4 @@ case class Average(child: Expression) extends AlgebraicAggregate {
277277
override def nullable: Boolean = true
278278
override def dataType: DataType = resultType
279279
override def children: Seq[Expression] = child :: Nil
280-
}
280+
}

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ import java.beans.Introspector
2121
import java.util.Properties
2222
import java.util.concurrent.atomic.AtomicReference
2323

24-
import org.apache.spark.sql.execution.aggregate2.{CheckAggregateFunction, ConvertAggregateFunction}
25-
2624
import scala.collection.JavaConversions._
2725
import scala.collection.immutable
2826
import scala.language.implicitConversions
@@ -42,6 +40,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
4240
import org.apache.spark.sql.catalyst.rules.RuleExecutor
4341
import org.apache.spark.sql.catalyst.{InternalRow, ParserDialect, _}
4442
import org.apache.spark.sql.execution.{Filter, _}
43+
import org.apache.spark.sql.execution.aggregate2.{CheckAggregateFunction, ConvertAggregateFunction}
4544
import org.apache.spark.sql.sources._
4645
import org.apache.spark.sql.types._
4746
import org.apache.spark.unsafe.types.UTF8String

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ import java.io.File
2121
import java.net.{URL, URLClassLoader}
2222
import java.sql.Timestamp
2323

24-
import org.apache.spark.sql.execution.aggregate2.{CheckAggregateFunction, ConvertAggregateFunction}
25-
2624
import scala.collection.JavaConversions._
2725
import scala.collection.mutable.HashMap
2826
import scala.language.implicitConversions
@@ -46,6 +44,7 @@ import org.apache.spark.sql.catalyst.ParserDialect
4644
import org.apache.spark.sql.catalyst.analysis._
4745
import org.apache.spark.sql.catalyst.plans.logical._
4846
import org.apache.spark.sql.execution.{ExecutedCommand, ExtractPythonUDFs, SetCommand}
47+
import org.apache.spark.sql.execution.aggregate2.{CheckAggregateFunction, ConvertAggregateFunction}
4948
import org.apache.spark.sql.hive.client._
5049
import org.apache.spark.sql.hive.execution.{DescribeHiveTableCommand, HiveNativeCommand}
5150
import org.apache.spark.sql.sources.DataSourceStrategy

0 commit comments

Comments
 (0)