File tree Expand file tree Collapse file tree 1 file changed +3
-5
lines changed
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Original file line number Diff line number Diff line change @@ -26,7 +26,7 @@ import scala.util.Try
2626import org .apache .spark .annotation .Experimental
2727import org .apache .spark .sql .catalyst .{SqlParser , ScalaReflection }
2828import org .apache .spark .sql .catalyst .analysis .{UnresolvedFunction , Star }
29- import org .apache .spark .sql .catalyst .encoders .{ ExpressionEncoder , Encoder }
29+ import org .apache .spark .sql .catalyst .encoders .ExpressionEncoder
3030import org .apache .spark .sql .catalyst .expressions ._
3131import org .apache .spark .sql .catalyst .expressions .aggregate ._
3232import org .apache .spark .sql .catalyst .plans .logical .BroadcastHint
@@ -83,9 +83,6 @@ object functions extends LegacyFunctions {
8383 Column (func.toAggregateExpression(isDistinct))
8484 }
8585
86- private implicit def newLongEncoder : Encoder [Long ] = ExpressionEncoder [Long ](flat = true )
87-
88-
8986 /**
9087 * Returns a [[Column ]] based on the given column name.
9188 *
@@ -269,7 +266,8 @@ object functions extends LegacyFunctions {
269266 * @group agg_funcs
270267 * @since 1.3.0
271268 */
272- def count (columnName : String ): TypedColumn [Any , Long ] = count(Column (columnName)).as[Long ]
269+ def count (columnName : String ): TypedColumn [Any , Long ] =
270+ count(Column (columnName)).as(ExpressionEncoder [Long ](flat = true ))
273271
274272 /**
275273 * Aggregate function: returns the number of distinct items in a group.
You can’t perform that action at this time.
0 commit comments