Skip to content

Commit dc2fba1

Browse files
committed
Fix compiler warning
1 parent 8d511ac commit dc2fba1

File tree

5 files changed

+15
-16
lines changed

5 files changed

+15
-16
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/functions/ReducibleFunction.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,5 +37,6 @@ public interface ReducibleFunction<T, A> extends ScalarFunction<T> {
3737
* @param otherArgument argument for other function instance
3838
* @return a reduction function if it is reducible, none if not
3939
*/
40-
Option<Reducer<A>> reducer(ReducibleFunction<?, ?> other, Option<?> thisArgument, Option<?> otherArgument);
40+
Option<Reducer<A>> reducer(ReducibleFunction<?, ?> other, Option<?> thisArgument,
41+
Option<?> otherArgument);
4142
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TransformExpression.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,7 @@ case class TransformExpression(
6969
true
7070
} else {
7171
(function, other.function) match {
72-
case (f: ReducibleFunction[Any, Any] @unchecked,
73-
o: ReducibleFunction[Any, Any] @unchecked) =>
72+
case (f: ReducibleFunction[_, _], o: ReducibleFunction[_, _]) =>
7473
val reducer = f.reducer(o, numBucketsOpt, other.numBucketsOpt)
7574
val otherReducer = o.reducer(f, other.numBucketsOpt, numBucketsOpt)
7675
reducer.isDefined || otherReducer.isDefined

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -651,7 +651,7 @@ trait ShuffleSpec {
651651
* Returning none also indicates that none of the partition expressions can be reduced on the
652652
* corresponding expression on the other shuffle spec.
653653
*/
654-
def reducers(spec: ShuffleSpec): Option[Seq[Option[Reducer[Any]]]] = None
654+
def reducers(spec: ShuffleSpec): Option[Seq[Option[Reducer[_]]]] = None
655655
}
656656

657657
case object SinglePartitionShuffleSpec extends ShuffleSpec {
@@ -854,17 +854,16 @@ case class KeyGroupedShuffleSpec(
854854
KeyGroupedPartitioning(clustering, partitioning.numPartitions, partitioning.partitionValues)
855855
}
856856

857-
override def reducers(other: ShuffleSpec): Option[Seq[Option[Reducer[Any]]]] = {
857+
override def reducers(other: ShuffleSpec): Option[Seq[Option[Reducer[_]]]] = {
858858
other match {
859859
case otherSpec: KeyGroupedShuffleSpec =>
860860
val results = partitioning.expressions.zip(otherSpec.partitioning.expressions).map {
861861
case (e1: TransformExpression, e2: TransformExpression)
862-
if e1.function.isInstanceOf[ReducibleFunction[Any, Any]@unchecked]
863-
&& e2.function.isInstanceOf[ReducibleFunction[Any, Any]@unchecked] =>
864-
e1.function.asInstanceOf[ReducibleFunction[Any, Any]].reducer(
865-
e2.function.asInstanceOf[ReducibleFunction[Any, Any]],
866-
e1.numBucketsOpt.map(a => a.asInstanceOf[Any]),
867-
e2.numBucketsOpt.map(a => a.asInstanceOf[Any]))
862+
if e1.function.isInstanceOf[ReducibleFunction[_, _]]
863+
&& e2.function.isInstanceOf[ReducibleFunction[_, _]] =>
864+
e1.function.asInstanceOf[ReducibleFunction[_, _]].reducer(
865+
e2.function.asInstanceOf[ReducibleFunction[_, _]],
866+
e1.numBucketsOpt, e2.numBucketsOpt)
868867
case (_, _) => None
869868
}
870869

@@ -892,11 +891,11 @@ case class KeyGroupedShuffleSpec(
892891
object KeyGroupedShuffleSpec {
893892
def reducePartitionValue(row: InternalRow,
894893
expressions: Seq[Expression],
895-
reducers: Seq[Option[Reducer[Any]]]):
894+
reducers: Seq[Option[Reducer[_]]]):
896895
InternalRowComparableWrapper = {
897896
val partitionVals = row.toSeq(expressions.map(_.dataType))
898897
val reducedRow = partitionVals.zip(reducers).map{
899-
case (v, Some(reducer)) => reducer.reduce(v)
898+
case (v, Some(reducer: Reducer[Any])) => reducer.reduce(v)
900899
case (v, _) => v
901900
}.toArray
902901
InternalRowComparableWrapper(new GenericInternalRow(reducedRow), expressions)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/BatchScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ case class StoragePartitionJoinParams(
272272
keyGroupedPartitioning: Option[Seq[Expression]] = None,
273273
joinKeyPositions: Option[Seq[Int]] = None,
274274
commonPartitionValues: Option[Seq[(InternalRow, Int)]] = None,
275-
reducers: Option[Seq[Option[Reducer[Any]]]] = None,
275+
reducers: Option[Seq[Option[Reducer[_]]]] = None,
276276
applyPartialClustering: Boolean = false,
277277
replicatePartitions: Boolean = false) {
278278
override def equals(other: Any): Boolean = other match {

sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/EnsureRequirements.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -550,7 +550,7 @@ case class EnsureRequirements(
550550
plan: SparkPlan,
551551
values: Seq[(InternalRow, Int)],
552552
joinKeyPositions: Option[Seq[Int]],
553-
reducers: Option[Seq[Option[Reducer[Any]]]],
553+
reducers: Option[Seq[Option[Reducer[_]]]],
554554
applyPartialClustering: Boolean,
555555
replicatePartitions: Boolean): SparkPlan = plan match {
556556
case scan: BatchScanExec =>
@@ -570,7 +570,7 @@ case class EnsureRequirements(
570570

571571
private def reduceCommonPartValues(commonPartValues: Seq[(InternalRow, Int)],
572572
expressions: Seq[Expression],
573-
reducers: Option[Seq[Option[Reducer[Any]]]]) = {
573+
reducers: Option[Seq[Option[Reducer[_]]]]) = {
574574
reducers match {
575575
case Some(reducers) => commonPartValues.groupBy { case (row, _) =>
576576
KeyGroupedShuffleSpec.reducePartitionValue(row, expressions, reducers)

0 commit comments

Comments
 (0)