Skip to content

Commit 4513d13

Browse files
committed
Use Serializer2 in more places.
1 parent e43803b commit 4513d13

File tree

1 file changed

+0
-12
lines changed

1 file changed

+0
-12
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/Exchange.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -85,17 +85,6 @@ case class Exchange(
8585
keySchema: Array[DataType],
8686
valueSchema: Array[DataType],
8787
numPartitions: Int): Serializer = {
88-
// In ExternalSorter's spillToMergeableFile function, key-value pairs are written out
89-
// through write(key) and then write(value) instead of write((key, value)). Because
90-
// SparkSqlSerializer2 assumes that objects passed in are Product2, we cannot safely use
91-
// it when spillToMergeableFile in ExternalSorter will be used.
92-
// So, we will not use SparkSqlSerializer2 when
93-
// - Sort-based shuffle is enabled and the number of reducers (numPartitions) is greater
94-
// then the bypassMergeThreshold; or
95-
// - newOrdering is defined.
96-
val cannotUseSqlSerializer2 =
97-
(sortBasedShuffleOn && numPartitions > bypassMergeThreshold) || newOrdering.nonEmpty
98-
9988
// It is true when there is no field that needs to be write out.
10089
// For now, we will not use SparkSqlSerializer2 when noField is true.
10190
val noField =
@@ -104,7 +93,6 @@ case class Exchange(
10493

10594
val useSqlSerializer2 =
10695
child.sqlContext.conf.useSqlSerializer2 && // SparkSqlSerializer2 is enabled.
107-
!cannotUseSqlSerializer2 && // Safe to use Serializer2.
10896
SparkSqlSerializer2.support(keySchema) && // The schema of key is supported.
10997
SparkSqlSerializer2.support(valueSchema) && // The schema of value is supported.
11098
!noField

0 commit comments

Comments
 (0)