File tree Expand file tree Collapse file tree 2 files changed +5
-6
lines changed
core/src/main/scala/org/apache/spark/rdd Expand file tree Collapse file tree 2 files changed +5
-6
lines changed Original file line number Diff line number Diff line change @@ -1064,9 +1064,8 @@ abstract class RDD[T: ClassTag](
10641064 // greater than totalParts because we actually cap it at totalParts in runJob.
10651065 var numPartsToTry = 1
10661066 if (partsScanned > 0 ) {
1067- // If we didn't find any rows after the first iteration, just try all partitions next.
1068- // Otherwise, interpolate the number of partitions we need to try, but overestimate it
1069- // by 50%.
1067+ // If we didn't find any rows after the previous iteration, double and retry. Otherwise,
1068+ // interpolate the number of partitions we need to try, but overestimate it by 50%.
10701069 if (buf.size == 0 ) {
10711070 numPartsToTry = partsScanned * 2
10721071 } else {
Original file line number Diff line number Diff line change @@ -1031,9 +1031,9 @@ def take(self, num):
10311031 # we actually cap it at totalParts in runJob.
10321032 numPartsToTry = 1
10331033 if partsScanned > 0 :
1034- # If we didn't find any rows after the first iteration, just
1035- # try all partitions next. Otherwise, interpolate the number
1036- # of partitions we need to try, but overestimate it by 50%.
1034+ # If we didn't find any rows after the previous iteration,
1035+ # double and retry. Otherwise, interpolate the number of
1036+ # partitions we need to try, but overestimate it by 50%.
10371037 if len (items ) == 0 :
10381038 numPartsToTry = partsScanned * 2
10391039 else :
You can’t perform that action at this time.
0 commit comments