Skip to content

Commit f0a02fd

Browse files
committed
WIP buildLocationMetadata-show-remaining-number-of-paths
1 parent 44dcf00 commit f0a02fd

File tree

3 files changed

+12
-9
lines changed

3 files changed

+12
-9
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2989,6 +2989,9 @@ private[spark] object Utils extends Logging {
29892989
metadata.append(paths(index).toString)
29902990
index += 1
29912991
}
2992+
if (paths.length > index) {
2993+
metadata.append(s", ... ${paths.length - index} more")
2994+
}
29922995
metadata.append("]")
29932996
metadata.toString
29942997
}

core/src/test/scala/org/apache/spark/util/UtilsSuite.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1304,16 +1304,16 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
13041304

13051305
test("pathsToMetadata") {
13061306
val paths = (0 to 4).map(i => new Path(s"path$i"))
1307-
assert(Utils.buildLocationMetadata(paths, 5) == "[path0]")
1308-
assert(Utils.buildLocationMetadata(paths, 10) == "[path0, path1]")
1309-
assert(Utils.buildLocationMetadata(paths, 15) == "[path0, path1, path2]")
1310-
assert(Utils.buildLocationMetadata(paths, 25) == "[path0, path1, path2, path3]")
1307+
assert(Utils.buildLocationMetadata(paths, 5) == "[path0, ... 4 more]")
1308+
assert(Utils.buildLocationMetadata(paths, 10) == "[path0, path1, ... 3 more]")
1309+
assert(Utils.buildLocationMetadata(paths, 15) == "[path0, path1, path2, ... 2 more]")
1310+
assert(Utils.buildLocationMetadata(paths, 25) == "[path0, path1, path2, path3, ... 1 more]")
13111311

13121312
// edge-case: we should consider the fact non-path chars including '[' and ", " are accounted
13131313
// 1. second path is not added due to the addition of '['
1314-
assert(Utils.buildLocationMetadata(paths, 6) == "[path0]")
1314+
assert(Utils.buildLocationMetadata(paths, 6) == "[path0, ... 4 more]")
13151315
// 2. third path is not added due to the addition of ", "
1316-
assert(Utils.buildLocationMetadata(paths, 13) == "[path0, path1]")
1316+
assert(Utils.buildLocationMetadata(paths, 13) == "[path0, path1, ... 3 more]")
13171317
}
13181318

13191319
test("checkHost supports both IPV4 and IPV6") {

sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,6 @@ class DataSourceScanExecRedactionSuite extends DataSourceScanRedactionTest {
122122

123123
test("SPARK-31793: FileSourceScanExec metadata should contain limited file paths") {
124124
withTempPath { path =>
125-
val dir = path.getCanonicalPath
126-
127125
// create a sub-directory with long name so that each root path will always exceed the limit
128126
// this is to ensure we always test the case for the path truncation
129127
val dataDirName = Random.alphanumeric.take(100).toList.mkString
@@ -155,7 +153,9 @@ class DataSourceScanExecRedactionSuite extends DataSourceScanRedactionTest {
155153
location.get.indexOf('[') + 1, location.get.indexOf(']')).split(", ").toSeq
156154

157155
// the only one path should be available
158-
assert(pathsInLocation.size == 1)
156+
assert(pathsInLocation.size == 2)
157+
// indicator ("... N more") should be available
158+
assert(pathsInLocation.exists(_.contains("... ")))
159159
}
160160
}
161161
}

0 commit comments

Comments
 (0)