Skip to content

Commit 85a5864

Browse files
committed
Address comments
1 parent efb99da commit 85a5864

File tree

3 files changed

+13
-10
lines changed

3 files changed

+13
-10
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -376,7 +376,8 @@ object LoadDataCommand {
376376
* @return qualified path object
377377
*/
378378
private[sql] def makeQualified(defaultUri: URI, workingDir: Path, path: Path): Path = {
379-
val pathUri = if (path.isAbsolute()) path.toUri() else new Path(workingDir, path).toUri()
379+
val newPath = new Path(workingDir, path)
380+
val pathUri = if (path.isAbsolute()) path.toUri() else newPath.toUri()
380381
if (pathUri.getScheme == null || pathUri.getAuthority == null &&
381382
defaultUri.getAuthority != null) {
382383
val scheme = if (pathUri.getScheme == null) defaultUri.getScheme else pathUri.getScheme
@@ -393,7 +394,7 @@ object LoadDataCommand {
393394
throw new IllegalArgumentException(e)
394395
}
395396
} else {
396-
new Path(pathUri)
397+
newPath
397398
}
398399
}
399400
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,13 @@ package org.apache.spark.sql.hive.execution
2020
import java.io.File
2121

2222
import com.google.common.io.Files
23+
import org.apache.hadoop.fs.{FileContext, FsConstants, Path}
2324

2425
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
2526
import org.apache.spark.sql.catalyst.TableIdentifier
2627
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
2728
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
29+
import org.apache.spark.sql.execution.command.LoadDataCommand
2830
import org.apache.spark.sql.hive.test.TestHiveSingleton
2931
import org.apache.spark.sql.test.SQLTestUtils
3032
import org.apache.spark.sql.types.StructType
@@ -439,4 +441,11 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
439441
}
440442
}
441443

444+
test("SPARK-25918: LOAD DATA LOCAL INPATH should handle a relative path") {
445+
val localFS = FileContext.getLocalFSFileContext()
446+
val workingDir = localFS.getWorkingDirectory
447+
val r = LoadDataCommand.makeQualified(
448+
FsConstants.LOCAL_FS_URI, workingDir, new Path("kv1.txt"))
449+
assert(r === new Path(s"$workingDir/kv1.txt"))
450+
}
442451
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import java.sql.{Date, Timestamp}
2424
import java.util.{Locale, Set}
2525

2626
import com.google.common.io.Files
27-
import org.apache.hadoop.fs.{FileContext, FileSystem, FsConstants, Path}
27+
import org.apache.hadoop.fs.{FileSystem, Path}
2828

2929
import org.apache.spark.TestUtils
3030
import org.apache.spark.sql._
@@ -1987,13 +1987,6 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
19871987
}
19881988
}
19891989

1990-
test("SPARK-25918: LOAD DATA LOCAL INPATH should handle a relative path") {
1991-
val localFS = FileContext.getLocalFSFileContext()
1992-
val r = LoadDataCommand.makeQualified(
1993-
FsConstants.LOCAL_FS_URI, localFS.getWorkingDirectory, new Path("kv1.txt"))
1994-
assert(r === new Path(s"${localFS.getWorkingDirectory}/kv1.txt"))
1995-
}
1996-
19971990
test("SPARK-25738: defaultFs can have a port") {
19981991
val defaultURI = new URI("hdfs://fizz.buzz.com:8020")
19991992
val r = LoadDataCommand.makeQualified(defaultURI, new Path("/foo/bar"), new Path("/flim/flam"))

0 commit comments

Comments
 (0)