Skip to content
12 changes: 2 additions & 10 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.net._
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.{Files, Paths}
import java.util.{Locale, Properties, Random, UUID}
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
Expand Down Expand Up @@ -1014,15 +1014,7 @@ private[spark] object Utils extends Logging {
* Check to see if file is a symbolic link.
*/
def isSymlink(file: File): Boolean = {
if (file == null) throw new NullPointerException("File must not be null")
if (isWindows) return false
val fileInCanonicalDir = if (file.getParent() == null) {
file
} else {
new File(file.getParentFile().getCanonicalFile(), file.getName())
}

!fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile())
return Files.isSymbolicLink(Paths.get(file.toURI))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -649,8 +649,13 @@ class SparkSubmitSuite
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val sparkSubmitFile = if (Utils.isWindows) {
new File("..\\bin\\spark-submit.cmd")
} else {
new File("../bin/spark-submit")
}
val process = Utils.executeCommand(
Seq("./bin/spark-submit") ++ args,
Seq(sparkSubmitFile.getCanonicalPath) ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}

test("SPARK-3697: ignore directories that cannot be read.") {
// setReadable(...) does not work on Windows. Please refer JDK-6728842.
assume(!Utils.isWindows)
val logFile1 = newLogFile("new1", None, inProgress = false)
writeFile(logFile1, true, None,
SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,14 @@ import java.util.Arrays

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.io.ChunkedByteBuffer
import org.apache.spark.util.Utils

class DiskStoreSuite extends SparkFunSuite {

test("reads of memory-mapped and non memory-mapped files are equivalent") {
// It will cause error when we tried to re-open the filestore and the
// memory-mapped byte buffer tot he file has not been GC on Windows.
assume(!Utils.isWindows)
val confKey = "spark.storage.memoryMapThreshold"

// Create a non-trivial (not all zeros) byte array
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils

Expand Down Expand Up @@ -83,6 +82,8 @@ class CreateTableAsSelectSuite
}

test("CREATE TABLE USING AS SELECT based on the file without write permission") {
// setWritable(...) does not work on Windows. Please refer JDK-6728842.
assume(!Utils.isWindows)
val childPath = new File(path.toString, "child")
path.mkdir()
path.setWritable(false)
Expand Down