Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 2 additions & 10 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.net._
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.{Files, Paths}
import java.util.{Locale, Properties, Random, UUID}
import java.util.concurrent._
import javax.net.ssl.HttpsURLConnection
Expand Down Expand Up @@ -946,15 +946,7 @@ private[spark] object Utils extends Logging {
* Check to see if file is a symbolic link.
*/
def isSymlink(file: File): Boolean = {
if (file == null) throw new NullPointerException("File must not be null")
if (isWindows) return false
val fileInCanonicalDir = if (file.getParent() == null) {
file
} else {
new File(file.getParentFile().getCanonicalFile(), file.getName())
}

!fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile())
return Files.isSymbolicLink(Paths.get(file.toURI))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -575,8 +575,13 @@ class SparkSubmitSuite
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val sparkSubmitFile = if (Utils.isWindows) {
new File("..\\bin\\spark-submit.cmd")
} else {
new File("../bin/spark-submit")
}
val process = Utils.executeCommand(
Seq("./bin/spark-submit") ++ args,
Seq(sparkSubmitFile.getCanonicalPath) ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}

test("SPARK-3697: ignore directories that cannot be read.") {
// setReadable doesn't work on windows for directories
assume(!Utils.isWindows)
val logFile1 = newLogFile("new1", None, inProgress = false)
writeFile(logFile1, true, None,
SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,14 @@ import java.util.Arrays

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.io.ChunkedByteBuffer
import org.apache.spark.util.Utils

class DiskStoreSuite extends SparkFunSuite {

test("reads of memory-mapped and non memory-mapped files are equivalent") {
// It will cause error when we tried to re-open the filestore and the
// memory-mapped byte buffer tot he file has not been GC on Windows.
assume(!Utils.isWindows)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why doesn't this one work?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It will cause error when we tried to re-open the filestore and the memory-mapped byte buffer to the file has not been GC.

Ref: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4724038
http://stackoverflow.com/questions/3602783/file-access-synchronized-on-java-object

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(Please add this info as a comment then)

val confKey = "spark.storage.memoryMapThreshold"

// Create a non-trivial (not all zeros) byte array
Expand Down
4 changes: 3 additions & 1 deletion core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,9 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
val stream = new java.io.PrintStream(buffer, true, "UTF-8")

// scalastyle:off println
stream.println("test circular test circular test circular test circular test circular")
// Note: println will append '\r\n' in windows,
// so there will be one more byte comparing to Unix/Linux.
stream.print("test circular test circular test circular test circular test circular\n")
// scalastyle:on println
assert(buffer.toString === "t circular test circular\n")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ class CreateTableAsSelectSuite extends DataSourceTest with SharedSQLContext with
}

test("CREATE TEMPORARY TABLE AS SELECT based on the file without write permission") {
// setWritable(boolean) doesn't work on Windows
assume(!Utils.isWindows)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why doesn't this one work?

Copy link
Contributor Author

@taoli91 taoli91 Apr 26, 2016

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The setWritable doesn't work on windows
Ref: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6728842

val childPath = new File(path.toString, "child")
path.mkdir()
childPath.createNewFile()
Expand Down