From 0dd55dbc1693d8e7f5657fe2e235b7315ee2067b Mon Sep 17 00:00:00 2001 From: "U-FAREAST\\tl" Date: Mon, 25 Apr 2016 00:43:32 -0700 Subject: [PATCH 01/13] Select correct spark submit and a test case can't work on windows for FsHistoryProviderSuite --- .../scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 7 ++++++- .../spark/deploy/history/FsHistoryProviderSuite.scala | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 31c8fb26460df..1a9b28eec5710 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -649,8 +649,13 @@ class SparkSubmitSuite // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkSubmit = if (Utils.isWindows) { + Seq(new File("..\\bin\\spark-submit.cmd").getAbsolutePath) ++ args + } else { + Seq("./bin/spark-submit") ++ args + } val process = Utils.executeCommand( - Seq("./bin/spark-submit") ++ args, + sparkSubmit, new File(sparkHome), Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala index 01bef0a11c124..a83e41a365aba 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala @@ -126,6 +126,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc } test("SPARK-3697: ignore directories that cannot be read.") { + // setReadable doesn't work on windows for directories + assume(!Utils.isWindows) val logFile1 = newLogFile("new1", None, inProgress = false) writeFile(logFile1, true, None, SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None), From d50b1b0adf2f9d3c8926775180280796c67bd632 Mon Sep 17 00:00:00 2001 From: Tao LI Date: Mon, 25 Apr 2016 01:10:00 -0700 Subject: [PATCH 02/13] Circular buffer on windows. --- core/src/test/scala/org/apache/spark/util/UtilsSuite.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index bc28b2d9cb831..e61a0a3a3cf98 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -719,8 +719,9 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { val buffer = new CircularBuffer(25) val stream = new PrintStream(buffer, true, "UTF-8") - stream.print("test circular test circular test circular test circular test circular") - assert(buffer.toString === "st circular test circular") + // Note: println will append '\r\n' in windows, + // so there will be one more byte comparing to Unix/Linux. + stream.print("test circular test circular test circular test circular test circular\n") } test("nanSafeCompareDoubles") { From cebc6884d392e36c4d727b6ecf61d96289d22c33 Mon Sep 17 00:00:00 2001 From: Tao LI Date: Mon, 25 Apr 2016 01:25:12 -0700 Subject: [PATCH 03/13] Java bug on windows. --- .../org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala index 344d4aa6cfea4..7fa2d73cf98d4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala @@ -83,6 +83,8 @@ class CreateTableAsSelectSuite } test("CREATE TABLE USING AS SELECT based on the file without write permission") { + // setWritable(boolean) doesn't work on Windows + assume(!Utils.isWindows) val childPath = new File(path.toString, "child") path.mkdir() path.setWritable(false) From 247c9b69aad53ba1f6346d75b5241fe4fdcd71cb Mon Sep 17 00:00:00 2001 From: "U-FAREAST\\tl" Date: Mon, 25 Apr 2016 04:22:33 -0700 Subject: [PATCH 04/13] fix windows issue --- .../test/scala/org/apache/spark/storage/DiskStoreSuite.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala index 9ed5016510d56..df37a7953ced0 100644 --- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala @@ -22,10 +22,12 @@ import java.util.Arrays import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.util.io.ChunkedByteBuffer +import org.apache.spark.util.Utils class DiskStoreSuite extends SparkFunSuite { test("reads of memory-mapped and non memory-mapped files are equivalent") { + assume(!Utils.isWindows) val confKey = "spark.storage.memoryMapThreshold" // Create a non-trivial (not all zeros) byte array From 47bf9163b2f225cb950d04c6a1b1d9c1ef859162 Mon Sep 17 00:00:00 2001 From: "U-FAREAST\\tl" Date: Mon, 25 Apr 2016 19:12:07 -0700 Subject: [PATCH 05/13] Check symlink for win --- core/src/main/scala/org/apache/spark/util/Utils.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index f3493bd96b1ee..27b589dc04919 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -23,7 +23,7 @@ import java.net._ import java.nio.ByteBuffer import java.nio.channels.Channels import java.nio.charset.StandardCharsets -import java.nio.file.Files +import java.nio.file.{Files, Paths} import java.util.{Locale, Properties, Random, UUID} import java.util.concurrent._ import java.util.concurrent.atomic.AtomicBoolean @@ -37,7 +37,6 @@ import scala.io.Source import scala.reflect.ClassTag import scala.util.Try import scala.util.control.{ControlThrowable, NonFatal} - import com.google.common.io.{ByteStreams, Files => GFiles} import com.google.common.net.InetAddresses import org.apache.commons.lang3.SystemUtils @@ -48,7 +47,6 @@ import org.apache.log4j.PropertyConfigurator import org.eclipse.jetty.util.MultiException import org.json4s._ import org.slf4j.Logger - import org.apache.spark._ import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.Logging @@ -1015,7 +1013,9 @@ private[spark] object Utils extends Logging { */ def isSymlink(file: File): Boolean = { if (file == null) throw new NullPointerException("File must not be null") - if (isWindows) return false + if (isWindows) { + return Files.isSymbolicLink(Paths.get(file.toURI)) + } val fileInCanonicalDir = if (file.getParent() == null) { file } else { From 4ce9f18098c47b028cb19623fa2530ac6d9e5679 Mon Sep 17 00:00:00 2001 From: Tao LI Date: Tue, 3 May 2016 10:35:27 +0800 Subject: [PATCH 06/13] Fix style --- core/src/main/scala/org/apache/spark/util/Utils.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 27b589dc04919..56169ccf501b4 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -37,6 +37,7 @@ import scala.io.Source import scala.reflect.ClassTag import scala.util.Try import scala.util.control.{ControlThrowable, NonFatal} + import com.google.common.io.{ByteStreams, Files => GFiles} import com.google.common.net.InetAddresses import org.apache.commons.lang3.SystemUtils @@ -47,6 +48,7 @@ import org.apache.log4j.PropertyConfigurator import org.eclipse.jetty.util.MultiException import org.json4s._ import org.slf4j.Logger + import org.apache.spark._ import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.Logging From 68a998986c0f564b9565543aeeb75f37a25589ce Mon Sep 17 00:00:00 2001 From: Tao LI Date: Tue, 3 May 2016 12:53:37 +0800 Subject: [PATCH 07/13] Fix subject to PR comments --- .../src/main/scala/org/apache/spark/util/Utils.scala | 12 +----------- .../org/apache/spark/deploy/SparkSubmitSuite.scala | 7 +------ .../org/apache/spark/storage/DiskStoreSuite.scala | 2 ++ .../scala/org/apache/spark/util/UtilsSuite.scala | 3 +++ 4 files changed, 7 insertions(+), 17 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 56169ccf501b4..ef832756ce3b7 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -1014,17 +1014,7 @@ private[spark] object Utils extends Logging { * Check to see if file is a symbolic link. */ def isSymlink(file: File): Boolean = { - if (file == null) throw new NullPointerException("File must not be null") - if (isWindows) { - return Files.isSymbolicLink(Paths.get(file.toURI)) - } - val fileInCanonicalDir = if (file.getParent() == null) { - file - } else { - new File(file.getParentFile().getCanonicalFile(), file.getName()) - } - - !fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile()) + return Files.isSymbolicLink(Paths.get(file.toURI)) } /** diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 1a9b28eec5710..31c8fb26460df 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -649,13 +649,8 @@ class SparkSubmitSuite // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) - val sparkSubmit = if (Utils.isWindows) { - Seq(new File("..\\bin\\spark-submit.cmd").getAbsolutePath) ++ args - } else { - Seq("./bin/spark-submit") ++ args - } val process = Utils.executeCommand( - sparkSubmit, + Seq("./bin/spark-submit") ++ args, new File(sparkHome), Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala index df37a7953ced0..9e6b02b9eac4d 100644 --- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala @@ -27,6 +27,8 @@ import org.apache.spark.util.Utils class DiskStoreSuite extends SparkFunSuite { test("reads of memory-mapped and non memory-mapped files are equivalent") { + // It will cause error when we tried to re-open the filestore and the + // memory-mapped byte buffer tot he file has not been GC on Windows. assume(!Utils.isWindows) val confKey = "spark.storage.memoryMapThreshold" diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index e61a0a3a3cf98..7cdeb3be88030 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -719,9 +719,12 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { val buffer = new CircularBuffer(25) val stream = new PrintStream(buffer, true, "UTF-8") + // scalastyle:off println // Note: println will append '\r\n' in windows, // so there will be one more byte comparing to Unix/Linux. stream.print("test circular test circular test circular test circular test circular\n") + // scalastyle:on println + assert(buffer.toString === "t circular test circular\n") } test("nanSafeCompareDoubles") { From a9a205547834e685637d7dd9f8ae811d53ac948a Mon Sep 17 00:00:00 2001 From: "U-FAREAST\\tl" Date: Mon, 2 May 2016 22:55:02 -0700 Subject: [PATCH 08/13] Revert sparksubmit change --- .../scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 31c8fb26460df..1a9b28eec5710 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -649,8 +649,13 @@ class SparkSubmitSuite // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkSubmit = if (Utils.isWindows) { + Seq(new File("..\\bin\\spark-submit.cmd").getAbsolutePath) ++ args + } else { + Seq("./bin/spark-submit") ++ args + } val process = Utils.executeCommand( - Seq("./bin/spark-submit") ++ args, + sparkSubmit, new File(sparkHome), Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) From cbe5bd3757782ce7949204435203d9add78a5ad8 Mon Sep 17 00:00:00 2001 From: "U-FAREAST\\tl" Date: Wed, 4 May 2016 20:14:34 -0700 Subject: [PATCH 09/13] Minor code cleanup --- .../scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 1a9b28eec5710..5e86735d985e8 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -649,13 +649,13 @@ class SparkSubmitSuite // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) - val sparkSubmit = if (Utils.isWindows) { - Seq(new File("..\\bin\\spark-submit.cmd").getAbsolutePath) ++ args + val sparkSubmitFile = if (Utils.isWindows) { + new File("..\\bin\\spark-submit.cmd") } else { - Seq("./bin/spark-submit") ++ args + new File("./bin/spark-submit.cmd") } val process = Utils.executeCommand( - sparkSubmit, + Seq(sparkSubmitFile.getCanonicalPath) ++ args, new File(sparkHome), Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) From a96a87308c0175cea71151c99be56f3535e4052d Mon Sep 17 00:00:00 2001 From: Tao LI Date: Thu, 5 May 2016 11:45:54 +0800 Subject: [PATCH 10/13] Fix path issue of SparkSubmitSuite on Linux --- .../test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 5e86735d985e8..732cbfaaeea46 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -652,7 +652,7 @@ class SparkSubmitSuite val sparkSubmitFile = if (Utils.isWindows) { new File("..\\bin\\spark-submit.cmd") } else { - new File("./bin/spark-submit.cmd") + new File("../bin/spark-submit") } val process = Utils.executeCommand( Seq(sparkSubmitFile.getCanonicalPath) ++ args, From cafca626e6ac113c078657506c4283cadfaf91ed Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Fri, 30 Sep 2016 23:51:45 +0900 Subject: [PATCH 11/13] Do not fix the test already fixed --- .../src/test/scala/org/apache/spark/util/UtilsSuite.scala | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index 7cdeb3be88030..bc28b2d9cb831 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -719,12 +719,8 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { val buffer = new CircularBuffer(25) val stream = new PrintStream(buffer, true, "UTF-8") - // scalastyle:off println - // Note: println will append '\r\n' in windows, - // so there will be one more byte comparing to Unix/Linux. - stream.print("test circular test circular test circular test circular test circular\n") - // scalastyle:on println - assert(buffer.toString === "t circular test circular\n") + stream.print("test circular test circular test circular test circular test circular") + assert(buffer.toString === "st circular test circular") } test("nanSafeCompareDoubles") { From 610d6457b6d76430c5117bcb23f56c23d7fc48d9 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Sat, 1 Oct 2016 15:28:42 +0900 Subject: [PATCH 12/13] Improve comments --- .../apache/spark/sql/sources/CreateTableAsSelectSuite.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala index 7fa2d73cf98d4..e0f478902b7f7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala @@ -26,7 +26,6 @@ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.BucketSpec import org.apache.spark.sql.catalyst.parser.ParseException -import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.util.Utils @@ -83,7 +82,7 @@ class CreateTableAsSelectSuite } test("CREATE TABLE USING AS SELECT based on the file without write permission") { - // setWritable(boolean) doesn't work on Windows + // setWritable(boolean) does not work on Windows. Please refer JDK-6728842. assume(!Utils.isWindows) val childPath = new File(path.toString, "child") path.mkdir() From 1ff1d57c085af01bc1626d3980656baa0fd9852a Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Sat, 1 Oct 2016 17:44:34 +0900 Subject: [PATCH 13/13] Fix comments --- .../apache/spark/deploy/history/FsHistoryProviderSuite.scala | 2 +- .../org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala index a83e41a365aba..a5eda7b5a5a75 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala @@ -126,7 +126,7 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc } test("SPARK-3697: ignore directories that cannot be read.") { - // setReadable doesn't work on windows for directories + // setReadable(...) does not work on Windows. Please refer JDK-6728842. assume(!Utils.isWindows) val logFile1 = newLogFile("new1", None, inProgress = false) writeFile(logFile1, true, None, diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala index e0f478902b7f7..c39005f6a1063 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala @@ -82,7 +82,7 @@ class CreateTableAsSelectSuite } test("CREATE TABLE USING AS SELECT based on the file without write permission") { - // setWritable(boolean) does not work on Windows. Please refer JDK-6728842. + // setWritable(...) does not work on Windows. Please refer JDK-6728842. assume(!Utils.isWindows) val childPath = new File(path.toString, "child") path.mkdir()