From 4e26e43056d00efdc5926c9c17c293ec8b5dcc59 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 15:24:33 -0700 Subject: [PATCH 01/12] WIP DO NOT MERGE. Testing out test tags. --- dev/run-tests-jenkins | 2 +- dev/run-tests.py | 17 ++++++++++-- dev/sparktestsupport/modules.py | 8 +++++- pom.xml | 2 ++ project/SparkBuild.scala | 5 ++++ .../execution/HiveCompatibilitySuite.scala | 2 ++ .../spark/sql/hive/ExtendedHiveTest.java | 26 +++++++++++++++++++ .../org/apache/spark/sql/hive/TestTags.scala | 22 ++++++++++++++++ .../spark/sql/hive/client/VersionsSuite.scala | 2 ++ 9 files changed, 82 insertions(+), 4 deletions(-) create mode 100644 sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java create mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala diff --git a/dev/run-tests-jenkins b/dev/run-tests-jenkins index c4d39d95d589..f005c4a87447 100755 --- a/dev/run-tests-jenkins +++ b/dev/run-tests-jenkins @@ -49,7 +49,7 @@ SHORT_COMMIT_HASH="${ghprbActualCommit:0:7}" # format: http://linux.die.net/man/1/timeout # must be less than the timeout configured on Jenkins (currently 180m) -TESTS_TIMEOUT="175m" +TESTS_TIMEOUT="200m" # Array to capture all tests to run on the pull request. These tests are held under the #+ dev/tests/ directory. diff --git a/dev/run-tests.py b/dev/run-tests.py index f689425ee40b..f2cdf746f36d 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -117,6 +117,13 @@ def determine_modules_to_test(changed_modules): return modules_to_test.union(set(changed_modules)) +def determine_tags_to_exclude(changed_modules): + tags = [ ] + for m in modules.all_modules: + if m not in changed_modules: + tags += m.test_tags + return tags + # ------------------------------------------------------------------------------------------------- # Functions for working with subprocesses and shell tools # ------------------------------------------------------------------------------------------------- @@ -335,6 +342,7 @@ def detect_binary_inop_with_mima(): def run_scala_tests_maven(test_profiles): mvn_test_goals = ["test", "--fail-at-end"] + profiles_and_goals = test_profiles + mvn_test_goals print("[info] Running Spark tests using Maven with these arguments: ", @@ -358,7 +366,7 @@ def run_scala_tests_sbt(test_modules, test_profiles): exec_sbt(profiles_and_goals) -def run_scala_tests(build_tool, hadoop_version, test_modules): +def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags): """Function to properly execute all tests passed in as a set from the `determine_test_suites` function""" set_title_and_block("Running Spark unit tests", "BLOCK_SPARK_UNIT_TESTS") @@ -367,6 +375,10 @@ def run_scala_tests(build_tool, hadoop_version, test_modules): test_profiles = get_hadoop_profiles(hadoop_version) + \ list(set(itertools.chain.from_iterable(m.build_profile_flags for m in test_modules))) + + if excluded_tags: + test_profiles += [ '-Dtest.exclude.tags=' + ",".join(excluded_tags) ] + if build_tool == "maven": run_scala_tests_maven(test_profiles) else: @@ -476,6 +488,7 @@ def main(): setup_test_environ(test_environ) test_modules = determine_modules_to_test(changed_modules) + excluded_tags = determine_tags_to_exclude(changed_modules) # license checks run_apache_rat_checks() @@ -498,7 +511,7 @@ def main(): detect_binary_inop_with_mima() # run the test suites - run_scala_tests(build_tool, hadoop_version, test_modules) + run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags) modules_with_python_tests = [m for m in test_modules if m.python_test_goals] if modules_with_python_tests: diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index 346452f3174e..079b2a59b22c 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -31,7 +31,7 @@ class Module(object): def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=(), environ={}, sbt_test_goals=(), python_test_goals=(), blacklisted_python_implementations=(), - should_run_r_tests=False): + test_tags=(), should_run_r_tests=False): """ Define a new module. @@ -50,6 +50,8 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags= :param blacklisted_python_implementations: A set of Python implementations that are not supported by this module's Python components. The values in this set should match strings returned by Python's `platform.python_implementation()`. + :param test_tags A set of tags that will be excluded when running unit tests if the module + is not explicitly changed. :param should_run_r_tests: If true, changes in this module will trigger all R tests. """ self.name = name @@ -60,6 +62,7 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags= self.environ = environ self.python_test_goals = python_test_goals self.blacklisted_python_implementations = blacklisted_python_implementations + self.test_tags = test_tags self.should_run_r_tests = should_run_r_tests self.dependent_modules = set() @@ -85,6 +88,9 @@ def contains_file(self, filename): "catalyst/test", "sql/test", "hive/test", + ], + test_tags=[ + "org.apache.spark.sql.hive.ExtendedHiveTest" ] ) diff --git a/pom.xml b/pom.xml index 0716016523ee..450c52dc59bd 100644 --- a/pom.xml +++ b/pom.xml @@ -181,6 +181,7 @@ 0.9.2 ${java.home} + diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index ea52bfd67944..08e34aba7e3c 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -565,6 +565,11 @@ object TestSettings { javaOptions in Test ++= "-Xmx3g -Xss4096k -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g" .split(" ").toSeq, javaOptions += "-Xmx3g", + // Exclude tags defined in a system property + testOptions in Test += Tests.Argument( + sys.props.get("test.exclude.tags").map { tags => + tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq + }.getOrElse(Nil): _*), // Show full stack trace and duration in test cases. testOptions in Test += Tests.Argument("-oDF"), testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index ab309e0a1d36..ffc4c32794ca 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -24,11 +24,13 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.scalatest.BeforeAndAfter import org.apache.spark.sql.SQLConf +import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.hive.test.TestHive /** * Runs the test cases that are included in the hive distribution. */ +@ExtendedHiveTest class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { // TODO: bundle in jar files... get from classpath private lazy val hiveQueryDir = TestHive.getHiveFile( diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java new file mode 100644 index 000000000000..e2183183fb55 --- /dev/null +++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive; + +import java.lang.annotation.*; +import org.scalatest.TagAnnotation; + +@TagAnnotation +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD, ElementType.TYPE}) +public @interface ExtendedHiveTest { } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala new file mode 100644 index 000000000000..4f9a98f96376 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive + +import org.scalatest.Tag + +object ExtendedHiveTestTag extends Tag("org.apache.spark.sql.hive.ExtendedHiveTest") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala index f0bb77092c0c..888d1b7b4553 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala @@ -23,6 +23,7 @@ import org.apache.spark.sql.hive.HiveContext import org.apache.spark.{Logging, SparkFunSuite} import org.apache.spark.sql.catalyst.expressions.{NamedExpression, Literal, AttributeReference, EqualTo} import org.apache.spark.sql.catalyst.util.quietly +import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.types.IntegerType import org.apache.spark.util.Utils @@ -32,6 +33,7 @@ import org.apache.spark.util.Utils * sure that reflective calls are not throwing NoSuchMethod error, but the actually functionality * is not fully tested. */ +@ExtendedHiveTest class VersionsSuite extends SparkFunSuite with Logging { // Do not use a temp path here to speed up subsequent executions of the unit test during From ef737c09fd7d81c26af83efde2c19c42476d1323 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 15:37:30 -0700 Subject: [PATCH 02/12] Oh great now we have different styles depending on language... --- dev/run-tests.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/dev/run-tests.py b/dev/run-tests.py index f2cdf746f36d..a7f190c31dec 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -118,11 +118,12 @@ def determine_modules_to_test(changed_modules): def determine_tags_to_exclude(changed_modules): - tags = [ ] - for m in modules.all_modules: - if m not in changed_modules: - tags += m.test_tags - return tags + tags = [] + for m in modules.all_modules: + if m not in changed_modules: + tags += m.test_tags + return tags + # ------------------------------------------------------------------------------------------------- # Functions for working with subprocesses and shell tools @@ -377,7 +378,7 @@ def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags): list(set(itertools.chain.from_iterable(m.build_profile_flags for m in test_modules))) if excluded_tags: - test_profiles += [ '-Dtest.exclude.tags=' + ",".join(excluded_tags) ] + test_profiles += ['-Dtest.exclude.tags=' + ",".join(excluded_tags)] if build_tool == "maven": run_scala_tests_maven(test_profiles) From 3a2979f737f34d648d0d72be99f02f50c74800f1 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 16:15:09 -0700 Subject: [PATCH 03/12] Always run all tests locally (since code doesn't detect changed modules). --- dev/run-tests.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/run-tests.py b/dev/run-tests.py index a7f190c31dec..0800d43441a6 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -474,8 +474,10 @@ def main(): target_branch = os.environ["ghprbTargetBranch"] changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch) changed_modules = determine_modules_for_files(changed_files) + excluded_tags = determine_tags_to_exclude(changed_modules) if not changed_modules: changed_modules = [modules.root] + excluded_tags = [] print("[info] Found the following changed modules:", ", ".join(x.name for x in changed_modules)) @@ -489,7 +491,6 @@ def main(): setup_test_environ(test_environ) test_modules = determine_modules_to_test(changed_modules) - excluded_tags = determine_tags_to_exclude(changed_modules) # license checks run_apache_rat_checks() From a02c07a8c64e92ef08d2c616caee6193183f8ae5 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 16:26:41 -0700 Subject: [PATCH 04/12] Add a tag for yarn integration tests. --- dev/sparktestsupport/modules.py | 12 +++++++++ .../spark/deploy/yarn/ExtendedYarnTest.java | 26 +++++++++++++++++++ .../apache/spark/deploy/yarn/TestTags.scala | 22 ++++++++++++++++ .../spark/deploy/yarn/YarnClusterSuite.scala | 1 + .../yarn/YarnShuffleIntegrationSuite.scala | 1 + 5 files changed, 62 insertions(+) create mode 100644 yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java create mode 100644 yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index 079b2a59b22c..854002d9d422 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -404,6 +404,18 @@ def contains_file(self, filename): ) +yarn = Module( + name="yarn", + dependencies=[], + source_file_regexes=[ + "yarn/", + "network/yarn/", + ], + test_tags=[ + "org.apache.spark.deploy.yarn.ExtendedYarnTest" + ] +) + # The root module is a dummy module which is used to run all of the tests. # No other modules should directly depend on this module. root = Module( diff --git a/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java new file mode 100644 index 000000000000..7a8f2fe979c1 --- /dev/null +++ b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy.yarn; + +import java.lang.annotation.*; +import org.scalatest.TagAnnotation; + +@TagAnnotation +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD, ElementType.TYPE}) +public @interface ExtendedYarnTest { } diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala new file mode 100644 index 000000000000..9bd990bffcf5 --- /dev/null +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy.yarn + +import org.scalatest.Tag + +object ExtendedYarnTestTag extends Tag("org.apache.spark.deploy.yarn.ExtendedYarnTest") diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 5a4ea2ea2f4f..62052f4722d2 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -37,6 +37,7 @@ import org.apache.spark.util.Utils * applications, and require the Spark assembly to be built before they can be successfully * run. */ +@ExtendedYarnTest class YarnClusterSuite extends BaseYarnClusterSuite { override def yarnConfig: YarnConfiguration = new YarnConfiguration() diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala index 5e8238822b90..377ca7e9b01d 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala @@ -32,6 +32,7 @@ import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor} /** * Integration test for the external shuffle service with a yarn mini-cluster */ +@ExtendedYarnTest class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite { override def yarnConfig: YarnConfiguration = { From 05304a6717fe5315c6a0260da488f7a2f0accf73 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 20:09:28 -0700 Subject: [PATCH 05/12] Add tag exclusion to junit also. Based on squito:SPARK-4746 --- pom.xml | 5 +++-- project/SparkBuild.scala | 10 +++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index 450c52dc59bd..d11f8a0a852a 100644 --- a/pom.xml +++ b/pom.xml @@ -761,7 +761,7 @@ com.novocode junit-interface - 0.10 + 0.11 test @@ -1910,7 +1910,7 @@ src false - ${test.exclude.tags} + ${test.exclude.tags} @@ -1946,6 +1946,7 @@ __not_used__ + ${test.exclude.tags} diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 08e34aba7e3c..db7054a8f741 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -566,15 +566,19 @@ object TestSettings { .split(" ").toSeq, javaOptions += "-Xmx3g", // Exclude tags defined in a system property - testOptions in Test += Tests.Argument( + testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, sys.props.get("test.exclude.tags").map { tags => tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq }.getOrElse(Nil): _*), + testOptions in Test += Tests.Argument(TestFrameworks.JUnit, + sys.props.get("test.exclude.tags").map { tags => + Seq("--exclude-categories=" + tags) + }.getOrElse(Nil): _*), // Show full stack trace and duration in test cases. testOptions in Test += Tests.Argument("-oDF"), - testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), + testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), // Enable Junit testing. - libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test", + libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", // Only allow one test at a time, even across projects, since they run in the same JVM parallelExecution in Test := false, // Make sure the test temp directory exists. From ce9226689844c08d7c1c93f7ec3acaa777674b1a Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 25 Aug 2015 20:11:56 -0700 Subject: [PATCH 06/12] Remove scalatest object tags since they're not used. From squito:SPARK-4746 it seems like they might not even be needed. --- .../org/apache/spark/sql/hive/TestTags.scala | 22 ------------------- .../apache/spark/deploy/yarn/TestTags.scala | 22 ------------------- 2 files changed, 44 deletions(-) delete mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala delete mode 100644 yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala deleted file mode 100644 index 4f9a98f96376..000000000000 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.hive - -import org.scalatest.Tag - -object ExtendedHiveTestTag extends Tag("org.apache.spark.sql.hive.ExtendedHiveTest") diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala deleted file mode 100644 index 9bd990bffcf5..000000000000 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/TestTags.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.deploy.yarn - -import org.scalatest.Tag - -object ExtendedYarnTestTag extends Tag("org.apache.spark.deploy.yarn.ExtendedYarnTest") From 27749cfdc7b22e0b90440fa2ea9f33c6acbb78f6 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 09:48:30 -0700 Subject: [PATCH 07/12] Declare common test dependencies in root pom only. --- core/pom.xml | 10 ---------- external/flume/pom.xml | 10 ---------- external/kafka/pom.xml | 10 ---------- external/mqtt/pom.xml | 10 ---------- external/twitter/pom.xml | 10 ---------- external/zeromq/pom.xml | 10 ---------- extras/java8-tests/pom.xml | 10 ---------- extras/kinesis-asl/pom.xml | 5 ----- launcher/pom.xml | 5 ----- mllib/pom.xml | 10 ---------- network/common/pom.xml | 10 ---------- network/shuffle/pom.xml | 10 ---------- pom.xml | 10 ++++++++++ sql/core/pom.xml | 5 ----- sql/hive/pom.xml | 5 ----- streaming/pom.xml | 10 ---------- unsafe/pom.xml | 10 ---------- 17 files changed, 10 insertions(+), 140 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index 4f79d71bf85f..53e9eb4b081f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -323,16 +323,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - org.apache.curator curator-test diff --git a/external/flume/pom.xml b/external/flume/pom.xml index 14f7daaf417e..912f36d89b60 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -66,16 +66,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml index ded863bd985e..b895a395f298 100644 --- a/external/kafka/pom.xml +++ b/external/kafka/pom.xml @@ -86,16 +86,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/external/mqtt/pom.xml b/external/mqtt/pom.xml index 69b309876a0d..fe9db09e06df 100644 --- a/external/mqtt/pom.xml +++ b/external/mqtt/pom.xml @@ -58,16 +58,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - org.apache.activemq activemq-core diff --git a/external/twitter/pom.xml b/external/twitter/pom.xml index 178ae8de13b5..a73b32fbc2b1 100644 --- a/external/twitter/pom.xml +++ b/external/twitter/pom.xml @@ -58,16 +58,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/external/zeromq/pom.xml b/external/zeromq/pom.xml index 37bfd10d4366..4091620149fb 100644 --- a/external/zeromq/pom.xml +++ b/external/zeromq/pom.xml @@ -57,16 +57,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/extras/java8-tests/pom.xml b/extras/java8-tests/pom.xml index 3636a9037d43..b6534406311b 100644 --- a/extras/java8-tests/pom.xml +++ b/extras/java8-tests/pom.xml @@ -58,16 +58,6 @@ test-jar test - - junit - junit - test - - - com.novocode - junit-interface - test - diff --git a/extras/kinesis-asl/pom.xml b/extras/kinesis-asl/pom.xml index 521b53e230c4..2df94676defc 100644 --- a/extras/kinesis-asl/pom.xml +++ b/extras/kinesis-asl/pom.xml @@ -74,11 +74,6 @@ scalacheck_${scala.binary.version} test - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/launcher/pom.xml b/launcher/pom.xml index 2fd768d8119c..fdfb9f6315b4 100644 --- a/launcher/pom.xml +++ b/launcher/pom.xml @@ -42,11 +42,6 @@ log4j test - - junit - junit - test - org.mockito mockito-core diff --git a/mllib/pom.xml b/mllib/pom.xml index a5db14407b4f..752c59258ef5 100644 --- a/mllib/pom.xml +++ b/mllib/pom.xml @@ -94,16 +94,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - - - com.novocode - junit-interface - test - org.mockito mockito-core diff --git a/network/common/pom.xml b/network/common/pom.xml index 7dc3068ab8cb..05cc879b1737 100644 --- a/network/common/pom.xml +++ b/network/common/pom.xml @@ -60,16 +60,6 @@ - - junit - junit - test - - - com.novocode - junit-interface - test - log4j log4j diff --git a/network/shuffle/pom.xml b/network/shuffle/pom.xml index 3d2edf9d9451..9611a29ad1f7 100644 --- a/network/shuffle/pom.xml +++ b/network/shuffle/pom.xml @@ -78,16 +78,6 @@ test-jar test - - junit - junit - test - - - com.novocode - junit-interface - test - log4j log4j diff --git a/pom.xml b/pom.xml index d11f8a0a852a..fdb0a3f4705a 100644 --- a/pom.xml +++ b/pom.xml @@ -340,6 +340,16 @@ scalatest_${scala.binary.version} test + + junit + junit + test + + + com.novocode + junit-interface + test + diff --git a/sql/core/pom.xml b/sql/core/pom.xml index 349007789f63..1988c357d29f 100644 --- a/sql/core/pom.xml +++ b/sql/core/pom.xml @@ -73,11 +73,6 @@ jackson-databind ${fasterxml.jackson.version} - - junit - junit - test - org.scalacheck scalacheck_${scala.binary.version} diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml index be1607476e25..3fb98e958824 100644 --- a/sql/hive/pom.xml +++ b/sql/hive/pom.xml @@ -160,11 +160,6 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - org.apache.spark spark-sql_${scala.binary.version} diff --git a/streaming/pom.xml b/streaming/pom.xml index 697895e72fe5..f6bb1da9f872 100644 --- a/streaming/pom.xml +++ b/streaming/pom.xml @@ -84,21 +84,11 @@ scalacheck_${scala.binary.version} test - - junit - junit - test - org.seleniumhq.selenium selenium-java test - - com.novocode - junit-interface - test - target/scala-${scala.binary.version}/classes diff --git a/unsafe/pom.xml b/unsafe/pom.xml index 89475ee3cf5a..bb45604f269b 100644 --- a/unsafe/pom.xml +++ b/unsafe/pom.xml @@ -55,16 +55,6 @@ - - junit - junit - test - - - com.novocode - junit-interface - test - org.mockito mockito-core From b7d05079d36bdc7dbaa98ea29641e1a96dc05866 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 10:34:14 -0700 Subject: [PATCH 08/12] Tag a random sample of tests in HiveCompatibilitySuite with ExtendedHiveTest. This allows a small set of tests to run for every PR, while runnign the whole test suite when changes to sql code are made (as decided by run-tests.py). --- .../HashJoinCompatibilitySuite.scala | 2 ++ .../execution/HiveCompatibilitySuite.scala | 2 -- .../org/apache/spark/sql/hive/TestTags.scala | 26 +++++++++++++++++++ .../hive/execution/HiveComparisonTest.scala | 19 +++++++++++--- .../hive/execution/HiveQueryFileTest.scala | 24 ++++++++++++++--- .../sql/hive/execution/HiveSerDeSuite.scala | 2 +- 6 files changed, 66 insertions(+), 9 deletions(-) create mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala index 1a5ba20404c4..db79909cfb8d 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala @@ -20,11 +20,13 @@ package org.apache.spark.sql.hive.execution import java.io.File import org.apache.spark.sql.SQLConf +import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.hive.test.TestHive /** * Runs the test cases that are included in the hive distribution with hash joins. */ +@ExtendedHiveTest class HashJoinCompatibilitySuite extends HiveCompatibilitySuite { override def beforeAll() { super.beforeAll() diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index ffc4c32794ca..ab309e0a1d36 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -24,13 +24,11 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.scalatest.BeforeAndAfter import org.apache.spark.sql.SQLConf -import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.hive.test.TestHive /** * Runs the test cases that are included in the hive distribution. */ -@ExtendedHiveTest class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { // TODO: bundle in jar files... get from classpath private lazy val hiveQueryDir = TestHive.getHiveFile( diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala new file mode 100644 index 000000000000..ba5a51e5ac91 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive + +import org.scalatest.Tag + +object TestTags { + + object ExtendedHiveTest extends Tag("org.apache.spark.sql.hive.ExtendedHiveTest") + +} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala index 4d45249d9c6b..8e31434cdea2 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala @@ -21,7 +21,7 @@ import java.io._ import scala.util.control.NonFatal -import org.scalatest.{BeforeAndAfterAll, GivenWhenThen} +import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Tag} import org.apache.spark.{Logging, SparkFunSuite} import org.apache.spark.sql.catalyst.planning.PhysicalOperation @@ -209,7 +209,11 @@ abstract class HiveComparisonTest } val installHooksCommand = "(?i)SET.*hooks".r - def createQueryTest(testCaseName: String, sql: String, reset: Boolean = true) { + def createQueryTest( + testCaseName: String, + sql: String, + tag: Option[Tag] = None, + reset: Boolean = true) { // testCaseName must not contain ':', which is not allowed to appear in a filename of Windows assert(!testCaseName.contains(":")) @@ -237,7 +241,16 @@ abstract class HiveComparisonTest return } - test(testCaseName) { + def createTest(name: String)(fn: => Unit): Unit = { + tag match { + case Some(tagValue) => + test(name, tagValue)(fn) + case None => + test(name)(fn) + } + } + + createTest(testCaseName) { logDebug(s"=== HIVE TEST: $testCaseName ===") // Clear old output for this testcase. diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala index f7b37dae0a5f..7e76fff7974c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala @@ -19,7 +19,10 @@ package org.apache.spark.sql.hive.execution import java.io.File +import scala.util.Random + import org.apache.spark.sql.catalyst.util._ +import org.apache.spark.sql.hive.TestTags.ExtendedHiveTest /** * A framework for running the query tests that are listed as a set of text files. @@ -51,21 +54,36 @@ abstract class HiveQueryFileTest extends HiveComparisonTest { Option(System.getProperty(whiteListProperty)).map(_.split(",").toSeq).getOrElse(whiteList) // Go through all the test cases and add them to scala test. - testCases.sorted.foreach { + val testsToRun = testCases.sorted.flatMap { case (testCaseName, testCaseFile) => if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) { logDebug(s"Blacklisted test skipped $testCaseName") + None } else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) || runAll) { // Build a test case and submit it to scala test framework... - val queriesString = fileToString(testCaseFile) - createQueryTest(testCaseName, queriesString) + Some(testCaseName -> testCaseFile) } else { // Only output warnings for the built in whitelist as this clutters the output when the user // trying to execute a single test from the commandline. if (System.getProperty(whiteListProperty) == null && !runAll) { ignore(testCaseName) {} } + None } } + + // Pick a random sample of tests to serve as a "smoke" test. This is used by automated tests when + // the sql/ code hasn't been changed, to avoid running the whole test suite for every PR that + // touches core code. + private val smokeCount = sys.props.getOrElse("spark.hive.smoke.count", "20").toInt + private val smokeSet = Random.shuffle(testsToRun).take(smokeCount) + .map { case (name, _) => name }.toSet + + testsToRun.foreach { case (testCaseName, testCaseFile) => + val queriesString = fileToString(testCaseFile) + val tag = if (!smokeSet.contains(testCaseName)) Some(ExtendedHiveTest) else None + createQueryTest(testCaseName, queriesString, tag = tag) + } + } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala index 5586a793618b..889288319024 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala @@ -38,7 +38,7 @@ class HiveSerDeSuite extends HiveComparisonTest with BeforeAndAfterAll { } // table sales is not a cache table, and will be clear after reset - createQueryTest("Read with RegexSerDe", "SELECT * FROM sales", false) + createQueryTest("Read with RegexSerDe", "SELECT * FROM sales", reset = false) createQueryTest( "Read and write with LazySimpleSerDe (tab separated)", From 5c107a53832e7b7a5e8ee279b717e94b36d94aa2 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 10:52:35 -0700 Subject: [PATCH 09/12] Workaround for an ivy issue? --- project/SparkBuild.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index db7054a8f741..65f181701f51 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -579,6 +579,7 @@ object TestSettings { testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), // Enable Junit testing. libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", + libraryDependencies += "junit" % "junit" % "4.10" % "test", // Only allow one test at a time, even across projects, since they run in the same JVM parallelExecution in Test := false, // Make sure the test temp directory exists. From 85fed2051574c7628c5d80cea4822e53e535bde8 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 11:06:22 -0700 Subject: [PATCH 10/12] Use same junit version for maven and sbt. This makes sbt happy now that I put the junit assembly in the shared dependencies in the root pom. --- pom.xml | 2 +- project/SparkBuild.scala | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index fdb0a3f4705a..a40fc66ff89e 100644 --- a/pom.xml +++ b/pom.xml @@ -753,7 +753,7 @@ junit junit - 4.10 + 4.11 test diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 65f181701f51..db7054a8f741 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -579,7 +579,6 @@ object TestSettings { testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), // Enable Junit testing. libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", - libraryDependencies += "junit" % "junit" % "4.10" % "test", // Only allow one test at a time, even across projects, since they run in the same JVM parallelExecution in Test := false, // Make sure the test temp directory exists. From 83bec7fe9bb68b1fbac97b43efedf45aeb93fc74 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 12:05:09 -0700 Subject: [PATCH 11/12] Revert "Tag a random sample of tests in HiveCompatibilitySuite with ExtendedHiveTest." This reverts commit b7d05079d36bdc7dbaa98ea29641e1a96dc05866. --- .../HashJoinCompatibilitySuite.scala | 2 -- .../execution/HiveCompatibilitySuite.scala | 2 ++ .../org/apache/spark/sql/hive/TestTags.scala | 26 ------------------- .../hive/execution/HiveComparisonTest.scala | 19 +++----------- .../hive/execution/HiveQueryFileTest.scala | 24 +++-------------- .../sql/hive/execution/HiveSerDeSuite.scala | 2 +- 6 files changed, 9 insertions(+), 66 deletions(-) delete mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala index db79909cfb8d..1a5ba20404c4 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HashJoinCompatibilitySuite.scala @@ -20,13 +20,11 @@ package org.apache.spark.sql.hive.execution import java.io.File import org.apache.spark.sql.SQLConf -import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.hive.test.TestHive /** * Runs the test cases that are included in the hive distribution with hash joins. */ -@ExtendedHiveTest class HashJoinCompatibilitySuite extends HiveCompatibilitySuite { override def beforeAll() { super.beforeAll() diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index ab309e0a1d36..ffc4c32794ca 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -24,11 +24,13 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.scalatest.BeforeAndAfter import org.apache.spark.sql.SQLConf +import org.apache.spark.sql.hive.ExtendedHiveTest import org.apache.spark.sql.hive.test.TestHive /** * Runs the test cases that are included in the hive distribution. */ +@ExtendedHiveTest class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { // TODO: bundle in jar files... get from classpath private lazy val hiveQueryDir = TestHive.getHiveFile( diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala deleted file mode 100644 index ba5a51e5ac91..000000000000 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/TestTags.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.hive - -import org.scalatest.Tag - -object TestTags { - - object ExtendedHiveTest extends Tag("org.apache.spark.sql.hive.ExtendedHiveTest") - -} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala index 8e31434cdea2..4d45249d9c6b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala @@ -21,7 +21,7 @@ import java.io._ import scala.util.control.NonFatal -import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Tag} +import org.scalatest.{BeforeAndAfterAll, GivenWhenThen} import org.apache.spark.{Logging, SparkFunSuite} import org.apache.spark.sql.catalyst.planning.PhysicalOperation @@ -209,11 +209,7 @@ abstract class HiveComparisonTest } val installHooksCommand = "(?i)SET.*hooks".r - def createQueryTest( - testCaseName: String, - sql: String, - tag: Option[Tag] = None, - reset: Boolean = true) { + def createQueryTest(testCaseName: String, sql: String, reset: Boolean = true) { // testCaseName must not contain ':', which is not allowed to appear in a filename of Windows assert(!testCaseName.contains(":")) @@ -241,16 +237,7 @@ abstract class HiveComparisonTest return } - def createTest(name: String)(fn: => Unit): Unit = { - tag match { - case Some(tagValue) => - test(name, tagValue)(fn) - case None => - test(name)(fn) - } - } - - createTest(testCaseName) { + test(testCaseName) { logDebug(s"=== HIVE TEST: $testCaseName ===") // Clear old output for this testcase. diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala index 7e76fff7974c..f7b37dae0a5f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala @@ -19,10 +19,7 @@ package org.apache.spark.sql.hive.execution import java.io.File -import scala.util.Random - import org.apache.spark.sql.catalyst.util._ -import org.apache.spark.sql.hive.TestTags.ExtendedHiveTest /** * A framework for running the query tests that are listed as a set of text files. @@ -54,36 +51,21 @@ abstract class HiveQueryFileTest extends HiveComparisonTest { Option(System.getProperty(whiteListProperty)).map(_.split(",").toSeq).getOrElse(whiteList) // Go through all the test cases and add them to scala test. - val testsToRun = testCases.sorted.flatMap { + testCases.sorted.foreach { case (testCaseName, testCaseFile) => if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) { logDebug(s"Blacklisted test skipped $testCaseName") - None } else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) || runAll) { // Build a test case and submit it to scala test framework... - Some(testCaseName -> testCaseFile) + val queriesString = fileToString(testCaseFile) + createQueryTest(testCaseName, queriesString) } else { // Only output warnings for the built in whitelist as this clutters the output when the user // trying to execute a single test from the commandline. if (System.getProperty(whiteListProperty) == null && !runAll) { ignore(testCaseName) {} } - None } } - - // Pick a random sample of tests to serve as a "smoke" test. This is used by automated tests when - // the sql/ code hasn't been changed, to avoid running the whole test suite for every PR that - // touches core code. - private val smokeCount = sys.props.getOrElse("spark.hive.smoke.count", "20").toInt - private val smokeSet = Random.shuffle(testsToRun).take(smokeCount) - .map { case (name, _) => name }.toSet - - testsToRun.foreach { case (testCaseName, testCaseFile) => - val queriesString = fileToString(testCaseFile) - val tag = if (!smokeSet.contains(testCaseName)) Some(ExtendedHiveTest) else None - createQueryTest(testCaseName, queriesString, tag = tag) - } - } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala index 889288319024..5586a793618b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala @@ -38,7 +38,7 @@ class HiveSerDeSuite extends HiveComparisonTest with BeforeAndAfterAll { } // table sales is not a cache table, and will be clear after reset - createQueryTest("Read with RegexSerDe", "SELECT * FROM sales", reset = false) + createQueryTest("Read with RegexSerDe", "SELECT * FROM sales", false) createQueryTest( "Read and write with LazySimpleSerDe (tab separated)", From 090e1d4f67c89b9f272ef80e0c1a76a836813bb1 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 26 Aug 2015 17:24:33 -0700 Subject: [PATCH 12/12] Define sbt_test_goals for yarn module. --- dev/sparktestsupport/modules.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index 854002d9d422..65397f1f3e0b 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -411,6 +411,10 @@ def contains_file(self, filename): "yarn/", "network/yarn/", ], + sbt_test_goals=[ + "yarn/test", + "network-yarn/test", + ], test_tags=[ "org.apache.spark.deploy.yarn.ExtendedYarnTest" ]