Skip to content

Commit db53aa5

Browse files
committed
Investigate PIP package failure
1 parent aa3a742 commit db53aa5

File tree

2 files changed

+15
-9
lines changed

2 files changed

+15
-9
lines changed

dev/run-pip-tests

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,7 @@ for python in "${PYTHON_EXECS[@]}"; do
110110
cd /
111111

112112
echo "Run basic sanity check on pip installed version with spark-submit"
113+
ls -al `which spark-submit`
113114
spark-submit "$FWDIR"/dev/pip-sanity-check.py
114115
echo "Run basic sanity check with import based"
115116
python3 "$FWDIR"/dev/pip-sanity-check.py

dev/run-tests.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -618,13 +618,14 @@ def main():
618618
test_modules = determine_modules_to_test(changed_modules)
619619

620620
# license checks
621-
run_apache_rat_checks()
621+
# run_apache_rat_checks()
622622

623623
# style checks
624624
if not changed_files or any(f.endswith(".scala")
625625
or f.endswith("scalastyle-config.xml")
626626
for f in changed_files):
627-
run_scala_style_checks(extra_profiles)
627+
# run_scala_style_checks(extra_profiles)
628+
pass
628629
should_run_java_style_checks = False
629630
if not changed_files or any(f.endswith(".java")
630631
or f.endswith("checkstyle.xml")
@@ -636,20 +637,23 @@ def main():
636637
or f.endswith("tox.ini")
637638
or f.endswith(".py")
638639
for f in changed_files):
639-
run_python_style_checks()
640+
# run_python_style_checks()
641+
pass
640642
if not changed_files or any(f.endswith(".R")
641643
or f.endswith("lint-r")
642644
or f.endswith(".lintr")
643645
for f in changed_files):
644-
run_sparkr_style_checks()
646+
# run_sparkr_style_checks()
647+
pass
645648

646649
# determine if docs were changed and if we're inside the amplab environment
647650
# note - the below commented out until *all* Jenkins workers can get `jekyll` installed
648651
# if "DOCS" in changed_modules and test_env == "amplab_jenkins":
649652
# build_spark_documentation()
650653

651654
if any(m.should_run_build_tests for m in test_modules):
652-
run_build_tests()
655+
# run_build_tests()
656+
pass
653657

654658
# spark build
655659
build_apache_spark(build_tool, extra_profiles)
@@ -663,18 +667,19 @@ def main():
663667
build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks)
664668

665669
# run the test suites
666-
run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags)
670+
# run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags)
667671

668672
modules_with_python_tests = [m for m in test_modules if m.python_test_goals]
669673
if modules_with_python_tests:
670674
# We only run PySpark tests with coverage report in one specific job with
671675
# Spark master with SBT in Jenkins.
672676
is_sbt_master_job = "SPARK_MASTER_SBT_HADOOP_2_7" in os.environ
673-
run_python_tests(
674-
modules_with_python_tests, opts.parallelism, with_coverage=is_sbt_master_job)
677+
# run_python_tests(
678+
# modules_with_python_tests, opts.parallelism, with_coverage=is_sbt_master_job)
675679
run_python_packaging_tests()
676680
if any(m.should_run_r_tests for m in test_modules):
677-
run_sparkr_tests()
681+
# run_sparkr_tests()
682+
pass
678683

679684

680685
def _test():

0 commit comments

Comments
 (0)