@@ -40,6 +40,7 @@ def is_spark_home(path):
4040 paths = ["../" , os .path .dirname (os .path .realpath (__file__ ))]
4141
4242 # Add the path of the PySpark module if it exists
43+ is_error_raised = False
4344 if sys .version < "3" :
4445 import imp
4546 try :
@@ -49,7 +50,7 @@ def is_spark_home(path):
4950 paths .append (os .path .join (module_home , "../../" ))
5051 except ImportError :
5152 # Not pip installed no worries
52- pass
53+ is_error_raised = True
5354 else :
5455 from importlib .util import find_spec
5556 try :
@@ -59,7 +60,7 @@ def is_spark_home(path):
5960 paths .append (os .path .join (module_home , "../../" ))
6061 except ImportError :
6162 # Not pip installed no worries
62- pass
63+ is_error_raised = True
6364
6465 # Normalize the paths
6566 paths = [os .path .abspath (p ) for p in paths ]
@@ -68,6 +69,15 @@ def is_spark_home(path):
6869 return next (path for path in paths if is_spark_home (path ))
6970 except StopIteration :
7071 print ("Could not find valid SPARK_HOME while searching {0}" .format (paths ), file = sys .stderr )
72+ if is_error_raised :
73+ print (
74+ "Did you install PySpark via a package manager such as PIP or Conda? If so, your "
75+ "Python executable does not have the PySpark installed. It is possible your "
76+ "Python executable does not point out your PIP. Please check your default "
77+ "'python' and if you set PYSPARK_PYTHON and/or PYSPARK_DRIVER_PYTHON environment "
78+ "variables, and see if you can import PySpark. If you cannot import, you can "
79+ "install by using Python executable directly, for example, 'python -m pip "
80+ "install pyspark'" , file = sys .stderr )
7181 sys .exit (- 1 )
7282
7383if __name__ == "__main__" :
0 commit comments