From f8c09de63aff3bcb220f5fa80926e83f4479c8b1 Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Tue, 26 Jan 2016 12:17:48 +0800 Subject: [PATCH] [SPARK-12993][PYSPARK] Remove usage of ADD_FILES in pyspark --- python/pyspark/shell.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py index 26cafca8b838..7c37f7519347 100644 --- a/python/pyspark/shell.py +++ b/python/pyspark/shell.py @@ -32,15 +32,10 @@ from pyspark.sql import SQLContext, HiveContext from pyspark.storagelevel import StorageLevel -# this is the deprecated equivalent of ADD_JARS -add_files = None -if os.environ.get("ADD_FILES") is not None: - add_files = os.environ.get("ADD_FILES").split(',') - if os.environ.get("SPARK_EXECUTOR_URI"): SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"]) -sc = SparkContext(pyFiles=add_files) +sc = SparkContext() atexit.register(lambda: sc.stop()) try: @@ -68,10 +63,6 @@ platform.python_build()[1])) print("SparkContext available as sc, %s available as sqlContext." % sqlContext.__class__.__name__) -if add_files is not None: - print("Warning: ADD_FILES environment variable is deprecated, use --py-files argument instead") - print("Adding files: [%s]" % ", ".join(add_files)) - # The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP, # which allows us to execute the user's PYTHONSTARTUP file: _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')