From a0322e6fffdc7c79d3aec6784ce1dc675a44fc4e Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Wed, 28 Aug 2019 10:03:43 +0900 Subject: [PATCH] Use SparkSession(SparkContext(...)) to prevent for Spark conf to affect other tests --- python/pyspark/sql/tests/test_arrow.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/python/pyspark/sql/tests/test_arrow.py b/python/pyspark/sql/tests/test_arrow.py index 50c82b0b5f88a..f32513771cbcf 100644 --- a/python/pyspark/sql/tests/test_arrow.py +++ b/python/pyspark/sql/tests/test_arrow.py @@ -22,6 +22,7 @@ import unittest import warnings +from pyspark import SparkContext, SparkConf from pyspark.sql import Row, SparkSession from pyspark.sql.functions import udf from pyspark.sql.types import * @@ -430,11 +431,8 @@ class MaxResultArrowTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.spark = SparkSession.builder \ - .master("local[4]") \ - .appName(cls.__name__) \ - .config("spark.driver.maxResultSize", "10k") \ - .getOrCreate() + cls.spark = SparkSession(SparkContext( + 'local[4]', cls.__name__, conf=SparkConf().set("spark.driver.maxResultSize", "10k"))) # Explicitly enable Arrow and disable fallback. cls.spark.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")