Skip to content

Commit 4651051

Browse files
committed
Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
1 parent 52d9052 commit 4651051

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

python/pyspark/sql.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class SQLContext:
2828
register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
2929
"""
3030

31-
def __init__(self, sparkContext):
31+
def __init__(self, sparkContext, sqlContext = None):
3232
"""
3333
Create a new SQLContext.
3434
@@ -58,10 +58,13 @@ def __init__(self, sparkContext):
5858
self._jvm = self._sc._jvm
5959
self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
6060

61+
if sqlContext:
62+
self._scala_SQLContext = sqlContext
63+
6164
@property
6265
def _ssql_ctx(self):
6366
"""
64-
Accessor for the JVM SparkSQL context. Subclasses can overrite this property to provide
67+
Accessor for the JVM SparkSQL context. Subclasses can override this property to provide
6568
their own JVM Contexts.
6669
"""
6770
if not hasattr(self, '_scala_SQLContext'):

sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ import org.apache.spark.util.Utils
3333
/**
3434
* The entry point for executing Spark SQL queries from a Java program.
3535
*/
36-
class JavaSQLContext(sparkContext: JavaSparkContext) {
36+
class JavaSQLContext(val sqlContext: SQLContext) {
3737

38-
val sqlContext = new SQLContext(sparkContext.sc)
38+
def this(sparkContext: JavaSparkContext) = this(new SQLContext(sparkContext.sc))
3939

4040
/**
4141
* Executes a query expressed in SQL, returning the result as a JavaSchemaRDD

0 commit comments

Comments
 (0)