Skip to content

Commit 8218d0a

Browse files
committed
Update
1 parent 801009e commit 8218d0a

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

python/pyspark/sql/dataframe.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1414,11 +1414,14 @@ def between(self, lowerBound, upperBound):
14141414

14151415
@ignore_unicode_prefix
14161416
def when(self, whenExpr, thenExpr):
1417-
return self._jc.when(whenExpr, thenExpr)
1417+
if isinstance(whenExpr, Column):
1418+
jc = self._jc.when(whenExpr._jc, thenExpr)
1419+
return Column(jc)
14181420

14191421
@ignore_unicode_prefix
14201422
def otherwise(self, elseExpr):
1421-
return self._jc.otherwise(elseExpr)
1423+
jc = self._jc.otherwise(elseExpr)
1424+
return Column(jc)
14221425

14231426
def __repr__(self):
14241427
return 'Column<%s>' % self._jc.toString().encode('utf8')

python/pyspark/sql/functions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from pyspark.rdd import _prepare_for_python_RDD, ignore_unicode_prefix
2828
from pyspark.serializers import PickleSerializer, AutoBatchedSerializer
2929
from pyspark.sql.types import StringType
30-
from pyspark.sql.dataframe import Column, _to_java_column, _to_seq
30+
from pyspark.sql.dataframe import Column, _to_java_column, _to_seq, _create_column_from_literal
3131

3232

3333
__all__ = [
@@ -154,7 +154,7 @@ def when(whenExpr, thenExpr):
154154
[Row(age=3), Row(age=None)]
155155
"""
156156
sc = SparkContext._active_spark_context
157-
jc = sc._jvm.functions.when(whenExpr, thenExpr)
157+
jc = sc._jvm.functions.when(whenExpr._jc, thenExpr)
158158
return Column(jc)
159159

160160
def rand(seed=None):

0 commit comments

Comments
 (0)