Skip to content

Commit 37d05ec

Browse files
committed
Revert "[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)"
This reverts commit 087bd27.
1 parent 34feea3 commit 37d05ec

File tree

2 files changed

+6
-8
lines changed

2 files changed

+6
-8
lines changed

python/pyspark/sql/tests.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -318,11 +318,6 @@ def test_multiple_udfs(self):
318318
[row] = self.spark.sql("SELECT double(add(1, 2)), add(double(2), 1)").collect()
319319
self.assertEqual(tuple(row), (6, 5))
320320

321-
def test_udf_without_arguments(self):
322-
self.sqlCtx.registerFunction("foo", lambda: "bar")
323-
[row] = self.sqlCtx.sql("SELECT foo()").collect()
324-
self.assertEqual(row[0], "bar")
325-
326321
def test_udf_with_array_type(self):
327322
d = [Row(l=list(range(3)), d={"key": list(range(5))})]
328323
rdd = self.sc.parallelize(d)

python/pyspark/sql/types.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1401,16 +1401,19 @@ def __new__(self, *args, **kwargs):
14011401
if args and kwargs:
14021402
raise ValueError("Can not use both args "
14031403
"and kwargs to create Row")
1404-
if kwargs:
1404+
if args:
1405+
# create row class or objects
1406+
return tuple.__new__(self, args)
1407+
1408+
elif kwargs:
14051409
# create row objects
14061410
names = sorted(kwargs.keys())
14071411
row = tuple.__new__(self, [kwargs[n] for n in names])
14081412
row.__fields__ = names
14091413
return row
14101414

14111415
else:
1412-
# create row class or objects
1413-
return tuple.__new__(self, args)
1416+
raise ValueError("No args or kwargs")
14141417

14151418
def asDict(self, recursive=False):
14161419
"""

0 commit comments

Comments
 (0)