Skip to content

Commit 22649b6

Browse files
JoshRosenmateiz
authored andcommitted
[SPARK-2305] [PySpark] Update Py4J to version 0.8.2.1
Author: Josh Rosen <[email protected]> Closes #1626 from JoshRosen/SPARK-2305 and squashes the following commits: 03fb283 [Josh Rosen] Update Py4J to version 0.8.2.1.
1 parent 86534d0 commit 22649b6

File tree

9 files changed

+8
-8
lines changed

9 files changed

+8
-8
lines changed

LICENSE

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
272272

273273

274274
========================================================================
275-
For Py4J (python/lib/py4j0.7.egg and files in assembly/lib/net/sf/py4j):
275+
For Py4J (python/lib/py4j-0.8.2.1-src.zip)
276276
========================================================================
277277

278278
Copyright (c) 2009-2011, Barthelemy Dagenais All rights reserved.
@@ -532,7 +532,7 @@ The following components are provided under a BSD-style license. See project lin
532532
(New BSD license) Protocol Buffer Java API (org.spark-project.protobuf:protobuf-java:2.4.1-shaded - http://code.google.com/p/protobuf)
533533
(The BSD License) Fortran to Java ARPACK (net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
534534
(The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - http://xmlenc.sourceforge.net)
535-
(The New BSD License) Py4J (net.sf.py4j:py4j:0.8.1 - http://py4j.sourceforge.net/)
535+
(The New BSD License) Py4J (net.sf.py4j:py4j:0.8.2.1 - http://py4j.sourceforge.net/)
536536
(Two-clause BSD-style license) JUnit-Interface (com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
537537
(ISC/BSD License) jbcrypt (org.mindrot:jbcrypt:0.3m - http://www.mindrot.org/)
538538

bin/pyspark

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ export PYSPARK_PYTHON
5252

5353
# Add the PySpark classes to the Python path:
5454
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
55-
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
55+
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
5656

5757
# Load the PySpark shell.py script when ./pyspark is used interactively:
5858
export OLD_PYTHONSTARTUP=$PYTHONSTARTUP

bin/pyspark2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ rem Figure out which Python to use.
4545
if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
4646

4747
set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
48-
set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.1-src.zip;%PYTHONPATH%
48+
set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
4949

5050
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
5151
set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@
275275
<dependency>
276276
<groupId>net.sf.py4j</groupId>
277277
<artifactId>py4j</artifactId>
278-
<version>0.8.1</version>
278+
<version>0.8.2.1</version>
279279
</dependency>
280280
</dependencies>
281281
<build>

core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ private[spark] object PythonUtils {
2929
val pythonPath = new ArrayBuffer[String]
3030
for (sparkHome <- sys.env.get("SPARK_HOME")) {
3131
pythonPath += Seq(sparkHome, "python").mkString(File.separator)
32-
pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.1-src.zip").mkString(File.separator)
32+
pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.2.1-src.zip").mkString(File.separator)
3333
}
3434
pythonPath ++= SparkContext.jarOfObject(this)
3535
pythonPath.mkString(File.pathSeparator)

python/lib/py4j-0.8.1-src.zip

-36.8 KB
Binary file not shown.

python/lib/py4j-0.8.2.1-src.zip

36.7 KB
Binary file not shown.

sbin/spark-config.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,4 @@ export SPARK_HOME=${SPARK_PREFIX}
3636
export SPARK_CONF_DIR="$SPARK_HOME/conf"
3737
# Add the PySpark classes to the PYTHONPATH:
3838
export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH
39-
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
39+
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH

sbin/spark-executor

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
FWDIR="$(cd `dirname $0`/..; pwd)"
2121

2222
export PYTHONPATH=$FWDIR/python:$PYTHONPATH
23-
export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
23+
export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
2424

2525
echo "Running spark-executor with framework dir = $FWDIR"
2626
exec $FWDIR/bin/spark-class org.apache.spark.executor.MesosExecutorBackend

0 commit comments

Comments
 (0)