Skip to content

Commit a6e08b4

Browse files
committed
Merge remote-tracking branch 'upstream/master' into dataTypeAndSchema
Conflicts: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
2 parents c712fbf + 8446746 commit a6e08b4

File tree

73 files changed

+2268
-417
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+2268
-417
lines changed

LICENSE

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
272272

273273

274274
========================================================================
275-
For Py4J (python/lib/py4j0.7.egg and files in assembly/lib/net/sf/py4j):
275+
For Py4J (python/lib/py4j-0.8.2.1-src.zip)
276276
========================================================================
277277

278278
Copyright (c) 2009-2011, Barthelemy Dagenais All rights reserved.
@@ -532,7 +532,7 @@ The following components are provided under a BSD-style license. See project lin
532532
(New BSD license) Protocol Buffer Java API (org.spark-project.protobuf:protobuf-java:2.4.1-shaded - http://code.google.com/p/protobuf)
533533
(The BSD License) Fortran to Java ARPACK (net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
534534
(The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - http://xmlenc.sourceforge.net)
535-
(The New BSD License) Py4J (net.sf.py4j:py4j:0.8.1 - http://py4j.sourceforge.net/)
535+
(The New BSD License) Py4J (net.sf.py4j:py4j:0.8.2.1 - http://py4j.sourceforge.net/)
536536
(Two-clause BSD-style license) JUnit-Interface (com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
537537
(ISC/BSD License) jbcrypt (org.mindrot:jbcrypt:0.3m - http://www.mindrot.org/)
538538

bin/pyspark

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ export PYSPARK_PYTHON
5252

5353
# Add the PySpark classes to the Python path:
5454
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
55-
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
55+
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
5656

5757
# Load the PySpark shell.py script when ./pyspark is used interactively:
5858
export OLD_PYTHONSTARTUP=$PYTHONSTARTUP

bin/pyspark2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ rem Figure out which Python to use.
4545
if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
4646

4747
set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
48-
set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.1-src.zip;%PYTHONPATH%
48+
set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
4949

5050
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
5151
set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@
275275
<dependency>
276276
<groupId>net.sf.py4j</groupId>
277277
<artifactId>py4j</artifactId>
278-
<version>0.8.1</version>
278+
<version>0.8.2.1</version>
279279
</dependency>
280280
</dependencies>
281281
<build>

core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ private[spark] object PythonUtils {
2929
val pythonPath = new ArrayBuffer[String]
3030
for (sparkHome <- sys.env.get("SPARK_HOME")) {
3131
pythonPath += Seq(sparkHome, "python").mkString(File.separator)
32-
pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.1-src.zip").mkString(File.separator)
32+
pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.2.1-src.zip").mkString(File.separator)
3333
}
3434
pythonPath ++= SparkContext.jarOfObject(this)
3535
pythonPath.mkString(File.pathSeparator)

pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@
114114
<sbt.project.name>spark</sbt.project.name>
115115
<scala.version>2.10.4</scala.version>
116116
<scala.binary.version>2.10</scala.binary.version>
117+
<scala.macros.version>2.0.1</scala.macros.version>
117118
<mesos.version>0.18.1</mesos.version>
118119
<mesos.classifier>shaded-protobuf</mesos.classifier>
119120
<akka.group>org.spark-project.akka</akka.group>
@@ -825,6 +826,15 @@
825826
<javacArg>-target</javacArg>
826827
<javacArg>${java.version}</javacArg>
827828
</javacArgs>
829+
<!-- The following plugin is required to use quasiquotes in Scala 2.10 and is used
830+
by Spark SQL for code generation. -->
831+
<compilerPlugins>
832+
<compilerPlugin>
833+
<groupId>org.scalamacros</groupId>
834+
<artifactId>paradise_${scala.version}</artifactId>
835+
<version>${scala.macros.version}</version>
836+
</compilerPlugin>
837+
</compilerPlugins>
828838
</configuration>
829839
</plugin>
830840
<plugin>

project/SparkBuild.scala

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,9 @@ object SparkBuild extends PomBuild {
167167
/* Enable unidoc only for the root spark project */
168168
enable(Unidoc.settings)(spark)
169169

170+
/* Catalyst macro settings */
171+
enable(Catalyst.settings)(catalyst)
172+
170173
/* Spark SQL Core console settings */
171174
enable(SQL.settings)(sql)
172175

@@ -189,10 +192,13 @@ object Flume {
189192
lazy val settings = sbtavro.SbtAvro.avroSettings
190193
}
191194

192-
object SQL {
193-
195+
object Catalyst {
194196
lazy val settings = Seq(
197+
addCompilerPlugin("org.scalamacros" % "paradise" % "2.0.1" cross CrossVersion.full))
198+
}
195199

200+
object SQL {
201+
lazy val settings = Seq(
196202
initialCommands in console :=
197203
"""
198204
|import org.apache.spark.sql.catalyst.analysis._
@@ -207,7 +213,6 @@ object SQL {
207213
|import org.apache.spark.sql.test.TestSQLContext._
208214
|import org.apache.spark.sql.parquet.ParquetTestData""".stripMargin
209215
)
210-
211216
}
212217

213218
object Hive {

python/lib/py4j-0.8.1-src.zip

-36.8 KB
Binary file not shown.

python/lib/py4j-0.8.2.1-src.zip

36.7 KB
Binary file not shown.

sbin/spark-config.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,4 @@ export SPARK_HOME=${SPARK_PREFIX}
3636
export SPARK_CONF_DIR="$SPARK_HOME/conf"
3737
# Add the PySpark classes to the PYTHONPATH:
3838
export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH
39-
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
39+
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH

0 commit comments

Comments
 (0)