Skip to content

Commit cd97b62

Browse files
committed
Merge branch 'master' into SPARK-13704_update
2 parents f5efc74 + 57aff93 commit cd97b62

File tree

220 files changed

+2594
-1104
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

220 files changed

+2594
-1104
lines changed

LICENSE-binary

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -209,34 +209,34 @@ org.apache.zookeeper:zookeeper
209209
oro:oro
210210
commons-configuration:commons-configuration
211211
commons-digester:commons-digester
212-
com.chuusai:shapeless_2.11
212+
com.chuusai:shapeless_2.12
213213
com.googlecode.javaewah:JavaEWAH
214214
com.twitter:chill-java
215-
com.twitter:chill_2.11
215+
com.twitter:chill_2.12
216216
com.univocity:univocity-parsers
217217
javax.jdo:jdo-api
218218
joda-time:joda-time
219219
net.sf.opencsv:opencsv
220220
org.apache.derby:derby
221221
org.objenesis:objenesis
222222
org.roaringbitmap:RoaringBitmap
223-
org.scalanlp:breeze-macros_2.11
224-
org.scalanlp:breeze_2.11
225-
org.typelevel:macro-compat_2.11
223+
org.scalanlp:breeze-macros_2.12
224+
org.scalanlp:breeze_2.12
225+
org.typelevel:macro-compat_2.12
226226
org.yaml:snakeyaml
227227
org.apache.xbean:xbean-asm5-shaded
228228
com.squareup.okhttp3:logging-interceptor
229229
com.squareup.okhttp3:okhttp
230230
com.squareup.okio:okio
231-
org.apache.spark:spark-catalyst_2.11
232-
org.apache.spark:spark-kvstore_2.11
233-
org.apache.spark:spark-launcher_2.11
234-
org.apache.spark:spark-mllib-local_2.11
235-
org.apache.spark:spark-network-common_2.11
236-
org.apache.spark:spark-network-shuffle_2.11
237-
org.apache.spark:spark-sketch_2.11
238-
org.apache.spark:spark-tags_2.11
239-
org.apache.spark:spark-unsafe_2.11
231+
org.apache.spark:spark-catalyst_2.12
232+
org.apache.spark:spark-kvstore_2.12
233+
org.apache.spark:spark-launcher_2.12
234+
org.apache.spark:spark-mllib-local_2.12
235+
org.apache.spark:spark-network-common_2.12
236+
org.apache.spark:spark-network-shuffle_2.12
237+
org.apache.spark:spark-sketch_2.12
238+
org.apache.spark:spark-tags_2.12
239+
org.apache.spark:spark-unsafe_2.12
240240
commons-httpclient:commons-httpclient
241241
com.vlkan:flatbuffers
242242
com.ning:compress-lzf
@@ -284,18 +284,18 @@ org.apache.orc:orc-mapreduce
284284
org.mortbay.jetty:jetty
285285
org.mortbay.jetty:jetty-util
286286
com.jolbox:bonecp
287-
org.json4s:json4s-ast_2.11
288-
org.json4s:json4s-core_2.11
289-
org.json4s:json4s-jackson_2.11
290-
org.json4s:json4s-scalap_2.11
287+
org.json4s:json4s-ast_2.12
288+
org.json4s:json4s-core_2.12
289+
org.json4s:json4s-jackson_2.12
290+
org.json4s:json4s-scalap_2.12
291291
com.carrotsearch:hppc
292292
com.fasterxml.jackson.core:jackson-annotations
293293
com.fasterxml.jackson.core:jackson-core
294294
com.fasterxml.jackson.core:jackson-databind
295295
com.fasterxml.jackson.dataformat:jackson-dataformat-yaml
296296
com.fasterxml.jackson.module:jackson-module-jaxb-annotations
297297
com.fasterxml.jackson.module:jackson-module-paranamer
298-
com.fasterxml.jackson.module:jackson-module-scala_2.11
298+
com.fasterxml.jackson.module:jackson-module-scala_2.12
299299
com.github.mifmif:generex
300300
com.google.code.findbugs:jsr305
301301
com.google.code.gson:gson
@@ -412,8 +412,8 @@ com.thoughtworks.paranamer:paranamer
412412
org.scala-lang:scala-compiler
413413
org.scala-lang:scala-library
414414
org.scala-lang:scala-reflect
415-
org.scala-lang.modules:scala-parser-combinators_2.11
416-
org.scala-lang.modules:scala-xml_2.11
415+
org.scala-lang.modules:scala-parser-combinators_2.12
416+
org.scala-lang.modules:scala-xml_2.12
417417
org.fusesource.leveldbjni:leveldbjni-all
418418
net.sourceforge.f2j:arpack_combined_all
419419
xmlenc:xmlenc
@@ -434,15 +434,15 @@ is distributed under the 3-Clause BSD license.
434434
MIT License
435435
-----------
436436

437-
org.spire-math:spire-macros_2.11
438-
org.spire-math:spire_2.11
439-
org.typelevel:machinist_2.11
437+
org.spire-math:spire-macros_2.12
438+
org.spire-math:spire_2.12
439+
org.typelevel:machinist_2.12
440440
net.razorvine:pyrolite
441441
org.slf4j:jcl-over-slf4j
442442
org.slf4j:jul-to-slf4j
443443
org.slf4j:slf4j-api
444444
org.slf4j:slf4j-log4j12
445-
com.github.scopt:scopt_2.11
445+
com.github.scopt:scopt_2.12
446446

447447
core/src/main/resources/org/apache/spark/ui/static/dagre-d3.min.js
448448
core/src/main/resources/org/apache/spark/ui/static/*dataTables*

R/CRAN_RELEASE.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,21 @@
1+
---
2+
license: |
3+
Licensed to the Apache Software Foundation (ASF) under one or more
4+
contributor license agreements. See the NOTICE file distributed with
5+
this work for additional information regarding copyright ownership.
6+
The ASF licenses this file to You under the Apache License, Version 2.0
7+
(the "License"); you may not use this file except in compliance with
8+
the License. You may obtain a copy of the License at
9+
10+
http://www.apache.org/licenses/LICENSE-2.0
11+
12+
Unless required by applicable law or agreed to in writing, software
13+
distributed under the License is distributed on an "AS IS" BASIS,
14+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
See the License for the specific language governing permissions and
16+
limitations under the License.
17+
---
18+
119
# SparkR CRAN Release
220

321
To release SparkR as a package to CRAN, we would use the `devtools` package. Please work with the

R/DOCUMENTATION.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,21 @@
1+
---
2+
license: |
3+
Licensed to the Apache Software Foundation (ASF) under one or more
4+
contributor license agreements. See the NOTICE file distributed with
5+
this work for additional information regarding copyright ownership.
6+
The ASF licenses this file to You under the Apache License, Version 2.0
7+
(the "License"); you may not use this file except in compliance with
8+
the License. You may obtain a copy of the License at
9+
10+
http://www.apache.org/licenses/LICENSE-2.0
11+
12+
Unless required by applicable law or agreed to in writing, software
13+
distributed under the License is distributed on an "AS IS" BASIS,
14+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
See the License for the specific language governing permissions and
16+
limitations under the License.
17+
---
18+
119
# SparkR Documentation
220

321
SparkR documentation is generated by using in-source comments and annotated by using

R/WINDOWS.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,21 @@
1+
---
2+
license: |
3+
Licensed to the Apache Software Foundation (ASF) under one or more
4+
contributor license agreements. See the NOTICE file distributed with
5+
this work for additional information regarding copyright ownership.
6+
The ASF licenses this file to You under the Apache License, Version 2.0
7+
(the "License"); you may not use this file except in compliance with
8+
the License. You may obtain a copy of the License at
9+
10+
http://www.apache.org/licenses/LICENSE-2.0
11+
12+
Unless required by applicable law or agreed to in writing, software
13+
distributed under the License is distributed on an "AS IS" BASIS,
14+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
See the License for the specific language governing permissions and
16+
limitations under the License.
17+
---
18+
119
## Building SparkR on Windows
220

321
To build SparkR on Windows, the following steps are required

R/pkg/DESCRIPTION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
Package: SparkR
22
Type: Package
33
Version: 3.0.0
4-
Title: R Front end for 'Apache Spark'
4+
Title: R Front End for 'Apache Spark'
55
Description: Provides an R Front end for 'Apache Spark' <https://spark.apache.org>.
66
Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
77
email = "[email protected]"),

core/src/main/scala/org/apache/spark/BarrierCoordinator.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark
1919

2020
import java.util.{Timer, TimerTask}
2121
import java.util.concurrent.ConcurrentHashMap
22-
import java.util.function.{Consumer, Function}
22+
import java.util.function.Consumer
2323

2424
import scala.collection.mutable.ArrayBuffer
2525

@@ -202,10 +202,8 @@ private[spark] class BarrierCoordinator(
202202
case request @ RequestToSync(numTasks, stageId, stageAttemptId, _, _) =>
203203
// Get or init the ContextBarrierState correspond to the stage attempt.
204204
val barrierId = ContextBarrierId(stageId, stageAttemptId)
205-
states.computeIfAbsent(barrierId, new Function[ContextBarrierId, ContextBarrierState] {
206-
override def apply(key: ContextBarrierId): ContextBarrierState =
207-
new ContextBarrierState(key, numTasks)
208-
})
205+
states.computeIfAbsent(barrierId,
206+
(key: ContextBarrierId) => new ContextBarrierState(key, numTasks))
209207
val barrierState = states.get(barrierId)
210208

211209
barrierState.handleRequest(context, request)

core/src/main/scala/org/apache/spark/ContextCleaner.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -123,9 +123,8 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
123123
cleaningThread.setDaemon(true)
124124
cleaningThread.setName("Spark Context Cleaner")
125125
cleaningThread.start()
126-
periodicGCService.scheduleAtFixedRate(new Runnable {
127-
override def run(): Unit = System.gc()
128-
}, periodicGCInterval, periodicGCInterval, TimeUnit.SECONDS)
126+
periodicGCService.scheduleAtFixedRate(() => System.gc(),
127+
periodicGCInterval, periodicGCInterval, TimeUnit.SECONDS)
129128
}
130129

131130
/**

core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -98,11 +98,9 @@ private[spark] class HeartbeatReceiver(sc: SparkContext, clock: Clock)
9898
private val killExecutorThread = ThreadUtils.newDaemonSingleThreadExecutor("kill-executor-thread")
9999

100100
override def onStart(): Unit = {
101-
timeoutCheckingTask = eventLoopThread.scheduleAtFixedRate(new Runnable {
102-
override def run(): Unit = Utils.tryLogNonFatalError {
103-
Option(self).foreach(_.ask[Boolean](ExpireDeadHosts))
104-
}
105-
}, 0, checkTimeoutIntervalMs, TimeUnit.MILLISECONDS)
101+
timeoutCheckingTask = eventLoopThread.scheduleAtFixedRate(
102+
() => Utils.tryLogNonFatalError { Option(self).foreach(_.ask[Boolean](ExpireDeadHosts)) },
103+
0, checkTimeoutIntervalMs, TimeUnit.MILLISECONDS)
106104
}
107105

108106
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
6262

6363
@transient private lazy val reader: ConfigReader = {
6464
val _reader = new ConfigReader(new SparkConfigProvider(settings))
65-
_reader.bindEnv(new ConfigProvider {
66-
override def get(key: String): Option[String] = Option(getenv(key))
67-
})
65+
_reader.bindEnv((key: String) => Option(getenv(key)))
6866
_reader
6967
}
7068

@@ -392,7 +390,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
392390

393391
/** Get an optional value, applying variable substitution. */
394392
private[spark] def getWithSubstitution(key: String): Option[String] = {
395-
getOption(key).map(reader.substitute(_))
393+
getOption(key).map(reader.substitute)
396394
}
397395

398396
/** Get all parameters as a list of pairs */
@@ -740,7 +738,6 @@ private[spark] object SparkConf extends Logging {
740738
*/
741739
def isExecutorStartupConf(name: String): Boolean = {
742740
(name.startsWith("spark.auth") && name != SecurityManager.SPARK_AUTH_SECRET_CONF) ||
743-
name.startsWith("spark.ssl") ||
744741
name.startsWith("spark.rpc") ||
745742
name.startsWith("spark.network") ||
746743
isSparkPortConf(name)

core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,7 @@ object PythonRunner {
6060
.javaAddress(localhost)
6161
.callbackClient(py4j.GatewayServer.DEFAULT_PYTHON_PORT, localhost, secret)
6262
.build()
63-
val thread = new Thread(new Runnable() {
64-
override def run(): Unit = Utils.logUncaughtExceptions {
65-
gatewayServer.start()
66-
}
67-
})
63+
val thread = new Thread(() => Utils.logUncaughtExceptions { gatewayServer.start() })
6864
thread.setName("py4j-gateway-init")
6965
thread.setDaemon(true)
7066
thread.start()

0 commit comments

Comments
 (0)