Skip to content

Commit ecbfb65

Browse files
committed
Fix spacing and formatting
1 parent b514bec commit ecbfb65

File tree

12 files changed

+66
-49
lines changed

12 files changed

+66
-49
lines changed

core/src/main/scala/org/apache/spark/HttpServer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,8 @@ private[spark] class ServerStateException(message: String) extends Exception(mes
4141
* as well as classes created by the interpreter when the user types in code. This is just a wrapper
4242
* around a Jetty server.
4343
*/
44-
private[spark] class HttpServer(resourceBase: File, securityManager: SecurityManager) extends Logging {
44+
private[spark] class HttpServer(resourceBase: File, securityManager: SecurityManager)
45+
extends Logging {
4546
private var server: Server = null
4647
private var port: Int = -1
4748

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
/*
2-
* Licensed to the Apache Software Foundation (ASF) under one
3-
* or more contributor license agreements. See the NOTICE file
4-
* distributed with this work for additional information
5-
* regarding copyright ownership. The ASF licenses this file
6-
* to you under the Apache License, Version 2.0 (the
7-
* "License"); you may not use this file except in compliance
8-
* with the License. You may obtain a copy of the License at
2+
/*
3+
* Licensed to the Apache Software Foundation (ASF) under one or more
4+
* contributor license agreements. See the NOTICE file distributed with
5+
* this work for additional information regarding copyright ownership.
6+
* The ASF licenses this file to You under the Apache License, Version 2.0
7+
* (the "License"); you may not use this file except in compliance with
8+
* the License. You may obtain a copy of the License at
99
*
10-
* http://www.apache.org/licenses/LICENSE-2.0
10+
* http://www.apache.org/licenses/LICENSE-2.0
1111
*
1212
* Unless required by applicable law or agreed to in writing, software
1313
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -104,10 +104,10 @@ import scala.collection.mutable.ArrayBuffer
104104
*
105105
* - HTTP for the Spark UI -> the UI was changed to use servlets so that javax servlet filters
106106
* can be used. Yarn requires a specific AmIpFilter be installed for security to work
107-
* properly. For non-Yarn deployments, users can write a filter to go through a companies
108-
* normal login service. If an authentication filter is in place then the SparkUI
109-
* can be configured to check the logged in user against the list of users who have
110-
* view acls to see if that user is authorized.
107+
* properly. For non-Yarn deployments, users can write a filter to go through a
108+
* companies normal login service. If an authentication filter is in place then the
109+
* SparkUI can be configured to check the logged in user against the list of users who
110+
* have view acls to see if that user is authorized.
111111
* The filters can also be used for many different purposes. For instance filters
112112
* could be used for logging, encypryption, or compression.
113113
*

core/src/main/scala/org/apache/spark/SparkSaslClient.scala

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
/*
2-
* Licensed to the Apache Software Foundation (ASF) under one
3-
* or more contributor license agreements. See the NOTICE file
4-
* distributed with this work for additional information
5-
* regarding copyright ownership. The ASF licenses this file
6-
* to you under the Apache License, Version 2.0 (the
7-
* "License") you may not use this file except in compliance
8-
* with the License. You may obtain a copy of the License at
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
98
*
10-
* http://www.apache.org/licenses/LICENSE-2.0
9+
* http://www.apache.org/licenses/LICENSE-2.0
1110
*
1211
* Unless required by applicable law or agreed to in writing, software
1312
* distributed under the License is distributed on an "AS IS" BASIS,

core/src/main/scala/org/apache/spark/SparkSaslServer.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
11
/*
2-
* Licensed to the Apache Software Foundation (ASF) under one
3-
* or more contributor license agreements. See the NOTICE file
4-
* distributed with this work for additional information
5-
* regarding copyright ownership. The ASF licenses this file
6-
* to you under the Apache License, Version 2.0 (the
7-
* "License") you may not use this file except in compliance
8-
* with the License. You may obtain a copy of the License at
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
98
*
10-
* http://www.apache.org/licenses/LICENSE-2.0
9+
* http://www.apache.org/licenses/LICENSE-2.0
1110
*
1211
* Unless required by applicable law or agreed to in writing, software
1312
* distributed under the License is distributed on an "AS IS" BASIS,
1413
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1514
* See the License for the specific language governing permissions and
1615
* limitations under the License.
1716
*/
17+
1818
package org.apache.spark
1919

2020
import javax.security.auth.callback.Callback

core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@ abstract class Broadcast[T](val id: Long) extends Serializable {
6060
}
6161

6262
private[spark]
63-
class BroadcastManager(val _isDriver: Boolean, conf: SparkConf, securityManager: SecurityManager) extends Logging with Serializable {
63+
class BroadcastManager(val _isDriver: Boolean, conf: SparkConf, securityManager: SecurityManager)
64+
extends Logging with Serializable {
6465

6566
private var initialized = false
6667
private var broadcastFactory: BroadcastFactory = null

core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ private object HttpBroadcast extends Logging {
153153
}
154154

155155
def read[T](id: Long): T = {
156-
logDebug("broadcast read server: " + serverUri + " id: broadcast-"+id)
156+
logDebug("broadcast read server: " + serverUri + " id: broadcast-" + id)
157157
val url = serverUri + "/" + BroadcastBlockId(id).name
158158

159159
var uc: URLConnection = null

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,8 @@ class MasterWebUI(val master: Master, requestedPort: Int) extends Logging {
6262

6363
val handlers = metricsHandlers ++ Seq[ServletContextHandler](
6464
createStaticHandler(MasterWebUI.STATIC_RESOURCE_DIR, "/static/*"),
65-
createServletHandler("/app/json", (request: HttpServletRequest) => applicationPage.renderJson(request)),
65+
createServletHandler("/app/json",
66+
(request: HttpServletRequest) => applicationPage.renderJson(request)),
6667
createServletHandler("/app", (request: HttpServletRequest) => applicationPage.render(request)),
6768
createServletHandler("/json", (request: HttpServletRequest) => indexPage.renderJson(request)),
6869
createServletHandler("*", (request: HttpServletRequest) => indexPage.render(request))

core/src/main/scala/org/apache/spark/network/Connection.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -447,8 +447,11 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
447447

448448

449449
// Must be created within selector loop - else deadlock
450-
private[spark] class ReceivingConnection(channel_ : SocketChannel, selector_ : Selector, id_ : ConnectionId)
451-
extends Connection(channel_, selector_, id_) {
450+
private[spark] class ReceivingConnection(
451+
channel_ : SocketChannel,
452+
selector_ : Selector,
453+
id_ : ConnectionId)
454+
extends Connection(channel_, selector_, id_) {
452455

453456
def isSaslComplete(): Boolean = {
454457
if (sparkSaslServer != null) sparkSaslServer.isComplete() else false
@@ -509,7 +512,7 @@ private[spark] class ReceivingConnection(channel_ : SocketChannel, selector_ : S
509512

510513
val inbox = new Inbox()
511514
val headerBuffer: ByteBuffer = ByteBuffer.allocate(MessageChunkHeader.HEADER_SIZE)
512-
var onReceiveCallback: (Connection , Message) => Unit = null
515+
var onReceiveCallback: (Connection, Message) => Unit = null
513516
var currentChunk: MessageChunk = null
514517

515518
channel.register(selector, SelectionKey.OP_READ)
@@ -584,7 +587,7 @@ private[spark] class ReceivingConnection(channel_ : SocketChannel, selector_ : S
584587
}
585588
}
586589
} catch {
587-
case e: Exception => {
590+
case e: Exception => {
588591
logWarning("Error reading from connection to " + getRemoteConnectionManagerId(), e)
589592
callOnExceptionCallback(e)
590593
close()

core/src/main/scala/org/apache/spark/network/ConnectionManager.scala

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,8 @@ import scala.concurrent.duration._
3737

3838
import org.apache.spark.util.{SystemClock, Utils}
3939

40-
private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManager: SecurityManager) extends Logging {
40+
private[spark] class ConnectionManager(port: Int, conf: SparkConf,
41+
securityManager: SecurityManager) extends Logging {
4142

4243
class MessageStatus(
4344
val message: Message,
@@ -54,7 +55,8 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
5455
private val selector = SelectorProvider.provider.openSelector()
5556

5657
// default to 30 second timeout waiting for authentication
57-
private val authTimeout= System.getProperty("spark.core.connection.auth.wait.timeout","30000").toInt
58+
private val authTimeout = System.getProperty("spark.core.connection.auth.wait.timeout",
59+
"30000").toInt
5860

5961
private val handleMessageExecutor = new ThreadPoolExecutor(
6062
conf.getInt("spark.core.connection.handler.threads.min", 20),
@@ -564,7 +566,8 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
564566
logDebug("Server sasl not completed: " + connection.connectionId)
565567
}
566568
if (replyToken != null) {
567-
var securityMsgResp = SecurityMessage.fromResponse(replyToken, securityMsg.getConnectionId)
569+
var securityMsgResp = SecurityMessage.fromResponse(replyToken,
570+
securityMsg.getConnectionId)
568571
var message = securityMsgResp.toBufferMessage
569572
if (message == null) throw new Exception("Error creating security Message")
570573
sendSecurityMessage(connection.getRemoteConnectionManagerId(), message)
@@ -689,7 +692,8 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
689692
var firstResponse: Array[Byte] = null
690693
try {
691694
firstResponse = conn.sparkSaslClient.firstToken()
692-
var securityMsg = SecurityMessage.fromResponse(firstResponse, conn.connectionId.toString())
695+
var securityMsg = SecurityMessage.fromResponse(firstResponse,
696+
conn.connectionId.toString())
693697
var message = securityMsg.toBufferMessage
694698
if (message == null) throw new Exception("Error creating security message")
695699
connectionsAwaitingSasl += ((conn.connectionId, conn))
@@ -714,13 +718,15 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
714718
def startNewConnection(): SendingConnection = {
715719
val inetSocketAddress = new InetSocketAddress(connManagerId.host, connManagerId.port)
716720
val newConnectionId = new ConnectionId(id, idCount.getAndIncrement.intValue)
717-
val newConnection = new SendingConnection(inetSocketAddress, selector, connManagerId, newConnectionId)
721+
val newConnection = new SendingConnection(inetSocketAddress, selector, connManagerId,
722+
newConnectionId)
718723
logInfo("creating new sending connection for security! " + newConnectionId )
719724
registerRequests.enqueue(newConnection)
720725

721726
newConnection
722727
}
723-
// I removed the lookupKey stuff as part of merge ... should I re-add it ? We did not find it useful in our test-env ...
728+
// I removed the lookupKey stuff as part of merge ... should I re-add it ?
729+
// We did not find it useful in our test-env ...
724730
// If we do re-add it, we should consistently use it everywhere I guess ?
725731
message.senderAddress = id.toSocketAddress()
726732
logDebug("Sending Security [" + message + "] to [" + connManagerId + "]")
@@ -737,7 +743,8 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
737743
val inetSocketAddress = new InetSocketAddress(connectionManagerId.host,
738744
connectionManagerId.port)
739745
val newConnectionId = new ConnectionId(id, idCount.getAndIncrement.intValue)
740-
val newConnection = new SendingConnection(inetSocketAddress, selector, connectionManagerId, newConnectionId)
746+
val newConnection = new SendingConnection(inetSocketAddress, selector, connectionManagerId,
747+
newConnectionId)
741748
logDebug("creating new sending connection: " + newConnectionId)
742749
registerRequests.enqueue(newConnection)
743750

@@ -751,7 +758,8 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf, securityManag
751758
checkSendAuthFirst(connectionManagerId, connection)
752759
}
753760
message.senderAddress = id.toSocketAddress()
754-
logDebug("Before Sending [" + message + "] to [" + connectionManagerId + "]" + " connectionid: " + connection.connectionId)
761+
logDebug("Before Sending [" + message + "] to [" + connectionManagerId + "]" + " " +
762+
"connectionid: " + connection.connectionId)
755763

756764
if (authEnabled) {
757765
// if we aren't authenticated yet lets block the senders until authentication completes

core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ private[spark] object MessageChunkHeader {
7474
buffer.get(ipBytes)
7575
val ip = InetAddress.getByAddress(ipBytes)
7676
val port = buffer.getInt()
77-
new MessageChunkHeader(typ, id, totalSize, chunkSize, other, securityNeg, new InetSocketAddress(ip, port))
77+
new MessageChunkHeader(typ, id, totalSize, chunkSize, other, securityNeg,
78+
new InetSocketAddress(ip, port))
7879
}
7980
}

0 commit comments

Comments
 (0)