Skip to content

Commit ffb9352

Browse files
committed
SPARK-11323 style: methods whose arg list is multiline have newline before first arg
1 parent 44ac1ea commit ffb9352

13 files changed

+118
-60
lines changed

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/rest/JerseyBinding.scala

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@ private[spark] object JerseyBinding extends Logging {
7777
* @param thrown exception caught
7878
* @return an exception to log, ingore, throw...
7979
*/
80-
def translateException(verb: String,
80+
def translateException(
81+
verb: String,
8182
targetURL: URI,
8283
thrown: Throwable): Throwable = {
8384
thrown match {
@@ -110,7 +111,8 @@ private[spark] object JerseyBinding extends Logging {
110111
* @param exception original exception
111112
* @return an exception to throw
112113
*/
113-
def translateException(verb: String,
114+
def translateException(
115+
verb: String,
114116
targetURL: URI,
115117
exception: ClientHandlerException): IOException = {
116118
val uri = if (targetURL !=null) targetURL.toString else "unknown URL"
@@ -162,7 +164,8 @@ private[spark] object JerseyBinding extends Logging {
162164
* @param exception original exception
163165
* @return a new exception, the original one nested as a cause
164166
*/
165-
def translateException(verb: String,
167+
def translateException(
168+
verb: String,
166169
targetURL: URI,
167170
exception: UniformInterfaceException): IOException = {
168171
var ioe: IOException = null
@@ -215,10 +218,11 @@ private[spark] object JerseyBinding extends Logging {
215218
* @param token optional delegation token
216219
* @return a new client instance
217220
*/
218-
def createJerseyClient(conf: Configuration,
219-
clientConfig: ClientConfig,
220-
token: DelegationTokenAuthenticatedURL.Token = new DelegationTokenAuthenticatedURL.Token):
221-
Client = {
221+
def createJerseyClient(
222+
conf: Configuration,
223+
clientConfig: ClientConfig,
224+
token: DelegationTokenAuthenticatedURL.Token = new DelegationTokenAuthenticatedURL.Token)
225+
: Client = {
222226
new JerseyBinding(conf, token).createClient(conf, clientConfig)
223227
}
224228

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/rest/LoggingKerberosDelegationTokenAuthenticator.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,12 @@ private[spark] class LoggingKerberosDelegationTokenAuthenticator
5656
/**
5757
* Low level token renewal operation
5858
*/
59-
override def renewDelegationToken(url: URL, token: AuthToken,
60-
dToken: Token[AbstractDelegationTokenIdentifier], doAsUser: String): Long = {
59+
override def renewDelegationToken(
60+
url: URL,
61+
token: AuthToken,
62+
dToken: Token[AbstractDelegationTokenIdentifier],
63+
doAsUser: String): Long = {
64+
6165
val orig = dToken.toString
6266
val user = if (doAsUser != null) s"user=$doAsUser" else ""
6367
logInfo(s"Renewing token against $url $user")

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/rest/SpnegoUrlConnector.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,8 @@ private[spark] class SpnegoUrlConnector(connConfigurator: ConnectionConfigurator
178178
* @param payloadContentType content type. Required if payload != null
179179
* @return the response
180180
*/
181-
def execHttpOperation(verb: String,
181+
def execHttpOperation(
182+
verb: String,
182183
url: URL,
183184
payload: Array[Byte] = null,
184185
payloadContentType: String = ""): HttpOperationResponse = {

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/server/ApplicationListingResults.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,8 @@ private[spark] class ApplicationListingResults(
8686
* @param attemptId attempt ID
8787
* @return (app, attempt, attempt) options.
8888
*/
89-
def lookupAttempt(appId: String, attemptId: Option[String]):
90-
(Option[TimelineApplicationHistoryInfo], Option[TimelineApplicationAttemptInfo],
89+
def lookupAttempt(appId: String, attemptId: Option[String])
90+
: (Option[TimelineApplicationHistoryInfo], Option[TimelineApplicationAttemptInfo],
9191
List[TimelineApplicationAttemptInfo] ) = {
9292
val foundApp = lookup(appId)
9393
if (foundApp.isEmpty) {

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/server/TimelineQueryClient.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,8 @@ import org.apache.spark.deploy.history.yarn.rest.{HttpRequestException, JerseyBi
4646
* @param conf Hadoop configuration
4747
* @param jerseyClientConfig Jersey client configuration
4848
*/
49-
private[spark] class TimelineQueryClient(timelineURI: URI,
49+
private[spark] class TimelineQueryClient(
50+
timelineURI: URI,
5051
conf: Configuration,
5152
jerseyClientConfig: ClientConfig)
5253
extends Logging with Closeable {
@@ -64,8 +65,7 @@ private[spark] class TimelineQueryClient(timelineURI: URI,
6465
/**
6566
* the delegation token (unused until delegation support implemented)
6667
*/
67-
private var token: DelegationTokenAuthenticatedURL.Token =
68-
new DelegationTokenAuthenticatedURL.Token
68+
private var token = new DelegationTokenAuthenticatedURL.Token
6969

7070
/**
7171
* The last time there was a token renewal operation.
@@ -311,7 +311,8 @@ private[spark] class TimelineQueryClient(timelineURI: URI,
311311
* @param fromTs optional timestamp to start from
312312
* @return a possibly empty list of entities
313313
*/
314-
def listEntities(entityType: String,
314+
def listEntities(
315+
entityType: String,
315316
primaryFilter: Option[(String, String)] = None,
316317
secondaryFilters: Map[String, String] = Map(),
317318
fields: Seq[String] = Nil,

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/server/YarnHistoryProvider.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -529,7 +529,8 @@ private[spark] class YarnHistoryProvider(sparkConf: SparkConf)
529529
* @return the result of the last successful listing operation,
530530
* or a listing with no history events if there has been a failure
531531
*/
532-
def listApplications(limit: Option[Long] = None,
532+
def listApplications(
533+
limit: Option[Long] = None,
533534
windowStart: Option[Long] = None,
534535
windowEnd: Option[Long] = None): ApplicationListingResults = {
535536
if (!enabled) {

yarn/src/history/main/scala/org/apache/spark/deploy/history/yarn/server/YarnProviderUtils.scala

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,8 @@ private[spark] object YarnProviderUtils extends Logging {
127127
* @param latest later list of entries
128128
* @return a combined list.
129129
*/
130-
def combineResults(original: Seq[TimelineApplicationHistoryInfo],
130+
def combineResults(
131+
original: Seq[TimelineApplicationHistoryInfo],
131132
latest: Seq[TimelineApplicationHistoryInfo]): Seq[TimelineApplicationHistoryInfo] = {
132133
// build map of original
133134
val results = new scala.collection.mutable.HashMap[String, TimelineApplicationHistoryInfo]
@@ -157,8 +158,8 @@ private[spark] object YarnProviderUtils extends Logging {
157158
* @param latest the latest attempt
158159
* @return the merged set
159160
*/
160-
def mergeAttempts(old: TimelineApplicationHistoryInfo, latest: TimelineApplicationHistoryInfo):
161-
TimelineApplicationHistoryInfo = {
161+
def mergeAttempts(old: TimelineApplicationHistoryInfo, latest: TimelineApplicationHistoryInfo)
162+
: TimelineApplicationHistoryInfo = {
162163
val oldAttempts = old.attempts
163164
val latestAttempts = latest.attempts
164165
new TimelineApplicationHistoryInfo(old.id, old.name,
@@ -173,7 +174,8 @@ private[spark] object YarnProviderUtils extends Logging {
173174
* @return an ordered list of attempts with original attempt entries removed if a later
174175
* version updated the event information.
175176
*/
176-
def mergeAttemptInfoLists(oldAttempts: List[TimelineApplicationAttemptInfo],
177+
def mergeAttemptInfoLists(
178+
oldAttempts: List[TimelineApplicationAttemptInfo],
177179
latestAttempts: List[TimelineApplicationAttemptInfo])
178180
: List[TimelineApplicationAttemptInfo] = {
179181

@@ -210,7 +212,8 @@ private[spark] object YarnProviderUtils extends Logging {
210212
* @param attempt2 attempt 2
211213
* @return the preferred outcome
212214
*/
213-
def mostRecentAttempt(attempt1: TimelineApplicationAttemptInfo,
215+
def mostRecentAttempt
216+
(attempt1: TimelineApplicationAttemptInfo,
214217
attempt2: TimelineApplicationAttemptInfo): TimelineApplicationAttemptInfo = {
215218
(attempt1, attempt2) match {
216219
case (a1, a2) if a1.version > 0 && a2.version > 0 =>
@@ -230,7 +233,8 @@ private[spark] object YarnProviderUtils extends Logging {
230233
* @param attempt2 attempt 2
231234
* @return true if attempt1 is considered newer than attempt2
232235
*/
233-
def attemptNewerThan(attempt1: TimelineApplicationAttemptInfo,
236+
def attemptNewerThan(
237+
attempt1: TimelineApplicationAttemptInfo,
234238
attempt2: TimelineApplicationAttemptInfo): Boolean = {
235239
if (attempt1.version > 0 && attempt2.version > 0) {
236240
attempt1.version > attempt2.version
@@ -318,8 +322,8 @@ private[spark] object YarnProviderUtils extends Logging {
318322
* @param history history to scan (which can be an empty list)
319323
* @return the latest element in the list, or `None` for no match
320324
*/
321-
def findStartOfWindow(history: Seq[TimelineApplicationHistoryInfo]):
322-
Option[TimelineApplicationHistoryInfo] = {
325+
def findStartOfWindow(history: Seq[TimelineApplicationHistoryInfo])
326+
: Option[TimelineApplicationHistoryInfo] = {
323327
findIncompleteApplications(history) match {
324328
// no incomplete apps; use latest
325329
case Nil => findLatestApplication(history)
@@ -439,7 +443,8 @@ private[spark] object YarnProviderUtils extends Logging {
439443
* @param livenessWindow the window in millis within which apps are considered automatically live
440444
* @return list of apps which are marked as incomplete but no longer running
441445
*/
442-
private[yarn] def completeAppsFromYARN(apps: Seq[TimelineApplicationHistoryInfo],
446+
private[yarn] def completeAppsFromYARN(
447+
apps: Seq[TimelineApplicationHistoryInfo],
443448
recordMap: Map[String, ApplicationReport],
444449
currentTime: Long,
445450
livenessWindow: Long): Seq[TimelineApplicationHistoryInfo] = {

yarn/src/history/test/scala/org/apache/spark/deploy/history/yarn/integration/AbstractHistoryIntegrationTests.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,8 @@ abstract class AbstractHistoryIntegrationTests
288288
* @param history service to flush
289289
* @param delay time to wait for an empty queue
290290
*/
291-
def flushHistoryServiceToSuccess(history: YarnHistoryService,
291+
def flushHistoryServiceToSuccess(
292+
history: YarnHistoryService,
292293
delay: Int = TEST_STARTUP_DELAY): Unit = {
293294
assertNotNull(history, "null history queue")
294295
historyService.asyncFlush()

yarn/src/history/test/scala/org/apache/spark/deploy/history/yarn/integration/AsyncRefreshSuite.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,8 +141,11 @@ class AsyncRefreshSuite extends AbstractHistoryIntegrationTests {
141141
* @param timeout timeout
142142
* @return the successful listing
143143
*/
144-
def awaitRefreshMessageProcessed (provider: TimeManagedHistoryProvider,
145-
initialCount: Long, timeout: Long, text: String): Unit = {
144+
def awaitRefreshMessageProcessed (
145+
provider: TimeManagedHistoryProvider,
146+
initialCount: Long,
147+
timeout: Long,
148+
text: String): Unit = {
146149
val refresher = provider.refresher
147150

148151
def listingProbe(): Outcome = {

yarn/src/history/test/scala/org/apache/spark/deploy/history/yarn/integration/IntegrationTestUtils.scala

Lines changed: 35 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -35,17 +35,20 @@ import org.apache.spark.scheduler.cluster.{StubApplicationAttemptId, StubApplica
3535
*/
3636
private[yarn] trait IntegrationTestUtils {
3737

38-
def appHistoryInfo(id: String,
38+
def appHistoryInfo(
39+
id: String,
3940
attempts: List[TimelineApplicationAttemptInfo]): TimelineApplicationHistoryInfo = {
4041
new TimelineApplicationHistoryInfo(id, id, attempts)
4142
}
4243

43-
def appHistoryInfo(id: String,
44+
def appHistoryInfo(
45+
id: String,
4446
attempt: TimelineApplicationAttemptInfo): TimelineApplicationHistoryInfo = {
4547
new TimelineApplicationHistoryInfo(id, id, attempt :: Nil)
4648
}
4749

48-
def attempt(id: String,
50+
def attempt(
51+
id: String,
4952
startTime: Long,
5053
endTime: Long,
5154
lastUpdated: Long,
@@ -64,9 +67,13 @@ private[yarn] trait IntegrationTestUtils {
6467
* @param finishTime finish time or 0
6568
* @return the report
6669
*/
67-
def stubApplicationReport(id: Int, clusterTimestamp: Long, attempt: Int,
70+
def stubApplicationReport(
71+
id: Int,
72+
clusterTimestamp: Long,
73+
attempt: Int,
6874
state: YarnApplicationState,
69-
startTime: Long, finishTime: Long = 0): ApplicationReport = {
75+
startTime: Long,
76+
finishTime: Long = 0): ApplicationReport = {
7077
val yarnId = new StubApplicationId(id, clusterTimestamp)
7178
// this is tagged as hadoop private. The alternate tactic: create your own implementation,
7279
// is brittle against Hadoop versions, as new fields are added. Using this
@@ -88,7 +95,10 @@ private[yarn] trait IntegrationTestUtils {
8895
* @param updateTime update time, will be taken from report start time otherwise
8996
* @return
9097
*/
91-
def attemptFromAppReport(report: ApplicationReport, endTime: Long, completed: Boolean,
98+
def attemptFromAppReport(
99+
report: ApplicationReport,
100+
endTime: Long,
101+
completed: Boolean,
92102
updateTime: Long = 0): TimelineApplicationAttemptInfo = {
93103

94104
val entityId = report.getCurrentApplicationAttemptId.toString
@@ -144,7 +154,8 @@ private[yarn] trait IntegrationTestUtils {
144154
* @param timeout timeout
145155
* @return the application details.
146156
*/
147-
def awaitListingEntry(provider: YarnHistoryProvider,
157+
def awaitListingEntry(
158+
provider: YarnHistoryProvider,
148159
appId: String,
149160
attempts: Int,
150161
timeout: Long): TimelineApplicationHistoryInfo = {
@@ -175,7 +186,8 @@ private[yarn] trait IntegrationTestUtils {
175186
* @param timeout timeout
176187
* @return the successful listing
177188
*/
178-
def awaitRefreshExecuted(provider: YarnHistoryProvider,
189+
def awaitRefreshExecuted(
190+
provider: YarnHistoryProvider,
179191
triggerRefresh: Boolean,
180192
timeout: Long): Unit = {
181193
val initialCount = provider.refreshCount
@@ -200,8 +212,12 @@ private[yarn] trait IntegrationTestUtils {
200212
* @param text text which must not be present
201213
* @param timeout timeout in mils
202214
*/
203-
def awaitURLDoesNotContainText(connector: SpnegoUrlConnector,
204-
url: URL, text: String, timeout: Long, message: String = ""): String = {
215+
def awaitURLDoesNotContainText(
216+
connector: SpnegoUrlConnector,
217+
url: URL,
218+
text: String,
219+
timeout: Long,
220+
message: String = ""): String = {
205221
def get: String = {
206222
connector.execHttpOperation("GET", url, null, "").responseBody
207223
}
@@ -236,8 +252,11 @@ private[yarn] trait IntegrationTestUtils {
236252
* @param text text which must be present
237253
* @param timeout timeout in mils
238254
*/
239-
def awaitURLContainsText(connector: SpnegoUrlConnector,
240-
url: URL, text: String, timeout: Long): String = {
255+
def awaitURLContainsText(
256+
connector: SpnegoUrlConnector,
257+
url: URL,
258+
text: String,
259+
timeout: Long): String = {
241260
def get: String = {
242261
connector.execHttpOperation("GET", url, null, "").responseBody
243262
}
@@ -266,13 +285,13 @@ private[yarn] trait IntegrationTestUtils {
266285
get
267286
}
268287

269-
def lookupApplication(listing: Seq[TimelineApplicationHistoryInfo], id: ApplicationId):
270-
TimelineApplicationHistoryInfo = {
288+
def lookupApplication(listing: Seq[TimelineApplicationHistoryInfo], id: ApplicationId)
289+
: TimelineApplicationHistoryInfo = {
271290
lookupApplication(listing, id.toString)
272291
}
273292

274-
def lookupApplication(listing: Seq[TimelineApplicationHistoryInfo], id: String):
275-
TimelineApplicationHistoryInfo = {
293+
def lookupApplication(listing: Seq[TimelineApplicationHistoryInfo], id: String)
294+
: TimelineApplicationHistoryInfo = {
276295
findAppById(listing, id) match {
277296
case Some(applicationInfo2) =>
278297
applicationInfo2

0 commit comments

Comments
 (0)