Skip to content

Commit 14767ed

Browse files
Davies LiuCodingCat
authored andcommitted
[HOTFIX] fix duplicated braces
Author: Davies Liu <[email protected]> Closes apache#8219 from davies/fix_typo.
1 parent de5b331 commit 14767ed

File tree

13 files changed

+15
-15
lines changed

13 files changed

+15
-15
lines changed

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ private[spark] class BlockManager(
222222
return
223223
} catch {
224224
case e: Exception if i < MAX_ATTEMPTS =>
225-
logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}}"
225+
logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}"
226226
+ s" more times after waiting $SLEEP_TIME_SECS seconds...", e)
227227
Thread.sleep(SLEEP_TIME_SECS * 1000)
228228
}

core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ class BlockManagerMaster(
103103
val future = driverEndpoint.askWithRetry[Future[Seq[Int]]](RemoveRdd(rddId))
104104
future.onFailure {
105105
case e: Exception =>
106-
logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}}", e)
106+
logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}", e)
107107
}(ThreadUtils.sameThread)
108108
if (blocking) {
109109
timeout.awaitResult(future)
@@ -115,7 +115,7 @@ class BlockManagerMaster(
115115
val future = driverEndpoint.askWithRetry[Future[Seq[Boolean]]](RemoveShuffle(shuffleId))
116116
future.onFailure {
117117
case e: Exception =>
118-
logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}}", e)
118+
logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}", e)
119119
}(ThreadUtils.sameThread)
120120
if (blocking) {
121121
timeout.awaitResult(future)
@@ -129,7 +129,7 @@ class BlockManagerMaster(
129129
future.onFailure {
130130
case e: Exception =>
131131
logWarning(s"Failed to remove broadcast $broadcastId" +
132-
s" with removeFromMaster = $removeFromMaster - ${e.getMessage}}", e)
132+
s" with removeFromMaster = $removeFromMaster - ${e.getMessage}", e)
133133
}(ThreadUtils.sameThread)
134134
if (blocking) {
135135
timeout.awaitResult(future)

core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ private[spark] object ClosureCleaner extends Logging {
181181
return
182182
}
183183

184-
logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}}) +++")
184+
logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}) +++")
185185

186186
// A list of classes that represents closures enclosed in the given one
187187
val innerClasses = getInnerClosureClasses(func)

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1366,7 +1366,7 @@ private[spark] object Utils extends Logging {
13661366
file.getAbsolutePath, effectiveStartIndex, effectiveEndIndex))
13671367
}
13681368
sum += fileToLength(file)
1369-
logDebug(s"After processing file $file, string built is ${stringBuffer.toString}}")
1369+
logDebug(s"After processing file $file, string built is ${stringBuffer.toString}")
13701370
}
13711371
stringBuffer.toString
13721372
}

examples/src/main/scala/org/apache/spark/examples/ml/MovieLensALS.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ object MovieLensALS {
7676
.text("path to a MovieLens dataset of movies")
7777
.action((x, c) => c.copy(movies = x))
7878
opt[Int]("rank")
79-
.text(s"rank, default: ${defaultParams.rank}}")
79+
.text(s"rank, default: ${defaultParams.rank}")
8080
.action((x, c) => c.copy(rank = x))
8181
opt[Int]("maxIter")
8282
.text(s"max number of iterations, default: ${defaultParams.maxIter}")

examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ object DecisionTreeRunner {
100100
.action((x, c) => c.copy(numTrees = x))
101101
opt[String]("featureSubsetStrategy")
102102
.text(s"feature subset sampling strategy" +
103-
s" (${RandomForest.supportedFeatureSubsetStrategies.mkString(", ")}}), " +
103+
s" (${RandomForest.supportedFeatureSubsetStrategies.mkString(", ")}), " +
104104
s"default: ${defaultParams.featureSubsetStrategy}")
105105
.action((x, c) => c.copy(featureSubsetStrategy = x))
106106
opt[Double]("fracTest")

examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ object MovieLensALS {
5555
val parser = new OptionParser[Params]("MovieLensALS") {
5656
head("MovieLensALS: an example app for ALS on MovieLens data.")
5757
opt[Int]("rank")
58-
.text(s"rank, default: ${defaultParams.rank}}")
58+
.text(s"rank, default: ${defaultParams.rank}")
5959
.action((x, c) => c.copy(rank = x))
6060
opt[Int]("numIterations")
6161
.text(s"number of iterations, default: ${defaultParams.numIterations}")

mllib/src/main/scala/org/apache/spark/mllib/linalg/BLAS.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -469,7 +469,7 @@ private[spark] object BLAS extends Serializable with Logging {
469469
require(A.numCols == x.size,
470470
s"The columns of A don't match the number of elements of x. A: ${A.numCols}, x: ${x.size}")
471471
require(A.numRows == y.size,
472-
s"The rows of A don't match the number of elements of y. A: ${A.numRows}, y:${y.size}}")
472+
s"The rows of A don't match the number of elements of y. A: ${A.numRows}, y:${y.size}")
473473
if (alpha == 0.0) {
474474
logDebug("gemv: alpha is equal to 0. Returning y.")
475475
} else {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ object HiveTypeCoercion {
164164
// Leave the same if the dataTypes match.
165165
case Some(newType) if a.dataType == newType.dataType => a
166166
case Some(newType) =>
167-
logDebug(s"Promoting $a to $newType in ${q.simpleString}}")
167+
logDebug(s"Promoting $a to $newType in ${q.simpleString}")
168168
newType
169169
}
170170
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ object JdbcUtils extends Logging {
170170
case BinaryType => "BLOB"
171171
case TimestampType => "TIMESTAMP"
172172
case DateType => "DATE"
173-
case t: DecimalType => s"DECIMAL(${t.precision}},${t.scale}})"
173+
case t: DecimalType => s"DECIMAL(${t.precision},${t.scale})"
174174
case _ => throw new IllegalArgumentException(s"Don't know how to save $field to JDBC")
175175
})
176176
val nullable = if (field.nullable) "" else "NOT NULL"

0 commit comments

Comments
 (0)