Skip to content

Commit 8cfa218

Browse files
sarutakrxin
authored andcommitted
[SPARK-12692][BUILD][SQL] Scala style: Fix the style violation (Space before "," or ":")
Fix the style violation (space before , and :). This PR is a followup for #10643. Author: Kousuke Saruta <[email protected]> Closes #10718 from sarutak/SPARK-12692-followup-sql.
1 parent 112abf9 commit 8cfa218

File tree

54 files changed

+150
-141
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+150
-141
lines changed

scalastyle-config.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ This file is divided into 3 sections:
218218
<check level="error" class="org.scalastyle.scalariform.EqualsHashCodeChecker" enabled="false"></check>
219219

220220
<!-- Should turn this on, but we have a few places that need to be fixed first -->
221-
<check level="warning" class="org.scalastyle.scalariform.DisallowSpaceBeforeTokenChecker" enabled="true">
221+
<check customId="whitespacebeforetoken" level="warning" class="org.scalastyle.scalariform.DisallowSpaceBeforeTokenChecker" enabled="true">
222222
<parameters>
223223
<parameter name="tokens">COLON, COMMA</parameter>
224224
</parameters>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ object ScalaReflection extends ScalaReflection {
4949
* Unlike `schemaFor`, this function doesn't do any massaging of types into the Spark SQL type
5050
* system. As a result, ObjectType will be returned for things like boxed Integers
5151
*/
52-
def dataTypeFor[T : TypeTag]: DataType = dataTypeFor(localTypeOf[T])
52+
def dataTypeFor[T: TypeTag]: DataType = dataTypeFor(localTypeOf[T])
5353

5454
private def dataTypeFor(tpe: `Type`): DataType = ScalaReflectionLock.synchronized {
5555
tpe match {
@@ -116,7 +116,7 @@ object ScalaReflection extends ScalaReflection {
116116
* from ordinal 0 (since there are no names to map to). The actual location can be moved by
117117
* calling resolve/bind with a new schema.
118118
*/
119-
def constructorFor[T : TypeTag]: Expression = {
119+
def constructorFor[T: TypeTag]: Expression = {
120120
val tpe = localTypeOf[T]
121121
val clsName = getClassNameFromType(tpe)
122122
val walkedTypePath = s"""- root class: "${clsName}"""" :: Nil
@@ -386,7 +386,7 @@ object ScalaReflection extends ScalaReflection {
386386
* * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"`
387387
* * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")`
388388
*/
389-
def extractorsFor[T : TypeTag](inputObject: Expression): CreateNamedStruct = {
389+
def extractorsFor[T: TypeTag](inputObject: Expression): CreateNamedStruct = {
390390
val tpe = localTypeOf[T]
391391
val clsName = getClassNameFromType(tpe)
392392
val walkedTypePath = s"""- root class: "${clsName}"""" :: Nil

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ object SqlParser extends AbstractSparkSQLParser with DataTypeParser {
203203
)
204204

205205
protected lazy val ordering: Parser[Seq[SortOrder]] =
206-
( rep1sep(expression ~ direction.? , ",") ^^ {
206+
( rep1sep(expression ~ direction.?, ",") ^^ {
207207
case exps => exps.map(pair => SortOrder(pair._1, pair._2.getOrElse(Ascending)))
208208
}
209209
)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class Analyzer(
8484
ResolveAggregateFunctions ::
8585
DistinctAggregationRewriter(conf) ::
8686
HiveTypeCoercion.typeCoercionRules ++
87-
extendedResolutionRules : _*),
87+
extendedResolutionRules: _*),
8888
Batch("Nondeterministic", Once,
8989
PullOutNondeterministic),
9090
Batch("UDF", Once,
@@ -110,7 +110,7 @@ class Analyzer(
110110
// Taking into account the reasonableness and the implementation complexity,
111111
// here use the CTE definition first, check table name only and ignore database name
112112
// see https://github.com/apache/spark/pull/4929#discussion_r27186638 for more info
113-
case u : UnresolvedRelation =>
113+
case u: UnresolvedRelation =>
114114
val substituted = cteRelations.get(u.tableIdentifier.table).map { relation =>
115115
val withAlias = u.alias.map(Subquery(_, relation))
116116
withAlias.getOrElse(relation)
@@ -889,7 +889,7 @@ class Analyzer(
889889
_.transform {
890890
// Extracts children expressions of a WindowFunction (input parameters of
891891
// a WindowFunction).
892-
case wf : WindowFunction =>
892+
case wf: WindowFunction =>
893893
val newChildren = wf.children.map(extractExpr)
894894
wf.withNewChildren(newChildren)
895895

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -323,13 +323,13 @@ object FunctionRegistry {
323323
} else {
324324
// Otherwise, find an ctor method that matches the number of arguments, and use that.
325325
val params = Seq.fill(expressions.size)(classOf[Expression])
326-
val f = Try(tag.runtimeClass.getDeclaredConstructor(params : _*)) match {
326+
val f = Try(tag.runtimeClass.getDeclaredConstructor(params: _*)) match {
327327
case Success(e) =>
328328
e
329329
case Failure(e) =>
330330
throw new AnalysisException(s"Invalid number of arguments for function $name")
331331
}
332-
Try(f.newInstance(expressions : _*).asInstanceOf[Expression]) match {
332+
Try(f.newInstance(expressions: _*).asInstanceOf[Expression]) match {
333333
case Success(e) => e
334334
case Failure(e) => throw new AnalysisException(e.getMessage)
335335
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -529,7 +529,7 @@ object HiveTypeCoercion {
529529
if falseValues.contains(value) => And(IsNotNull(bool), Not(bool))
530530

531531
case EqualTo(left @ BooleanType(), right @ NumericType()) =>
532-
transform(left , right)
532+
transform(left, right)
533533
case EqualTo(left @ NumericType(), right @ BooleanType()) =>
534534
transform(right, left)
535535
case EqualNullSafe(left @ BooleanType(), right @ NumericType()) =>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,11 @@ package object dsl {
6161
trait ImplicitOperators {
6262
def expr: Expression
6363

64+
// scalastyle:off whitespacebeforetoken
6465
def unary_- : Expression = UnaryMinus(expr)
6566
def unary_! : Predicate = Not(expr)
6667
def unary_~ : Expression = BitwiseNot(expr)
68+
// scalastyle:on whitespacebeforetoken
6769

6870
def + (other: Expression): Expression = Add(expr, other)
6971
def - (other: Expression): Expression = Subtract(expr, other)
@@ -141,7 +143,7 @@ package object dsl {
141143
// Note that if we make ExpressionConversions an object rather than a trait, we can
142144
// then make this a value class to avoid the small penalty of runtime instantiation.
143145
def $(args: Any*): analysis.UnresolvedAttribute = {
144-
analysis.UnresolvedAttribute(sc.s(args : _*))
146+
analysis.UnresolvedAttribute(sc.s(args: _*))
145147
}
146148
}
147149

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.util.Utils
4444
* to the name `value`.
4545
*/
4646
object ExpressionEncoder {
47-
def apply[T : TypeTag](): ExpressionEncoder[T] = {
47+
def apply[T: TypeTag](): ExpressionEncoder[T] = {
4848
// We convert the not-serializable TypeTag into StructType and ClassTag.
4949
val mirror = typeTag[T].mirror
5050
val cls = mirror.runtimeClass(typeTag[T].tpe)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/package.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ package object encoders {
2727
* references from a specific schema.) This requirement allows us to preserve whether a given
2828
* object type is being bound by name or by ordinal when doing resolution.
2929
*/
30-
private[sql] def encoderFor[A : Encoder]: ExpressionEncoder[A] = implicitly[Encoder[A]] match {
30+
private[sql] def encoderFor[A: Encoder]: ExpressionEncoder[A] = implicitly[Encoder[A]] match {
3131
case e: ExpressionEncoder[A] =>
3232
e.assertUnresolved()
3333
e

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ abstract class Expression extends TreeNode[Expression] {
164164
* Returns the hash for this expression. Expressions that compute the same result, even if
165165
* they differ cosmetically should return the same hash.
166166
*/
167-
def semanticHash() : Int = {
167+
def semanticHash(): Int = {
168168
def computeHash(e: Seq[Any]): Int = {
169169
// See http://stackoverflow.com/questions/113511/hash-code-implementation
170170
var hash: Int = 17

0 commit comments

Comments
 (0)