Skip to content

Commit dabcaf2

Browse files
committed
Cleaning up more comments and one more test.
1 parent 1414f30 commit dabcaf2

File tree

3 files changed

+5
-13
lines changed

3 files changed

+5
-13
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/timetypes.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
2020
import java.sql.{Date, Timestamp}
2121
import scala.language.implicitConversions
2222

23-
/* *
23+
/**
2424
* Subclass of java.sql.Date which provides the usual comparison
2525
* operators (as required for catalyst expressions) and which can
2626
* be constructed from a string.

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
503503
new SchemaRDD(this, LogicalRDD(schema.toAttributes, rowRdd)(self))
504504
}
505505

506-
/* *
506+
/**
507507
* Make RichDate and RichTimestamp available under the names
508508
* Date and Timestamp when the members of this SQLContext are
509509
* imported.

sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,6 @@ import org.apache.spark.util.Utils
2929
// Implicits
3030
import org.apache.spark.sql.hive.test.TestHive._
3131

32-
/*
33-
* Note: the DSL conversions collide with the scalatest === operator!
34-
* We can apply the scalatest conversion explicitly:
35-
* assert(X === Y) --> assert(convertToEqualizer(X).===(Y))
36-
* (This file already imports convertToEqualizer)
37-
*/
38-
3932
case class Cases(lower: String, UPPER: String)
4033

4134
class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAfterEach {
@@ -87,7 +80,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
8780

8881
test("Simple column projection + filter on Parquet table") {
8982
val rdd = sql("SELECT myboolean, mylong FROM testsource WHERE myboolean=true").collect()
90-
assert(convertToEqualizer(rdd.size).===(5), "Filter returned incorrect number of rows")
83+
assert(rdd.size === 5, "Filter returned incorrect number of rows")
9184
assert(rdd.forall(_.getBoolean(0)), "Filter returned incorrect Boolean field value")
9285
}
9386

@@ -109,7 +102,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
109102
sql("INSERT OVERWRITE TABLE ptable SELECT * FROM testsource").collect()
110103
val rddCopy = sql("SELECT * FROM ptable").collect()
111104
val rddOrig = sql("SELECT * FROM testsource").collect()
112-
assert(convertToEqualizer(rddCopy.size).===(rddOrig.size), "INSERT OVERWRITE changed size of table??")
105+
assert(rddCopy.size === rddOrig.size, "INSERT OVERWRITE changed size of table??")
113106
compareRDDs(rddOrig, rddCopy, "testsource", ParquetTestData.testSchemaFieldNames)
114107
}
115108

@@ -118,8 +111,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
118111
(rddOne, rddTwo).zipped.foreach {
119112
(a,b) => (a,b).zipped.toArray.zipWithIndex.foreach {
120113
case ((value_1, value_2), index) =>
121-
assert(convertToEqualizer(value_1).===(value_2),
122-
s"table $tableName row $counter field ${fieldNames(index)} don't match")
114+
assert(value_1 === value_2, s"table $tableName row $counter field ${fieldNames(index)} don't match")
123115
}
124116
counter = counter + 1
125117
}

0 commit comments

Comments
 (0)