Skip to content

Commit 7e0db5e

Browse files
committed
sql module
1 parent 04ec7ac commit 7e0db5e

20 files changed

+120
-85
lines changed

sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,8 @@ class CachedTableSuite extends QueryTest {
9292

9393
test("too big for memory") {
9494
val data = "*" * 10000
95-
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF().registerTempTable("bigData")
95+
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF()
96+
.registerTempTable("bigData")
9697
table("bigData").persist(StorageLevel.MEMORY_AND_DISK)
9798
assert(table("bigData").count() === 200000L)
9899
table("bigData").unpersist(blocking = true)

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -329,8 +329,9 @@ class DataFrameSuite extends QueryTest {
329329
checkAnswer(
330330
decimalData.agg(avg('a cast DecimalType(10, 2))),
331331
Row(new java.math.BigDecimal(2.0)))
332+
// non-partial
332333
checkAnswer(
333-
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))), // non-partial
334+
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))),
334335
Row(new java.math.BigDecimal(2.0), new java.math.BigDecimal(6)) :: Nil)
335336
}
336337

sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class QueryTest extends PlanTest {
6767
checkAnswer(df, Seq(expectedAnswer))
6868
}
6969

70-
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext): Unit = {
70+
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext) {
7171
test(sqlString) {
7272
checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
7373
}

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
182182
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002")))
183183

184184
checkAnswer(sql(
185-
"SELECT time FROM timestamps WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')"),
185+
""""
186+
|SELECT time FROM timestamps
187+
|WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')
188+
""".stripMargin),
186189
Seq(Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001")),
187190
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002"))))
188191

@@ -248,7 +251,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
248251
Row("1"))
249252
}
250253

251-
def sortTest() = {
254+
def sortTest(): Unit = {
252255
checkAnswer(
253256
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC"),
254257
Seq(Row(1,1), Row(1,2), Row(2,1), Row(2,2), Row(3,1), Row(3,2)))
@@ -327,7 +330,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
327330

328331
test("from follow multiple brackets") {
329332
checkAnswer(sql(
330-
"select key from ((select * from testData limit 1) union all (select * from testData limit 1)) x limit 1"),
333+
"""
334+
|select key from ((select * from testData limit 1)
335+
| union all (select * from testData limit 1)) x limit 1
336+
""".stripMargin),
331337
Row(1)
332338
)
333339

@@ -337,7 +343,11 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
337343
)
338344

339345
checkAnswer(sql(
340-
"select key from (select * from testData limit 1 union all select * from testData limit 1) x limit 1"),
346+
"""
347+
|select key from
348+
| (select * from testData limit 1 union all select * from testData limit 1) x
349+
| limit 1
350+
""".stripMargin),
341351
Row(1)
342352
)
343353
}
@@ -384,7 +394,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
384394
Seq(Row(1, 0), Row(2, 1)))
385395

386396
checkAnswer(
387-
sql("SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"),
397+
sql(
398+
"""
399+
|SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3
400+
""".stripMargin),
388401
Row(2, 1, 2, 2, 1))
389402
}
390403

@@ -997,7 +1010,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
9971010
}
9981011

9991012
test("SPARK-3483 Special chars in column names") {
1000-
val data = sparkContext.parallelize(Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
1013+
val data = sparkContext.parallelize(
1014+
Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
10011015
jsonRDD(data).registerTempTable("records")
10021016
sql("SELECT `key?number1` FROM records")
10031017
}
@@ -1082,8 +1096,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
10821096
}
10831097

10841098
test("SPARK-6145: ORDER BY test for nested fields") {
1085-
jsonRDD(sparkContext.makeRDD(
1086-
"""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil)).registerTempTable("nestedOrder")
1099+
jsonRDD(sparkContext.makeRDD("""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil))
1100+
.registerTempTable("nestedOrder")
10871101

10881102
checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY a.b"), Row(1))
10891103
checkAnswer(sql("SELECT a.b FROM nestedOrder ORDER BY a.b"), Row(1))

sql/core/src/test/scala/org/apache/spark/sql/ScalaReflectionRelationSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ class ScalaReflectionRelationSuite extends FunSuite {
8080

8181
test("query case class RDD") {
8282
val data = ReflectData("a", 1, 1L, 1.toFloat, 1.toDouble, 1.toShort, 1.toByte, true,
83-
new java.math.BigDecimal(1), new Date(12345), new Timestamp(12345), Seq(1,2,3))
83+
new java.math.BigDecimal(1), new Date(12345), new Timestamp(12345), Seq(1,2,3))
8484
val rdd = sparkContext.parallelize(data :: Nil)
8585
rdd.toDF().registerTempTable("reflectData")
8686

@@ -103,7 +103,8 @@ class ScalaReflectionRelationSuite extends FunSuite {
103103
val rdd = sparkContext.parallelize(data :: Nil)
104104
rdd.toDF().registerTempTable("reflectOptionalData")
105105

106-
assert(sql("SELECT * FROM reflectOptionalData").collect().head === Row.fromSeq(Seq.fill(7)(null)))
106+
assert(sql("SELECT * FROM reflectOptionalData").collect().head ===
107+
Row.fromSeq(Seq.fill(7)(null)))
107108
}
108109

109110
// Equality is broken for Arrays, so we test that separately.

sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ private[sql] class MyDenseVectorUDT extends UserDefinedType[MyDenseVector] {
6363
}
6464
}
6565

66-
override def userClass = classOf[MyDenseVector]
66+
override def userClass: Class[MyDenseVector] = classOf[MyDenseVector]
6767

6868
private[spark] override def asNullable: MyDenseVectorUDT = this
6969
}

sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
2727
import org.apache.spark.sql.types.{Decimal, DataType, NativeType}
2828

2929
object ColumnarTestUtils {
30-
def makeNullRow(length: Int) = {
30+
def makeNullRow(length: Int): GenericMutableRow = {
3131
val row = new GenericMutableRow(length)
3232
(0 until length).foreach(row.setNullAt)
3333
row
@@ -93,7 +93,7 @@ object ColumnarTestUtils {
9393

9494
def makeUniqueValuesAndSingleValueRows[T <: NativeType](
9595
columnType: NativeColumnType[T],
96-
count: Int) = {
96+
count: Int): (Seq[T#JvmType], Seq[GenericMutableRow]) = {
9797

9898
val values = makeUniqueRandomValues(columnType, count)
9999
val rows = values.map { value =>

sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ class TestNullableColumnAccessor[T <: DataType, JvmType](
3131
with NullableColumnAccessor
3232

3333
object TestNullableColumnAccessor {
34-
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType]) = {
34+
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType])
35+
: TestNullableColumnAccessor[T, JvmType] = {
3536
// Skips the column type ID
3637
buffer.getInt()
3738
new TestNullableColumnAccessor(buffer, columnType)

sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ class TestNullableColumnBuilder[T <: DataType, JvmType](columnType: ColumnType[T
2727
with NullableColumnBuilder
2828

2929
object TestNullableColumnBuilder {
30-
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0) = {
30+
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0)
31+
: TestNullableColumnBuilder[T, JvmType] = {
3132
val builder = new TestNullableColumnBuilder(columnType)
3233
builder.initialize(initialSize)
3334
builder

sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ object TestCompressibleColumnBuilder {
3535
def apply[T <: NativeType](
3636
columnStats: ColumnStats,
3737
columnType: NativeColumnType[T],
38-
scheme: CompressionScheme) = {
38+
scheme: CompressionScheme): TestCompressibleColumnBuilder[T] = {
3939

4040
val builder = new TestCompressibleColumnBuilder(columnStats, columnType, Seq(scheme))
4141
builder.initialize(0, "", useCompression = true)

0 commit comments

Comments
 (0)