Skip to content

Commit 808380f

Browse files
committed
Fixes issues introduced while rebasing
1 parent 50dd8d1 commit 808380f

File tree

6 files changed

+16
-13
lines changed

6 files changed

+16
-13
lines changed

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTypes.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -545,10 +545,10 @@ private[parquet] object ParquetTypesConverter extends Logging {
545545
val newFields = ArrayBuffer.empty[StructField]
546546

547547
leftFields.foreach {
548-
case leftField @ StructField(leftName, leftType, leftNullable, leftMetadata) =>
548+
case leftField @ StructField(leftName, leftType, leftNullable, _) =>
549549
rightFields
550550
.find(_.name == leftName)
551-
.map { case rightField @ StructField(_, rightType, rightNullable, rightMeatadata) =>
551+
.map { case rightField @ StructField(_, rightType, rightNullable, _) =>
552552
leftField.copy(
553553
dataType = mergeCatalystDataTypes(leftType, rightType),
554554
nullable = leftNullable || rightNullable)

sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,10 @@ object ParquetRelation2 {
393393
// Falls back to Parquet schema if Spark SQL schema is absent.
394394
StructType.fromAttributes(
395395
// TODO Really no need to use `Attribute` here, we only need to know the data type.
396-
convertToAttributes(parquetSchema, sqlContext.conf.isParquetBinaryAsString))
396+
convertToAttributes(
397+
parquetSchema,
398+
sqlContext.conf.isParquetBinaryAsString,
399+
sqlContext.conf.isParquetINT96AsTimestamp))
397400
}
398401
}.reduce { (left, right) =>
399402
try mergeCatalystSchemas(left, right) catch { case e: Throwable =>

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -312,10 +312,10 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
312312
}
313313

314314
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "true") {
315-
run("Enable Parquet data source")
315+
run("Parquet data source enabled")
316316
}
317317

318318
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "false") {
319-
run("Disable Parquet data source")
319+
run("Parquet data source disabled")
320320
}
321321
}

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -291,10 +291,10 @@ class ParquetIOSuite extends QueryTest with ParquetTest {
291291
}
292292

293293
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "true") {
294-
run("Enable Parquet data source")
294+
run("Parquet data source enabled")
295295
}
296296

297297
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "false") {
298-
run("Disable Parquet data source")
298+
run("Parquet data source disabled")
299299
}
300300
}

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,15 +46,15 @@ class ParquetQuerySuite extends QueryTest with ParquetTest {
4646

4747
// This test case will trigger the NPE mentioned in
4848
// https://issues.apache.org/jira/browse/PARQUET-151.
49-
ignore("overwriting") {
49+
ignore(s"$prefix: overwriting") {
5050
val data = (0 until 10).map(i => (i, i.toString))
5151
withParquetTable(data, "t") {
5252
sql("INSERT OVERWRITE TABLE t SELECT * FROM t")
5353
checkAnswer(table("t"), data.map(Row.fromTuple))
5454
}
5555
}
5656

57-
test("self-join") {
57+
test(s"$prefix: self-join") {
5858
// 4 rows, cells of column 1 of row 2 and row 4 are null
5959
val data = (1 to 4).map { i =>
6060
val maybeInt = if (i % 2 == 0) None else Some(i)
@@ -111,10 +111,10 @@ class ParquetQuerySuite extends QueryTest with ParquetTest {
111111
}
112112

113113
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "true") {
114-
run("Enable Parquet data source")
114+
run("Parquet data source enabled")
115115
}
116116

117117
withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "false") {
118-
run("Disable Parquet data source")
118+
run("Parquet data source disabled")
119119
}
120120
}

sql/hive/src/test/scala/org/apache/spark/sql/parquet/parquetSuites.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -286,8 +286,8 @@ abstract class ParquetPartitioningTest extends QueryTest with BeforeAndAfterAll
286286
}
287287

288288
setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, "false")
289-
run("Enable Parquet data source")
289+
run("Parquet data source enabled")
290290

291291
setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, "true")
292-
run("Disable Parquet data source")
292+
run("Parquet data source disabled")
293293
}

0 commit comments

Comments
 (0)