Skip to content

Commit 8f3cf43

Browse files
author
Joseph Batchik
committed
merged in changes
1 parent b63d337 commit 8f3cf43

File tree

2 files changed

+21
-25
lines changed

2 files changed

+21
-25
lines changed

sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -42,24 +42,24 @@ private[sql] class DefaultSource extends HadoopFsRelationProvider with DataSourc
4242
def format(): String = "json"
4343

4444
override def createRelation(
45-
sqlContext: SQLContext,
46-
paths: Array[String],
47-
dataSchema: Option[StructType],
48-
partitionColumns: Option[StructType],
49-
parameters: Map[String, String]): HadoopFsRelation = {
45+
sqlContext: SQLContext,
46+
paths: Array[String],
47+
dataSchema: Option[StructType],
48+
partitionColumns: Option[StructType],
49+
parameters: Map[String, String]): HadoopFsRelation = {
5050
val samplingRatio = parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0)
5151

5252
new JSONRelation(None, samplingRatio, dataSchema, None, partitionColumns, paths)(sqlContext)
5353
}
5454
}
5555

5656
private[sql] class JSONRelation(
57-
val inputRDD: Option[RDD[String]],
58-
val samplingRatio: Double,
59-
val maybeDataSchema: Option[StructType],
60-
val maybePartitionSpec: Option[PartitionSpec],
61-
override val userDefinedPartitionColumns: Option[StructType],
62-
override val paths: Array[String] = Array.empty[String])(@transient val sqlContext: SQLContext)
57+
val inputRDD: Option[RDD[String]],
58+
val samplingRatio: Double,
59+
val maybeDataSchema: Option[StructType],
60+
val maybePartitionSpec: Option[PartitionSpec],
61+
override val userDefinedPartitionColumns: Option[StructType],
62+
override val paths: Array[String] = Array.empty[String])(@transient val sqlContext: SQLContext)
6363
extends HadoopFsRelation(maybePartitionSpec) {
6464

6565
/** Constraints to be imposed on schema to be stored. */
@@ -109,9 +109,9 @@ private[sql] class JSONRelation(
109109
}
110110

111111
override def buildScan(
112-
requiredColumns: Array[String],
113-
filters: Array[Filter],
114-
inputPaths: Array[FileStatus]): RDD[Row] = {
112+
requiredColumns: Array[String],
113+
filters: Array[Filter],
114+
inputPaths: Array[FileStatus]): RDD[Row] = {
115115
JacksonParser(
116116
inputRDD.getOrElse(createBaseRdd(inputPaths)),
117117
StructType(requiredColumns.map(dataSchema(_))),
@@ -142,19 +142,19 @@ private[sql] class JSONRelation(
142142
override def prepareJobForWrite(job: Job): OutputWriterFactory = {
143143
new OutputWriterFactory {
144144
override def newInstance(
145-
path: String,
146-
dataSchema: StructType,
147-
context: TaskAttemptContext): OutputWriter = {
145+
path: String,
146+
dataSchema: StructType,
147+
context: TaskAttemptContext): OutputWriter = {
148148
new JsonOutputWriter(path, dataSchema, context)
149149
}
150150
}
151151
}
152152
}
153153

154154
private[json] class JsonOutputWriter(
155-
path: String,
156-
dataSchema: StructType,
157-
context: TaskAttemptContext)
155+
path: String,
156+
dataSchema: StructType,
157+
context: TaskAttemptContext)
158158
extends OutputWriterInternal with SparkHadoopMapRedUtil with Logging {
159159

160160
val writer = new CharArrayWriter()
@@ -187,4 +187,4 @@ private[json] class JsonOutputWriter(
187187
gen.close()
188188
recordWriter.close(context)
189189
}
190-
}
190+
}

sql/hive/src/test/scala/org/apache/spark/sql/sources/SimpleTextRelation.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@ import org.apache.spark.sql.{Row, SQLContext}
3737
*/
3838
class SimpleTextSource extends HadoopFsRelationProvider {
3939

40-
override def format(): String = "simple text source"
41-
4240
override def createRelation(
4341
sqlContext: SQLContext,
4442
paths: Array[String],
@@ -138,8 +136,6 @@ class SimpleTextRelation(
138136
*/
139137
class CommitFailureTestSource extends HadoopFsRelationProvider {
140138

141-
override def format(): String = "commit failure test"
142-
143139
override def createRelation(
144140
sqlContext: SQLContext,
145141
paths: Array[String],

0 commit comments

Comments
 (0)