@@ -42,24 +42,24 @@ private[sql] class DefaultSource extends HadoopFsRelationProvider with DataSourc
4242 def format (): String = " json"
4343
4444 override def createRelation (
45- sqlContext : SQLContext ,
46- paths : Array [String ],
47- dataSchema : Option [StructType ],
48- partitionColumns : Option [StructType ],
49- parameters : Map [String , String ]): HadoopFsRelation = {
45+ sqlContext : SQLContext ,
46+ paths : Array [String ],
47+ dataSchema : Option [StructType ],
48+ partitionColumns : Option [StructType ],
49+ parameters : Map [String , String ]): HadoopFsRelation = {
5050 val samplingRatio = parameters.get(" samplingRatio" ).map(_.toDouble).getOrElse(1.0 )
5151
5252 new JSONRelation (None , samplingRatio, dataSchema, None , partitionColumns, paths)(sqlContext)
5353 }
5454}
5555
5656private [sql] class JSONRelation (
57- val inputRDD : Option [RDD [String ]],
58- val samplingRatio : Double ,
59- val maybeDataSchema : Option [StructType ],
60- val maybePartitionSpec : Option [PartitionSpec ],
61- override val userDefinedPartitionColumns : Option [StructType ],
62- override val paths : Array [String ] = Array .empty[String ])(@ transient val sqlContext : SQLContext )
57+ val inputRDD : Option [RDD [String ]],
58+ val samplingRatio : Double ,
59+ val maybeDataSchema : Option [StructType ],
60+ val maybePartitionSpec : Option [PartitionSpec ],
61+ override val userDefinedPartitionColumns : Option [StructType ],
62+ override val paths : Array [String ] = Array .empty[String ])(@ transient val sqlContext : SQLContext )
6363 extends HadoopFsRelation (maybePartitionSpec) {
6464
6565 /** Constraints to be imposed on schema to be stored. */
@@ -109,9 +109,9 @@ private[sql] class JSONRelation(
109109 }
110110
111111 override def buildScan (
112- requiredColumns : Array [String ],
113- filters : Array [Filter ],
114- inputPaths : Array [FileStatus ]): RDD [Row ] = {
112+ requiredColumns : Array [String ],
113+ filters : Array [Filter ],
114+ inputPaths : Array [FileStatus ]): RDD [Row ] = {
115115 JacksonParser (
116116 inputRDD.getOrElse(createBaseRdd(inputPaths)),
117117 StructType (requiredColumns.map(dataSchema(_))),
@@ -142,19 +142,19 @@ private[sql] class JSONRelation(
142142 override def prepareJobForWrite (job : Job ): OutputWriterFactory = {
143143 new OutputWriterFactory {
144144 override def newInstance (
145- path : String ,
146- dataSchema : StructType ,
147- context : TaskAttemptContext ): OutputWriter = {
145+ path : String ,
146+ dataSchema : StructType ,
147+ context : TaskAttemptContext ): OutputWriter = {
148148 new JsonOutputWriter (path, dataSchema, context)
149149 }
150150 }
151151 }
152152}
153153
154154private [json] class JsonOutputWriter (
155- path : String ,
156- dataSchema : StructType ,
157- context : TaskAttemptContext )
155+ path : String ,
156+ dataSchema : StructType ,
157+ context : TaskAttemptContext )
158158 extends OutputWriterInternal with SparkHadoopMapRedUtil with Logging {
159159
160160 val writer = new CharArrayWriter ()
@@ -187,4 +187,4 @@ private[json] class JsonOutputWriter(
187187 gen.close()
188188 recordWriter.close(context)
189189 }
190- }
190+ }
0 commit comments