Skip to content

Commit b31917b

Browse files
author
Nathan Howell
committed
Rename useJsonRDD2 to useJacksonStreamingAPI
1 parent 15c5d1b commit b31917b

File tree

3 files changed

+8
-8
lines changed

3 files changed

+8
-8
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ private[spark] object SQLConf {
6767

6868
val USE_SQL_SERIALIZER2 = "spark.sql.useSerializer2"
6969

70-
val USE_JSONRDD2 = "spark.sql.json.useJsonRDD2"
70+
val USE_JACKSON_STREAMING_API = "spark.sql.json.useJacksonStreamingAPI"
7171

7272
object Deprecated {
7373
val MAPRED_REDUCE_TASKS = "mapred.reduce.tasks"
@@ -162,7 +162,7 @@ private[sql] class SQLConf extends Serializable {
162162

163163
private[spark] def useSqlSerializer2: Boolean = getConf(USE_SQL_SERIALIZER2, "true").toBoolean
164164

165-
private[spark] def useJsonRDD2: Boolean = getConf(USE_JSONRDD2, "true").toBoolean
165+
private[spark] def useJacksonStreamingAPI: Boolean = getConf(USE_JACKSON_STREAMING_API, "true").toBoolean
166166

167167
/**
168168
* Upper bound on the sizes (in bytes) of the tables qualified for the auto conversion to

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -615,7 +615,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
615615
*/
616616
@Experimental
617617
def jsonRDD(json: RDD[String], schema: StructType): DataFrame = {
618-
if (conf.useJsonRDD2) {
618+
if (conf.useJacksonStreamingAPI) {
619619
baseRelationToDataFrame(new JSONRelation(json, None, 1.0, Some(schema))(this))
620620
} else {
621621
val columnNameOfCorruptJsonRecord = conf.columnNameOfCorruptRecord
@@ -649,7 +649,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
649649
*/
650650
@Experimental
651651
def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame = {
652-
if (conf.useJsonRDD2) {
652+
if (conf.useJacksonStreamingAPI) {
653653
baseRelationToDataFrame(new JSONRelation(json, None, samplingRatio, None)(this))
654654
} else {
655655
val columnNameOfCorruptJsonRecord = conf.columnNameOfCorruptRecord

sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -125,12 +125,12 @@ private[sql] class JSONRelation(
125125
samplingRatio,
126126
userSpecifiedSchema)(sqlContext)
127127

128-
private val useJsonRDD2: Boolean = sqlContext.conf.useJsonRDD2
128+
private val useJacksonStreamingAPI: Boolean = sqlContext.conf.useJacksonStreamingAPI
129129

130130
override val needConversion: Boolean = false
131131

132132
override lazy val schema = userSpecifiedSchema.getOrElse {
133-
if (useJsonRDD2) {
133+
if (useJacksonStreamingAPI) {
134134
JsonRDD2.nullTypeToStringType(
135135
JsonRDD2.inferSchema(
136136
baseRDD,
@@ -146,7 +146,7 @@ private[sql] class JSONRelation(
146146
}
147147

148148
override def buildScan(): RDD[Row] = {
149-
if (useJsonRDD2) {
149+
if (useJacksonStreamingAPI) {
150150
JsonRDD2.jsonStringToRow(
151151
baseRDD,
152152
schema,
@@ -160,7 +160,7 @@ private[sql] class JSONRelation(
160160
}
161161

162162
override def buildScan(requiredColumns: Seq[Attribute], filters: Seq[Expression]): RDD[Row] = {
163-
if (useJsonRDD2) {
163+
if (useJacksonStreamingAPI) {
164164
JsonRDD2.jsonStringToRow(
165165
baseRDD,
166166
StructType.fromAttributes(requiredColumns),

0 commit comments

Comments
 (0)