Skip to content

Commit ce0edda

Browse files
committed
Clean up the test
1 parent 5c53472 commit ce0edda

File tree

1 file changed

+9
-7
lines changed

1 file changed

+9
-7
lines changed

sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -142,15 +142,17 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
142142
"Unable to convert column a of type calendarinterval to JSON."))
143143
}
144144

145-
test("to_json and from_json roundtrip") {
146-
val dfOne = Seq(Some(Tuple1(Tuple1(1))), None).toDF("a")
147-
val readBackOne = dfOne.select(to_json($"a").as("b"))
148-
.select(from_json($"b", dfOne.schema.head.dataType.asInstanceOf[StructType]))
145+
test("roundtrip in to_json and from_json") {
146+
val dfOne = Seq(Some(Tuple1(Tuple1(1))), None).toDF("struct")
147+
val schemaOne = dfOne.schema(0).dataType.asInstanceOf[StructType]
148+
val readBackOne = dfOne.select(to_json($"struct").as("json"))
149+
.select(from_json($"json", schemaOne).as("struct"))
149150
checkAnswer(dfOne, readBackOne)
150151

151-
val dfTwo = Seq(Some("""{"a":1}"""), None).toDF("value")
152-
val schema = new StructType().add("a", IntegerType)
153-
val readBackTwo = dfTwo.select(from_json($"value", schema).as("b")).select(to_json($"b"))
152+
val dfTwo = Seq(Some("""{"a":1}"""), None).toDF("json")
153+
val schemaTwo = new StructType().add("a", IntegerType)
154+
val readBackTwo = dfTwo.select(from_json($"json", schemaTwo).as("struct"))
155+
.select(to_json($"struct").as("json"))
154156
checkAnswer(dfTwo, readBackTwo)
155157
}
156158
}

0 commit comments

Comments
 (0)