@@ -2592,44 +2592,53 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
25922592 }
25932593
25942594 test(" inferring timestamp type" ) {
2595- def schemaOf (jsons : String * ): StructType = spark.read.json(jsons.toDS).schema
2596-
2597- assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" ,
2598- """ {"a":"2018-12-16T22:23:24.123-02:00"}""" ) === fromDDL(" a timestamp" ))
2599-
2600- assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":1}""" )
2601- === fromDDL(" a string" ))
2602- assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":"123"}""" )
2603- === fromDDL(" a string" ))
2604-
2605- assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":null}""" )
2606- === fromDDL(" a timestamp" ))
2607- assert(schemaOf(""" {"a":null}""" , """ {"a":"2018-12-17T10:11:12.123-01:00"}""" )
2608- === fromDDL(" a timestamp" ))
2595+ Seq (true , false ).foreach { legacyParser =>
2596+ withSQLConf(SQLConf .LEGACY_TIME_PARSER_ENABLED .key -> legacyParser.toString) {
2597+ def schemaOf (jsons : String * ): StructType = spark.read.json(jsons.toDS).schema
2598+
2599+ assert(schemaOf(
2600+ """ {"a":"2018-12-17T10:11:12.123-01:00"}""" ,
2601+ """ {"a":"2018-12-16T22:23:24.123-02:00"}""" ) === fromDDL(" a timestamp" ))
2602+
2603+ assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":1}""" )
2604+ === fromDDL(" a string" ))
2605+ assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":"123"}""" )
2606+ === fromDDL(" a string" ))
2607+
2608+ assert(schemaOf(""" {"a":"2018-12-17T10:11:12.123-01:00"}""" , """ {"a":null}""" )
2609+ === fromDDL(" a timestamp" ))
2610+ assert(schemaOf(""" {"a":null}""" , """ {"a":"2018-12-17T10:11:12.123-01:00"}""" )
2611+ === fromDDL(" a timestamp" ))
2612+ }
2613+ }
26092614 }
26102615
26112616 test(" roundtrip for timestamp type inferring" ) {
2612- val customSchema = new StructType (Array (StructField (" date" , TimestampType , true )))
2613- withTempDir { dir =>
2614- val timestampsWithFormatPath = s " ${dir.getCanonicalPath}/timestampsWithFormat.json "
2615- val timestampsWithFormat = spark.read
2616- .option(" timestampFormat" , " dd/MM/yyyy HH:mm" )
2617- .json(datesRecords)
2618- assert(timestampsWithFormat.schema === customSchema)
2619-
2620- timestampsWithFormat.write
2621- .format(" json" )
2622- .option(" timestampFormat" , " yyyy-MM-dd HH:mm:ss" )
2623- .option(DateTimeUtils .TIMEZONE_OPTION , " UTC" )
2624- .save(timestampsWithFormatPath)
2625-
2626- val readBack = spark.read
2627- .option(" timestampFormat" , " yyyy-MM-dd HH:mm:ss" )
2628- .option(DateTimeUtils .TIMEZONE_OPTION , " UTC" )
2629- .json(timestampsWithFormatPath)
2630-
2631- assert(readBack.schema === customSchema)
2632- checkAnswer(readBack, timestampsWithFormat)
2617+ Seq (true , false ).foreach { legacyParser =>
2618+ withSQLConf(SQLConf .LEGACY_TIME_PARSER_ENABLED .key -> legacyParser.toString) {
2619+ val customSchema = new StructType ().add(" date" , TimestampType )
2620+ withTempDir { dir =>
2621+ val timestampsWithFormatPath = s " ${dir.getCanonicalPath}/timestampsWithFormat.json "
2622+ val timestampsWithFormat = spark.read
2623+ .option(" timestampFormat" , " dd/MM/yyyy HH:mm" )
2624+ .json(datesRecords)
2625+ assert(timestampsWithFormat.schema === customSchema)
2626+
2627+ timestampsWithFormat.write
2628+ .format(" json" )
2629+ .option(" timestampFormat" , " yyyy-MM-dd HH:mm:ss" )
2630+ .option(DateTimeUtils .TIMEZONE_OPTION , " UTC" )
2631+ .save(timestampsWithFormatPath)
2632+
2633+ val readBack = spark.read
2634+ .option(" timestampFormat" , " yyyy-MM-dd HH:mm:ss" )
2635+ .option(DateTimeUtils .TIMEZONE_OPTION , " UTC" )
2636+ .json(timestampsWithFormatPath)
2637+
2638+ assert(readBack.schema === customSchema)
2639+ checkAnswer(readBack, timestampsWithFormat)
2640+ }
2641+ }
26332642 }
26342643 }
26352644}
0 commit comments