From 9ee60a6fe0eed6e6d4e1b4387f51849bda0c6b9c Mon Sep 17 00:00:00 2001 From: "Zhu, Lipeng" Date: Fri, 15 Mar 2019 09:28:19 +0800 Subject: [PATCH 1/5] Add docker integration test for MsSql server --- external/docker-integration-tests/pom.xml | 8 + .../jdbc/MsSqlServerIntegrationSuite.scala | 173 ++++++++++++++++++ 2 files changed, 181 insertions(+) create mode 100644 external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml index b39db7540b7d2..bcc4934352395 100644 --- a/external/docker-integration-tests/pom.xml +++ b/external/docker-integration-tests/pom.xml @@ -150,5 +150,13 @@ 10.5.0.5 jar + + + com.microsoft.sqlserver + mssql-jdbc + 7.2.1.jre8 + test + + diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala new file mode 100644 index 0000000000000..d19a14d0efbeb --- /dev/null +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.jdbc + +import java.math.BigDecimal +import java.sql.{Connection, Date, Timestamp} +import java.util.Properties + +import org.apache.spark.tags.DockerTest + +@DockerTest +class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { + override val db = new DatabaseOnDocker { + override val imageName = "mcr.microsoft.com/mssql/server" + override val env = Map( + "SA_PASSWORD" -> "Sapass123", + "ACCEPT_EULA" -> "Y" + ) + override val usesIpc = false + override val jdbcPort: Int = 1433 + + override def getJdbcUrl(ip: String, port: Int): String = + s"jdbc:sqlserver://$ip:$port;user=sa;password=Sapass123;" + + override def getStartupProcessName: Option[String] = None + } + + override def dataPreparation(conn: Connection): Unit = { + conn.prepareStatement("CREATE TABLE tbl (x INT, y VARCHAR (50))").executeUpdate() + conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate() + conn.prepareStatement("INSERT INTO tbl VALUES (17,'dave')").executeUpdate() + + conn.prepareStatement("CREATE TABLE numbers (a BIT, " + + "b DECIMAL(5,2), c NUMERIC(10,5), " + + "d FLOAT, e REAL, " + + "f BIGINT, g INT, h SMALLINT, i TINYINT, " + + "j MONEY, k SMALLMONEY)").executeUpdate() + conn.prepareStatement("INSERT INTO numbers VALUES (0, " + + "123, 12345.12, " + + "123456789012345.123, 123456789012345.123456789012345, " + + "9223372036854775807, 2147483647, 32767, 255, " + + "922337203685477.58, 214748.3647)").executeUpdate() + + conn.prepareStatement("CREATE TABLE dates (a DATE, b DATETIME, c DATETIME2, " + + "d DATETIMEOFFSET, e SMALLDATETIME, f TIME)").executeUpdate() + conn.prepareStatement("INSERT INTO dates VALUES ('1991-11-09', '1999-01-01 13:23:35', " + + "'9999-12-31 23:59:59', '1901-05-09 23:59:59 +14:00', " + + "'1996-01-01 23:23:45', '13:31:24')").executeUpdate() + + conn.prepareStatement("CREATE TABLE strings (a CHAR(10), b VARCHAR(10), c nchar(10), " + + "d nvarchar(10), e binary(4), f varbinary(4), g text, h ntext, i image)").executeUpdate() + conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 'brown', 'fox', 123456," + + " 123456, 'the', 'lazy', 'dog')").executeUpdate() + + } + + test("Basic test") { + val df = sqlContext.read.jdbc(jdbcUrl, "tbl", new Properties) + val rows = df.collect() + assert(rows.length == 2) + val types = rows(0).toSeq.map(x => x.getClass.toString) + assert(types.length == 2) + assert(types(0).equals("class java.lang.Integer")) + assert(types(1).equals("class java.lang.String")) + + } + + test("Numeric types") { + val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) + val rows = df.collect() + assert(rows.length == 1) + val types = rows(0).toSeq.map(x => x.getClass.toString) + assert(types.length == 11) + assert(types(0).equals("class java.lang.Boolean")) + assert(types(1).equals("class java.math.BigDecimal")) + assert(types(2).equals("class java.math.BigDecimal")) + assert(types(3).equals("class java.lang.Double")) + assert(types(4).equals("class java.lang.Double")) + assert(types(5).equals("class java.lang.Long")) + assert(types(6).equals("class java.lang.Integer")) + assert(types(7).equals("class java.lang.Integer")) + assert(types(8).equals("class java.lang.Integer")) + assert(types(9).equals("class java.math.BigDecimal")) + assert(types(10).equals("class java.math.BigDecimal")) + assert(rows(0).getBoolean(0) == false) + assert(rows(0).getAs[BigDecimal](1).equals(new BigDecimal("123.00"))) + assert(rows(0).getAs[BigDecimal](2).equals(new BigDecimal("12345.12000"))) + assert(rows(0).getDouble(3) == 1.2345678901234512E14) + assert(rows(0).getDouble(4) == 1.23456788103168E14) + assert(rows(0).getLong(5) == 9223372036854775807L) + assert(rows(0).getInt(6) == 2147483647) + assert(rows(0).getInt(7) == 32767) + assert(rows(0).getInt(8) == 255) + assert(rows(0).getAs[BigDecimal](9).equals(new BigDecimal("922337203685477.5800"))) + assert(rows(0).getAs[BigDecimal](10).equals(new BigDecimal("214748.3647"))) + } + + test("Date types") { + val df = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) + val rows = df.collect() + assert(rows.length == 1) + val types = rows(0).toSeq.map(x => x.getClass.toString) + assert(types.length == 6) + assert(types(0).equals("class java.sql.Date")) + assert(types(1).equals("class java.sql.Timestamp")) + assert(types(2).equals("class java.sql.Timestamp")) + assert(types(3).equals("class java.lang.String")) + assert(types(4).equals("class java.sql.Timestamp")) + assert(types(5).equals("class java.sql.Timestamp")) + assert(rows(0).getAs[Date](0).equals(Date.valueOf("1991-11-09"))) + assert(rows(0).getAs[Timestamp](1).equals(Timestamp.valueOf("1999-01-01 13:23:35.0"))) + assert(rows(0).getAs[Timestamp](2).equals(Timestamp.valueOf("9999-12-31 23:59:59.0"))) + assert(rows(0).getString(3).equals("1901-05-09 23:59:59.0000000 +14:00")) + assert(rows(0).getAs[Timestamp](4).equals(Timestamp.valueOf("1996-01-01 23:24:00.0"))) + assert(rows(0).getAs[Timestamp](5).equals(Timestamp.valueOf("1900-01-01 13:31:24.0"))) + } + + + test("String types") { + val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) + val rows = df.collect() + assert(rows.length == 1) + val types = rows(0).toSeq.map(x => x.getClass.toString) + assert(types.length == 9) + assert(types(0).equals("class java.lang.String")) + assert(types(1).equals("class java.lang.String")) + assert(types(2).equals("class java.lang.String")) + assert(types(3).equals("class java.lang.String")) + assert(types(4).equals("class [B")) + assert(types(5).equals("class [B")) + assert(types(6).equals("class java.lang.String")) + assert(types(7).equals("class java.lang.String")) + assert(types(8).equals("class [B")) + assert(rows(0).getString(0).length == 10) + assert(rows(0).getString(0).trim.equals("the")) + assert(rows(0).getString(1).equals("quick")) + assert(rows(0).getString(2).length == 10) + assert(rows(0).getString(2).trim.equals("brown")) + assert(rows(0).getString(3).equals("fox")) + assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](4), Array[Byte](0, 1, -30, 64))) + assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](5), Array[Byte](0, 1, -30, 64))) + assert(rows(0).getString(6).equals("the")) + assert(rows(0).getString(7).equals("lazy")) + assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), Array[Byte](100, 111, 103))) + } + + + test("Basic write test") { + val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) + val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) + val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) + + df1.write.jdbc(jdbcUrl, "numberscopy", new Properties) + df2.write.jdbc(jdbcUrl, "datescopy", new Properties) +// df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) + } + +} From 7a844e1564ca746c33aef6abcf6e0b0b2e167e93 Mon Sep 17 00:00:00 2001 From: "Zhu, Lipeng" Date: Sat, 16 Mar 2019 18:47:24 +0800 Subject: [PATCH 2/5] uncomment write binary data to mssql server as SPARK-27159 is fixed --- .../org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index d19a14d0efbeb..a3388485212e8 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -167,7 +167,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { df1.write.jdbc(jdbcUrl, "numberscopy", new Properties) df2.write.jdbc(jdbcUrl, "datescopy", new Properties) -// df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) + df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) } } From 1b4bd43774f1dc8f0596d42a23c29ccf98780cc0 Mon Sep 17 00:00:00 2001 From: "Zhu, Lipeng" Date: Mon, 18 Mar 2019 11:04:18 +0800 Subject: [PATCH 3/5] add tag for mssql server docker image --- .../org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index a3388485212e8..e295b4569733d 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -26,7 +26,7 @@ import org.apache.spark.tags.DockerTest @DockerTest class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { override val db = new DatabaseOnDocker { - override val imageName = "mcr.microsoft.com/mssql/server" + override val imageName = "mcr.microsoft.com/mssql/server:2017-GA-ubuntu" override val env = Map( "SA_PASSWORD" -> "Sapass123", "ACCEPT_EULA" -> "Y" From ab474a477f3acc400591a4de729171e334387c79 Mon Sep 17 00:00:00 2001 From: "Zhu, Lipeng" Date: Tue, 19 Mar 2019 09:29:55 +0800 Subject: [PATCH 4/5] beauty code--remove empty lines --- external/docker-integration-tests/pom.xml | 2 -- .../apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala | 6 ------ 2 files changed, 8 deletions(-) diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml index bcc4934352395..a4956ff5ee9cc 100644 --- a/external/docker-integration-tests/pom.xml +++ b/external/docker-integration-tests/pom.xml @@ -150,13 +150,11 @@ 10.5.0.5 jar - com.microsoft.sqlserver mssql-jdbc 7.2.1.jre8 test - diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index e295b4569733d..47f8cafc1b5ec 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -66,7 +66,6 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { "d nvarchar(10), e binary(4), f varbinary(4), g text, h ntext, i image)").executeUpdate() conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 'brown', 'fox', 123456," + " 123456, 'the', 'lazy', 'dog')").executeUpdate() - } test("Basic test") { @@ -77,7 +76,6 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types.length == 2) assert(types(0).equals("class java.lang.Integer")) assert(types(1).equals("class java.lang.String")) - } test("Numeric types") { @@ -130,7 +128,6 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(rows(0).getAs[Timestamp](5).equals(Timestamp.valueOf("1900-01-01 13:31:24.0"))) } - test("String types") { val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) val rows = df.collect() @@ -159,15 +156,12 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), Array[Byte](100, 111, 103))) } - test("Basic write test") { val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) - df1.write.jdbc(jdbcUrl, "numberscopy", new Properties) df2.write.jdbc(jdbcUrl, "datescopy", new Properties) df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) } - } From 4bfe70cdb2077cedb0f9135c58e860793e4bd3d6 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 18 Mar 2019 21:58:36 -0700 Subject: [PATCH 5/5] update --- .../jdbc/MsSqlServerIntegrationSuite.scala | 170 +++++++++++------- 1 file changed, 104 insertions(+), 66 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index 47f8cafc1b5ec..82ce16c2b7e5a 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -45,31 +45,64 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate() conn.prepareStatement("INSERT INTO tbl VALUES (17,'dave')").executeUpdate() - conn.prepareStatement("CREATE TABLE numbers (a BIT, " + - "b DECIMAL(5,2), c NUMERIC(10,5), " + - "d FLOAT, e REAL, " + - "f BIGINT, g INT, h SMALLINT, i TINYINT, " + - "j MONEY, k SMALLMONEY)").executeUpdate() - conn.prepareStatement("INSERT INTO numbers VALUES (0, " + - "123, 12345.12, " + - "123456789012345.123, 123456789012345.123456789012345, " + - "9223372036854775807, 2147483647, 32767, 255, " + - "922337203685477.58, 214748.3647)").executeUpdate() + conn.prepareStatement( + """ + |CREATE TABLE numbers ( + |a BIT, + |b TINYINT, c SMALLINT, d INT, e BIGINT, + |f FLOAT, f1 FLOAT(24), + |g REAL, + |h DECIMAL(5,2), i NUMERIC(10,5), + |j MONEY, k SMALLMONEY) + """.stripMargin).executeUpdate() + conn.prepareStatement( + """ + |INSERT INTO numbers VALUES ( + |0, + |255, 32767, 2147483647, 9223372036854775807, + |123456789012345.123456789012345, 123456789012345.123456789012345, + |123456789012345.123456789012345, + |123, 12345.12, + |922337203685477.58, 214748.3647) + """.stripMargin).executeUpdate() - conn.prepareStatement("CREATE TABLE dates (a DATE, b DATETIME, c DATETIME2, " + - "d DATETIMEOFFSET, e SMALLDATETIME, f TIME)").executeUpdate() - conn.prepareStatement("INSERT INTO dates VALUES ('1991-11-09', '1999-01-01 13:23:35', " + - "'9999-12-31 23:59:59', '1901-05-09 23:59:59 +14:00', " + - "'1996-01-01 23:23:45', '13:31:24')").executeUpdate() + conn.prepareStatement( + """ + |CREATE TABLE dates ( + |a DATE, b DATETIME, c DATETIME2, + |d DATETIMEOFFSET, e SMALLDATETIME, + |f TIME) + """.stripMargin).executeUpdate() + conn.prepareStatement( + """ + |INSERT INTO dates VALUES ( + |'1991-11-09', '1999-01-01 13:23:35', '9999-12-31 23:59:59', + |'1901-05-09 23:59:59 +14:00', '1996-01-01 23:23:45', + |'13:31:24') + """.stripMargin).executeUpdate() - conn.prepareStatement("CREATE TABLE strings (a CHAR(10), b VARCHAR(10), c nchar(10), " + - "d nvarchar(10), e binary(4), f varbinary(4), g text, h ntext, i image)").executeUpdate() - conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 'brown', 'fox', 123456," + - " 123456, 'the', 'lazy', 'dog')").executeUpdate() + conn.prepareStatement( + """ + |CREATE TABLE strings ( + |a CHAR(10), b VARCHAR(10), + |c NCHAR(10), d NVARCHAR(10), + |e BINARY(4), f VARBINARY(4), + |g TEXT, h NTEXT, + |i IMAGE) + """.stripMargin).executeUpdate() + conn.prepareStatement( + """ + |INSERT INTO strings VALUES ( + |'the', 'quick', + |'brown', 'fox', + |123456, 123456, + |'the', 'lazy', + |'dog') + """.stripMargin).executeUpdate() } test("Basic test") { - val df = sqlContext.read.jdbc(jdbcUrl, "tbl", new Properties) + val df = spark.read.jdbc(jdbcUrl, "tbl", new Properties) val rows = df.collect() assert(rows.length == 2) val types = rows(0).toSeq.map(x => x.getClass.toString) @@ -79,40 +112,44 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { } test("Numeric types") { - val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) + val df = spark.read.jdbc(jdbcUrl, "numbers", new Properties) val rows = df.collect() assert(rows.length == 1) - val types = rows(0).toSeq.map(x => x.getClass.toString) - assert(types.length == 11) + val row = rows(0) + val types = row.toSeq.map(x => x.getClass.toString) + assert(types.length == 12) assert(types(0).equals("class java.lang.Boolean")) - assert(types(1).equals("class java.math.BigDecimal")) - assert(types(2).equals("class java.math.BigDecimal")) - assert(types(3).equals("class java.lang.Double")) - assert(types(4).equals("class java.lang.Double")) - assert(types(5).equals("class java.lang.Long")) - assert(types(6).equals("class java.lang.Integer")) - assert(types(7).equals("class java.lang.Integer")) - assert(types(8).equals("class java.lang.Integer")) + assert(types(1).equals("class java.lang.Integer")) + assert(types(2).equals("class java.lang.Integer")) + assert(types(3).equals("class java.lang.Integer")) + assert(types(4).equals("class java.lang.Long")) + assert(types(5).equals("class java.lang.Double")) + assert(types(6).equals("class java.lang.Double")) + assert(types(7).equals("class java.lang.Double")) + assert(types(8).equals("class java.math.BigDecimal")) assert(types(9).equals("class java.math.BigDecimal")) assert(types(10).equals("class java.math.BigDecimal")) - assert(rows(0).getBoolean(0) == false) - assert(rows(0).getAs[BigDecimal](1).equals(new BigDecimal("123.00"))) - assert(rows(0).getAs[BigDecimal](2).equals(new BigDecimal("12345.12000"))) - assert(rows(0).getDouble(3) == 1.2345678901234512E14) - assert(rows(0).getDouble(4) == 1.23456788103168E14) - assert(rows(0).getLong(5) == 9223372036854775807L) - assert(rows(0).getInt(6) == 2147483647) - assert(rows(0).getInt(7) == 32767) - assert(rows(0).getInt(8) == 255) - assert(rows(0).getAs[BigDecimal](9).equals(new BigDecimal("922337203685477.5800"))) - assert(rows(0).getAs[BigDecimal](10).equals(new BigDecimal("214748.3647"))) + assert(types(11).equals("class java.math.BigDecimal")) + assert(row.getBoolean(0) == false) + assert(row.getInt(1) == 255) + assert(row.getInt(2) == 32767) + assert(row.getInt(3) == 2147483647) + assert(row.getLong(4) == 9223372036854775807L) + assert(row.getDouble(5) == 1.2345678901234512E14) // float = float(53) has 15-digits precision + assert(row.getDouble(6) == 1.23456788103168E14) // float(24) has 7-digits precision + assert(row.getDouble(7) == 1.23456788103168E14) // real = float(24) + assert(row.getAs[BigDecimal](8).equals(new BigDecimal("123.00"))) + assert(row.getAs[BigDecimal](9).equals(new BigDecimal("12345.12000"))) + assert(row.getAs[BigDecimal](10).equals(new BigDecimal("922337203685477.5800"))) + assert(row.getAs[BigDecimal](11).equals(new BigDecimal("214748.3647"))) } test("Date types") { - val df = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) + val df = spark.read.jdbc(jdbcUrl, "dates", new Properties) val rows = df.collect() assert(rows.length == 1) - val types = rows(0).toSeq.map(x => x.getClass.toString) + val row = rows(0) + val types = row.toSeq.map(x => x.getClass.toString) assert(types.length == 6) assert(types(0).equals("class java.sql.Date")) assert(types(1).equals("class java.sql.Timestamp")) @@ -120,19 +157,20 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types(3).equals("class java.lang.String")) assert(types(4).equals("class java.sql.Timestamp")) assert(types(5).equals("class java.sql.Timestamp")) - assert(rows(0).getAs[Date](0).equals(Date.valueOf("1991-11-09"))) - assert(rows(0).getAs[Timestamp](1).equals(Timestamp.valueOf("1999-01-01 13:23:35.0"))) - assert(rows(0).getAs[Timestamp](2).equals(Timestamp.valueOf("9999-12-31 23:59:59.0"))) - assert(rows(0).getString(3).equals("1901-05-09 23:59:59.0000000 +14:00")) - assert(rows(0).getAs[Timestamp](4).equals(Timestamp.valueOf("1996-01-01 23:24:00.0"))) - assert(rows(0).getAs[Timestamp](5).equals(Timestamp.valueOf("1900-01-01 13:31:24.0"))) + assert(row.getAs[Date](0).equals(Date.valueOf("1991-11-09"))) + assert(row.getAs[Timestamp](1).equals(Timestamp.valueOf("1999-01-01 13:23:35.0"))) + assert(row.getAs[Timestamp](2).equals(Timestamp.valueOf("9999-12-31 23:59:59.0"))) + assert(row.getString(3).equals("1901-05-09 23:59:59.0000000 +14:00")) + assert(row.getAs[Timestamp](4).equals(Timestamp.valueOf("1996-01-01 23:24:00.0"))) + assert(row.getAs[Timestamp](5).equals(Timestamp.valueOf("1900-01-01 13:31:24.0"))) } test("String types") { - val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) + val df = spark.read.jdbc(jdbcUrl, "strings", new Properties) val rows = df.collect() assert(rows.length == 1) - val types = rows(0).toSeq.map(x => x.getClass.toString) + val row = rows(0) + val types = row.toSeq.map(x => x.getClass.toString) assert(types.length == 9) assert(types(0).equals("class java.lang.String")) assert(types(1).equals("class java.lang.String")) @@ -143,23 +181,23 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types(6).equals("class java.lang.String")) assert(types(7).equals("class java.lang.String")) assert(types(8).equals("class [B")) - assert(rows(0).getString(0).length == 10) - assert(rows(0).getString(0).trim.equals("the")) - assert(rows(0).getString(1).equals("quick")) - assert(rows(0).getString(2).length == 10) - assert(rows(0).getString(2).trim.equals("brown")) - assert(rows(0).getString(3).equals("fox")) - assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](4), Array[Byte](0, 1, -30, 64))) - assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](5), Array[Byte](0, 1, -30, 64))) - assert(rows(0).getString(6).equals("the")) - assert(rows(0).getString(7).equals("lazy")) - assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), Array[Byte](100, 111, 103))) + assert(row.getString(0).length == 10) + assert(row.getString(0).trim.equals("the")) + assert(row.getString(1).equals("quick")) + assert(row.getString(2).length == 10) + assert(row.getString(2).trim.equals("brown")) + assert(row.getString(3).equals("fox")) + assert(java.util.Arrays.equals(row.getAs[Array[Byte]](4), Array[Byte](0, 1, -30, 64))) + assert(java.util.Arrays.equals(row.getAs[Array[Byte]](5), Array[Byte](0, 1, -30, 64))) + assert(row.getString(6).equals("the")) + assert(row.getString(7).equals("lazy")) + assert(java.util.Arrays.equals(row.getAs[Array[Byte]](8), Array[Byte](100, 111, 103))) } test("Basic write test") { - val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) - val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) - val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) + val df1 = spark.read.jdbc(jdbcUrl, "numbers", new Properties) + val df2 = spark.read.jdbc(jdbcUrl, "dates", new Properties) + val df3 = spark.read.jdbc(jdbcUrl, "strings", new Properties) df1.write.jdbc(jdbcUrl, "numberscopy", new Properties) df2.write.jdbc(jdbcUrl, "datescopy", new Properties) df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)