Skip to content

Commit 210ed25

Browse files
mihailom-dbcloud-fan
authored andcommitted
[SPARK-48172][SQL] Fix escaping issues in JDBCDialects
This PR is a fix of #46437. The previous PR was reverted as `LONGTEXT` is not supported by all dialects. Special case escaping for MySQL and fix issues with redundant escaping for ' character. New changes introduced in the fix include change `LONGTEXT` -> `VARCHAR(50)`, as well as fix for table naming in the tests. When pushing down startsWith, endsWith and contains they are converted to LIKE. This requires addition of escape characters for these expressions. Unfortunately, MySQL uses ESCAPE '\' syntax instead of ESCAPE '' which would cause errors when trying to push down. Yes Tests for each existing dialect. No. Closes #46588 from mihailom-db/SPARK-48172. Authored-by: Mihailo Milosevic <[email protected]> Signed-off-by: Wenchen Fan <[email protected]> (cherry picked from commit 9e386b4) Signed-off-by: Wenchen Fan <[email protected]>
1 parent 07e08c0 commit 210ed25

File tree

12 files changed

+291
-14
lines changed

12 files changed

+291
-14
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,12 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest {
8080
connection.prepareStatement(
8181
"CREATE TABLE employee (dept INTEGER, name VARCHAR(10), salary DECIMAL(20, 2), bonus DOUBLE)")
8282
.executeUpdate()
83+
connection.prepareStatement(
84+
s"""CREATE TABLE pattern_testing_table (
85+
|pattern_testing_col VARCHAR(50)
86+
|)
87+
""".stripMargin
88+
).executeUpdate()
8389
}
8490

8591
override def testUpdateColumnType(tbl: String): Unit = {

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DockerJDBCIntegrationV2Suite.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,17 @@ abstract class DockerJDBCIntegrationV2Suite extends DockerJDBCIntegrationSuite {
3838
.executeUpdate()
3939
connection.prepareStatement("INSERT INTO employee VALUES (6, 'jen', 12000, 1200)")
4040
.executeUpdate()
41+
42+
connection.prepareStatement(
43+
s"""
44+
|INSERT INTO pattern_testing_table VALUES
45+
|('special_character_quote''_present'),
46+
|('special_character_quote_not_present'),
47+
|('special_character_percent%_present'),
48+
|('special_character_percent_not_present'),
49+
|('special_character_underscore_present'),
50+
|('special_character_underscorenot_present')
51+
""".stripMargin).executeUpdate()
4152
}
4253

4354
def tablePreparation(connection: Connection): Unit

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,12 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JD
8686
connection.prepareStatement(
8787
"CREATE TABLE employee (dept INT, name VARCHAR(32), salary NUMERIC(20, 2), bonus FLOAT)")
8888
.executeUpdate()
89+
connection.prepareStatement(
90+
s"""CREATE TABLE pattern_testing_table (
91+
|pattern_testing_col VARCHAR(50)
92+
|)
93+
""".stripMargin
94+
).executeUpdate()
8995
}
9096

9197
override def notSupportsTableComment: Boolean = true

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,12 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest
8888
connection.prepareStatement(
8989
"CREATE TABLE employee (dept INT, name VARCHAR(32), salary DECIMAL(20, 2)," +
9090
" bonus DOUBLE)").executeUpdate()
91+
connection.prepareStatement(
92+
s"""CREATE TABLE pattern_testing_table (
93+
|pattern_testing_col LONGTEXT
94+
|)
95+
""".stripMargin
96+
).executeUpdate()
9197
}
9298

9399
override def testUpdateColumnType(tbl: String): Unit = {

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,12 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes
106106
connection.prepareStatement(
107107
"CREATE TABLE employee (dept NUMBER(32), name VARCHAR2(32), salary NUMBER(20, 2)," +
108108
" bonus BINARY_DOUBLE)").executeUpdate()
109+
connection.prepareStatement(
110+
s"""CREATE TABLE pattern_testing_table (
111+
|pattern_testing_col VARCHAR(50)
112+
|)
113+
""".stripMargin
114+
).executeUpdate()
109115
}
110116

111117
override def testUpdateColumnType(tbl: String): Unit = {

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,12 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCT
5959
connection.prepareStatement(
6060
"CREATE TABLE employee (dept INTEGER, name VARCHAR(32), salary NUMERIC(20, 2)," +
6161
" bonus double precision)").executeUpdate()
62+
connection.prepareStatement(
63+
s"""CREATE TABLE pattern_testing_table (
64+
|pattern_testing_col VARCHAR(50)
65+
|)
66+
""".stripMargin
67+
).executeUpdate()
6268
}
6369

6470
override def testUpdateColumnType(tbl: String): Unit = {

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala

Lines changed: 229 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -358,6 +358,235 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu
358358
assert(scan.schema.names.sameElements(Seq(col)))
359359
}
360360

361+
test("SPARK-48172: Test CONTAINS") {
362+
val df1 = spark.sql(
363+
s"""
364+
|SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
365+
|WHERE contains(pattern_testing_col, 'quote\\'')""".stripMargin)
366+
df1.explain("formatted")
367+
val rows1 = df1.collect()
368+
assert(rows1.length === 1)
369+
assert(rows1(0).getString(0) === "special_character_quote'_present")
370+
371+
val df2 = spark.sql(
372+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
373+
|WHERE contains(pattern_testing_col, 'percent%')""".stripMargin)
374+
val rows2 = df2.collect()
375+
assert(rows2.length === 1)
376+
assert(rows2(0).getString(0) === "special_character_percent%_present")
377+
378+
val df3 = spark.
379+
sql(
380+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
381+
|WHERE contains(pattern_testing_col, 'underscore_')""".stripMargin)
382+
val rows3 = df3.collect()
383+
assert(rows3.length === 1)
384+
assert(rows3(0).getString(0) === "special_character_underscore_present")
385+
386+
val df4 = spark.
387+
sql(
388+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
389+
|WHERE contains(pattern_testing_col, 'character')
390+
|ORDER BY pattern_testing_col""".stripMargin)
391+
val rows4 = df4.collect()
392+
assert(rows4.length === 6)
393+
assert(rows4(0).getString(0) === "special_character_percent%_present")
394+
assert(rows4(1).getString(0) === "special_character_percent_not_present")
395+
assert(rows4(2).getString(0) === "special_character_quote'_present")
396+
assert(rows4(3).getString(0) === "special_character_quote_not_present")
397+
assert(rows4(4).getString(0) === "special_character_underscore_present")
398+
assert(rows4(5).getString(0) === "special_character_underscorenot_present")
399+
}
400+
401+
test("SPARK-48172: Test ENDSWITH") {
402+
val df1 = spark.sql(
403+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
404+
|WHERE endswith(pattern_testing_col, 'quote\\'_present')""".stripMargin)
405+
val rows1 = df1.collect()
406+
assert(rows1.length === 1)
407+
assert(rows1(0).getString(0) === "special_character_quote'_present")
408+
409+
val df2 = spark.sql(
410+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
411+
|WHERE endswith(pattern_testing_col, 'percent%_present')""".stripMargin)
412+
val rows2 = df2.collect()
413+
assert(rows2.length === 1)
414+
assert(rows2(0).getString(0) === "special_character_percent%_present")
415+
416+
val df3 = spark.
417+
sql(
418+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
419+
|WHERE endswith(pattern_testing_col, 'underscore_present')""".stripMargin)
420+
val rows3 = df3.collect()
421+
assert(rows3.length === 1)
422+
assert(rows3(0).getString(0) === "special_character_underscore_present")
423+
424+
val df4 = spark.
425+
sql(
426+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
427+
|WHERE endswith(pattern_testing_col, 'present')
428+
|ORDER BY pattern_testing_col""".stripMargin)
429+
val rows4 = df4.collect()
430+
assert(rows4.length === 6)
431+
assert(rows4(0).getString(0) === "special_character_percent%_present")
432+
assert(rows4(1).getString(0) === "special_character_percent_not_present")
433+
assert(rows4(2).getString(0) === "special_character_quote'_present")
434+
assert(rows4(3).getString(0) === "special_character_quote_not_present")
435+
assert(rows4(4).getString(0) === "special_character_underscore_present")
436+
assert(rows4(5).getString(0) === "special_character_underscorenot_present")
437+
}
438+
439+
test("SPARK-48172: Test STARTSWITH") {
440+
val df1 = spark.sql(
441+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
442+
|WHERE startswith(pattern_testing_col, 'special_character_quote\\'')""".stripMargin)
443+
val rows1 = df1.collect()
444+
assert(rows1.length === 1)
445+
assert(rows1(0).getString(0) === "special_character_quote'_present")
446+
447+
val df2 = spark.sql(
448+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
449+
|WHERE startswith(pattern_testing_col, 'special_character_percent%')""".stripMargin)
450+
val rows2 = df2.collect()
451+
assert(rows2.length === 1)
452+
assert(rows2(0).getString(0) === "special_character_percent%_present")
453+
454+
val df3 = spark.
455+
sql(
456+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
457+
|WHERE startswith(pattern_testing_col, 'special_character_underscore_')""".stripMargin)
458+
val rows3 = df3.collect()
459+
assert(rows3.length === 1)
460+
assert(rows3(0).getString(0) === "special_character_underscore_present")
461+
462+
val df4 = spark.
463+
sql(
464+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
465+
|WHERE startswith(pattern_testing_col, 'special_character')
466+
|ORDER BY pattern_testing_col""".stripMargin)
467+
val rows4 = df4.collect()
468+
assert(rows4.length === 6)
469+
assert(rows4(0).getString(0) === "special_character_percent%_present")
470+
assert(rows4(1).getString(0) === "special_character_percent_not_present")
471+
assert(rows4(2).getString(0) === "special_character_quote'_present")
472+
assert(rows4(3).getString(0) === "special_character_quote_not_present")
473+
assert(rows4(4).getString(0) === "special_character_underscore_present")
474+
assert(rows4(5).getString(0) === "special_character_underscorenot_present")
475+
}
476+
477+
test("SPARK-48172: Test LIKE") {
478+
// this one should map to contains
479+
val df1 = spark.sql(
480+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
481+
|WHERE pattern_testing_col LIKE '%quote\\'%'""".stripMargin)
482+
val rows1 = df1.collect()
483+
assert(rows1.length === 1)
484+
assert(rows1(0).getString(0) === "special_character_quote'_present")
485+
486+
val df2 = spark.sql(
487+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
488+
|WHERE pattern_testing_col LIKE '%percent\\%%'""".stripMargin)
489+
val rows2 = df2.collect()
490+
assert(rows2.length === 1)
491+
assert(rows2(0).getString(0) === "special_character_percent%_present")
492+
493+
val df3 = spark.
494+
sql(
495+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
496+
|WHERE pattern_testing_col LIKE '%underscore\\_%'""".stripMargin)
497+
val rows3 = df3.collect()
498+
assert(rows3.length === 1)
499+
assert(rows3(0).getString(0) === "special_character_underscore_present")
500+
501+
val df4 = spark.
502+
sql(
503+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
504+
|WHERE pattern_testing_col LIKE '%character%'
505+
|ORDER BY pattern_testing_col""".stripMargin)
506+
val rows4 = df4.collect()
507+
assert(rows4.length === 6)
508+
assert(rows4(0).getString(0) === "special_character_percent%_present")
509+
assert(rows4(1).getString(0) === "special_character_percent_not_present")
510+
assert(rows4(2).getString(0) === "special_character_quote'_present")
511+
assert(rows4(3).getString(0) === "special_character_quote_not_present")
512+
assert(rows4(4).getString(0) === "special_character_underscore_present")
513+
assert(rows4(5).getString(0) === "special_character_underscorenot_present")
514+
515+
// map to startsWith
516+
// this one should map to contains
517+
val df5 = spark.sql(
518+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
519+
|WHERE pattern_testing_col LIKE 'special_character_quote\\'%'""".stripMargin)
520+
val rows5 = df5.collect()
521+
assert(rows5.length === 1)
522+
assert(rows5(0).getString(0) === "special_character_quote'_present")
523+
524+
val df6 = spark.sql(
525+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
526+
|WHERE pattern_testing_col LIKE 'special_character_percent\\%%'""".stripMargin)
527+
val rows6 = df6.collect()
528+
assert(rows6.length === 1)
529+
assert(rows6(0).getString(0) === "special_character_percent%_present")
530+
531+
val df7 = spark.
532+
sql(
533+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
534+
|WHERE pattern_testing_col LIKE 'special_character_underscore\\_%'""".stripMargin)
535+
val rows7 = df7.collect()
536+
assert(rows7.length === 1)
537+
assert(rows7(0).getString(0) === "special_character_underscore_present")
538+
539+
val df8 = spark.
540+
sql(
541+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
542+
|WHERE pattern_testing_col LIKE 'special_character%'
543+
|ORDER BY pattern_testing_col""".stripMargin)
544+
val rows8 = df8.collect()
545+
assert(rows8.length === 6)
546+
assert(rows8(0).getString(0) === "special_character_percent%_present")
547+
assert(rows8(1).getString(0) === "special_character_percent_not_present")
548+
assert(rows8(2).getString(0) === "special_character_quote'_present")
549+
assert(rows8(3).getString(0) === "special_character_quote_not_present")
550+
assert(rows8(4).getString(0) === "special_character_underscore_present")
551+
assert(rows8(5).getString(0) === "special_character_underscorenot_present")
552+
// map to endsWith
553+
// this one should map to contains
554+
val df9 = spark.sql(
555+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
556+
|WHERE pattern_testing_col LIKE '%quote\\'_present'""".stripMargin)
557+
val rows9 = df9.collect()
558+
assert(rows9.length === 1)
559+
assert(rows9(0).getString(0) === "special_character_quote'_present")
560+
561+
val df10 = spark.sql(
562+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
563+
|WHERE pattern_testing_col LIKE '%percent\\%_present'""".stripMargin)
564+
val rows10 = df10.collect()
565+
assert(rows10.length === 1)
566+
assert(rows10(0).getString(0) === "special_character_percent%_present")
567+
568+
val df11 = spark.
569+
sql(
570+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
571+
|WHERE pattern_testing_col LIKE '%underscore\\_present'""".stripMargin)
572+
val rows11 = df11.collect()
573+
assert(rows11.length === 1)
574+
assert(rows11(0).getString(0) === "special_character_underscore_present")
575+
576+
val df12 = spark.
577+
sql(
578+
s"""SELECT * FROM $catalogAndNamespace.${caseConvert("pattern_testing_table")}
579+
|WHERE pattern_testing_col LIKE '%present' ORDER BY pattern_testing_col""".stripMargin)
580+
val rows12 = df12.collect()
581+
assert(rows12.length === 6)
582+
assert(rows12(0).getString(0) === "special_character_percent%_present")
583+
assert(rows12(1).getString(0) === "special_character_percent_not_present")
584+
assert(rows12(2).getString(0) === "special_character_quote'_present")
585+
assert(rows12(3).getString(0) === "special_character_quote_not_present")
586+
assert(rows12(4).getString(0) === "special_character_underscore_present")
587+
assert(rows12(5).getString(0) === "special_character_underscorenot_present")
588+
}
589+
361590
test("SPARK-37038: Test TABLESAMPLE") {
362591
if (supportsTableSample) {
363592
withTable(s"$catalogName.new_table") {

sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,9 +66,6 @@ protected String escapeSpecialCharsForLikePattern(String str) {
6666
case '%':
6767
builder.append("\\%");
6868
break;
69-
case '\'':
70-
builder.append("\\\'");
71-
break;
7269
default:
7370
builder.append(c);
7471
}

sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.sql.connector.expressions
1919

20+
import org.apache.commons.lang3.StringUtils
21+
2022
import org.apache.spark.SparkException
2123
import org.apache.spark.sql.catalyst
2224
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
@@ -350,7 +352,7 @@ private[sql] object HoursTransform {
350352
private[sql] final case class LiteralValue[T](value: T, dataType: DataType) extends Literal[T] {
351353
override def toString: String = {
352354
if (dataType.isInstanceOf[StringType]) {
353-
s"'$value'"
355+
s"'${StringUtils.replace(s"$value", "'", "''")}'"
354356
} else {
355357
s"$value"
356358
}

sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -254,13 +254,6 @@ private[sql] object H2Dialect extends JdbcDialect {
254254
}
255255

256256
class H2SQLBuilder extends JDBCSQLBuilder {
257-
override def escapeSpecialCharsForLikePattern(str: String): String = {
258-
str.map {
259-
case '_' => "\\_"
260-
case '%' => "\\%"
261-
case c => c.toString
262-
}.mkString
263-
}
264257

265258
override def visitAggregateFunction(
266259
funcName: String, isDistinct: Boolean, inputs: Array[String]): String =

0 commit comments

Comments
 (0)