Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,24 @@
* <li>Since version: 3.4.0</li>
* </ul>
* </li>
* <li>Name: <code>BIT_LENGTH</code>
* <ul>
* <li>SQL semantic: <code>BIT_LENGTH(src)</code></li>
* <li>Since version: 3.4.0</li>
* </ul>
* </li>
* <li>Name: <code>CHAR_LENGTH</code>
* <ul>
* <li>SQL semantic: <code>CHAR_LENGTH(src)</code></li>
* <li>Since version: 3.4.0</li>
* </ul>
* </li>
* <li>Name: <code>CONCAT</code>
* <ul>
* <li>SQL semantic: <code>CONCAT(col1, col2, ..., colN)</code></li>
* <li>Since version: 3.4.0</li>
* </ul>
* </li>
* <li>Name: <code>OVERLAY</code>
* <ul>
* <li>SQL semantic: <code>OVERLAY(string, replace, position[, length])</code></li>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,9 @@ public String build(Expression expr) {
case "SHA2":
case "MD5":
case "CRC32":
case "BIT_LENGTH":
case "CHAR_LENGTH":
case "CONCAT":
return visitSQLFunction(name,
Arrays.stream(e.children()).map(c -> build(c)).toArray(String[]::new));
case "CASE_WHEN": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.connector.expressions.{Cast => V2Cast, Expression =>
import org.apache.spark.sql.connector.expressions.aggregate.{AggregateFunc, Avg, Count, CountStar, GeneralAggregateFunc, Max, Min, Sum, UserDefinedAggregateFunc}
import org.apache.spark.sql.connector.expressions.filter.{AlwaysFalse, AlwaysTrue, And => V2And, Not => V2Not, Or => V2Or, Predicate => V2Predicate}
import org.apache.spark.sql.execution.datasources.PushableExpression
import org.apache.spark.sql.types.{BooleanType, IntegerType}
import org.apache.spark.sql.types.{BooleanType, IntegerType, StringType}

/**
* The builder to generate V2 expressions from catalyst expressions.
Expand Down Expand Up @@ -217,6 +217,11 @@ class V2ExpressionBuilder(e: Expression, isPredicate: Boolean = false) {
generateExpressionWithName("SUBSTRING", children)
case Upper(child) => generateExpressionWithName("UPPER", Seq(child))
case Lower(child) => generateExpressionWithName("LOWER", Seq(child))
case BitLength(child) if child.dataType.isInstanceOf[StringType] =>
generateExpressionWithName("BIT_LENGTH", Seq(child))
case Length(child) if child.dataType.isInstanceOf[StringType] =>
generateExpressionWithName("CHAR_LENGTH", Seq(child))
case concat: Concat => generateExpressionWithName("CONCAT", concat.children)
case translate: StringTranslate => generateExpressionWithName("TRANSLATE", translate.children)
case trim: StringTrim => generateExpressionWithName("TRIM", trim.children)
case trim: StringTrimLeft => generateExpressionWithName("LTRIM", trim.children)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ private[sql] object H2Dialect extends JdbcDialect {
Set("ABS", "COALESCE", "GREATEST", "LEAST", "RAND", "LOG", "LOG10", "LN", "EXP",
"POWER", "SQRT", "FLOOR", "CEIL", "ROUND", "SIN", "SINH", "COS", "COSH", "TAN",
"TANH", "COT", "ASIN", "ACOS", "ATAN", "ATAN2", "DEGREES", "RADIANS", "SIGN",
"PI", "SUBSTRING", "UPPER", "LOWER", "TRANSLATE", "TRIM", "MD5", "SHA1", "SHA2")
"PI", "SUBSTRING", "UPPER", "LOWER", "TRANSLATE", "TRIM", "MD5", "SHA1", "SHA2",
"BIT_LENGTH", "CHAR_LENGTH", "CONCAT")

override def isSupportedFunction(funcName: String): Boolean =
supportedFunctions.contains(funcName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1449,6 +1449,22 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
"PushedFilters: [NAME IS NOT NULL]"
checkPushedInfo(df5, expectedPlanFragment5)
checkAnswer(df5, Seq(Row(6, "jen", 12000, 1200, true)))

val df6 = sql("SELECT * FROM h2.test.employee WHERE bit_length(name) = 40")
checkFiltersRemoved(df6)
checkPushedInfo(df6, "[NAME IS NOT NULL, BIT_LENGTH(NAME) = 40]")
checkAnswer(df6, Seq(Row(1, "cathy", 9000, 1200, false), Row(2, "david", 10000, 1300, true)))

val df7 = sql("SELECT * FROM h2.test.employee WHERE char_length(name) = 5")
checkFiltersRemoved(df7)
checkPushedInfo(df7, "[NAME IS NOT NULL, CHAR_LENGTH(NAME) = 5]")
checkAnswer(df6, Seq(Row(1, "cathy", 9000, 1200, false), Row(2, "david", 10000, 1300, true)))

val df8 = sql("SELECT * FROM h2.test.employee WHERE " +
"concat(name, ',' , cast(salary as string)) = 'cathy,9000.00'")
checkFiltersRemoved(df8)
checkPushedInfo(df8, "[(CONCAT(NAME, ',', CAST(SALARY AS string))) = 'cathy,9000.00']")
checkAnswer(df8, Seq(Row(1, "cathy", 9000, 1200, false)))
}

test("scan with aggregate push-down: MAX AVG with filter and group by") {
Expand Down