@@ -1509,7 +1509,8 @@ impl<'a> Tokenizer<'a> {
15091509
15101510 chars. next ( ) ;
15111511
1512- if let Some ( '$' ) = chars. peek ( ) {
1512+ // If the dialect does not support dollar-quoted strings, then `$$` is rather a placeholder.
1513+ if matches ! ( chars. peek( ) , Some ( '$' ) ) && !self . dialect . supports_dollar_placeholder ( ) {
15131514 chars. next ( ) ;
15141515
15151516 let mut is_terminated = false ;
@@ -1543,10 +1544,14 @@ impl<'a> Tokenizer<'a> {
15431544 } ;
15441545 } else {
15451546 value. push_str ( & peeking_take_while ( chars, |ch| {
1546- ch. is_alphanumeric ( ) || ch == '_'
1547+ ch. is_alphanumeric ( )
1548+ || ch == '_'
1549+ // Allow $ as a placeholder character if the dialect supports it
1550+ || matches ! ( ch, '$' if self . dialect. supports_dollar_placeholder( ) )
15471551 } ) ) ;
15481552
1549- if let Some ( '$' ) = chars. peek ( ) {
1553+ // If the dialect does not support dollar-quoted strings, don't look for the end delimiter.
1554+ if matches ! ( chars. peek( ) , Some ( '$' ) ) && !self . dialect . supports_dollar_placeholder ( ) {
15501555 chars. next ( ) ;
15511556
15521557 ' searching_for_end: loop {
@@ -2137,7 +2142,7 @@ fn take_char_from_hex_digits(
21372142mod tests {
21382143 use super :: * ;
21392144 use crate :: dialect:: {
2140- BigQueryDialect , ClickHouseDialect , HiveDialect , MsSqlDialect , MySqlDialect ,
2145+ BigQueryDialect , ClickHouseDialect , HiveDialect , MsSqlDialect , MySqlDialect , SQLiteDialect ,
21412146 } ;
21422147 use core:: fmt:: Debug ;
21432148
@@ -2573,6 +2578,30 @@ mod tests {
25732578 ) ;
25742579 }
25752580
2581+ #[ test]
2582+ fn tokenize_dollar_placeholder ( ) {
2583+ let sql = String :: from ( "SELECT $$, $$ABC$$, $ABC$, $ABC" ) ;
2584+ let dialect = SQLiteDialect { } ;
2585+ let tokens = Tokenizer :: new ( & dialect, & sql) . tokenize ( ) . unwrap ( ) ;
2586+ assert_eq ! (
2587+ tokens,
2588+ vec![
2589+ Token :: make_keyword( "SELECT" ) ,
2590+ Token :: Whitespace ( Whitespace :: Space ) ,
2591+ Token :: Placeholder ( "$$" . into( ) ) ,
2592+ Token :: Comma ,
2593+ Token :: Whitespace ( Whitespace :: Space ) ,
2594+ Token :: Placeholder ( "$$ABC$$" . into( ) ) ,
2595+ Token :: Comma ,
2596+ Token :: Whitespace ( Whitespace :: Space ) ,
2597+ Token :: Placeholder ( "$ABC$" . into( ) ) ,
2598+ Token :: Comma ,
2599+ Token :: Whitespace ( Whitespace :: Space ) ,
2600+ Token :: Placeholder ( "$ABC" . into( ) ) ,
2601+ ]
2602+ ) ;
2603+ }
2604+
25762605 #[ test]
25772606 fn tokenize_dollar_quoted_string_untagged ( ) {
25782607 let sql =
0 commit comments