Skip to content

Commit c54bef6

Browse files
committed
Support for colon preceeded placeholders
1 parent 5ec953b commit c54bef6

2 files changed

Lines changed: 97 additions & 0 deletions

File tree

src/dialect/mod.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -841,6 +841,12 @@ pub trait Dialect: Debug + Any {
841841
false
842842
}
843843

844+
/// Returns true if this dialect allow colon placeholders
845+
/// e.g. `SELECT :var` (JPA named parameters)
846+
fn supports_colon_placeholder(&self) -> bool {
847+
false
848+
}
849+
844850
/// Does the dialect support with clause in create index statement?
845851
/// e.g. `CREATE INDEX idx ON t WITH (key = value, key2)`
846852
fn supports_create_index_with_clause(&self) -> bool {

src/tokenizer.rs

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1521,6 +1521,11 @@ impl<'a> Tokenizer<'a> {
15211521
match chars.peek() {
15221522
Some(':') => self.consume_and_return(chars, Token::DoubleColon),
15231523
Some('=') => self.consume_and_return(chars, Token::Assignment),
1524+
Some(c)
1525+
if self.dialect.supports_colon_placeholder() && c.is_alphabetic() =>
1526+
{
1527+
self.tokenize_colon_preceeded_placeholder(chars).map(Some)
1528+
}
15241529
_ => Ok(Some(Token::Colon)),
15251530
}
15261531
}
@@ -1756,6 +1761,30 @@ impl<'a> Tokenizer<'a> {
17561761
}
17571762
}
17581763

1764+
/// Tokenizes an identifier followed immediately after a colon,
1765+
/// aka named query parameter, e.g. `:name`. The next char of the
1766+
/// processed char stream is to be an alphabetic - panics otherwise.
1767+
fn tokenize_colon_preceeded_placeholder(
1768+
&self,
1769+
chars: &mut State,
1770+
) -> Result<Token, TokenizerError> {
1771+
let mut s = String::with_capacity(16);
1772+
s.push(':');
1773+
s.push(chars.next().expect("initial character missing"));
1774+
while let Some(&ch) = chars.peek() {
1775+
if ch.is_alphanumeric()
1776+
|| ch == '_'
1777+
|| matches!(ch, '$' if self.dialect.supports_dollar_placeholder())
1778+
{
1779+
s.push(ch);
1780+
chars.next();
1781+
} else {
1782+
break;
1783+
}
1784+
}
1785+
Ok(Token::Placeholder(s))
1786+
}
1787+
17591788
/// Tokenize dollar preceded value (i.e: a string/placeholder)
17601789
fn tokenize_dollar_preceded_value(&self, chars: &mut State) -> Result<Token, TokenizerError> {
17611790
let mut s = String::new();
@@ -2952,6 +2981,68 @@ mod tests {
29522981
);
29532982
}
29542983

2984+
#[test]
2985+
fn tokenize_colon_placeholder() {
2986+
#[derive(Debug)]
2987+
struct TestDialect(bool);
2988+
impl Dialect for TestDialect {
2989+
fn supports_colon_placeholder(&self) -> bool {
2990+
true
2991+
}
2992+
fn supports_dollar_placeholder(&self) -> bool {
2993+
self.0
2994+
}
2995+
fn is_identifier_start(&self, ch: char) -> bool {
2996+
ch.is_alphabetic() || ch == '_'
2997+
}
2998+
fn is_identifier_part(&self, ch: char) -> bool {
2999+
ch.is_alphabetic() || ch.is_ascii_digit() || ch == '_'
3000+
}
3001+
}
3002+
3003+
let sql = "SELECT :foo FROM bar";
3004+
let tokens = Tokenizer::new(&TestDialect(false), sql)
3005+
.tokenize_with_location()
3006+
.unwrap();
3007+
assert_eq!(
3008+
tokens.iter().map(|t| t.token.clone()).collect::<Vec<_>>(),
3009+
vec![
3010+
Token::make_keyword("SELECT"),
3011+
Token::Whitespace(Whitespace::Space),
3012+
Token::Placeholder(":foo".into()),
3013+
Token::Whitespace(Whitespace::Space),
3014+
Token::make_keyword("FROM"),
3015+
Token::Whitespace(Whitespace::Space),
3016+
Token::make_word("bar", None)
3017+
]
3018+
);
3019+
assert_eq!(
3020+
tokens[2].span,
3021+
Span::new(Location::of(1, 8), Location::of(1, 12))
3022+
);
3023+
3024+
let sql = "SELECT :foo$bar FROM bar";
3025+
let tokens = Tokenizer::new(&TestDialect(true), sql)
3026+
.tokenize_with_location()
3027+
.unwrap();
3028+
assert_eq!(
3029+
tokens.iter().map(|t| t.token.clone()).collect::<Vec<_>>(),
3030+
vec![
3031+
Token::make_keyword("SELECT"),
3032+
Token::Whitespace(Whitespace::Space),
3033+
Token::Placeholder(":foo$bar".into()),
3034+
Token::Whitespace(Whitespace::Space),
3035+
Token::make_keyword("FROM"),
3036+
Token::Whitespace(Whitespace::Space),
3037+
Token::make_word("bar", None)
3038+
]
3039+
);
3040+
assert_eq!(
3041+
tokens[2].span,
3042+
Span::new(Location::of(1, 8), Location::of(1, 16))
3043+
);
3044+
}
3045+
29553046
#[test]
29563047
fn tokenize_dollar_placeholder() {
29573048
let sql = String::from("SELECT $$, $$ABC$$, $ABC$, $ABC");

0 commit comments

Comments
 (0)