From 41de3a974105398c8c0f6428b9b38e4feeab671a Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Mon, 13 Jun 2022 17:20:39 -0700 Subject: [PATCH] Support more placeholders (#53) --- sqltree/tokenizer.py | 13 +++++++++++++ tests/test_formatter.py | 9 +++++++++ 2 files changed, 22 insertions(+) diff --git a/sqltree/tokenizer.py b/sqltree/tokenizer.py index c3d46c4..65520ac 100644 --- a/sqltree/tokenizer.py +++ b/sqltree/tokenizer.py @@ -89,6 +89,16 @@ def tokenize(sql: str, dialect: Dialect) -> Iterable[Token]: if next_char is not None and next_char.isalpha(): token_type = TokenType.placeholder text = "%" + _consume_identifier(pi) + elif next_char == "(": + token_type = TokenType.placeholder + pi.next() + text = "%(" + text += _consume_identifier(pi) + next_char = pi.next() + if next_char != ")": + raise TokenizeError(f"expected ')', got {next_char!r}") + text += ")" + text += _consume_identifier(pi) elif next_char == "%": pi.next() token_type = TokenType.punctuation @@ -96,6 +106,9 @@ def tokenize(sql: str, dialect: Dialect) -> Iterable[Token]: else: token_type = TokenType.punctuation text = "%" + elif char == "?": + token_type = TokenType.placeholder + text = char elif char in starting_char_to_continuations: token_type = TokenType.punctuation continuations = starting_char_to_continuations[char] diff --git a/tests/test_formatter.py b/tests/test_formatter.py index b5bf086..13c8184 100644 --- a/tests/test_formatter.py +++ b/tests/test_formatter.py @@ -738,3 +738,12 @@ def test_create_table() -> None: def test_rename_tables() -> None: assert format("rename table x to y") == "RENAME TABLE x TO y\n" assert format("rename table x to y, z to w") == "RENAME TABLE x TO y, z TO w\n" + + +def test_placeholder() -> None: + assert format("select * from x where x = ?") == "SELECT *\nFROM x\nWHERE x = ?\n" + assert format("select * from x where x = %s") == "SELECT *\nFROM x\nWHERE x = %s\n" + assert ( + format("select * from x where x = %(x)s") + == "SELECT *\nFROM x\nWHERE x = %(x)s\n" + )