diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 54a158c1f..622acfc4a 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1684,7 +1684,7 @@ impl<'a> Tokenizer<'a> { } } Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp), - _ => self.consume_and_return(chars, Token::Question), + _ => Ok(Some(Token::Question)), } } '?' => { diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index 11512cf80..d5739cf4e 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -26,7 +26,7 @@ use helpers::attached_token::AttachedToken; use sqlparser::ast::{ DataType, DropBehavior, DropOperator, DropOperatorClass, DropOperatorSignature, }; -use sqlparser::tokenizer::Span; +use sqlparser::tokenizer::{Span, Token, Whitespace}; use test_utils::*; use sqlparser::ast::*; @@ -7492,3 +7492,20 @@ fn parse_create_operator_class() { ) .is_err()); } + +#[test] +fn tokenize_question_mark() { + let sql = "SELECT x ? y"; + pg().tokenizes_to( + sql, + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::make_word("x", None), + Token::Whitespace(Whitespace::Space), + Token::Question, + Token::Whitespace(Whitespace::Space), + Token::make_word("y", None), + ], + ) +}