@@ -20,6 +20,7 @@ use std::iter::Peekable;
2020use std:: str:: Chars ;
2121
2222use super :: dialect:: keywords:: { Keyword , ALL_KEYWORDS , ALL_KEYWORDS_INDEX } ;
23+ use super :: dialect:: BigQueryDialect ;
2324use super :: dialect:: Dialect ;
2425use super :: dialect:: PostgreSqlDialect ;
2526use super :: dialect:: SnowflakeDialect ;
@@ -465,7 +466,7 @@ impl<'a> Tokenizer<'a> {
465466 chars. next ( ) ; // consume the '*', starting a multi-line comment
466467 self . tokenize_multiline_comment ( chars)
467468 }
468- Some ( '/' ) if dialect_of ! ( self is SnowflakeDialect ) => {
469+ Some ( '/' ) if dialect_of ! ( self is SnowflakeDialect | BigQueryDialect ) => {
469470 chars. next ( ) ; // consume the second '/', starting a snowflake single-line comment
470471 let comment = self . tokenize_single_line_comment ( chars) ;
471472 Ok ( Some ( Token :: Whitespace ( Whitespace :: SingleLineComment {
@@ -542,7 +543,7 @@ impl<'a> Tokenizer<'a> {
542543 '^' => self . consume_and_return ( chars, Token :: Caret ) ,
543544 '{' => self . consume_and_return ( chars, Token :: LBrace ) ,
544545 '}' => self . consume_and_return ( chars, Token :: RBrace ) ,
545- '#' if dialect_of ! ( self is SnowflakeDialect ) => {
546+ '#' if dialect_of ! ( self is SnowflakeDialect | BigQueryDialect ) => {
546547 chars. next ( ) ; // consume the '#', starting a snowflake single-line comment
547548 let comment = self . tokenize_single_line_comment ( chars) ;
548549 Ok ( Some ( Token :: Whitespace ( Whitespace :: SingleLineComment {
0 commit comments