Skip to content

Commit 425e73e

Browse files
committed
Fix clippy 1.95 lint errors
1 parent 9f04ebe commit 425e73e

File tree

1 file changed

+79
-90
lines changed

1 file changed

+79
-90
lines changed

src/parser/mod.rs

Lines changed: 79 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -508,10 +508,10 @@ impl<'a> Parser<'a> {
508508
Token::EOF => break,
509509

510510
// end of statement
511-
Token::Word(word) => {
512-
if expecting_statement_delimiter && word.keyword == Keyword::END {
513-
break;
514-
}
511+
Token::Word(word)
512+
if expecting_statement_delimiter && word.keyword == Keyword::END =>
513+
{
514+
break;
515515
}
516516
_ => {}
517517
}
@@ -1305,41 +1305,40 @@ impl<'a> Parser<'a> {
13051305

13061306
let next_token = self.next_token();
13071307
match next_token.token {
1308-
t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1309-
if self.peek_token_ref().token == Token::Period {
1310-
let mut id_parts: Vec<Ident> = vec![match t {
1311-
Token::Word(w) => w.into_ident(next_token.span),
1312-
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1313-
_ => {
1314-
return Err(ParserError::ParserError(
1315-
"Internal parser error: unexpected token type".to_string(),
1316-
))
1308+
t @ (Token::Word(_) | Token::SingleQuotedString(_))
1309+
if self.peek_token_ref().token == Token::Period =>
1310+
{
1311+
let mut id_parts: Vec<Ident> = vec![match t {
1312+
Token::Word(w) => w.into_ident(next_token.span),
1313+
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1314+
_ => {
1315+
return Err(ParserError::ParserError(
1316+
"Internal parser error: unexpected token type".to_string(),
1317+
))
1318+
}
1319+
}];
1320+
1321+
while self.consume_token(&Token::Period) {
1322+
let next_token = self.next_token();
1323+
match next_token.token {
1324+
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1325+
Token::SingleQuotedString(s) => {
1326+
// SQLite has single-quoted identifiers
1327+
id_parts.push(Ident::with_quote('\'', s))
13171328
}
1318-
}];
1319-
1320-
while self.consume_token(&Token::Period) {
1321-
let next_token = self.next_token();
1322-
match next_token.token {
1323-
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1324-
Token::SingleQuotedString(s) => {
1325-
// SQLite has single-quoted identifiers
1326-
id_parts.push(Ident::with_quote('\'', s))
1327-
}
1328-
Token::Placeholder(s) => {
1329-
// Snowflake uses $1, $2, etc. for positional column references
1330-
// in staged data queries like: SELECT t.$1 FROM @stage t
1331-
id_parts.push(Ident::new(s))
1332-
}
1333-
Token::Mul => {
1334-
return Ok(Expr::QualifiedWildcard(
1335-
ObjectName::from(id_parts),
1336-
AttachedToken(next_token),
1337-
));
1338-
}
1339-
_ => {
1340-
return self
1341-
.expected("an identifier or a '*' after '.'", next_token);
1342-
}
1329+
Token::Placeholder(s) => {
1330+
// Snowflake uses $1, $2, etc. for positional column references
1331+
// in staged data queries like: SELECT t.$1 FROM @stage t
1332+
id_parts.push(Ident::new(s))
1333+
}
1334+
Token::Mul => {
1335+
return Ok(Expr::QualifiedWildcard(
1336+
ObjectName::from(id_parts),
1337+
AttachedToken(next_token),
1338+
));
1339+
}
1340+
_ => {
1341+
return self.expected("an identifier or a '*' after '.'", next_token);
13431342
}
13441343
}
13451344
}
@@ -5031,10 +5030,10 @@ impl<'a> Parser<'a> {
50315030
loop {
50325031
match &self.peek_nth_token_ref(0).token {
50335032
Token::EOF => break,
5034-
Token::Word(w) => {
5035-
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5036-
break;
5037-
}
5033+
Token::Word(w)
5034+
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5035+
{
5036+
break;
50385037
}
50395038
_ => {}
50405039
}
@@ -8377,70 +8376,60 @@ impl<'a> Parser<'a> {
83778376
Keyword::LINES,
83788377
Keyword::NULL,
83798378
]) {
8380-
Some(Keyword::FIELDS) => {
8381-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8379+
Some(Keyword::FIELDS)
8380+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8381+
{
8382+
row_delimiters.push(HiveRowDelimiter {
8383+
delimiter: HiveDelimiter::FieldsTerminatedBy,
8384+
char: self.parse_identifier()?,
8385+
});
8386+
8387+
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
83828388
row_delimiters.push(HiveRowDelimiter {
8383-
delimiter: HiveDelimiter::FieldsTerminatedBy,
8389+
delimiter: HiveDelimiter::FieldsEscapedBy,
83848390
char: self.parse_identifier()?,
83858391
});
8386-
8387-
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8388-
row_delimiters.push(HiveRowDelimiter {
8389-
delimiter: HiveDelimiter::FieldsEscapedBy,
8390-
char: self.parse_identifier()?,
8391-
});
8392-
}
8393-
} else {
8394-
break;
83958392
}
83968393
}
8397-
Some(Keyword::COLLECTION) => {
8394+
Some(Keyword::COLLECTION)
83988395
if self.parse_keywords(&[
83998396
Keyword::ITEMS,
84008397
Keyword::TERMINATED,
84018398
Keyword::BY,
8402-
]) {
8403-
row_delimiters.push(HiveRowDelimiter {
8404-
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8405-
char: self.parse_identifier()?,
8406-
});
8407-
} else {
8408-
break;
8409-
}
8399+
]) =>
8400+
{
8401+
row_delimiters.push(HiveRowDelimiter {
8402+
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8403+
char: self.parse_identifier()?,
8404+
});
84108405
}
8411-
Some(Keyword::MAP) => {
8406+
Some(Keyword::MAP)
84128407
if self.parse_keywords(&[
84138408
Keyword::KEYS,
84148409
Keyword::TERMINATED,
84158410
Keyword::BY,
8416-
]) {
8417-
row_delimiters.push(HiveRowDelimiter {
8418-
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8419-
char: self.parse_identifier()?,
8420-
});
8421-
} else {
8422-
break;
8423-
}
8411+
]) =>
8412+
{
8413+
row_delimiters.push(HiveRowDelimiter {
8414+
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8415+
char: self.parse_identifier()?,
8416+
});
84248417
}
8425-
Some(Keyword::LINES) => {
8426-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8427-
row_delimiters.push(HiveRowDelimiter {
8428-
delimiter: HiveDelimiter::LinesTerminatedBy,
8429-
char: self.parse_identifier()?,
8430-
});
8431-
} else {
8432-
break;
8433-
}
8418+
Some(Keyword::LINES)
8419+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8420+
{
8421+
row_delimiters.push(HiveRowDelimiter {
8422+
delimiter: HiveDelimiter::LinesTerminatedBy,
8423+
char: self.parse_identifier()?,
8424+
});
84348425
}
8435-
Some(Keyword::NULL) => {
8436-
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8437-
row_delimiters.push(HiveRowDelimiter {
8438-
delimiter: HiveDelimiter::NullDefinedAs,
8439-
char: self.parse_identifier()?,
8440-
});
8441-
} else {
8442-
break;
8443-
}
8426+
Some(Keyword::NULL)
8427+
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8428+
{
8429+
row_delimiters.push(HiveRowDelimiter {
8430+
delimiter: HiveDelimiter::NullDefinedAs,
8431+
char: self.parse_identifier()?,
8432+
});
84448433
}
84458434
_ => {
84468435
break;

0 commit comments

Comments
 (0)