Skip to content

Commit 8781633

Browse files
andygroveclaude
authored andcommitted
minor: reduce unnecessary string allocations (apache#2178)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 7e200b0 commit 8781633

3 files changed

Lines changed: 8 additions & 10 deletions

File tree

src/ast/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4965,9 +4965,9 @@ impl fmt::Display for Statement {
49654965
f,
49664966
"{tables}{read}{export}",
49674967
tables = if !tables.is_empty() {
4968-
" ".to_string() + &display_comma_separated(tables).to_string()
4968+
format!(" {}", display_comma_separated(tables))
49694969
} else {
4970-
"".to_string()
4970+
String::new()
49714971
},
49724972
export = if *export { " FOR EXPORT" } else { "" },
49734973
read = if *read_lock { " WITH READ LOCK" } else { "" }

src/parser/mod.rs

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11180,16 +11180,14 @@ impl<'a> Parser<'a> {
1118011180
/// Parse a single tab-separated value row used by `COPY` payload parsing.
1118111181
pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
1118211182
let mut values = vec![];
11183-
let mut content = String::from("");
11183+
let mut content = String::new();
1118411184
while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
1118511185
match t {
1118611186
Token::Whitespace(Whitespace::Tab) => {
11187-
values.push(Some(content.to_string()));
11188-
content.clear();
11187+
values.push(Some(core::mem::take(&mut content)));
1118911188
}
1119011189
Token::Whitespace(Whitespace::Newline) => {
11191-
values.push(Some(content.to_string()));
11192-
content.clear();
11190+
values.push(Some(core::mem::take(&mut content)));
1119311191
}
1119411192
Token::Backslash => {
1119511193
if self.consume_token(&Token::Period) {
@@ -11314,7 +11312,7 @@ impl<'a> Parser<'a> {
1131411312
Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
1131511313
_ => self.expected("placeholder", next_token),
1131611314
}?;
11317-
Ok(Value::Placeholder(tok.to_string() + &ident.value)
11315+
Ok(Value::Placeholder(format!("{tok}{}", ident.value))
1131811316
.with_span(Span::new(span.start, ident.span.end)))
1131911317
}
1132011318
unexpected => self.expected(

src/tokenizer.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1833,7 +1833,7 @@ impl<'a> Tokenizer<'a> {
18331833
'?' => {
18341834
chars.next();
18351835
let s = peeking_take_while(chars, |ch| ch.is_numeric());
1836-
Ok(Some(Token::Placeholder(String::from("?") + &s)))
1836+
Ok(Some(Token::Placeholder(format!("?{s}"))))
18371837
}
18381838

18391839
// identifier or keyword
@@ -1982,7 +1982,7 @@ impl<'a> Tokenizer<'a> {
19821982
}
19831983
}
19841984
} else {
1985-
return Ok(Token::Placeholder(String::from("$") + &value));
1985+
return Ok(Token::Placeholder(format!("${value}")));
19861986
}
19871987
}
19881988

0 commit comments

Comments
 (0)