Skip to content

Commit 0cea12e

Browse files
committed
fmt
1 parent 671d969 commit 0cea12e

File tree

1 file changed

+7
-3
lines changed
  • src/query/service/src/sql/planner

1 file changed

+7
-3
lines changed

src/query/service/src/sql/planner/mod.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,14 +75,15 @@ impl Planner {
7575
.and_then(|token| Some(token.as_ref().ok()?.kind))
7676
== Some(TokenKind::INSERT);
7777
// Only tokenize the beginning tokens for `INSERT INTO` statement because it's unnecessary to tokenize tokens for values.
78-
//
78+
//
7979
// Stop the tokenizer on unrecognized token because some values inputs (e.g. CSV) may not be recognized by the tokenizer.
8080
// See also: https://github.com/datafuselabs/databend/issues/6669
8181
let mut tokens: Vec<Token> = if is_insert_stmt {
8282
(&mut tokenizer)
8383
.take(PROBE_INSERT_INITIAL_TOKENS)
8484
.take_while(|token| token.is_ok())
85-
.collect::<Result<_>>().unwrap()
85+
.collect::<Result<_>>()
86+
.unwrap()
8687
} else {
8788
(&mut tokenizer).collect::<Result<_>>()?
8889
};
@@ -119,7 +120,10 @@ impl Planner {
119120
&& matches!(tokenizer.peek(), Some(Ok(_)))
120121
{
121122
// Tokenize more and try again.
122-
for token in (&mut tokenizer).take(tokens.len() * 2).take_while(|token| token.is_ok()) {
123+
for token in (&mut tokenizer)
124+
.take(tokens.len() * 2)
125+
.take_while(|token| token.is_ok())
126+
{
123127
tokens.push(token.unwrap());
124128
}
125129
} else {

0 commit comments

Comments
 (0)