Skip to content

Commit 671d969

Browse files
committed
address comment
1 parent 8cccb77 commit 671d969

File tree

1 file changed

+7
-3
lines changed
  • src/query/service/src/sql/planner

1 file changed

+7
-3
lines changed

src/query/service/src/sql/planner/mod.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,14 @@ impl Planner {
7575
.and_then(|token| Some(token.as_ref().ok()?.kind))
7676
== Some(TokenKind::INSERT);
7777
// Only tokenize the beginning tokens for `INSERT INTO` statement because it's unnecessary to tokenize tokens for values.
78+
//
79+
// Stop the tokenizer on unrecognized token because some values inputs (e.g. CSV) may not be recognized by the tokenizer.
80+
// See also: https://github.com/datafuselabs/databend/issues/6669
7881
let mut tokens: Vec<Token> = if is_insert_stmt {
7982
(&mut tokenizer)
8083
.take(PROBE_INSERT_INITIAL_TOKENS)
81-
.collect::<Result<_>>()?
84+
.take_while(|token| token.is_ok())
85+
.collect::<Result<_>>().unwrap()
8286
} else {
8387
(&mut tokenizer).collect::<Result<_>>()?
8488
};
@@ -115,8 +119,8 @@ impl Planner {
115119
&& matches!(tokenizer.peek(), Some(Ok(_)))
116120
{
117121
// Tokenize more and try again.
118-
for token in (&mut tokenizer).take(tokens.len() * 2) {
119-
tokens.push(token?);
122+
for token in (&mut tokenizer).take(tokens.len() * 2).take_while(|token| token.is_ok()) {
123+
tokens.push(token.unwrap());
120124
}
121125
} else {
122126
return res;

0 commit comments

Comments
 (0)