File tree Expand file tree Collapse file tree 1 file changed +7
-3
lines changed
src/query/service/src/sql/planner Expand file tree Collapse file tree 1 file changed +7
-3
lines changed Original file line number Diff line number Diff line change @@ -75,14 +75,15 @@ impl Planner {
75
75
. and_then ( |token| Some ( token. as_ref ( ) . ok ( ) ?. kind ) )
76
76
== Some ( TokenKind :: INSERT ) ;
77
77
// Only tokenize the beginning tokens for `INSERT INTO` statement because it's unnecessary to tokenize tokens for values.
78
- //
78
+ //
79
79
// Stop the tokenizer on unrecognized token because some values inputs (e.g. CSV) may not be recognized by the tokenizer.
80
80
// See also: https://github.com/datafuselabs/databend/issues/6669
81
81
let mut tokens: Vec < Token > = if is_insert_stmt {
82
82
( & mut tokenizer)
83
83
. take ( PROBE_INSERT_INITIAL_TOKENS )
84
84
. take_while ( |token| token. is_ok ( ) )
85
- . collect :: < Result < _ > > ( ) . unwrap ( )
85
+ . collect :: < Result < _ > > ( )
86
+ . unwrap ( )
86
87
} else {
87
88
( & mut tokenizer) . collect :: < Result < _ > > ( ) ?
88
89
} ;
@@ -119,7 +120,10 @@ impl Planner {
119
120
&& matches ! ( tokenizer. peek( ) , Some ( Ok ( _) ) )
120
121
{
121
122
// Tokenize more and try again.
122
- for token in ( & mut tokenizer) . take ( tokens. len ( ) * 2 ) . take_while ( |token| token. is_ok ( ) ) {
123
+ for token in ( & mut tokenizer)
124
+ . take ( tokens. len ( ) * 2 )
125
+ . take_while ( |token| token. is_ok ( ) )
126
+ {
123
127
tokens. push ( token. unwrap ( ) ) ;
124
128
}
125
129
} else {
You can’t perform that action at this time.
0 commit comments