Skip to content

Commit 20f065d

Browse files
authored
fix: update isContainCaret judgment when caret position token is whit… (#390)
* fix: update isContainCaret judgment when caret position token is whitespace * fix: remove unnecessary +Infinity
1 parent 05134bc commit 20f065d

File tree

23 files changed

+220
-32
lines changed

23 files changed

+220
-32
lines changed

src/parser/common/basicSQL.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ export abstract class BasicSQL<
8383
*/
8484
protected abstract createEntityCollector(
8585
input: string,
86+
allTokens?: Token[],
8687
caretTokenIndex?: number
8788
): EntityCollector;
8889

@@ -378,7 +379,7 @@ export abstract class BasicSQL<
378379
? findCaretTokenIndex(caretPosition, allTokens)
379380
: void 0;
380381

381-
const collectListener = this.createEntityCollector(input, caretTokenIndex);
382+
const collectListener = this.createEntityCollector(input, allTokens, caretTokenIndex);
382383
// const parser = this.createParserWithCache(input);
383384

384385
// parser.entityCollecting = true;

src/parser/common/entityCollector.ts

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { ParserRuleContext } from 'antlr4ng';
1+
import { ParserRuleContext, Token } from 'antlr4ng';
22
import { EntityContextType } from './types';
33
import { WordPosition, TextPosition } from './textAndWord';
44
import { ctxToText, ctxToWord } from './textAndWord';
@@ -96,15 +96,17 @@ export function toEntityContext(
9696
* @todo: [may be need] Combine the entities in each clause.
9797
*/
9898
export abstract class EntityCollector {
99-
constructor(input: string, caretTokenIndex?: number) {
99+
constructor(input: string, allTokens?: Token[], caretTokenIndex?: number) {
100100
this._input = input;
101+
this._allTokens = allTokens || [];
101102
this._caretTokenIndex = caretTokenIndex ?? -1;
102103
this._entitiesSet = new Set();
103104
this._stmtStack = new SimpleStack();
104105
this._entityStack = new SimpleStack();
105106
this._rootStmt = null;
106107
}
107108
private readonly _input: string;
109+
private readonly _allTokens: Token[];
108110
private readonly _caretTokenIndex: number;
109111
private readonly _entitiesSet: Set<EntityContext>;
110112
/** Staging statements that have already entered. */
@@ -136,14 +138,31 @@ export abstract class EntityCollector {
136138
this._rootStmt = null;
137139
}
138140

141+
/**
142+
* The antlr4 will ignore hidden tokens, if we type whitespace at the end of a statement,
143+
* the whitespace token will not as stop token, so we consider the whitespace token as a part of the nonhidden token in front of it
144+
*/
145+
protected getPrevNonHiddenTokenIndex(caretTokenIndex: number) {
146+
if (this._allTokens[caretTokenIndex].channel !== Token.HIDDEN_CHANNEL)
147+
return caretTokenIndex;
148+
for (let i = caretTokenIndex - 1; i >= 0; i--) {
149+
const token = this._allTokens[i];
150+
if (token.channel !== Token.HIDDEN_CHANNEL) {
151+
// If prev nonhidden token is ';', the current token does not belong to any statement.
152+
return token.text === ';' ? Infinity : token.tokenIndex;
153+
}
154+
}
155+
return Infinity;
156+
}
157+
139158
protected pushStmt(ctx: ParserRuleContext, type: StmtContextType) {
140159
let isContainCaret: boolean | undefined;
141160
if (this._caretTokenIndex >= 0) {
142161
isContainCaret =
143162
!!ctx.start &&
144163
!!ctx.stop &&
145164
ctx.start.tokenIndex <= this._caretTokenIndex &&
146-
ctx.stop.tokenIndex >= this._caretTokenIndex;
165+
ctx.stop.tokenIndex >= this.getPrevNonHiddenTokenIndex(this._caretTokenIndex);
147166
}
148167
const stmtContext = toStmtContext(
149168
ctx,

src/parser/flink/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,8 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
3737
return new FlinkSqlSplitListener();
3838
}
3939

40-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
41-
return new FlinkEntityCollector(input, caretTokenIndex);
40+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
41+
return new FlinkEntityCollector(input, allTokens, caretTokenIndex);
4242
}
4343

4444
protected processCandidates(

src/parser/hive/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
3838
return new HiveSqlSplitListener();
3939
}
4040

41-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
42-
return new HiveEntityCollector(input, caretTokenIndex);
41+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
42+
return new HiveEntityCollector(input, allTokens, caretTokenIndex);
4343
}
4444

4545
protected processCandidates(

src/parser/impala/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
3636
return new ImpalaSqlSplitListener();
3737
}
3838

39-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
40-
return new ImpalaEntityCollector(input, caretTokenIndex);
39+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
40+
return new ImpalaEntityCollector(input, allTokens, caretTokenIndex);
4141
}
4242

4343
protected processCandidates(

src/parser/mysql/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
3636
return new MysqlSplitListener();
3737
}
3838

39-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
40-
return new MySqlEntityCollector(input, caretTokenIndex);
39+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
40+
return new MySqlEntityCollector(input, allTokens, caretTokenIndex);
4141
}
4242

4343
protected processCandidates(

src/parser/postgresql/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
4141
return new PostgreSqlSplitListener();
4242
}
4343

44-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
45-
return new PostgreSqlEntityCollector(input, caretTokenIndex);
44+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
45+
return new PostgreSqlEntityCollector(input, allTokens, caretTokenIndex);
4646
}
4747

4848
protected processCandidates(

src/parser/spark/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
3636
return new SparkSqlSplitListener();
3737
}
3838

39-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
40-
return new SparkEntityCollector(input, caretTokenIndex);
39+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
40+
return new SparkEntityCollector(input, allTokens, caretTokenIndex);
4141
}
4242

4343
protected processCandidates(

src/parser/trino/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
2323
return new TrinoSqlSplitListener();
2424
}
2525

26-
protected createEntityCollector(input: string, caretTokenIndex?: number) {
27-
return new TrinoEntityCollector(input, caretTokenIndex);
26+
protected createEntityCollector(input: string, allTokens?: Token[], caretTokenIndex?: number) {
27+
return new TrinoEntityCollector(input, allTokens, caretTokenIndex);
2828
}
2929

3030
protected preferredRules: Set<number> = new Set([

test/parser/flink/suggestion/fixtures/suggestionWithEntity.sql

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,8 @@ INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, sta
88

99
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT FROM origin_table;
1010

11-
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT id, FROM origin_table;
11+
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT id, FROM origin_table;
12+
13+
SELECT id FROM tb WHERE
14+
15+
SELECT id FROM tb GROUP BY ;

0 commit comments

Comments
 (0)