Skip to content

Commit 5a736df

Browse files
committed
feat: DTStack#410 optimize processCandidates tokenIndexOffset
1 parent 8dd0e2d commit 5a736df

File tree

8 files changed

+18
-56
lines changed

8 files changed

+18
-56
lines changed

src/parser/common/basicSQL.ts

+4-7
Original file line numberDiff line numberDiff line change
@@ -70,15 +70,13 @@ export abstract class BasicSQL<
7070
/**
7171
* Convert candidates to suggestions
7272
* @param candidates candidate list
73-
* @param allTokens all tokens from input
73+
* @param allTokens slice all tokens from input by tokenIndexOffset
7474
* @param caretTokenIndex tokenIndex of caretPosition
75-
* @param tokenIndexOffset offset of the tokenIndex in the candidates compared to the tokenIndex in allTokens
7675
*/
7776
protected abstract processCandidates(
7877
candidates: CandidatesCollection,
7978
allTokens: Token[],
80-
caretTokenIndex: number,
81-
tokenIndexOffset: number
79+
caretTokenIndex: number
8280
): Suggestions<Token>;
8381

8482
/**
@@ -420,9 +418,8 @@ export abstract class BasicSQL<
420418
const candidates = core.collectCandidates(newTokenIndex, c3Context);
421419
const originalSuggestions = this.processCandidates(
422420
candidates,
423-
allTokens,
424-
newTokenIndex,
425-
tokenIndexOffset
421+
allTokens.slice(tokenIndexOffset),
422+
newTokenIndex
426423
);
427424

428425
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax.map(

src/parser/flink/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -67,19 +67,14 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
6767
protected processCandidates(
6868
candidates: CandidatesCollection,
6969
allTokens: Token[],
70-
caretTokenIndex: number,
71-
tokenIndexOffset: number
70+
caretTokenIndex: number
7271
): Suggestions<Token> {
7372
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7473
const keywords: string[] = [];
7574

7675
for (let candidate of candidates.rules) {
7776
const [ruleType, candidateRule] = candidate;
78-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
79-
const tokenRanges = allTokens.slice(
80-
startTokenIndex,
81-
caretTokenIndex + tokenIndexOffset + 1
82-
);
77+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8378

8479
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8580
switch (ruleType) {

src/parser/hive/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -68,18 +68,13 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
6868
protected processCandidates(
6969
candidates: CandidatesCollection,
7070
allTokens: Token[],
71-
caretTokenIndex: number,
72-
tokenIndexOffset: number
71+
caretTokenIndex: number
7372
): Suggestions<Token> {
7473
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7574
const keywords: string[] = [];
7675
for (let candidate of candidates.rules) {
7776
const [ruleType, candidateRule] = candidate;
78-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
79-
const tokenRanges = allTokens.slice(
80-
startTokenIndex,
81-
caretTokenIndex + tokenIndexOffset + 1
82-
);
77+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8378

8479
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8580
switch (ruleType) {

src/parser/impala/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -66,18 +66,13 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
6666
protected processCandidates(
6767
candidates: CandidatesCollection,
6868
allTokens: Token[],
69-
caretTokenIndex: number,
70-
tokenIndexOffset: number
69+
caretTokenIndex: number
7170
): Suggestions<Token> {
7271
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7372
const keywords: string[] = [];
7473
for (let candidate of candidates.rules) {
7574
const [ruleType, candidateRule] = candidate;
76-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
77-
const tokenRanges = allTokens.slice(
78-
startTokenIndex,
79-
caretTokenIndex + tokenIndexOffset + 1
80-
);
75+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8176

8277
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8378
switch (ruleType) {

src/parser/mysql/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -66,19 +66,14 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
6666
protected processCandidates(
6767
candidates: CandidatesCollection,
6868
allTokens: Token[],
69-
caretTokenIndex: number,
70-
tokenIndexOffset: number
69+
caretTokenIndex: number
7170
): Suggestions<Token> {
7271
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7372
const keywords: string[] = [];
7473

7574
for (const candidate of candidates.rules) {
7675
const [ruleType, candidateRule] = candidate;
77-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
78-
const tokenRanges = allTokens.slice(
79-
startTokenIndex,
80-
caretTokenIndex + tokenIndexOffset + 1
81-
);
76+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8277

8378
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8479
switch (ruleType) {

src/parser/postgresql/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -71,18 +71,13 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
7171
protected processCandidates(
7272
candidates: CandidatesCollection,
7373
allTokens: Token[],
74-
caretTokenIndex: number,
75-
tokenIndexOffset: number
74+
caretTokenIndex: number
7675
): Suggestions<Token> {
7776
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7877
const keywords: string[] = [];
7978
for (let candidate of candidates.rules) {
8079
const [ruleType, candidateRule] = candidate;
81-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
82-
const tokenRanges = allTokens.slice(
83-
startTokenIndex,
84-
caretTokenIndex + tokenIndexOffset + 1
85-
);
80+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8681

8782
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8883
switch (ruleType) {

src/parser/spark/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -66,19 +66,14 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
6666
protected processCandidates(
6767
candidates: CandidatesCollection,
6868
allTokens: Token[],
69-
caretTokenIndex: number,
70-
tokenIndexOffset: number
69+
caretTokenIndex: number
7170
): Suggestions<Token> {
7271
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7372
const keywords: string[] = [];
7473

7574
for (const candidate of candidates.rules) {
7675
const [ruleType, candidateRule] = candidate;
77-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
78-
const tokenRanges = allTokens.slice(
79-
startTokenIndex,
80-
caretTokenIndex + tokenIndexOffset + 1
81-
);
76+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8277

8378
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8479
switch (ruleType) {

src/parser/trino/index.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -68,19 +68,14 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
6868
protected processCandidates(
6969
candidates: CandidatesCollection,
7070
allTokens: Token[],
71-
caretTokenIndex: number,
72-
tokenIndexOffset: number
71+
caretTokenIndex: number
7372
): Suggestions<Token> {
7473
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
7574
const keywords: string[] = [];
7675

7776
for (let candidate of candidates.rules) {
7877
const [ruleType, candidateRule] = candidate;
79-
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
80-
const tokenRanges = allTokens.slice(
81-
startTokenIndex,
82-
caretTokenIndex + tokenIndexOffset + 1
83-
);
78+
const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1);
8479

8580
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
8681
switch (ruleType) {

0 commit comments

Comments
 (0)