Skip to content

Commit cce069d

Browse files
authored
Upgrade to sqldelight 2.1.0 and sql-psi 0.5.2 (#122)
Unfortunately sql-psi 0.4.9 and sqldelight 2.0.2 have some regressions. sqldelight 2.1.0 and sql-psi 0.5.2 supposedly fix them. Some heavy rebasing of our forked BNF grammar was required in this process, but we have pretty solid test suites (I also added more test cases) so I'm relatively confident. This also marks our 0.6 release.
1 parent 7302555 commit cce069d

File tree

8 files changed

+105
-17
lines changed

8 files changed

+105
-17
lines changed

cockroachdb-dialect/build.gradle.kts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@ dependencies {
2222
}
2323

2424
testImplementation(libs.sqldelight.compiler.env)
25-
// Remove with next sql-psi release https://github.com/AlecKazakova/sql-psi/pull/619
26-
testImplementation(libs.sql.psi.enviroment)
2725
testFixturesApi(testFixtures(libs.sql.psi))
2826
}
2927

cockroachdb-dialect/src/main/kotlin/com/faire/sqldelight/dialects/cockroachdb/grammar/CockroachDB.bnf

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
// Specify the parent parser.
2+
// Specify the parent parser; our grammar hence must base on PostgreSql.bnf from sqldelight.
33
overrides="app.cash.sqldelight.dialects.postgresql.grammar.PostgreSqlParser"
44
elementTypeClass = "com.alecstrong.sql.psi.core.SqlElementType"
55

@@ -104,7 +104,8 @@ overrides ::= table_constraint
104104
index_using_hash ::= USING 'HASH' [ WITH LP 'bucket_count' EQ ansi_signed_number RP ]
105105

106106
table_constraint ::= table_family_constraint | [ CONSTRAINT ansi_identifier ] (
107-
( PRIMARY KEY | [UNIQUE] INDEX | UNIQUE ) [ansi_index_name] LP ansi_indexed_column [ LP ansi_signed_number RP ] ( COMMA ansi_indexed_column [ LP ansi_signed_number RP ] ) * RP [ index_using_hash ] [('STORING' | 'COVERING' | 'INCLUDE') LP ansi_column_name ( COMMA ansi_column_name ) * RP ] [ WHERE <<expr '-1'>> ] ansi_conflict_clause [comment_type] |
107+
( PRIMARY KEY | [UNIQUE] INDEX | UNIQUE ) [ansi_index_name] LP ansi_indexed_column [ LP ansi_signed_number RP ] ( COMMA ansi_indexed_column [ LP ansi_signed_number RP ] ) * RP [ index_using_hash ] [('STORING' | 'COVERING' | 'INCLUDE') LP ansi_column_name ( COMMA ansi_column_name ) * RP ] [ WHERE <<expr '-1'>> ] [ansi_conflict_clause] [comment_type] |
108+
'EXCLUDE' USING {index_method} LP <<expr '-1'>> WITH {constraint_exclude_operators} ( COMMA <<expr '-1'>> WITH {constraint_exclude_operators} ) * RP [ WHERE LP <<expr '-1'>> RP ] |
108109
ansi_check_constraint |
109110
FOREIGN KEY LP ansi_column_name ( COMMA ansi_column_name ) * RP ansi_foreign_key_clause
110111
) {
@@ -116,9 +117,9 @@ table_constraint ::= table_family_constraint | [ CONSTRAINT ansi_identifier ] (
116117
table_family_constraint ::= 'FAMILY' [ ansi_identifier ] LP ansi_column_name ( COMMA ansi_column_name ) * RP
117118

118119
column_constraint ::= alter_table_add_column_family_constraint | [ CONSTRAINT ansi_identifier ] (
119-
PRIMARY KEY [ index_using_hash ] [ ASC | DESC ] ansi_conflict_clause |
120-
[ NOT ] NULL ansi_conflict_clause |
121-
UNIQUE ansi_conflict_clause |
120+
PRIMARY KEY [ index_using_hash ] [ ASC | DESC ] [ansi_conflict_clause] |
121+
[ NOT ] NULL [ansi_conflict_clause] |
122+
UNIQUE [ansi_conflict_clause] |
122123
ansi_check_constraint |
123124
generated_clause |
124125
{default_constraint} |
@@ -169,8 +170,10 @@ blob_data_type ::= 'BYTEA' | 'BLOB' | 'BYTES' {
169170
override = true
170171
}
171172

172-
create_index_stmt ::= CREATE [ UNIQUE ] INDEX [ 'CONCURRENTLY' ] [ IF NOT EXISTS ] [ [ ansi_database_name DOT ] ansi_index_name ] ON ansi_table_name
173-
( USING 'GIN' LP ansi_indexed_column [ {gin_operator_class_stmt} ] ( COMMA ansi_indexed_column [ {gin_operator_class_stmt} ] ) * RP | LP ansi_indexed_column ( COMMA ansi_indexed_column ) * RP [ index_using_hash ] [('STORING' | 'COVERING' | 'INCLUDE') LP ansi_indexed_column ( COMMA ansi_indexed_column ) * RP ] [ WHERE <<expr '-1'>> ] ) {
173+
create_index_stmt ::= CREATE [ UNIQUE ] INDEX [ 'CONCURRENTLY' ] [ IF NOT EXISTS ] [ [ ansi_database_name DOT ] ansi_index_name ] ON ansi_table_name (
174+
USING {index_method} LP ansi_indexed_column [ {operator_class_stmt} ] ( COMMA ansi_indexed_column [ {operator_class_stmt} ] ) * RP [ {with_storage_parameter} ] |
175+
LP ansi_indexed_column [ {operator_class_stmt} ] ( COMMA ansi_indexed_column [ {operator_class_stmt} ] ) * RP [ index_using_hash ] [ ('STORING' | 'COVERING' | 'INCLUDE') LP ansi_indexed_column ( COMMA ansi_indexed_column ) * RP ] [ WHERE <<expr '-1'>> ]
176+
) {
174177
extends = "com.faire.sqldelight.dialects.cockroachdb.grammar.mixins.CreateIndexMixin"
175178
implements = "com.alecstrong.sql.psi.core.psi.SqlCreateIndexStmt"
176179
pin = 5
@@ -208,6 +211,7 @@ alter_table_rules ::= (
208211
ansi_alter_table_add_column
209212
| ansi_alter_table_rename_table
210213
| {alter_table_rename_column}
214+
| {alter_table_drop_constraint}
211215
| {alter_table_drop_column}
212216
| {alter_table_add_constraint}
213217
| alter_table_alter_primary_key

cockroachdb-dialect/src/test/kotlin/com/faire/sqldelight/dialects/cockroachdb/CockroachDBFixturesTest.kt

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import java.io.File
2424
@RunWith(Parameterized::class)
2525
class CockroachDBFixturesTest(name: String, fixtureRoot: File) : FixturesTest(name, fixtureRoot) {
2626
override val replaceRules = arrayOf(
27+
// TODO: document why
2728
"INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT" to "SERIAL NOT NULL PRIMARY KEY",
2829
"AUTOINCREMENT" to "",
2930
"?1" to "?",
@@ -48,6 +49,15 @@ class CockroachDBFixturesTest(name: String, fixtureRoot: File) : FixturesTest(na
4849
"create-if-not-exists",
4950
// Excluded since we're not validating indices when creating them.
5051
"create-index-collision",
52+
// Excluded since our error message is different;
53+
// we've copied the test case, but without the failure case, into `multiple-column-where-ansi`.
54+
"multiple-column-where",
55+
)
56+
57+
private val excludedPgSqlFixtures = listOf(
58+
// Excluded since we're not validating indices when creating them;
59+
// we've copied the test case, but without error assertions, into `create-index-pgsql`.
60+
"create-index",
5161
)
5262

5363
// Used by Parameterized JUnit runner reflectively.
@@ -57,9 +67,10 @@ class CockroachDBFixturesTest(name: String, fixtureRoot: File) : FixturesTest(na
5767
val extraAnsiFixtures = ansiFixtures
5868
.filter { (it[0] as String) !in excludedAnsiFixtures }
5969

60-
return CockroachDBTestFixtures.fixtures +
61-
PostgresqlTestFixtures.fixtures +
62-
extraAnsiFixtures
70+
val extraPgSqlFixtures = PostgresqlTestFixtures.fixtures
71+
.filter { (it[0] as String) !in excludedPgSqlFixtures }
72+
73+
return CockroachDBTestFixtures.fixtures + extraPgSqlFixtures + extraAnsiFixtures
6374
}
6475
}
6576
}
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
CREATE TABLE abg (
2+
id INTEGER PRIMARY KEY,
3+
alpha TEXT,
4+
beta TEXT,
5+
gamma TEXT
6+
);
7+
8+
CREATE INDEX CONCURRENTLY beta_gamma_idx ON abg (beta, gamma);
9+
10+
CREATE INDEX gamma_index_name ON abg (gamma) WHERE beta = 'some_value';
11+
12+
CREATE INDEX alpha_index_name ON abg USING BTREE (alpha) WITH (fillfactor = 70, deduplicate_items = on);
13+
14+
CREATE INDEX beta_gamma_index_name ON abg USING HASH (beta) WITH (fillfactor = 20);
15+
CREATE INDEX alpha_index_name_err ON abg USING BTREE (alpha) WITH (deduplicate_items = yes);
16+
CREATE INDEX beta_gamma_index_name_err ON abg USING HASH (beta) WITH (fillfactor = 1);
17+
CREATE INDEX beta_gamma_index_name_err_param ON abg USING HASH (beta) WITH (autosummarize = off);
18+
19+
CREATE TABLE json_gin(
20+
alpha JSONB,
21+
beta JSONB
22+
);
23+
24+
CREATE TABLE json_gist(
25+
alpha JSONB,
26+
beta JSONB
27+
);
28+
29+
CREATE TABLE text_search(
30+
alpha TSVECTOR,
31+
beta TEXT
32+
);
33+
34+
CREATE INDEX gin_alpha_1 ON json_gin USING GIN (alpha);
35+
CREATE INDEX gin_alpha_beta_2 ON json_gin USING GIN (alpha, beta);
36+
CREATE INDEX gin_alpha_beta_3 ON json_gin USING GIN (alpha jsonb_ops, beta);
37+
CREATE INDEX gin_alpha_beta_4 ON json_gin USING GIN (alpha, beta jsonb_path_ops) WITH (fastupdate = off);
38+
CREATE INDEX gin_alpha_beta_5 ON json_gin USING GIN (alpha jsonb_path_ops, beta jsonb_ops) WITH (gin_pending_list_limit = 2048);
39+
40+
CREATE INDEX gist_alpha_1 ON text_search USING GIST (alpha) WITH (fillfactor = 75);
41+
CREATE INDEX gist_alpha_2 ON text_search USING GIST (alpha) WITH (buffering = on);
42+
43+
CREATE INDEX tsv_gist_alpha_1 ON text_search USING GIST (alpha);
44+
CREATE INDEX tsv_gin_alpha_1 ON text_search USING GIN (alpha);
45+
CREATE INDEX trgm_gist_beta_1 ON text_search USING GIST (beta gist_trgm_ops(siglen=32));
46+
CREATE INDEX trgm_gist_beta_2 ON text_search USING GIN (beta gin_trgm_ops);
47+
48+
CREATE INDEX beta_index ON text_search (beta varchar_pattern_ops);
49+
50+
CREATE INDEX ts_brin_beta_1 ON text_search USING BRIN (beta) WITH (autosummarize = on, pages_per_range = 6);
51+
52+
CREATE INDEX gin_alpha_beta_error_1 ON json_gin USING GIN (alpha jsonb_path_ops, beta jsonb_ops) WITH (gin_pending_list_limit = 1);
53+
CREATE INDEX gin_alpha_beta_error_2 ON json_gin USING GIN (alpha, beta jsonb_path_ops) WITH (fastupdate = yes);
54+
CREATE INDEX ts_brin_beta_error_1 ON text_search USING BRIN (beta) WITH (pages_per_range = 0);
55+
CREATE INDEX ts_brin_beta_error_2 ON text_search USING BRIN (beta) WITH (autosummarize=no);
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
CREATE TABLE drop_unique_index(
2+
id INT NOT NULL,
3+
uid INT NOT NULL,
4+
PRIMARY KEY(id),
5+
UNIQUE INDEX idx_uid (uid)
6+
);
7+
8+
DROP INDEX drop_unique_index@idx_uid CASCADE;
9+
DROP INDEX IF EXISTS drop_unique_index@idx_uid CASCADE;
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
CREATE TABLE posts (
2+
id TEXT NOT NULL PRIMARY KEY,
3+
text TEXT,
4+
created_at INTEGER NOT NULL
5+
);
6+
7+
-- works fine.
8+
SELECT *
9+
FROM posts
10+
WHERE (id, created_at) <= (?, ?)
11+
ORDER BY created_at DESC
12+
LIMIT 4;

gradle.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@ org.gradle.parallel=true
22

33
GROUP=com.faire
44
POM_ARTIFACT_ID=sqldelight-cockroachdb-dialect
5-
VERSION_NAME=0.5.0
5+
VERSION_NAME=0.6.0

gradle/libs.versions.toml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,15 @@
22
idea = "231.9392.1"
33
kotlin = "2.0.10"
44
spotless = "7.0.3"
5-
sql-psi = "0.4.9"
6-
sqldelight = "2.0.2"
5+
sql-psi = "0.5.2"
6+
sqldelight = "2.1.0"
77

88
[libraries]
99
assertj-core = { module = "org.assertj:assertj-core", version = "3.27.3" }
1010
intellij-analysis = { module = "com.jetbrains.intellij.platform:analysis-impl", version.ref = "idea" }
1111
postgres-jdbc-driver = { module = "org.postgresql:postgresql", version = "42.7.5" }
1212
slf4j-simple = { module = "org.slf4j:slf4j-simple", version = "2.0.17" }
13-
sql-psi = { module = "com.alecstrong.sql.psi:core", version.ref = "sql-psi" }
14-
sql-psi-enviroment = { module = "com.alecstrong.sql.psi:environment", version.ref = "sql-psi" }
13+
sql-psi = { module = "app.cash.sql-psi:core", version.ref = "sql-psi" }
1514
sqldelight-compiler-env = { module = "app.cash.sqldelight:compiler-env", version.ref = "sqldelight" }
1615
sqldelight-jdbc-driver = { module = "app.cash.sqldelight:jdbc-driver", version.ref = "sqldelight" }
1716
sqldelight-postgresql-dialect = { module = "app.cash.sqldelight:postgresql-dialect", version.ref = "sqldelight" }

0 commit comments

Comments
 (0)