Skip to content

Commit 83c68ae

Browse files
authored
chore: do not drop database first for bench (#15041)
1 parent 4a55ea4 commit 83c68ae

File tree

6 files changed

+13
-124
lines changed

6 files changed

+13
-124
lines changed

.github/workflows/reuse.benchmark.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ jobs:
168168
env:
169169
BENDSQL_DSN: "databend://${{ secrets.BENCHMARK_CLOUD_USER }}:${{ secrets.BENCHMARK_CLOUD_PASSWORD }}@${{ secrets.BENCHMARK_CLOUD_GATEWAY }}:443/?warehouse=default"
170170
run: |
171-
echo "DROP DATABASE IF EXISTS 'load_test_${{ inputs.run_id }}';" | bendsql
171+
echo "DROP DATABASE IF EXISTS load_test_${{ inputs.run_id }};" | bendsql
172172
echo "DROP WAREHOUSE IF EXISTS 'benchmark-${{ inputs.run_id }}';" | bendsql
173173
174174
comment:

benchmark/clickbench/benchmark_cloud.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,6 @@ export BENDSQL_DSN="databend://${CLOUD_USER}:${CLOUD_PASSWORD}@${CLOUD_GATEWAY}:
7575

7676
if [[ "${BENCHMARK_DATASET}" == "load" ]]; then
7777
echo "Creating database..."
78-
echo "DROP DATABASE IF EXISTS ${BENCHMARK_DATABASE};" | bendsql --database default
7978
echo "CREATE DATABASE ${BENCHMARK_DATABASE};" | bendsql --database default
8079
fi
8180

benchmark/clickbench/load/queries/00.sql

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
DROP TABLE IF EXISTS hits_csv;
2-
CREATE TABLE hits_csv (
1+
DROP TABLE IF EXISTS hits_parquet;
2+
CREATE TABLE hits_parquet (
33
WatchID BIGINT NOT NULL,
44
JavaEnable SMALLINT NOT NULL,
55
Title TEXT,
@@ -106,5 +106,5 @@ CREATE TABLE hits_csv (
106106
URLHash BIGINT NOT NULL,
107107
CLID INTEGER NOT NULL
108108
) CLUSTER BY (CounterID, EventDate, UserID, EventTime, WatchID);
109-
COPY INTO hits_csv
110-
FROM 's3://databend-datasets/hits_compatible/hits.csv.gz' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'CSV', COMPRESSION = AUTO);
109+
COPY INTO hits_parquet
110+
FROM 's3://databend-datasets/hits_compatible/hits.parquet' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'PARQUET');

benchmark/clickbench/load/queries/01.sql

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
DROP TABLE IF EXISTS hits_tsv;
2-
CREATE TABLE hits_tsv (
1+
DROP TABLE IF EXISTS hits_csv;
2+
CREATE TABLE hits_csv (
33
WatchID BIGINT NOT NULL,
44
JavaEnable SMALLINT NOT NULL,
55
Title TEXT,
@@ -106,5 +106,5 @@ CREATE TABLE hits_tsv (
106106
URLHash BIGINT NOT NULL,
107107
CLID INTEGER NOT NULL
108108
) CLUSTER BY (CounterID, EventDate, UserID, EventTime, WatchID);
109-
COPY INTO hits_tsv
110-
FROM 's3://databend-datasets/hits_compatible/hits.tsv.gz' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'TSV', COMPRESSION = AUTO);
109+
COPY INTO hits_csv
110+
FROM 's3://databend-datasets/hits_compatible/hits.csv.gz' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'CSV', COMPRESSION = AUTO);

benchmark/clickbench/load/queries/02.sql

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
DROP TABLE IF EXISTS hits_json;
2-
CREATE TABLE hits_json (
1+
DROP TABLE IF EXISTS hits_tsv;
2+
CREATE TABLE hits_tsv (
33
WatchID BIGINT NOT NULL,
44
JavaEnable SMALLINT NOT NULL,
55
Title TEXT,
@@ -106,5 +106,5 @@ CREATE TABLE hits_json (
106106
URLHash BIGINT NOT NULL,
107107
CLID INTEGER NOT NULL
108108
) CLUSTER BY (CounterID, EventDate, UserID, EventTime, WatchID);
109-
COPY INTO hits_json
110-
FROM 's3://databend-datasets/hits_compatible/hits.json.gz' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'NDJSON', COMPRESSION = AUTO);
109+
COPY INTO hits_tsv
110+
FROM 's3://databend-datasets/hits_compatible/hits.tsv.gz' CONNECTION = (CONNECTION_NAME = 'repo') FILE_FORMAT = (TYPE = 'TSV', COMPRESSION = AUTO);

benchmark/clickbench/load/queries/03.sql

Lines changed: 0 additions & 110 deletions
This file was deleted.

0 commit comments

Comments
 (0)