Skip to content

fix: should not modify client query_id. #18322

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ use crate::servers::flight::v1::packets::QueryEnv;

pub static INIT_QUERY_ENV: &str = "/actions/init_query_env";

pub async fn init_query_env(mut env: QueryEnv) -> Result<()> {
// Update query id to make sure they are compatible.
env.query_id = env.query_id.replace('-', "");

pub async fn init_query_env(env: QueryEnv) -> Result<()> {
let mut tracking_workload_group = None;
let mut parent_mem_stat = ParentMemStat::StaticRef(&GLOBAL_MEM_STAT);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use crate::servers::flight::v1::exchange::DataExchangeManager;
pub static START_PREPARED_QUERY: &str = "/actions/start_prepared_query";

pub async fn start_prepared_query(id: String) -> Result<()> {
let id = id.replace('-', "");
let ctx = DataExchangeManager::instance().get_query_ctx(&id)?;

let mut tracking_payload = ThreadTracker::new_tracking_payload();
Expand Down
2 changes: 1 addition & 1 deletion src/query/service/src/servers/http/middleware/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -683,7 +683,7 @@ impl<E: Endpoint> Endpoint for HTTPSessionEndpoint<E> {
let query_id = req
.headers()
.get(HEADER_QUERY_ID)
.map(|id| id.to_str().unwrap().replace('-', ""))
.map(|id| id.to_str().unwrap().to_string())
.unwrap_or_else(|| Uuid::now_v7().simple().to_string());

let mut login_history = LoginHistory::new();
Expand Down
2 changes: 1 addition & 1 deletion src/query/service/src/sessions/query_ctx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,7 @@ impl TableContext for QueryContext {
}

fn get_id(&self) -> String {
self.shared.init_query_id.as_ref().read().replace('-', "")
self.shared.init_query_id.as_ref().read().clone()
}

fn get_current_catalog(&self) -> String {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,6 @@ async fn test_client_query_id() -> Result<()> {
Ok(())
}

// `-` in query id will be trimmed.
#[tokio::test(flavor = "current_thread")]
async fn test_client_compatible_query_id() -> Result<()> {
let _fixture = TestFixture::setup().await?;
Expand All @@ -473,7 +472,7 @@ async fn test_client_compatible_query_id() -> Result<()> {
let (status, result) =
post_sql_to_endpoint_new_session(&ep, sql, wait_time_secs, headers).await?;
assert_eq!(status, StatusCode::OK);
assert_eq!(result.id, "testqueryid");
assert_eq!(result.id, "test-query-id");

Ok(())
}
Expand Down
2 changes: 1 addition & 1 deletion tests/sqllogictests/suites/stage/ordered_unload.test
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ SELECT COUNT(*) FROM (SELECT $1 AS a, rank() OVER (ORDER BY metadata$filename, m

# data_af2ab6dc872546e5a6013dad9c512769_0000_00000770.csv
query
SELECT * from list_stage(location => '@s1') where substr(name, 39, 4) != '0000'
SELECT * from list_stage(location => '@s1') where substr(replace(name, '-', ''), 39, 4) != '0000'
----

statement ok
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)

export TEST_USER_PASSWORD="password"
export TEST_USER_CONNECT="bendsql --user=test-user --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}"
export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g"""
export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g"""

stmt "drop database if exists db01;"
stmt "create database db01;"
Expand Down
2 changes: 1 addition & 1 deletion tests/suites/1_stateful/00_stage/00_0012_stage_priv.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export TEST_USER_NAME="u1"
export TEST_USER_PASSWORD="password"
export TEST_USER_CONNECT="bendsql --user=u1 --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}"
export USER_B_CONNECT="bendsql --user=b --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}"
export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g"""
export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g"""

echo "drop table if exists test_table;" | $BENDSQL_CLIENT_CONNECT
echo "drop user if exists u1;" | $BENDSQL_CLIENT_CONNECT
Expand Down
2 changes: 1 addition & 1 deletion tests/suites/1_stateful/00_stage/00_0015_unload_output.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../../../shell_env.sh

export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g"""
export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g"""

stmt "drop table if exists t1"
stmt "create table t1 (a int)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
q1.parquet 624 1
>>>> streaming load: q1.parquet error :
+ curl -sS -H x-databend-query-id:load-q1 -H 'sql:insert into streaming_load_parquet(c2,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=())' -F upload=@/tmp/streaming_load_parquet/q1.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadq1","stats":{"rows":1,"bytes":25}}
{"id":"load-q1","stats":{"rows":1,"bytes":25}}
<<<<
>>>> select * from streaming_load_parquet;
ok 1 2021-01-01
Expand All @@ -26,7 +26,7 @@ q2.parquet 426 1
q3.parquet 426 1
>>>> streaming load: q3.parquet field_default :
+ curl -sS -H x-databend-query-id:load-q3 -H 'sql:insert into streaming_load_parquet(c2,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=field_default, null_if=())' -F upload=@/tmp/streaming_load_parquet/q3.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadq3","stats":{"rows":1,"bytes":21}}
{"id":"load-q3","stats":{"rows":1,"bytes":21}}
<<<<
>>>> select * from streaming_load_parquet;
ok NULL 2021-01-01
Expand All @@ -37,7 +37,7 @@ ok NULL 2021-01-01
q4.parquet 643 1
>>>> streaming load: q4.parquet error :
+ curl -sS -H x-databend-query-id:load-q4 -H 'sql:insert into streaming_load_parquet(c1,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=())' -F upload=@/tmp/streaming_load_parquet/q4.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadq4","stats":{"rows":1,"bytes":26}}
{"id":"load-q4","stats":{"rows":1,"bytes":26}}
<<<<
>>>> select * from streaming_load_parquet;
my_null NULL 2021-01-01
Expand All @@ -48,7 +48,7 @@ my_null NULL 2021-01-01
q5.parquet 643 1
>>>> streaming load: q5.parquet error 'my_null':
+ curl -sS -H x-databend-query-id:load-q5 -H 'sql:insert into streaming_load_parquet(c1,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=('\''my_null'\''))' -F upload=@/tmp/streaming_load_parquet/q5.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadq5","stats":{"rows":1,"bytes":7}}
{"id":"load-q5","stats":{"rows":1,"bytes":7}}
<<<<
>>>> select * from streaming_load_parquet;
NULL NULL 2021-01-01
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
>>>> copy into @streaming_load_07/data.csv from (select '2020-01-02' as c4, 110 as c2) file_format=(type='csv') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true;
data.csv 17 1
+ curl -sS -H x-databend-query-id:load-csv -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=csv)' -F upload=@/tmp/streaming_load_07/data.csv -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadcsv","stats":{"rows":1,"bytes":39}}
{"id":"load-csv","stats":{"rows":1,"bytes":39}}
<<<<
>>>> select * from streaming_load_07;
ok 110 a 2020-01-02
Expand All @@ -14,7 +14,7 @@ ok 110 a 2020-01-02
>>>> copy into @streaming_load_07/data.tsv from (select '2020-01-02' as c4, 110 as c2) file_format=(type='tsv') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true;
data.tsv 15 1
+ curl -sS -H x-databend-query-id:load-tsv -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=tsv)' -F upload=@/tmp/streaming_load_07/data.tsv -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadtsv","stats":{"rows":1,"bytes":39}}
{"id":"load-tsv","stats":{"rows":1,"bytes":39}}
<<<<
>>>> select * from streaming_load_07;
ok 110 a 2020-01-02
Expand All @@ -24,7 +24,7 @@ ok 110 a 2020-01-02
>>>> copy into @streaming_load_07/data.ndjson from (select '2020-01-02' as c4, 110 as c2) file_format=(type='ndjson') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true;
data.ndjson 29 1
+ curl -sS -H x-databend-query-id:load-ndjson -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=ndjson)' -F upload=@/tmp/streaming_load_07/data.ndjson -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadndjson","stats":{"rows":1,"bytes":39}}
{"id":"load-ndjson","stats":{"rows":1,"bytes":39}}
<<<<
>>>> select * from streaming_load_07;
ok 110 a 2020-01-02
Expand All @@ -34,7 +34,7 @@ ok 110 a 2020-01-02
>>>> copy into @streaming_load_07/data.parquet from (select '2020-01-02' as c4, 110 as c2) file_format=(type='parquet') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true;
data.parquet 665 1
+ curl -sS -H x-databend-query-id:load-parquet -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=parquet)' -F upload=@/tmp/streaming_load_07/data.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load
{"id":"loadparquet","stats":{"rows":1,"bytes":39}}
{"id":"load-parquet","stats":{"rows":1,"bytes":39}}
<<<<
>>>> select * from streaming_load_07;
ok 110 a 2020-01-02
Expand Down
Loading