diff --git a/src/query/service/src/servers/flight/v1/actions/init_query_env.rs b/src/query/service/src/servers/flight/v1/actions/init_query_env.rs index 93ecb10ed3416..c64119b54c934 100644 --- a/src/query/service/src/servers/flight/v1/actions/init_query_env.rs +++ b/src/query/service/src/servers/flight/v1/actions/init_query_env.rs @@ -28,10 +28,7 @@ use crate::servers::flight::v1::packets::QueryEnv; pub static INIT_QUERY_ENV: &str = "/actions/init_query_env"; -pub async fn init_query_env(mut env: QueryEnv) -> Result<()> { - // Update query id to make sure they are compatible. - env.query_id = env.query_id.replace('-', ""); - +pub async fn init_query_env(env: QueryEnv) -> Result<()> { let mut tracking_workload_group = None; let mut parent_mem_stat = ParentMemStat::StaticRef(&GLOBAL_MEM_STAT); diff --git a/src/query/service/src/servers/flight/v1/actions/start_prepared_query.rs b/src/query/service/src/servers/flight/v1/actions/start_prepared_query.rs index 7a760c33dea65..fbb7e8fce55e7 100644 --- a/src/query/service/src/servers/flight/v1/actions/start_prepared_query.rs +++ b/src/query/service/src/servers/flight/v1/actions/start_prepared_query.rs @@ -21,7 +21,6 @@ use crate::servers::flight::v1::exchange::DataExchangeManager; pub static START_PREPARED_QUERY: &str = "/actions/start_prepared_query"; pub async fn start_prepared_query(id: String) -> Result<()> { - let id = id.replace('-', ""); let ctx = DataExchangeManager::instance().get_query_ctx(&id)?; let mut tracking_payload = ThreadTracker::new_tracking_payload(); diff --git a/src/query/service/src/servers/http/middleware/session.rs b/src/query/service/src/servers/http/middleware/session.rs index 635f4e0660419..93a70f6992e81 100644 --- a/src/query/service/src/servers/http/middleware/session.rs +++ b/src/query/service/src/servers/http/middleware/session.rs @@ -683,7 +683,7 @@ impl Endpoint for HTTPSessionEndpoint { let query_id = req .headers() .get(HEADER_QUERY_ID) - .map(|id| id.to_str().unwrap().replace('-', "")) + .map(|id| id.to_str().unwrap().to_string()) .unwrap_or_else(|| Uuid::now_v7().simple().to_string()); let mut login_history = LoginHistory::new(); diff --git a/src/query/service/src/sessions/query_ctx.rs b/src/query/service/src/sessions/query_ctx.rs index 7601e94dbceb4..ea4e9f744fffc 100644 --- a/src/query/service/src/sessions/query_ctx.rs +++ b/src/query/service/src/sessions/query_ctx.rs @@ -851,7 +851,7 @@ impl TableContext for QueryContext { } fn get_id(&self) -> String { - self.shared.init_query_id.as_ref().read().replace('-', "") + self.shared.init_query_id.as_ref().read().clone() } fn get_current_catalog(&self) -> String { diff --git a/src/query/service/tests/it/servers/http/http_query_handlers.rs b/src/query/service/tests/it/servers/http/http_query_handlers.rs index 861052d0bc369..28f215384b272 100644 --- a/src/query/service/tests/it/servers/http/http_query_handlers.rs +++ b/src/query/service/tests/it/servers/http/http_query_handlers.rs @@ -460,7 +460,6 @@ async fn test_client_query_id() -> Result<()> { Ok(()) } -// `-` in query id will be trimmed. #[tokio::test(flavor = "current_thread")] async fn test_client_compatible_query_id() -> Result<()> { let _fixture = TestFixture::setup().await?; @@ -473,7 +472,7 @@ async fn test_client_compatible_query_id() -> Result<()> { let (status, result) = post_sql_to_endpoint_new_session(&ep, sql, wait_time_secs, headers).await?; assert_eq!(status, StatusCode::OK); - assert_eq!(result.id, "testqueryid"); + assert_eq!(result.id, "test-query-id"); Ok(()) } diff --git a/tests/sqllogictests/suites/stage/ordered_unload.test b/tests/sqllogictests/suites/stage/ordered_unload.test index 9555bb339c7f7..c1b0b5e3725ab 100644 --- a/tests/sqllogictests/suites/stage/ordered_unload.test +++ b/tests/sqllogictests/suites/stage/ordered_unload.test @@ -25,7 +25,7 @@ SELECT COUNT(*) FROM (SELECT $1 AS a, rank() OVER (ORDER BY metadata$filename, m # data_af2ab6dc872546e5a6013dad9c512769_0000_00000770.csv query -SELECT * from list_stage(location => '@s1') where substr(name, 39, 4) != '0000' +SELECT * from list_stage(location => '@s1') where substr(replace(name, '-', ''), 39, 4) != '0000' ---- statement ok diff --git a/tests/suites/0_stateless/18_rbac/18_0007_privilege_access.sh b/tests/suites/0_stateless/18_rbac/18_0007_privilege_access.sh index 1435e651cee31..97450fcd6df86 100755 --- a/tests/suites/0_stateless/18_rbac/18_0007_privilege_access.sh +++ b/tests/suites/0_stateless/18_rbac/18_0007_privilege_access.sh @@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) export TEST_USER_PASSWORD="password" export TEST_USER_CONNECT="bendsql --user=test-user --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}" -export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g""" +export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g""" stmt "drop database if exists db01;" stmt "create database db01;" diff --git a/tests/suites/1_stateful/00_stage/00_0012_stage_priv.sh b/tests/suites/1_stateful/00_stage/00_0012_stage_priv.sh index 95cde6c227ea4..12963959d81c9 100755 --- a/tests/suites/1_stateful/00_stage/00_0012_stage_priv.sh +++ b/tests/suites/1_stateful/00_stage/00_0012_stage_priv.sh @@ -7,7 +7,7 @@ export TEST_USER_NAME="u1" export TEST_USER_PASSWORD="password" export TEST_USER_CONNECT="bendsql --user=u1 --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}" export USER_B_CONNECT="bendsql --user=b --password=password --host=${QUERY_MYSQL_HANDLER_HOST} --port ${QUERY_HTTP_HANDLER_PORT}" -export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g""" +export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g""" echo "drop table if exists test_table;" | $BENDSQL_CLIENT_CONNECT echo "drop user if exists u1;" | $BENDSQL_CLIENT_CONNECT diff --git a/tests/suites/1_stateful/00_stage/00_0015_unload_output.sh b/tests/suites/1_stateful/00_stage/00_0015_unload_output.sh index c4545b8646903..cb8fe3cecbae1 100755 --- a/tests/suites/1_stateful/00_stage/00_0015_unload_output.sh +++ b/tests/suites/1_stateful/00_stage/00_0015_unload_output.sh @@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) . "$CURDIR"/../../../shell_env.sh -export RM_UUID="sed -E ""s/[a-z0-9]{32}/UUID/g""" +export RM_UUID="sed -E ""s/[-a-z0-9]{32,36}/UUID/g""" stmt "drop table if exists t1" stmt "create table t1 (a int)" diff --git a/tests/suites/1_stateful/01_streaming_load/01_0006_streaming_load_parquet.result b/tests/suites/1_stateful/01_streaming_load/01_0006_streaming_load_parquet.result index 435fcc7d2913a..1d4631e8abd8f 100755 --- a/tests/suites/1_stateful/01_streaming_load/01_0006_streaming_load_parquet.result +++ b/tests/suites/1_stateful/01_streaming_load/01_0006_streaming_load_parquet.result @@ -5,7 +5,7 @@ q1.parquet 624 1 >>>> streaming load: q1.parquet error : + curl -sS -H x-databend-query-id:load-q1 -H 'sql:insert into streaming_load_parquet(c2,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=())' -F upload=@/tmp/streaming_load_parquet/q1.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadq1","stats":{"rows":1,"bytes":25}} +{"id":"load-q1","stats":{"rows":1,"bytes":25}} <<<< >>>> select * from streaming_load_parquet; ok 1 2021-01-01 @@ -26,7 +26,7 @@ q2.parquet 426 1 q3.parquet 426 1 >>>> streaming load: q3.parquet field_default : + curl -sS -H x-databend-query-id:load-q3 -H 'sql:insert into streaming_load_parquet(c2,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=field_default, null_if=())' -F upload=@/tmp/streaming_load_parquet/q3.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadq3","stats":{"rows":1,"bytes":21}} +{"id":"load-q3","stats":{"rows":1,"bytes":21}} <<<< >>>> select * from streaming_load_parquet; ok NULL 2021-01-01 @@ -37,7 +37,7 @@ ok NULL 2021-01-01 q4.parquet 643 1 >>>> streaming load: q4.parquet error : + curl -sS -H x-databend-query-id:load-q4 -H 'sql:insert into streaming_load_parquet(c1,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=())' -F upload=@/tmp/streaming_load_parquet/q4.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadq4","stats":{"rows":1,"bytes":26}} +{"id":"load-q4","stats":{"rows":1,"bytes":26}} <<<< >>>> select * from streaming_load_parquet; my_null NULL 2021-01-01 @@ -48,7 +48,7 @@ my_null NULL 2021-01-01 q5.parquet 643 1 >>>> streaming load: q5.parquet error 'my_null': + curl -sS -H x-databend-query-id:load-q5 -H 'sql:insert into streaming_load_parquet(c1,c3) from @_databend_load file_format = (type='\''parquet'\'', missing_field_as=error, null_if=('\''my_null'\''))' -F upload=@/tmp/streaming_load_parquet/q5.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadq5","stats":{"rows":1,"bytes":7}} +{"id":"load-q5","stats":{"rows":1,"bytes":7}} <<<< >>>> select * from streaming_load_parquet; NULL NULL 2021-01-01 diff --git a/tests/suites/1_stateful/01_streaming_load/01_0007_streaming_load_placeholder.result b/tests/suites/1_stateful/01_streaming_load/01_0007_streaming_load_placeholder.result index 0c89f9017983d..a2128e04daf35 100755 --- a/tests/suites/1_stateful/01_streaming_load/01_0007_streaming_load_placeholder.result +++ b/tests/suites/1_stateful/01_streaming_load/01_0007_streaming_load_placeholder.result @@ -4,7 +4,7 @@ >>>> copy into @streaming_load_07/data.csv from (select '2020-01-02' as c4, 110 as c2) file_format=(type='csv') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true; data.csv 17 1 + curl -sS -H x-databend-query-id:load-csv -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=csv)' -F upload=@/tmp/streaming_load_07/data.csv -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadcsv","stats":{"rows":1,"bytes":39}} +{"id":"load-csv","stats":{"rows":1,"bytes":39}} <<<< >>>> select * from streaming_load_07; ok 110 a 2020-01-02 @@ -14,7 +14,7 @@ ok 110 a 2020-01-02 >>>> copy into @streaming_load_07/data.tsv from (select '2020-01-02' as c4, 110 as c2) file_format=(type='tsv') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true; data.tsv 15 1 + curl -sS -H x-databend-query-id:load-tsv -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=tsv)' -F upload=@/tmp/streaming_load_07/data.tsv -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadtsv","stats":{"rows":1,"bytes":39}} +{"id":"load-tsv","stats":{"rows":1,"bytes":39}} <<<< >>>> select * from streaming_load_07; ok 110 a 2020-01-02 @@ -24,7 +24,7 @@ ok 110 a 2020-01-02 >>>> copy into @streaming_load_07/data.ndjson from (select '2020-01-02' as c4, 110 as c2) file_format=(type='ndjson') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true; data.ndjson 29 1 + curl -sS -H x-databend-query-id:load-ndjson -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=ndjson)' -F upload=@/tmp/streaming_load_07/data.ndjson -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadndjson","stats":{"rows":1,"bytes":39}} +{"id":"load-ndjson","stats":{"rows":1,"bytes":39}} <<<< >>>> select * from streaming_load_07; ok 110 a 2020-01-02 @@ -34,7 +34,7 @@ ok 110 a 2020-01-02 >>>> copy into @streaming_load_07/data.parquet from (select '2020-01-02' as c4, 110 as c2) file_format=(type='parquet') single=true include_query_id=false use_raw_path=true detailed_output=true overwrite=true; data.parquet 665 1 + curl -sS -H x-databend-query-id:load-parquet -H 'sql:insert into streaming_load_07(c3, c4, c2) values ('\''a'\'', ?, ?) from @_databend_load file_format = (type=parquet)' -F upload=@/tmp/streaming_load_07/data.parquet -u root: -XPUT http://localhost:8000/v1/streaming_load -{"id":"loadparquet","stats":{"rows":1,"bytes":39}} +{"id":"load-parquet","stats":{"rows":1,"bytes":39}} <<<< >>>> select * from streaming_load_07; ok 110 a 2020-01-02