Skip to content

Commit 5b2ad25

Browse files
authored
Merge pull request #466 from ydb-platform/query_service_slo
WIP: Query Service SLO
2 parents 0a13de5 + 197c428 commit 5b2ad25

File tree

7 files changed

+198
-48
lines changed

7 files changed

+198
-48
lines changed

.github/workflows/slo.yml

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ jobs:
3131
if: env.DOCKER_REPO != null
3232
env:
3333
DOCKER_REPO: ${{ secrets.SLO_DOCKER_REPO }}
34+
continue-on-error: true
3435
with:
3536
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
3637
KUBECONFIG_B64: ${{ secrets.SLO_KUBE_CONFIG }}
@@ -48,11 +49,17 @@ jobs:
4849
timeBetweenPhases: 30
4950
shutdownTime: 30
5051

51-
language_id0: sync
52-
language0: python-sync
52+
language_id0: sync-python-table
53+
language0: Python SDK over Table Service
5354
workload_path0: tests/slo
5455
workload_build_context0: ../..
55-
workload_build_options0: -f Dockerfile
56+
workload_build_options0: -f Dockerfile --build-arg SDK_SERVICE=sync-python-table
57+
58+
language_id1: sync-python-query
59+
language1: Python SDK over Query Service
60+
workload_path1: tests/slo
61+
workload_build_context1: ../..
62+
workload_build_options1: -f Dockerfile --build-arg SDK_SERVICE=sync-python-query
5663

5764
- uses: actions/upload-artifact@v3
5865
if: env.DOCKER_REPO != null

tests/slo/Dockerfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,7 @@ COPY . /src
33
WORKDIR /src
44
RUN python -m pip install --upgrade pip && python -m pip install -e . && python -m pip install -r tests/slo/requirements.txt
55
WORKDIR tests/slo
6+
ARG SDK_SERVICE
7+
ENV SDK_SERVICE=$SDK_SERVICE
68

79
ENTRYPOINT ["python", "src"]

tests/slo/src/jobs.py

Lines changed: 128 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import logging
44
import dataclasses
55
from random import randint
6-
from typing import Callable, Tuple
6+
from typing import Callable, Tuple, Union
77
from ratelimiter import RateLimiter
88

99
import threading
@@ -31,12 +31,13 @@
3131
);
3232
"""
3333

34+
3435
logger = logging.getLogger(__name__)
3536

3637

3738
@dataclasses.dataclass
3839
class RequestParams:
39-
pool: ydb.SessionPool
40+
pool: Union[ydb.SessionPool, ydb.QuerySessionPool]
4041
query: str
4142
params: dict
4243
metrics: Metrics
@@ -56,7 +57,7 @@ def transaction(session):
5657

5758
result = session.transaction().execute(
5859
params.query,
59-
params.params,
60+
parameters=params.params,
6061
commit_tx=True,
6162
settings=params.request_settings,
6263
)
@@ -82,7 +83,7 @@ def transaction(session):
8283
def run_reads(driver, query, max_id, metrics, limiter, runtime, timeout):
8384
start_time = time.time()
8485

85-
logger.info("Start read workload")
86+
logger.info("Start read workload over table service")
8687

8788
request_settings = ydb.BaseRequestSettings().with_timeout(timeout)
8889
retry_setting = ydb.RetrySettings(
@@ -116,7 +117,7 @@ def check_result(result):
116117

117118

118119
def run_read_jobs(args, driver, tb_name, max_id, metrics):
119-
logger.info("Start read jobs")
120+
logger.info("Start read jobs over table service")
120121

121122
session = ydb.retry_operation_sync(lambda: driver.table_client.session().create())
122123
read_q = session.prepare(READ_QUERY_TEMPLATE.format(tb_name))
@@ -135,10 +136,65 @@ def run_read_jobs(args, driver, tb_name, max_id, metrics):
135136
return futures
136137

137138

139+
def run_reads_query(driver, query, max_id, metrics, limiter, runtime, timeout):
140+
start_time = time.time()
141+
142+
logger.info("Start read workload over query service")
143+
144+
request_settings = ydb.BaseRequestSettings().with_timeout(timeout)
145+
retry_setting = ydb.RetrySettings(
146+
idempotent=True,
147+
max_session_acquire_timeout=timeout,
148+
)
149+
150+
with ydb.QuerySessionPool(driver) as pool:
151+
logger.info("Session pool for read requests created")
152+
153+
while time.time() - start_time < runtime:
154+
params = {"$object_id": (randint(1, max_id), ydb.PrimitiveType.Uint64)}
155+
with limiter:
156+
157+
def check_result(result):
158+
res = next(result)
159+
assert res.rows[0]
160+
161+
params = RequestParams(
162+
pool=pool,
163+
query=query,
164+
params=params,
165+
metrics=metrics,
166+
labels=(JOB_READ_LABEL,),
167+
request_settings=request_settings,
168+
retry_settings=retry_setting,
169+
check_result_cb=check_result,
170+
)
171+
execute_query(params)
172+
173+
logger.info("Stop read workload")
174+
175+
176+
def run_read_jobs_query(args, driver, tb_name, max_id, metrics):
177+
logger.info("Start read jobs over query service")
178+
179+
read_q = READ_QUERY_TEMPLATE.format(tb_name)
180+
181+
read_limiter = RateLimiter(max_calls=args.read_rps, period=1)
182+
futures = []
183+
for _ in range(args.read_threads):
184+
future = threading.Thread(
185+
name="slo_run_read",
186+
target=run_reads_query,
187+
args=(driver, read_q, max_id, metrics, read_limiter, args.time, args.read_timeout / 1000),
188+
)
189+
future.start()
190+
futures.append(future)
191+
return futures
192+
193+
138194
def run_writes(driver, query, row_generator, metrics, limiter, runtime, timeout):
139195
start_time = time.time()
140196

141-
logger.info("Start write workload")
197+
logger.info("Start write workload over table service")
142198

143199
request_settings = ydb.BaseRequestSettings().with_timeout(timeout)
144200
retry_setting = ydb.RetrySettings(
@@ -157,6 +213,7 @@ def run_writes(driver, query, row_generator, metrics, limiter, runtime, timeout)
157213
"$payload_double": row.payload_double,
158214
"$payload_timestamp": row.payload_timestamp,
159215
}
216+
160217
with limiter:
161218
params = RequestParams(
162219
pool=pool,
@@ -173,7 +230,7 @@ def run_writes(driver, query, row_generator, metrics, limiter, runtime, timeout)
173230

174231

175232
def run_write_jobs(args, driver, tb_name, max_id, metrics):
176-
logger.info("Start write jobs")
233+
logger.info("Start write jobs over table service")
177234

178235
session = ydb.retry_operation_sync(lambda: driver.table_client.session().create())
179236
write_q = session.prepare(WRITE_QUERY_TEMPLATE.format(tb_name))
@@ -194,6 +251,70 @@ def run_write_jobs(args, driver, tb_name, max_id, metrics):
194251
return futures
195252

196253

254+
def run_writes_query(driver, query, row_generator, metrics, limiter, runtime, timeout):
255+
start_time = time.time()
256+
257+
logger.info("Start write workload over query service")
258+
259+
request_settings = ydb.BaseRequestSettings().with_timeout(timeout)
260+
retry_setting = ydb.RetrySettings(
261+
idempotent=True,
262+
max_session_acquire_timeout=timeout,
263+
)
264+
265+
with ydb.QuerySessionPool(driver) as pool:
266+
logger.info("Session pool for read requests created")
267+
268+
while time.time() - start_time < runtime:
269+
row = row_generator.get()
270+
params = {
271+
"$object_id": (row.object_id, ydb.PrimitiveType.Uint64),
272+
"$payload_str": (row.payload_str, ydb.PrimitiveType.Utf8),
273+
"$payload_double": (row.payload_double, ydb.PrimitiveType.Double),
274+
"$payload_timestamp": (row.payload_timestamp, ydb.PrimitiveType.Timestamp),
275+
}
276+
277+
def check_result(result):
278+
# we have to close stream by reading it till the end
279+
with result:
280+
pass
281+
282+
with limiter:
283+
params = RequestParams(
284+
pool=pool,
285+
query=query,
286+
params=params,
287+
metrics=metrics,
288+
labels=(JOB_WRITE_LABEL,),
289+
request_settings=request_settings,
290+
retry_settings=retry_setting,
291+
check_result_cb=check_result,
292+
)
293+
execute_query(params)
294+
295+
logger.info("Stop write workload")
296+
297+
298+
def run_write_jobs_query(args, driver, tb_name, max_id, metrics):
299+
logger.info("Start write jobs for query service")
300+
301+
write_q = WRITE_QUERY_TEMPLATE.format(tb_name)
302+
303+
write_limiter = RateLimiter(max_calls=args.write_rps, period=1)
304+
row_generator = RowGenerator(max_id)
305+
306+
futures = []
307+
for _ in range(args.write_threads):
308+
future = threading.Thread(
309+
name="slo_run_write",
310+
target=run_writes_query,
311+
args=(driver, write_q, row_generator, metrics, write_limiter, args.time, args.write_timeout / 1000),
312+
)
313+
future.start()
314+
futures.append(future)
315+
return futures
316+
317+
197318
def push_metric(limiter, runtime, metrics):
198319
start_time = time.time()
199320
logger.info("Start push metrics")

tests/slo/src/metrics.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
JOB_READ_LABEL, JOB_WRITE_LABEL = "read", "write"
1414
JOB_STATUS_OK, JOB_STATUS_ERR = "ok", "err"
1515

16+
SDK_SERVICE_NAME = environ.get("SDK_SERVICE", "sync-python-table")
17+
1618

1719
class Metrics:
1820
def __init__(self, push_gateway):
@@ -102,10 +104,10 @@ def stop(self, labels, start_time, attempts=1, error=None):
102104
def push(self):
103105
push_to_gateway(
104106
self._push_gtw,
105-
job="workload-sync",
107+
job=f"workload-{SDK_SERVICE_NAME}",
106108
registry=self._registry,
107109
grouping_key={
108-
"sdk": "python-sync",
110+
"sdk": SDK_SERVICE_NAME,
109111
"sdkVersion": version("ydb"),
110112
},
111113
)

tests/slo/src/runner.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,14 @@
77
import concurrent.futures
88
from concurrent.futures import ThreadPoolExecutor
99

10-
from jobs import run_read_jobs, run_write_jobs, run_metric_job
11-
from metrics import Metrics
10+
from jobs import (
11+
run_read_jobs,
12+
run_write_jobs,
13+
run_read_jobs_query,
14+
run_write_jobs_query,
15+
run_metric_job,
16+
)
17+
from metrics import Metrics, SDK_SERVICE_NAME
1218

1319
logger = logging.getLogger(__name__)
1420

@@ -85,12 +91,20 @@ def run_slo(args, driver, tb_name):
8591
logger.info("Max ID: %s", max_id)
8692

8793
metrics = Metrics(args.prom_pgw)
88-
89-
futures = (
90-
*run_read_jobs(args, driver, tb_name, max_id, metrics),
91-
*run_write_jobs(args, driver, tb_name, max_id, metrics),
92-
run_metric_job(args, metrics),
93-
)
94+
if SDK_SERVICE_NAME == "sync-python-table":
95+
futures = (
96+
*run_read_jobs(args, driver, tb_name, max_id, metrics),
97+
*run_write_jobs(args, driver, tb_name, max_id, metrics),
98+
run_metric_job(args, metrics),
99+
)
100+
elif SDK_SERVICE_NAME == "sync-python-query":
101+
futures = (
102+
*run_read_jobs_query(args, driver, tb_name, max_id, metrics),
103+
*run_write_jobs_query(args, driver, tb_name, max_id, metrics),
104+
run_metric_job(args, metrics),
105+
)
106+
else:
107+
raise ValueError(f"Unsupported service: {SDK_SERVICE_NAME}")
94108

95109
for future in futures:
96110
future.join()
@@ -114,7 +128,7 @@ def run_from_args(args):
114128
table_name = path.join(args.db, args.table_name)
115129

116130
with ydb.Driver(driver_config) as driver:
117-
driver.wait(timeout=5)
131+
driver.wait(timeout=300)
118132
try:
119133
if args.subcommand == "create":
120134
run_create(args, driver, table_name)

0 commit comments

Comments
 (0)