|
31 | 31 | );
|
32 | 32 | """
|
33 | 33 |
|
34 |
| -QUERY_READ_QUERY_TEMPLATE = """ |
35 |
| -SELECT * FROM `{}` WHERE object_id = $object_id AND object_hash = Digest::NumericHash($object_id); |
36 |
| -""" |
37 |
| - |
38 |
| -QUERY_WRITE_QUERY_TEMPLATE = """ |
39 |
| -UPSERT INTO `{}` ( |
40 |
| - object_id, object_hash, payload_str, payload_double, payload_timestamp |
41 |
| -) VALUES ( |
42 |
| - $object_id, Digest::NumericHash($object_id), $payload_str, $payload_double, $payload_timestamp |
43 |
| -); |
44 |
| -""" |
| 34 | +# QUERY_READ_QUERY_TEMPLATE = """ |
| 35 | +# SELECT * FROM `{}` WHERE object_id = $object_id AND object_hash = Digest::NumericHash($object_id); |
| 36 | +# """ |
| 37 | + |
| 38 | +# QUERY_WRITE_QUERY_TEMPLATE = """ |
| 39 | +# UPSERT INTO `{}` ( |
| 40 | +# object_id, object_hash, payload_str, payload_double, payload_timestamp |
| 41 | +# ) VALUES ( |
| 42 | +# $object_id, Digest::NumericHash($object_id), $payload_str, $payload_double, $payload_timestamp |
| 43 | +# ); |
| 44 | +# """ |
45 | 45 |
|
46 | 46 | logger = logging.getLogger(__name__)
|
47 | 47 |
|
@@ -187,7 +187,7 @@ def check_result(result):
|
187 | 187 | def run_read_jobs_query(args, driver, tb_name, max_id, metrics):
|
188 | 188 | logger.info("Start read jobs over query service")
|
189 | 189 |
|
190 |
| - read_q = QUERY_READ_QUERY_TEMPLATE.format(tb_name) |
| 190 | + read_q = READ_QUERY_TEMPLATE.format(tb_name) |
191 | 191 |
|
192 | 192 | read_limiter = RateLimiter(max_calls=args.read_rps, period=1)
|
193 | 193 | futures = []
|
@@ -308,7 +308,7 @@ def check_result(result):
|
308 | 308 | def run_write_jobs_query(args, driver, tb_name, max_id, metrics):
|
309 | 309 | logger.info("Start write jobs for query service")
|
310 | 310 |
|
311 |
| - write_q = QUERY_WRITE_QUERY_TEMPLATE.format(tb_name) |
| 311 | + write_q = WRITE_QUERY_TEMPLATE.format(tb_name) |
312 | 312 |
|
313 | 313 | write_limiter = RateLimiter(max_calls=args.write_rps, period=1)
|
314 | 314 | row_generator = RowGenerator(max_id)
|
|
0 commit comments