Skip to content

Commit b3f16d3

Browse files
authored
feat(spans): Add timers for process_spans and process_batch (#94159)
Adds more timings to give additional insights into the overheads for loading payloads, killswitches, and batch processing.
1 parent 71fe86d commit b3f16d3

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

src/sentry/spans/buffer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,7 @@ def __reduce__(self):
170170
def _get_span_key(self, project_and_trace: str, span_id: str) -> bytes:
171171
return f"span-buf:z:{{{project_and_trace}}}:{span_id}".encode("ascii")
172172

173+
@metrics.wraps("spans.buffer.process_spans")
173174
def process_spans(self, spans: Sequence[Span], now: int):
174175
"""
175176
:param spans: List of to-be-ingested spans.

src/sentry/spans/consumers/process/factory.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from sentry import killswitches
1919
from sentry.spans.buffer import Span, SpansBuffer
2020
from sentry.spans.consumers.process.flusher import SpanFlusher
21+
from sentry.utils import metrics
2122
from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing
2223

2324
logger = logging.getLogger(__name__)
@@ -122,6 +123,7 @@ def shutdown(self) -> None:
122123
self.__pool.close()
123124

124125

126+
@metrics.wraps("spans.buffer.process_batch")
125127
def process_batch(
126128
buffer: SpansBuffer,
127129
values: Message[ValuesBatch[tuple[int, KafkaPayload]]],
@@ -136,17 +138,16 @@ def process_batch(
136138
if min_timestamp is None or timestamp < min_timestamp:
137139
min_timestamp = timestamp
138140

139-
val = cast(SpanEvent, rapidjson.loads(payload.value))
140-
141-
partition_id = value.partition.index
141+
with metrics.timer("spans.buffer.process_batch.decode"):
142+
val = cast(SpanEvent, rapidjson.loads(payload.value))
142143

143144
if killswitches.killswitch_matches_context(
144145
"spans.drop-in-buffer",
145146
{
146147
"org_id": val.get("organization_id"),
147148
"project_id": val.get("project_id"),
148149
"trace_id": val.get("trace_id"),
149-
"partition_id": partition_id,
150+
"partition_id": value.partition.index,
150151
},
151152
):
152153
continue

0 commit comments

Comments
 (0)