Skip to content

Commit

Permalink
ref(sdk): Remove excessive json.loads spans (#64883)
Browse files Browse the repository at this point in the history
### Summary
Many of our transactions use up their span limit immediately because of
json.loads creating spans for every field call etc.

![Screenshot 2024-02-08 at 11 25 04
AM](https://github.com/getsentry/sentry/assets/6111995/cc5181cf-845a-4546-9703-8989c68d0a52)

I ran the devserver and tried a couple endpoints and removed spans for
anything I saw spamming. Feel free to re-add it if you find a python
profile for your transaction that shows json.loads is taking up a
significant amount of time, I'm just trying to get most of them that
concern me at the moment without opening a bunch of PRs one by one.

In the future we should probably consider moving these over to a metric
as the span offers no additional information, so we can avoid this
altogether. At the moment, since json can be called at startup there is
currently an issue with config not being loaded so metrics can be used
here so that would need to be resolved first.
  • Loading branch information
k-fish committed Feb 8, 2024
1 parent 6013463 commit 270094f
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 8 deletions.
6 changes: 3 additions & 3 deletions src/sentry/db/models/fields/jsonfield.py
Expand Up @@ -83,8 +83,8 @@ def get_default(self):
if callable(default):
default = default()
if isinstance(default, str):
return json.loads(default)
return json.loads(json.dumps(default))
return json.loads(default, skip_trace=True)
return json.loads(json.dumps(default), skip_trace=True)
return super().get_default()

def get_internal_type(self):
Expand All @@ -101,7 +101,7 @@ def to_python(self, value):
if self.blank:
return ""
try:
value = json.loads(value)
value = json.loads(value, skip_trace=True)
except ValueError:
msg = self.error_messages["invalid"] % value
raise ValidationError(msg)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/db/models/fields/picklefield.py
Expand Up @@ -26,6 +26,6 @@ def to_python(self, value):
if value is None:
return None
try:
return json.loads(value)
return json.loads(value, skip_trace=True)
except (ValueError, TypeError):
return super().to_python(value)
4 changes: 3 additions & 1 deletion src/sentry/ingest/billing_metrics_consumer.py
Expand Up @@ -85,7 +85,9 @@ def submit(self, message: Message[KafkaPayload]) -> None:
self.__next_step.submit(message)

def _get_payload(self, message: Message[KafkaPayload]) -> GenericMetric:
payload = json.loads(message.payload.value.decode("utf-8"), use_rapid_json=True)
payload = json.loads(
message.payload.value.decode("utf-8"), use_rapid_json=True, skip_trace=True
)
return cast(GenericMetric, payload)

def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataCategory, int]:
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/ingest/consumer/processors.py
Expand Up @@ -106,7 +106,7 @@ def process_event(message: IngestMessage, project: Project) -> None:
# serializing it again.
# XXX: Do not use CanonicalKeyDict here. This may break preprocess_event
# which assumes that data passed in is a raw dictionary.
data = json.loads(payload, use_rapid_json=True)
data = json.loads(payload, use_rapid_json=True, skip_trace=True)
if project_id == settings.SENTRY_PROJECT:
metrics.incr(
"internal.captured.ingest_consumer.parsed",
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/utils/codecs.py
Expand Up @@ -75,7 +75,7 @@ def encode(self, value: JSONData) -> str:
return str(json.dumps(value))

def decode(self, value: str) -> JSONData:
return json.loads(value)
return json.loads(value, skip_trace=True)


class ZlibCodec(Codec[bytes, bytes]):
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/utils/snuba.py
Expand Up @@ -958,7 +958,7 @@ def _bulk_snuba_query(
for index, item in enumerate(query_results):
response, _, reverse = item
try:
body = json.loads(response.data)
body = json.loads(response.data, skip_trace=True)
if SNUBA_INFO:
if "sql" in body:
print( # NOQA: only prints when an env variable is set
Expand Down

0 comments on commit 270094f

Please sign in to comment.