Skip to content

Commit d209235

Browse files
authored
fix(issue-platform): Fix get_event_by_id to correctly return occurrence information for transactions (#49059)
When cleaning up some flags I noticed that we're not properly handling transactions with performance issues in `get_event_by_id`. We'd convert them to a `GroupEvent`, but not fetch the occurrence info from snuba, so the occurrence would be unavailable.
1 parent ff94182 commit d209235

File tree

6 files changed

+140
-121
lines changed

6 files changed

+140
-121
lines changed

src/sentry/api/endpoints/project_event_details.py

Lines changed: 6 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from sentry.api.bases.project import ProjectEndpoint
1010
from sentry.api.serializers import IssueEventSerializer, serialize
1111
from sentry.eventstore.models import Event
12-
from sentry.issues.query import apply_performance_conditions
1312
from sentry.models.project import Project
1413

1514

@@ -21,22 +20,14 @@ def wrap_event_response(request_user: Any, event: Event, project: Project, envir
2120
prev_event_id = None
2221

2322
if event.group_id:
24-
if event.get_event_type() == "transaction":
25-
conditions = apply_performance_conditions([], event.group)
26-
_filter = eventstore.Filter(
27-
conditions=conditions,
28-
project_ids=[event.project_id],
29-
)
30-
else:
31-
conditions = [["event.type", "!=", "transaction"]]
32-
_filter = eventstore.Filter(
33-
conditions=conditions,
34-
project_ids=[event.project_id],
35-
group_ids=[event.group_id],
36-
)
37-
23+
conditions = []
3824
if environments:
3925
conditions.append(["environment", "IN", environments])
26+
_filter = eventstore.Filter(
27+
conditions=conditions,
28+
project_ids=[event.project_id],
29+
group_ids=[event.group_id],
30+
)
4031

4132
prev_ids, next_ids = eventstore.get_adjacent_event_ids(event, filter=_filter)
4233

src/sentry/eventstore/snuba/backend.py

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -214,15 +214,12 @@ def get_event_by_id(
214214
if len(event.data) == 0:
215215
return None
216216

217-
if group_id is not None and event.get_event_type() != "generic":
218-
# Set passed group_id if not a transaction
219-
if event.get_event_type() == "transaction" and not skip_transaction_groupevent:
220-
logger.warning("eventstore.passed-group-id-for-transaction")
221-
return event.for_group(Group.objects.get(id=group_id))
222-
else:
223-
event.group_id = group_id
224-
225-
elif event.get_event_type() != "transaction":
217+
if group_id is not None and (
218+
event.get_event_type() == "error"
219+
or (event.get_event_type() == "transaction" and skip_transaction_groupevent)
220+
):
221+
event.group_id = group_id
222+
elif event.get_event_type() != "transaction" or group_id:
226223
# Load group_id from Snuba if not a transaction
227224
raw_query_kwargs = {}
228225
if event.datetime > timezone.now() - timedelta(hours=1):
@@ -233,16 +230,21 @@ def get_event_by_id(
233230
["timestamp", ">", datetime.fromtimestamp(random.randint(0, 1000000000))]
234231
]
235232
dataset = (
236-
Dataset.IssuePlatform if event.get_event_type() == "generic" else Dataset.Events
233+
Dataset.IssuePlatform
234+
if event.get_event_type() in ("transaction", "generic")
235+
else Dataset.Events
237236
)
238237
try:
239238
tenant_ids = tenant_ids or {"organization_id": event.project.organization_id}
239+
filter_keys = {"project_id": [project_id], "event_id": [event_id]}
240+
if group_id:
241+
filter_keys["group_id"] = [group_id]
240242
result = snuba.raw_query(
241243
dataset=dataset,
242244
selected_columns=self.__get_columns(dataset),
243245
start=event.datetime,
244246
end=event.datetime + timedelta(seconds=1),
245-
filter_keys={"project_id": [project_id], "event_id": [event_id]},
247+
filter_keys=filter_keys,
246248
limit=1,
247249
referrer="eventstore.get_event_by_id_nodestore",
248250
tenant_ids=tenant_ids,
@@ -274,13 +276,18 @@ def get_event_by_id(
274276
# Inject the snuba data here to make sure any snuba columns are available
275277
event._snuba_data = result["data"][0]
276278

279+
# Set passed group_id if not a transaction
280+
if event.get_event_type() == "transaction" and not skip_transaction_groupevent and group_id:
281+
logger.warning("eventstore.passed-group-id-for-transaction")
282+
return event.for_group(Group.objects.get(id=group_id))
283+
277284
return event
278285

279286
def _get_dataset_for_event(self, event):
280-
if event.get_event_type() == "transaction":
281-
return snuba.Dataset.Transactions
282-
elif event.get_event_type() == "generic":
287+
if getattr(event, "occurrence", None) or event.get_event_type() == "generic":
283288
return snuba.Dataset.IssuePlatform
289+
elif event.get_event_type() == "transaction":
290+
return snuba.Dataset.Transactions
284291
else:
285292
return snuba.Dataset.Discover
286293

src/sentry/testutils/cases.py

Lines changed: 40 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@
8383
from sentry.auth.superuser import ORG_ID as SU_ORG_ID
8484
from sentry.auth.superuser import Superuser
8585
from sentry.event_manager import EventManager
86+
from sentry.eventstore.models import Event
8687
from sentry.eventstream.snuba import SnubaEventStream
8788
from sentry.issues.grouptype import NoiseConfig, PerformanceNPlusOneGroupType
8889
from sentry.issues.ingest import send_issue_occurrence_to_eventstream
@@ -106,6 +107,7 @@
106107
IdentityStatus,
107108
NotificationSetting,
108109
Organization,
110+
Project,
109111
ProjectOption,
110112
Release,
111113
ReleaseCommit,
@@ -136,6 +138,7 @@
136138
from sentry.utils import json
137139
from sentry.utils.auth import SsoSession
138140
from sentry.utils.json import dumps_htmlsafe
141+
from sentry.utils.performance_issues.performance_detection import detect_performance_problems
139142
from sentry.utils.pytest.selenium import Browser
140143
from sentry.utils.retries import TimedRetryPolicy
141144
from sentry.utils.samples import load_data
@@ -486,32 +489,55 @@ class TransactionTestCase(BaseTestCase, TransactionTestCase):
486489

487490
class PerformanceIssueTestCase(BaseTestCase):
488491
def create_performance_issue(
489-
self, tags=None, contexts=None, fingerprint="group1", transaction=None
492+
self,
493+
tags=None,
494+
contexts=None,
495+
fingerprint=None,
496+
transaction=None,
497+
event_data=None,
498+
issue_type=None,
499+
noise_limit=0,
500+
project_id=None,
501+
detector_option="performance.issues.n_plus_one_db.problem-creation",
490502
):
491-
event_data = load_data(
492-
"transaction-n-plus-one",
493-
timestamp=before_now(minutes=10),
494-
fingerprint=[f"{PerformanceNPlusOneGroupType.type_id}-{fingerprint}"],
495-
)
503+
if issue_type is None:
504+
issue_type = PerformanceNPlusOneGroupType
505+
if event_data is None:
506+
event_data = load_data(
507+
"transaction-n-plus-one",
508+
timestamp=before_now(minutes=10),
509+
)
496510
if tags is not None:
497511
event_data["tags"] = tags
498512
if contexts is not None:
499513
event_data["contexts"] = contexts
500514
if transaction:
501515
event_data["transaction"] = transaction
516+
if project_id is None:
517+
project_id = self.project.id
502518

503519
perf_event_manager = EventManager(event_data)
504520
perf_event_manager.normalize()
505521

522+
def detect_performance_problems_interceptor(data: Event, project: Project):
523+
perf_problems = detect_performance_problems(data, project)
524+
if fingerprint:
525+
for perf_problem in perf_problems:
526+
perf_problem.fingerprint = fingerprint
527+
return perf_problems
528+
506529
with mock.patch(
507530
"sentry.issues.ingest.send_issue_occurrence_to_eventstream",
508531
side_effect=send_issue_occurrence_to_eventstream,
509-
) as mock_eventstream, mock.patch.object(
510-
PerformanceNPlusOneGroupType, "noise_config", new=NoiseConfig(0, timedelta(minutes=1))
532+
) as mock_eventstream, mock.patch(
533+
"sentry.event_manager.detect_performance_problems",
534+
side_effect=detect_performance_problems_interceptor,
535+
), mock.patch.object(
536+
issue_type, "noise_config", new=NoiseConfig(noise_limit, timedelta(minutes=1))
511537
), override_options(
512538
{
513539
"performance.issues.all.problem-detection": 1.0,
514-
"performance.issues.n_plus_one_db.problem-creation": 1.0,
540+
detector_option: 1.0,
515541
"performance.issues.send_to_issues_platform": True,
516542
"performance.issues.create_issues_through_platform": True,
517543
}
@@ -520,10 +546,11 @@ def create_performance_issue(
520546
"projects:performance-suspect-spans-ingestion",
521547
]
522548
):
523-
event = perf_event_manager.save(self.project.id)
524-
group_event = event.for_group(mock_eventstream.call_args[0][2].group)
525-
group_event.occurrence = mock_eventstream.call_args[0][1]
526-
return group_event
549+
event = perf_event_manager.save(project_id)
550+
if mock_eventstream.call_args:
551+
event = event.for_group(mock_eventstream.call_args[0][2].group)
552+
event.occurrence = mock_eventstream.call_args[0][1]
553+
return event
527554

528555

529556
class APITestCase(BaseTestCase, BaseAPITestCase):

src/sentry/utils/samples.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ def load_data(
114114
spans=None,
115115
trace_context=None,
116116
fingerprint=None,
117+
event_id=None,
117118
):
118119
# NOTE: Before editing this data, make sure you understand the context
119120
# in which its being used. It is NOT only used for local development and
@@ -256,6 +257,9 @@ def load_data(
256257

257258
data["fingerprint"] = fingerprint
258259

260+
if event_id is not None:
261+
data["event_id"] = event_id
262+
259263
data["platform"] = platform
260264
# XXX: Message is a legacy alias for logentry. Do not overwrite if set.
261265
if "message" not in data:

tests/sentry/eventstore/snuba/test_backend.py

Lines changed: 28 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -3,20 +3,17 @@
33
from sentry.eventstore.base import Filter
44
from sentry.eventstore.models import Event
55
from sentry.eventstore.snuba.backend import SnubaEventStorage
6-
from sentry.issues.grouptype import (
7-
PerformanceRenderBlockingAssetSpanGroupType,
8-
PerformanceSlowDBQueryGroupType,
9-
)
10-
from sentry.issues.query import apply_performance_conditions
6+
from sentry.issues.grouptype import PerformanceNPlusOneGroupType
117
from sentry.testutils import SnubaTestCase, TestCase
8+
from sentry.testutils.cases import PerformanceIssueTestCase
129
from sentry.testutils.helpers.datetime import before_now, iso_format
1310
from sentry.testutils.silo import region_silo_test
1411
from sentry.utils import snuba
1512
from sentry.utils.samples import load_data
1613

1714

1815
@region_silo_test(stable=True)
19-
class SnubaEventStorageTest(TestCase, SnubaTestCase):
16+
class SnubaEventStorageTest(TestCase, SnubaTestCase, PerformanceIssueTestCase):
2017
def setUp(self):
2118
super().setUp()
2219
self.min_ago = iso_format(before_now(minutes=1))
@@ -66,23 +63,27 @@ def setUp(self):
6663
self.transaction_event = self.store_event(data=event_data, project_id=self.project1.id)
6764

6865
event_data_2 = load_data(
69-
platform="transaction",
70-
fingerprint=[f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group3"],
66+
platform="transaction-n-plus-one",
67+
fingerprint=[f"{PerformanceNPlusOneGroupType.type_id}-group3"],
7168
)
7269
event_data_2["timestamp"] = iso_format(before_now(seconds=30))
7370
event_data_2["start_timestamp"] = iso_format(before_now(seconds=31))
7471
event_data_2["event_id"] = "e" * 32
7572

76-
self.transaction_event_2 = self.store_event(data=event_data_2, project_id=self.project2.id)
73+
self.transaction_event_2 = self.create_performance_issue(
74+
event_data=event_data_2, project_id=self.project2.id
75+
)
7776

7877
event_data_3 = load_data(
79-
"transaction", fingerprint=[f"{PerformanceSlowDBQueryGroupType.type_id}-group3"]
78+
"transaction-n-plus-one", fingerprint=[f"{PerformanceNPlusOneGroupType.type_id}-group3"]
8079
)
8180
event_data_3["timestamp"] = iso_format(before_now(seconds=30))
8281
event_data_3["start_timestamp"] = iso_format(before_now(seconds=31))
8382
event_data_3["event_id"] = "f" * 32
8483

85-
self.transaction_event_3 = self.store_event(data=event_data_3, project_id=self.project2.id)
84+
self.transaction_event_3 = self.create_performance_issue(
85+
event_data=event_data_3, project_id=self.project2.id
86+
)
8687

8788
"""
8889
event_data_4 = load_data("transaction")
@@ -182,23 +183,23 @@ def test_get_event_by_id_cached(self):
182183
with mock.patch("sentry.eventstore.snuba.backend.Event") as mock_event:
183184
dummy_event = Event(
184185
project_id=self.project2.id,
185-
event_id="f" * 32,
186-
data={"something": "hi", "timestamp": self.min_ago},
186+
event_id="1" * 32,
187+
data={"something": "hi", "timestamp": self.min_ago, "type": "error"},
187188
)
188189
mock_event.return_value = dummy_event
189-
event = self.eventstore.get_event_by_id(self.project2.id, "f" * 32)
190+
event = self.eventstore.get_event_by_id(self.project2.id, "1" * 32)
190191
# Result of query should be None
191192
assert event is None
192193

193194
# Now we store the event properly, so it will exist in Snuba.
194195
self.store_event(
195-
data={"event_id": "f" * 32, "timestamp": self.min_ago},
196+
data={"event_id": "1" * 32, "timestamp": self.min_ago, "type": "error"},
196197
project_id=self.project2.id,
197198
)
198199

199200
# Make sure that the negative cache isn't causing the event to not show up
200-
event = self.eventstore.get_event_by_id(self.project2.id, "f" * 32)
201-
assert event.event_id == "f" * 32
201+
event = self.eventstore.get_event_by_id(self.project2.id, "1" * 32)
202+
assert event.event_id == "1" * 32
202203
assert event.project_id == self.project2.id
203204
assert event.group_id == event.group.id
204205

@@ -299,19 +300,23 @@ def test_adjacent_event_ids_same_timestamp(self):
299300
assert next_event is None
300301

301302
def test_transaction_get_next_prev_event_id(self):
302-
group = self.transaction_event_2.groups[0]
303+
group = self.transaction_event_2.group
303304
_filter = Filter(
304305
project_ids=[self.project2.id],
305-
conditions=apply_performance_conditions([], group),
306+
group_ids=[group.id],
307+
)
308+
event = self.eventstore.get_event_by_id(
309+
self.project2.id, self.transaction_event_3.event_id, group_id=group.id
306310
)
307-
event = self.eventstore.get_event_by_id(self.project2.id, "f" * 32)
308311
prev_event, next_event = self.eventstore.get_adjacent_event_ids(event, filter=_filter)
309312

310-
assert prev_event == (str(self.project2.id), "e" * 32)
313+
assert prev_event == (str(self.project2.id), self.transaction_event_2.event_id)
311314
assert next_event is None
312315

313-
event = self.eventstore.get_event_by_id(self.project2.id, "e" * 32)
316+
event = self.eventstore.get_event_by_id(
317+
self.project2.id, self.transaction_event_2.event_id, group_id=group.id
318+
)
314319
prev_event, next_event = self.eventstore.get_adjacent_event_ids(event, filter=_filter)
315320

316321
assert prev_event is None
317-
assert next_event == (str(self.project2.id), "f" * 32)
322+
assert next_event == (str(self.project2.id), self.transaction_event_3.event_id)

0 commit comments

Comments
 (0)