Add Event.grouping field and fill it

An event always has a single (automatically calculated) Grouping associated with it.
We add this info to the Event model (we'll soon display it in the UI, and as per the
now-removed comment it's simply the consistent thing to do)
This commit is contained in:
Klaas van Schelven
2025-01-31 15:51:24 +01:00
parent c42aa9118a
commit 9ee623de6b
6 changed files with 72 additions and 4 deletions

View File

@@ -25,9 +25,14 @@ def create_event(project=None, issue=None, timestamp=None, event_data=None):
Max("digest_order"))["digest_order__max"]
issue_digest_order = max_current + 1 if max_current is not None else 1
# we get this via issue because we don't have manual merging yet; once we do, the following is more appropriate:
# Grouping.objects.filter(project=project, grouping_key=grouping_key).get()
grouping = issue.grouping_set.first()
return Event.objects.create(
project=project,
issue=issue,
grouping=grouping,
ingested_at=timestamp,
digested_at=timestamp,
timestamp=timestamp,

View File

@@ -0,0 +1,24 @@
# Generated by Django 4.2.18 on 2025-01-31 14:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("issues", "0007_alter_turningpoint_options"),
("events", "0013_harmonize_foogested_at"),
]
operations = [
migrations.AddField(
model_name="event",
name="grouping",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="issues.grouping",
),
),
]

View File

@@ -0,0 +1,35 @@
# Generated by Django 4.2.18 on 2025-01-31 14:40
from django.db import migrations
def set_event_grouping(apps, schema_editor):
# We can just deduce the grouping from the issue, because manual merging of issues has no UI yet, i.e. as it stands
# an issue always has exactly one grouping.
# This is slightly optimized under the assumption that in cases where there are very many events, there a orders of
# magnitude fewer issues, so we iterate over issues and then filter events by issue, rather than iterating over
# events and then getting the issue for each event.
# Naive impl.:
# Event = apps.get_model("events", "Event")
# for event in Event.objects.all():
# event.grouping = event.issue.grouping_set.first()
# event.save()
Event = apps.get_model("events", "Event")
Issue = apps.get_model("issues", "Issue")
for issue in Issue.objects.all():
grouping = issue.grouping_set.first()
Event.objects.filter(issue=issue).update(grouping=grouping)
class Migration(migrations.Migration):
dependencies = [
("events", "0014_event_grouping"),
("issues", "0007_alter_turningpoint_options"),
]
operations = [
migrations.RunPython(set_event_grouping),
]

View File

@@ -61,6 +61,9 @@ class Event(models.Model):
# not actually expected to be null, but we want to be able to delete issues without deleting events (cleanup later)
issue = models.ForeignKey("issues.Issue", blank=False, null=True, on_delete=models.SET_NULL)
# not actually expected to be null
grouping = models.ForeignKey("issues.Grouping", blank=False, null=True, on_delete=models.SET_NULL)
# The docs say:
# > Required. Hexadecimal string representing a uuid4 value. The length is exactly 32 characters. Dashes are not
# > allowed. Has to be lowercase.
@@ -183,7 +186,7 @@ class Event(models.Model):
return get_title_for_exception_type_and_value(self.calculated_type, self.calculated_value)
@classmethod
def from_ingested(cls, event_metadata, digested_at, digest_order, stored_event_count, issue, parsed_data,
def from_ingested(cls, event_metadata, digested_at, digest_order, stored_event_count, issue, grouping, parsed_data,
denormalized_fields):
# 'from_ingested' may be a bit of a misnomer... the full 'from_ingested' is done in 'digest_event' in the views.
@@ -197,6 +200,7 @@ class Event(models.Model):
event_id=event_metadata["event_id"], # the metadata is the envelope's event_id, which takes precedence
project_id=event_metadata["project_id"],
issue=issue,
grouping=grouping,
ingested_at=event_metadata["ingested_at"],
digested_at=digested_at,
data=json.dumps(parsed_data),

View File

@@ -270,14 +270,13 @@ class BaseIngestAPIView(View):
# "what is a limit anyway, if you can go either over it, or work is done before the limit is reached")
evict_for_max_events(project, digested_at, project_stored_event_count)
# NOTE: an event always has a single (automatically calculated) Grouping associated with it. Since we have that
# information available here, we could add it to the Event model.
event, event_created = Event.from_ingested(
event_metadata,
ingested_at,
issue.digested_event_count,
issue_stored_event_count,
issue,
grouping,
event_data,
denormalized_fields,
)

View File

@@ -22,6 +22,7 @@ from bsmain.management.commands.send_json import Command as SendJsonCommand
from compat.dsn import get_header_value
from events.models import Event
from ingest.views import BaseIngestAPIView
from issues.factories import get_or_create_issue
from .models import Issue, IssueStateManager
from .regressions import is_regression, is_regression_2, issue_is_regression
@@ -398,7 +399,7 @@ class ViewTests(TransactionTestCase):
self.user = User.objects.create_user(username='test', password='test')
self.project = Project.objects.create()
ProjectMembership.objects.create(project=self.project, user=self.user)
self.issue = Issue.objects.create(project=self.project, **denormalized_issue_fields())
self.issue, _ = get_or_create_issue(self.project)
self.event = create_event(self.project, self.issue)
self.client.force_login(self.user)