Merge pull request #211 from bugsink/canonical-api

"Canonincal" (Bugsink-specific) API: first version
This commit is contained in:
Klaas van Schelven
2025-09-15 16:41:25 +02:00
committed by GitHub
37 changed files with 2021 additions and 24 deletions

View File

@@ -1,5 +1,6 @@
from django.db.backends.signals import connection_created
from django.contrib.auth.management.commands.createsuperuser import Command as CreateSuperUserCommand
from drf_spectacular.extensions import OpenApiAuthenticationExtension
def set_pragmas(sender, connection, **kwargs):
@@ -40,3 +41,16 @@ def _get_input_message(self, field, default=None):
unpatched_get_input_message = CreateSuperUserCommand._get_input_message
CreateSuperUserCommand._get_input_message = _get_input_message
class BearerTokenAuthenticationExtension(OpenApiAuthenticationExtension):
# Will be auto-discovered b/c in __init__.py and subclass of OpenApiAuthenticationExtension
target_class = 'bugsink.authentication.BearerTokenAuthentication'
name = 'BearerAuth'
def get_security_definition(self, auto_schema):
return {
'type': 'http',
'scheme': 'bearer',
'bearerFormat': 'token',
}

22
bugsink/api_fields.py Normal file
View File

@@ -0,0 +1,22 @@
from rest_framework import serializers
def make_enum_field(enum_cls, *, name=None):
class EnumChoiceField(serializers.ChoiceField):
_enum_cls = enum_cls
def __init__(self, **kwargs):
self._to_value = {m.name.lower(): m.value for m in enum_cls}
self._to_name = {m.value: m.name.lower() for m in enum_cls}
super().__init__(choices=self._to_value, **kwargs)
def to_representation(self, value):
return self._to_name[value]
def to_internal_value(self, data):
key = super().to_internal_value(data)
return self._to_value[key]
EnumChoiceField.__name__ = name or f"{enum_cls.__name__}Field"
return EnumChoiceField

50
bugsink/api_mixins.py Normal file
View File

@@ -0,0 +1,50 @@
from rest_framework.exceptions import ValidationError
from bugsink.decorators import atomic_for_request_method
class AtomicRequestMixin:
def dispatch(self, request, *args, **kwargs):
wrapped = atomic_for_request_method(super().dispatch, using=None)
return wrapped(request, *args, **kwargs)
class ExpandableSerializerMixin:
expandable_fields = {}
def __init__(self, *args, **kwargs):
self._expand = set(kwargs.pop("expand", []))
super().__init__(*args, **kwargs)
def to_representation(self, instance):
data = super().to_representation(instance)
for field, serializer_cls in self.expandable_fields.items():
if field in self._expand:
data[field] = serializer_cls(getattr(instance, field)).data
return data
class ExpandViewSetMixin:
"""
Mixin for ViewSets that support ?expand=...
Requires the serializer class to define expandable_fields.
"""
def get_serializer(self, *args, **kwargs):
expand = self.request.query_params.getlist("expand")
if expand:
if len(expand) == 1 and "," in expand[0]:
expand = expand[0].split(",")
serializer_cls = self.get_serializer_class()
expandable = getattr(serializer_cls, "expandable_fields", None)
if expandable is None:
raise ValidationError({"expand": ["Expansions are not supported on this endpoint."]})
invalid = [f for f in expand if f not in expandable]
if invalid:
raise ValidationError({"expand": [f"Unknown field: {name}" for name in invalid]})
kwargs["expand"] = expand
return super().get_serializer(*args, **kwargs)

39
bugsink/api_pagination.py Normal file
View File

@@ -0,0 +1,39 @@
from rest_framework.pagination import CursorPagination
from rest_framework.exceptions import ValidationError
class AscDescCursorPagination(CursorPagination):
"""
Cursor-based paginator that supports `?order=asc|desc`.
Each view sets:
base_ordering = ("field",) or ("field1", "field2")
default_direction = "asc" | "desc"
page_size = <int>
"""
# note to self: CursorPagination is the "obviously right" choice for navigating large datasets because it scales
# well; I'm not entirely sure why I didn't use the non-API equvivalent of this for the web UI (in issues/views.py)
# when I ran into performance problems in the past. I suspect it's because (at least partially) because the "cursor"
# approach precludes jumping to arbitrary pages; another part might be that I assumed that "endless scrolling" (by
# clicking 'next page' repeatedly) is an unlikely use case anyway, especially since I already generally have very
# large page sizes; in short, I probably dind't think that the performance problem of "navigating to a large offset"
# was likely to happen in practice (as opposed to: count breaking down at scale, which I did see in practice and
# solved). For now: we'll keep this for the API only, and see how it goes.
base_ordering = None
default_direction = "desc"
def get_ordering(self, request, queryset, view):
order_param = request.query_params.get("order")
if order_param and order_param not in ("asc", "desc"):
raise ValidationError({"order": ["Must be 'asc' or 'desc'."]})
direction = order_param or self.default_direction
if self.base_ordering is None:
raise RuntimeError("AscDescCursorPagination requires base_ordering to be set.")
ordering = []
for field in self.base_ordering:
ordering.append(f"-{field}" if direction == "desc" else field)
return ordering

31
bugsink/authentication.py Normal file
View File

@@ -0,0 +1,31 @@
from django.contrib.auth.models import AnonymousUser
from rest_framework.authentication import BaseAuthentication
from rest_framework import exceptions
from bsmain.models import AuthToken
class BearerTokenAuthentication(BaseAuthentication):
"""
Accepts: Authorization: Bearer <40-hex>
Returns (AnonymousUser, AuthToken) on success; leaves request.user anonymous.
"""
keyword = "Bearer"
def authenticate(self, request):
header = request.headers.get("Authorization")
if not header or not header.startswith(f"{self.keyword} "):
return None
raw = header[len(self.keyword) + 1:].strip()
if len(raw) != 40 or any(c not in "0123456789abcdef" for c in raw):
raise exceptions.AuthenticationFailed("Invalid Bearer token.")
token_obj = AuthToken.objects.filter(token=raw).first()
if not token_obj:
raise exceptions.AuthenticationFailed("Invalid Bearer token.")
return (AnonymousUser(), token_obj)
def authenticate_header(self, request):
# tells DRF what to send in WWW-Authenticate on 401 responses, hinting the required auth scheme
return self.keyword

10
bugsink/permissions.py Normal file
View File

@@ -0,0 +1,10 @@
from rest_framework.permissions import BasePermission
from bsmain.models import AuthToken
class IsGlobalAuthenticated(BasePermission):
"""Allows access only to authenticated users with a valid (global) AuthToken."""
def has_permission(self, request, view):
return isinstance(request.auth, AuthToken)

View File

@@ -66,8 +66,46 @@ INSTALLED_APPS = [
'tailwind', # As currently set up, this is also needed in production (templatetags)
'admin_auto_filters',
'rest_framework',
'drf_spectacular',
'drf_spectacular_sidecar', # this brings the swagger-ui
]
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', # from the tutorial
'PAGE_SIZE': 10,
"DEFAULT_AUTHENTICATION_CLASSES": [
"bugsink.authentication.BearerTokenAuthentication",
],
"DEFAULT_PERMISSION_CLASSES": [
"bugsink.permissions.IsGlobalAuthenticated",
],
"DEFAULT_RENDERER_CLASSES": [
"rest_framework.renderers.JSONRenderer",
],
"DEFAULT_PARSER_CLASSES": [
"rest_framework.parsers.JSONParser",
],
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
}
SPECTACULAR_SETTINGS = {
'TITLE': 'Bugsink',
'DESCRIPTION': 'Bugsink API Documentation',
'VERSION': '1.0.0',
'SERVE_INCLUDE_SCHEMA': False, # keep the docs clean and not document the docs endpoint itself.
"SECURITY": [
{"bearerAuth": []}
],
"ENUM_NAME_OVERRIDES": {
"TeamVisibilityEnum": ["joinable", "discoverable", "hidden"],
"ProjectVisibilityEnum": ["joinable", "discoverable", "team_members"],
},
}
BUGSINK_APPS = [
'bsmain',
'phonehome',

24
bugsink/test_api.py Normal file
View File

@@ -0,0 +1,24 @@
import unittest
from django.urls import reverse
from rest_framework.test import APIClient
from bsmain.models import AuthToken
class BearerAuthRouterTests(unittest.TestCase):
def setUp(self):
self.client = APIClient()
def test_ok_on_event_list(self):
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
resp = self.client.get(reverse("api:event-list"), {"issue": "00000000-0000-0000-0000-000000000000"})
self.assertEqual(resp.status_code, 200)
def test_missing_on_event_list(self):
resp = self.client.get(reverse("api:event-list"))
self.assertIn(resp.status_code, (401, 403))
def test_invalid_on_event_list(self):
self.client.credentials(HTTP_AUTHORIZATION="Bearer " + "a" * 40)
resp = self.client.get(reverse("api:event-list"))
self.assertEqual(resp.status_code, 401)

View File

@@ -5,6 +5,9 @@ from django.urls import include, path
from django.contrib.auth import views as auth_views
from django.views.generic import RedirectView, TemplateView
from rest_framework import routers
from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
from alerts.views import debug_email as debug_alerts_email
from users.views import debug_email as debug_users_email
from teams.views import debug_email as debug_teams_email
@@ -14,6 +17,12 @@ from ingest.views import download_envelope
from files.views import chunk_upload, artifact_bundle_assemble, api_root, api_catch_all
from bugsink.decorators import login_exempt
from events.api_views import EventViewSet
from issues.api_views import IssueViewSet
from projects.api_views import ProjectViewSet
from releases.api_views import ReleaseViewSet
from teams.api_views import TeamViewSet
from .views import home, trigger_error, favicon, settings_view, silence_email_system_warning, counts, health_check_ready
from .debug_views import csrf_debug
@@ -23,6 +32,14 @@ admin.site.site_title = get_settings().SITE_TITLE
admin.site.index_title = "Admin" # everyone calls this the "admin" anyway. Let's set the title accordingly.
api_router = routers.DefaultRouter()
api_router.register(r'events', EventViewSet)
api_router.register(r'issues', IssueViewSet)
api_router.register(r'projects', ProjectViewSet)
api_router.register(r'releases', ReleaseViewSet)
api_router.register(r'teams', TeamViewSet)
urlpatterns = [
path('', home, name='home'),
@@ -43,6 +60,10 @@ urlpatterns = [
# many user-related views are directly exposed above (/accounts/), the rest is here:
path("users/", include("users.urls")),
path("api/canonical/0/", include((api_router.urls, "api"), namespace="api")),
path("api/canonical/0/schema/", SpectacularAPIView.as_view(), name="schema"),
path("api/canonical/0/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), name="swagger-ui"),
# these are sentry-cli endpoint for uploading; they're unrelated to e.g. the ingestion API.
# the /api/0/ is just a hard prefix (for the ingest API, that position indicates the project id, but here it's just
# a prefix)

108
events/api_views.py Normal file
View File

@@ -0,0 +1,108 @@
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from rest_framework.exceptions import ValidationError
from rest_framework.decorators import action
from rest_framework.response import Response
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes, OpenApiResponse
from bugsink.utils import assert_
from bugsink.api_pagination import AscDescCursorPagination
from bugsink.api_mixins import AtomicRequestMixin
from .models import Event
from .serializers import EventListSerializer, EventDetailSerializer
from .markdown_stacktrace import render_stacktrace_md
from .renderers import MarkdownRenderer
class EventPagination(AscDescCursorPagination):
# Cursor pagination requires an indexed, mostly-stable ordering field. We use `digest_order`: we require
# ?issue=<uuid> and have a composite (issue_id, digest_order) index, so ORDER BY digest_order after filtering by
# issue is fast and cursor-stable. (also note that digest_order comes in in-order).
base_ordering = ("digest_order",)
page_size = 250
default_direction = "desc" # newest first by default, aligned with UI
class EventViewSet(AtomicRequestMixin, viewsets.ReadOnlyModelViewSet):
"""
LIST requires: ?issue=<uuid>
Optional: ?order=asc|desc (default: desc)
LIST omits `data`, ordered by digest_order
RETRIEVE includes `data` (pure PK lookup; no filters/order applied)
"""
queryset = Event.objects.all() # router requirement for basename inference
serializer_class = EventListSerializer
pagination_class = EventPagination
def filter_queryset(self, queryset):
query_params = self.request.query_params
if "issue" not in query_params:
raise ValidationError({"issue": ["This field is required."]})
return queryset.filter(issue=query_params["issue"])
@extend_schema(
parameters=[
OpenApiParameter(
name="issue",
type=OpenApiTypes.UUID,
location=OpenApiParameter.QUERY,
required=True,
description="Filter events by issue UUID (required).",
),
OpenApiParameter(
name="order",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
enum=["asc", "desc"],
description="Sort order of digest_order (default: desc).",
),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def get_object(self):
"""
DRF's get_object(), but we intentionally bypass filter_queryset for detail routes to keep PK lookups
db-index-friendly (no WHERE filters other than the PK which is already indexed).
"""
queryset = self.get_queryset() # no filter_queryset() here
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
assert_(lookup_url_kwarg in self.kwargs, (
'Expected view %s to be called with a URL keyword argument '
'named "%s". Fix your URL conf, or set the `.lookup_field` '
'attribute on the view correctly.' %
(self.__class__.__name__, lookup_url_kwarg)
))
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
obj = get_object_or_404(queryset, **filter_kwargs)
# May raise a permission denied
self.check_object_permissions(self.request, obj)
return obj
def get_serializer_class(self):
return EventDetailSerializer if self.action == "retrieve" else EventListSerializer
@extend_schema(
description="Render the event's stacktrace (frames, source, locals) as Markdown-like text.",
responses={200: OpenApiResponse(response=str, description="Stacktrace as Markdown")},
)
@action(
detail=True,
methods=["get"],
url_path="stacktrace",
renderer_classes=[MarkdownRenderer],
)
def stacktrace(self, request, pk=None):
event = self.get_object()
text = render_stacktrace_md(event, frames="in_app", include_locals=True)
return Response(text)

View File

@@ -0,0 +1,186 @@
# This module is almost entirely written by a chatbot, with heavy guidance in terms of desired outcome, but very little
# code review. It's smoke-tested against all sample events and char-for-char tested for a single representative event.
#
# Large parts mirror (have stolen from) existing stacktrace-rendering logic from our views/templates, trimmed down for a
# Markdown/LLM audience.
#
# Purpose: expose event stacktraces (frames, source, locals) as clean, low-maintenance text for humans and machine
# tools. As in the UI: focus on the stacktrace rather than the event metadata.
#
# The provided markdown is not a stable interface; it's intended to be useful but not something you'd parse
# programmatically (just use the event data instead).
import logging
from django.conf import settings
from events.utils import apply_sourcemaps
from sentry_sdk_extensions import capture_or_log_exception
logger = logging.getLogger("bugsink.issues")
def _code_segments(frame):
pre = frame.get("pre_context") or []
ctx = frame.get("context_line")
post = frame.get("post_context") or []
pre = [("" if l is None else str(l)) for l in pre]
post = [("" if l is None else str(l)) for l in post]
if ctx is not None:
ctx = str(ctx)
return pre, ctx, post
def _code_lines(frame):
pre, ctx, post = _code_segments(frame)
lines = []
lines.extend(pre)
if ctx is not None:
lines.append(ctx)
lines.extend(post)
return lines
def _iter_exceptions(parsed):
exc = parsed.get("exception")
if not exc:
return []
if isinstance(exc, dict):
return list(exc.get("values") or [])
if isinstance(exc, (list, tuple)):
return list(exc)
return []
def _frames_for_exception(exc):
st = exc.get("stacktrace") or {}
return list(st.get("frames") or [])
def _header_lines(event, exc):
etype = exc.get("type") or "Exception"
val = exc.get("value") or ""
# Two-line title; no platform/event_id/timestamp clutter.
return [f"# {etype}", val]
def _format_frame_header(frame):
fn = frame.get("filename") or frame.get("abs_path") or "<unknown>"
func = frame.get("function") or ""
lineno = frame.get("lineno")
in_app = frame.get("in_app") is True
scope = "in-app" if in_app else "external"
header = f"### {fn}"
if lineno is not None:
header += f":{lineno}"
if func:
header += f" in `{func}`"
header += f" [{scope}]"
debug_id = frame.get("debug_id")
if debug_id and not frame.get("mapped"):
header += f" (no sourcemap for debug_id {debug_id})"
return [header]
def _format_code_gutter(frame):
pre, ctx, post = _code_segments(frame)
if not pre and ctx is None and not post:
return []
lineno = frame.get("lineno")
if lineno is not None:
start = max(1, int(lineno) - len(pre))
else:
start = 1
lines = list(pre)
ctx_index = None
if ctx is not None:
ctx_index = len(lines)
lines.append(ctx)
lines.extend(post)
last_no = start + len(lines) - 1
width = max(2, len(str(last_no)))
out = []
for i, text in enumerate(lines):
n = start + i
if ctx_index is not None and i == ctx_index:
out.append(f"{str(n).rjust(width)} | {text}")
else:
out.append(f" {str(n).rjust(width)} | {text}")
return out
def _format_locals(frame):
vars_ = frame.get("vars") or {}
if not vars_:
return []
lines = ["", "#### Locals", ""]
for k, v in vars_.items():
lines.append(f"* `{k}` = `{v}`")
return lines
def _select_frames(frames, in_app_only):
if not in_app_only:
return frames
filtered = [f for f in frames if f.get("in_app") is True]
return filtered if filtered else frames
def render_stacktrace_md(event, frames="in_app", include_locals=True):
parsed = event.get_parsed_data()
try:
apply_sourcemaps(parsed)
except Exception as e:
if settings.DEBUG or settings.I_AM_RUNNING == "TEST":
# when developing/testing, I _do_ want to get notified
raise
# sourcemaps are still experimental; we don't want to fail on them, so we just log the error and move on.
capture_or_log_exception(e, logger)
excs = _iter_exceptions(parsed)
if not excs:
return "_No stacktrace available._"
stack_of_plates = getattr(event, "platform", None) != "python"
if stack_of_plates:
excs = list(reversed(excs))
lines = []
for i, exc in enumerate(excs):
if i > 0:
lines += ["", "**During handling of the above exception, another exception occurred:**", ""]
lines += _header_lines(event, exc)
frames_list = _frames_for_exception(exc) or []
if stack_of_plates and frames_list:
frames_list = list(reversed(frames_list))
in_app_only = frames == "in_app"
frames_list = _select_frames(frames_list, in_app_only)
for frame in frames_list:
# spacer above every frame header
lines.append("")
lines += _format_frame_header(frame)
code_listing = _format_code_gutter(frame)
if code_listing:
lines += code_listing
else:
# brief mention when no source context is available
lines.append("_no source context available_")
if include_locals:
loc_lines = _format_locals(frame)
if loc_lines:
lines += loc_lines
return "\n".join([s.rstrip() for s in lines]).strip()

10
events/renderers.py Normal file
View File

@@ -0,0 +1,10 @@
from rest_framework.renderers import BaseRenderer
class MarkdownRenderer(BaseRenderer):
media_type = "text/markdown"
format = "md"
charset = "utf-8"
def render(self, data, accepted_media_type=None, renderer_context=None):
return data.encode("utf-8")

49
events/serializers.py Normal file
View File

@@ -0,0 +1,49 @@
from rest_framework import serializers
from drf_spectacular.utils import extend_schema_field
from .markdown_stacktrace import render_stacktrace_md
from .models import Event
class EventListSerializer(serializers.ModelSerializer):
"""Lightweight list view: excludes the (potentially large) `data` field."""
class Meta:
model = Event
fields = [
"id",
"ingested_at",
"digested_at",
"issue",
"grouping",
"event_id",
"project",
"timestamp",
"digest_order",
]
class EventDetailSerializer(serializers.ModelSerializer):
"""Detail view: includes full `data` payload."""
# NOTE as with Issue.grouping_keys: check viewset for prefetching
# grouping_key = serializers.CharField(source="grouping.grouping_key", read_only=True)
data = serializers.SerializerMethodField()
stacktrace_md = serializers.SerializerMethodField()
class Meta:
model = Event
fields = EventListSerializer.Meta.fields + [
"data",
"stacktrace_md",
# "grouping_key" # TODO (likely) once we have the "expand" idea implemented
]
@extend_schema_field(serializers.JSONField)
def get_data(self, obj):
# we override `data` to return the parsed version (which may come from the file store rather than the DB)
return obj.get_parsed_data()
@extend_schema_field(serializers.CharField)
def get_stacktrace_md(self, obj):
return render_stacktrace_md(obj, frames="in_app", include_locals=True)

141
events/test_api.py Normal file
View File

@@ -0,0 +1,141 @@
from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase
from django.urls import reverse
from rest_framework.test import APIClient
from projects.models import Project
from bsmain.models import AuthToken
from events.factories import create_event
from events.api_views import EventViewSet
from issues.factories import get_or_create_issue
from events.factories import create_event_data
class EventApiTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.project = Project.objects.create(name="Test Project")
self.issue, _ = get_or_create_issue(project=self.project)
self.event = create_event(issue=self.issue)
def test_list_requires_scope(self):
response = self.client.get(reverse("api:event-list"))
self.assertEqual(response.status_code, 400)
self.assertEqual({'issue': ['This field is required.']}, response.json())
def test_detail_by_id(self):
url = reverse("api:event-detail", args=[self.event.id])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
detail = response.json()
self.assertEqual(detail["id"], str(self.event.id))
self.assertIn("data", detail)
self.assertTrue("event_id" in detail["data"])
def test_detail_includes_stacktrace_md_field(self):
url = reverse("api:event-detail", args=[self.event.id])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
detail = response.json()
self.assertIn("stacktrace_md", detail)
self.assertIsInstance(detail["stacktrace_md"], str)
self.assertTrue(len(detail["stacktrace_md"]) > 0)
self.assertEqual("_No stacktrace available._", detail["stacktrace_md"])
def test_stacktrace_action_returns_markdown(self):
url = reverse("api:event-stacktrace", args=[self.event.id])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertTrue(response["Content-Type"].startswith("text/markdown"))
body = response.content.decode("utf-8")
self.assertTrue(len(body) > 0)
self.assertEqual("_No stacktrace available._", body)
def test_list_by_issue_is_light_payload(self):
response = self.client.get(reverse("api:event-list"), {"issue": str(self.issue.id)})
self.assertEqual(response.status_code, 200)
self.assertNotIn("data", response.json()["results"][0])
def test_detail_not_found_is_404(self):
url = reverse("api:event-detail", args=["00000000-0000-0000-0000-000000000000"])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_list_rejects_bad_order(self):
response = self.client.get(reverse("api:event-list"), {"issue": str(self.issue.id), "order": "sideways"})
self.assertEqual(response.status_code, 400)
self.assertEqual({'order': ["Must be 'asc' or 'desc'."]}, response.json())
def test_list_order_default_desc(self):
e0 = self.event
e1 = create_event(issue=self.issue)
response = self.client.get(reverse("api:event-list"), {"issue": str(self.issue.id)})
self.assertEqual(response.status_code, 200)
ids = [item["id"] for item in response.json()["results"]]
self.assertEqual(ids[0], str(e1.id))
self.assertEqual(ids[1], str(e0.id))
def test_list_order_asc(self):
e0 = self.event
e1 = create_event(issue=self.issue)
response = self.client.get(reverse("api:event-list"), {"issue": str(self.issue.id), "order": "asc"})
self.assertEqual(response.status_code, 200)
ids = [item["id"] for item in response.json()["results"]]
self.assertEqual(ids[0], str(e0.id))
self.assertEqual(ids[1], str(e1.id))
class EventPaginationTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.old_size = EventViewSet.pagination_class.page_size
EventViewSet.pagination_class.page_size = 2
def tearDown(self):
EventViewSet.pagination_class.page_size = self.old_size
def _make_events(self, issue, n=5):
events = []
for i in range(n):
ev = create_event(issue=issue)
events.append(ev)
return events
def _ids(self, resp):
return [row["id"] for row in resp.json()["results"]]
def test_digest_order_desc_two_pages(self):
proj = Project.objects.create(name="P")
issue = get_or_create_issue(project=proj, event_data=create_event_data(exception_type="root"))[0]
events = self._make_events(issue, 5)
# default (desc) → events 5,4 on page 1; 3,2 on page 2
r1 = self.client.get(reverse("api:event-list"), {"issue": str(issue.id)})
self.assertEqual(self._ids(r1), [str(events[4].id), str(events[3].id)])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(events[2].id), str(events[1].id)])
def test_digest_order_asc_two_pages(self):
proj = Project.objects.create(name="P2")
issue = get_or_create_issue(project=proj, event_data=create_event_data(exception_type="root2"))[0]
events = self._make_events(issue, 5)
# asc → events 1,2 on page 1; 3,4 on page 2
r1 = self.client.get(reverse("api:event-list"),
{"issue": str(issue.id), "order": "asc"})
self.assertEqual(self._ids(r1), [str(events[0].id), str(events[1].id)])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(events[2].id), str(events[3].id)])

View File

@@ -1,6 +1,6 @@
from django.urls import path
from .views import event_download, event_plaintext
from .views import event_download, event_plaintext, event_markdown
urlpatterns = [
@@ -8,4 +8,5 @@ urlpatterns = [
path('event/<uuid:event_pk>/raw/', event_download, kwargs={"as_attachment": False}),
path('event/<uuid:event_pk>/download/', event_download, kwargs={"as_attachment": True}),
path('event/<uuid:event_pk>/plain/', event_plaintext),
path('event/<uuid:event_pk>/md/', event_markdown),
]

View File

@@ -5,6 +5,8 @@ from django.shortcuts import render
from bugsink.decorators import event_membership_required, atomic_for_request_method
from issues.utils import get_values
from .markdown_stacktrace import render_stacktrace_md
@atomic_for_request_method
@event_membership_required
@@ -25,3 +27,15 @@ def event_plaintext(request, event):
"event": event,
"exceptions": exceptions,
}, content_type="text/plain")
@atomic_for_request_method
@event_membership_required
def event_markdown(request, event, as_attachment=False):
text = render_stacktrace_md(event, frames="in_app", include_locals=True)
result = HttpResponse(text, content_type="text/markdown; charset=utf-8")
if as_attachment:
result["Content-Disposition"] = content_disposition_header(
as_attachment=True, filename=event.id.hex + ".md"
)
return result

View File

@@ -369,7 +369,7 @@ class BaseIngestAPIView(View):
# multiple events with the same event_id "don't happen" (i.e. are the result of badly misbehaving clients)
raise ValidationError("Event already exists", code="event_already_exists")
release = create_release_if_needed(project, event.release, event, issue)
release, _ = create_release_if_needed(project, event.release, event.ingested_at, issue)
if issue_created:
TurningPoint.objects.create(

133
issues/api_views.py Normal file
View File

@@ -0,0 +1,133 @@
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from rest_framework.pagination import CursorPagination
from rest_framework.exceptions import ValidationError
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
from bugsink.api_mixins import AtomicRequestMixin
from bugsink.utils import assert_
from .models import Issue
from .serializers import IssueSerializer
class IssuesCursorPagination(CursorPagination):
"""
Cursor paginator for /issues supporting ?sort=… and ?order=asc|desc.
Sort modes are named after the *primary* column:
- sort=digest_order → unique per project → no tie-breakers needed
- sort=last_seen → timestamp → tie-breaker on id
Direction applies to primary *and beyond* (i.e. all fields in the list).
The view MUST filter by project; ordering is handled here.
"""
# Cursor pagination requires an indexed, mostly-stable ordering. Stable mode: sort=digest_order (default). We
# require ?project=<uuid> and have a composite (project_id, digest_order) index, so ORDER BY digest_order after
# filtering by project is fast and cursor-stable.
# We also offer a "recent" mode: sort=last_seen. This is not stable, as new events can come in mid-cursor, and
# reshuffle things causing misses or duplicates. However, this is the desired UX for a "recent activity" view.
# i.e. the typical usage would in fact just be to get the "first page" of recent activity.
page_size = 250
default_direction = "asc"
default_sort = "digest_order"
VALID_SORTS = ("digest_order", "last_seen")
VALID_ORDERS = ("asc", "desc")
def get_ordering(self, request, queryset, view):
sort = request.query_params.get("sort", self.default_sort)
if sort not in self.VALID_SORTS:
raise ValidationError({"sort": ["Must be 'digest_order' or 'last_seen'."]})
order = request.query_params.get("order", self.default_direction)
if order not in self.VALID_ORDERS:
raise ValidationError({"order": ["Must be 'asc' or 'desc'."]})
desc = (order == "desc")
if sort == "digest_order":
# Unique per project; stable cursor once filtered by project.
return ["-digest_order" if desc else "digest_order"]
# sort == "last_seen": timestamp needs a deterministic tie-breaker.
if desc:
return ["-last_seen", "-id"]
return ["last_seen", "id"]
class IssueViewSet(AtomicRequestMixin, viewsets.ReadOnlyModelViewSet):
"""
LIST requires: ?project=<uuid>
Optional: ?order=asc|desc (default: desc)
LIST ordered by last_seen
RETRIEVE is a pure PK lookup (soft-deletes implied)
"""
queryset = Issue.objects.filter(is_deleted=False) # hide soft-deleted issues; also satisfies router
serializer_class = IssueSerializer
pagination_class = IssuesCursorPagination
def get_queryset(self):
return self.queryset
@extend_schema(
parameters=[
OpenApiParameter(
name="project",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
required=True,
description="Filter issues by project id (required).",
),
OpenApiParameter(
name="sort",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
enum=["digest_order", "last_seen"],
description="Sort mode (default: digest_order).",
),
OpenApiParameter(
name="order",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
enum=["asc", "desc"],
description="Sort order (default: asc).",
),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
if self.action != "list":
return queryset
project = self.request.query_params.get("project")
if not project:
# the below at least until we have a UI for cross-project Issue listing, i.e. #190
raise ValidationError({"project": ["This field is required."]})
return queryset.filter(project=project)
def get_object(self):
"""
DRF's get_object(), but bypass filter_queryset for detail.
"""
# TODO: copy/paste from events/api_views.py
queryset = self.get_queryset()
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
assert_(
lookup_url_kwarg in self.kwargs,
'Expected view %s to be called with a URL keyword argument named "%s".'
% (self.__class__.__name__, lookup_url_kwarg)
)
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
obj = get_object_or_404(queryset, **filter_kwargs)
self.check_object_permissions(self.request, obj)
return obj

42
issues/serializers.py Normal file
View File

@@ -0,0 +1,42 @@
from rest_framework import serializers
from .models import Issue
class IssueSerializer(serializers.ModelSerializer):
# grouping_keys = serializers.SerializerMethodField() # read-only list of strings
class Meta:
model = Issue
# TODO better wording:
# This is the first attempt at getting the list of fields right. My belief is: this is a nice minimal list.
# it _does_ contain `data`, which is typically quite "fat", but I'd say that's the most useful field to have.
# and when you're actually in the business of looking at a specific event, you want to see the data.
fields = [
"id",
"project",
"is_deleted",
"digest_order",
"last_seen",
"first_seen",
"digested_event_count",
"stored_event_count",
"calculated_type",
"calculated_value",
"transaction",
# "last_frame_filename",
# "last_frame_module",
# "last_frame_function",
"is_resolved",
"is_resolved_by_next_release",
# "fixed_at", too "raw"? i.e. too implementation-tied?
# "events_at", too "raw"? i.e. too implementation-tied?
"is_muted",
# "unmute_on_volume_based_conditions", too "raw"? i.e. too implementation-tied?
# "grouping_keys", TODO (likely) once we have the "expand" idea implemented
]
# def get_grouping_keys(self, obj):
# # TODO: prefetch grouping_key in IssueViewSet
# return list(obj.grouping_set.values_list("grouping_key", flat=True))

View File

@@ -129,6 +129,7 @@
{% if is_event_page %}
<a href="{{ script_prefix }}/events/event/{{ event.id }}/download/">{% translate "Download" %}</a>
| <a href="{{ script_prefix }}/events/event/{{ event.id }}/raw/" >{% translate "JSON" %}</a>
| <a href="{{ script_prefix }}/events/event/{{ event.id }}/md/" >{% translate "Markdown" %}</a>
| <a href="{{ script_prefix }}/events/event/{{ event.id }}/plain/" >{% translate "Plain" %}</a>
{% endif %}

168
issues/test_api.py Normal file
View File

@@ -0,0 +1,168 @@
from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APIClient
from bsmain.models import AuthToken
from projects.models import Project
from issues.models import Issue
from issues.factories import get_or_create_issue
from events.factories import create_event_data
from issues.api_views import IssueViewSet
class IssueApiTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.project = Project.objects.create(name="Test Project")
# create two distinct issues for ordering tests (different grouping keys)
data0 = create_event_data(exception_type="E0")
data1 = create_event_data(exception_type="E1")
self.issue0, _ = get_or_create_issue(project=self.project, event_data=data0)
self.issue1, _ = get_or_create_issue(project=self.project, event_data=data1)
# ensure deterministic last_seen ordering
now = timezone.now()
Issue.objects.filter(id=self.issue0.id).update(last_seen=now)
Issue.objects.filter(id=self.issue1.id).update(last_seen=now + timezone.timedelta(seconds=1))
self.issue0.refresh_from_db()
self.issue1.refresh_from_db()
def test_list_requires_project(self):
response = self.client.get(reverse("api:issue-list"))
self.assertEqual(response.status_code, 400)
self.assertEqual({"project": ["This field is required."]}, response.json())
def test_list_by_project_default_asc(self):
response = self.client.get(reverse("api:issue-list"), {"project": str(self.project.id)})
self.assertEqual(response.status_code, 200)
ids = [row["id"] for row in response.json()["results"]]
self.assertEqual(ids[0], str(self.issue0.id))
self.assertEqual(ids[1], str(self.issue1.id))
def test_list_by_project_order_desc(self):
response = self.client.get(reverse("api:issue-list"), {"project": str(self.project.id), "order": "desc"})
self.assertEqual(response.status_code, 200)
ids = [row["id"] for row in response.json()["results"]]
self.assertEqual(ids[0], str(self.issue1.id))
self.assertEqual(ids[1], str(self.issue0.id))
def test_list_rejects_bad_order(self):
response = self.client.get(reverse("api:issue-list"), {"project": str(self.project.id), "order": "sideways"})
self.assertEqual(response.status_code, 400)
self.assertEqual({"order": ["Must be 'asc' or 'desc'."]}, response.json())
def test_detail_by_id(self):
url = reverse("api:issue-detail", args=[self.issue0.id])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["id"], str(self.issue0.id))
def test_detail_ignores_query_filters(self):
url = reverse("api:issue-detail", args=[self.issue0.id])
response = self.client.get(url, {"project": "00000000-0000-0000-0000-000000000000", "order": "asc"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["id"], str(self.issue0.id))
def test_detail_404_on_is_deleted(self):
Issue.objects.filter(id=self.issue0.id).update(is_deleted=True)
url = reverse("api:issue-detail", args=[self.issue0.id])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_list_rejects_bad_sort(self):
r = self.client.get(
reverse("api:issue-list"),
{"project": str(self.project.id), "sort": "nope"},
)
self.assertEqual(r.status_code, 400)
self.assertEqual(r.json(), {"sort": ["Must be 'digest_order' or 'last_seen'."]})
class IssuePaginationTests(TransactionTestCase):
last_seen_deltas = [3, 1, 4, 0, 2]
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.old_size = IssueViewSet.pagination_class.page_size
IssueViewSet.pagination_class.page_size = 2
def tearDown(self):
IssueViewSet.pagination_class.page_size = self.old_size
def _make_issues(self):
proj = Project.objects.create(name="P")
base = timezone.now().replace(microsecond=0)
issues = []
for i, delta in enumerate(self.last_seen_deltas):
data = create_event_data(exception_type=f"E{i}")
iss = get_or_create_issue(project=proj, event_data=data)[0]
iss.digest_order = i + 1
iss.last_seen = base + timezone.timedelta(minutes=delta)
iss.save(update_fields=["digest_order", "last_seen"])
issues.append(iss)
return proj, issues
def _ids(self, resp):
return [row["id"] for row in resp.json()["results"]]
def _idx_by_last_seen(self, issues, minutes):
return issues[self.last_seen_deltas.index(minutes)].id
def _idx_by_digest(self, issues, n):
return issues[n - 1].id # digest_order = n
def test_digest_order_asc(self):
proj, issues = self._make_issues()
r1 = self.client.get(
reverse("api:issue-list"),
{"project": str(proj.id), "sort": "digest_order", "order": "asc"})
self.assertEqual(self._ids(r1), [str(self._idx_by_digest(issues, 1)), str(self._idx_by_digest(issues, 2))])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(self._idx_by_digest(issues, 3)), str(self._idx_by_digest(issues, 4))])
def test_digest_order_desc(self):
proj, issues = self._make_issues()
r1 = self.client.get(
reverse("api:issue-list"), {"project": str(proj.id), "sort": "digest_order", "order": "desc"})
self.assertEqual(self._ids(r1), [str(self._idx_by_digest(issues, 5)), str(self._idx_by_digest(issues, 4))])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(self._idx_by_digest(issues, 3)), str(self._idx_by_digest(issues, 2))])
def test_last_seen_asc(self):
proj, issues = self._make_issues()
r1 = self.client.get(
reverse("api:issue-list"), {"project": str(proj.id), "sort": "last_seen", "order": "asc"})
self.assertEqual(
self._ids(r1), [str(self._idx_by_last_seen(issues, 0)), str(self._idx_by_last_seen(issues, 1))])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2),
[str(self._idx_by_last_seen(issues, 2)), str(self._idx_by_last_seen(issues, 3))])
def test_last_seen_desc(self):
proj, issues = self._make_issues()
r1 = self.client.get(
reverse("api:issue-list"), {"project": str(proj.id), "sort": "last_seen", "order": "desc"})
self.assertEqual(
self._ids(r1), [str(self._idx_by_last_seen(issues, 4)), str(self._idx_by_last_seen(issues, 3))])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(
self._ids(r2), [str(self._idx_by_last_seen(issues, 2)), str(self._idx_by_last_seen(issues, 1))])

View File

@@ -27,6 +27,7 @@ from ingest.views import BaseIngestAPIView
from issues.factories import get_or_create_issue
from tags.models import store_tags
from tags.tasks import vacuum_tagvalues
from events.markdown_stacktrace import render_stacktrace_md
from .models import Issue, IssueStateManager, TurningPoint, TurningPointKind
from .regressions import is_regression, is_regression_2, issue_is_regression
@@ -193,7 +194,8 @@ class RegressionIssueTestCase(DjangoTestCase):
def test_issue_is_regression_no_releases(self):
project = Project.objects.create()
create_release_if_needed(fresh(project), "", create_event(project))
timestamp = datetime(2020, 1, 1, tzinfo=timezone.utc)
create_release_if_needed(fresh(project), "", timestamp)
# new issue is not a regression
issue = Issue.objects.create(project=project, **denormalized_issue_fields())
@@ -212,7 +214,8 @@ class RegressionIssueTestCase(DjangoTestCase):
def test_issue_had_no_releases_but_now_does(self):
project = Project.objects.create()
create_release_if_needed(fresh(project), "", create_event(project))
timestamp = datetime(2020, 1, 1, tzinfo=timezone.utc)
create_release_if_needed(fresh(project), "", timestamp)
# new issue is not a regression
issue = Issue.objects.create(project=project, **denormalized_issue_fields())
@@ -223,15 +226,16 @@ class RegressionIssueTestCase(DjangoTestCase):
issue.save()
# a new release happens
create_release_if_needed(fresh(project), "1.0.0", create_event(project))
create_release_if_needed(fresh(project), "1.0.0", timestamp)
self.assertTrue(issue_is_regression(fresh(issue), "1.0.0"))
def test_issue_is_regression_with_releases_resolve_by_latest(self):
project = Project.objects.create()
timestamp = datetime(2020, 1, 1, tzinfo=timezone.utc)
create_release_if_needed(fresh(project), "1.0.0", create_event(project))
create_release_if_needed(fresh(project), "2.0.0", create_event(project))
create_release_if_needed(fresh(project), "1.0.0", timestamp)
create_release_if_needed(fresh(project), "2.0.0", timestamp)
# new issue is not a regression
issue = Issue.objects.create(project=project, **denormalized_issue_fields())
@@ -244,7 +248,7 @@ class RegressionIssueTestCase(DjangoTestCase):
self.assertTrue(issue_is_regression(fresh(issue), "2.0.0"))
# a new release happens, and the issue is seen there: also a regression
create_release_if_needed(fresh(project), "3.0.0", create_event(project))
create_release_if_needed(fresh(project), "3.0.0", timestamp)
self.assertTrue(issue_is_regression(fresh(issue), "3.0.0"))
# reopen the issue (as is done when a real regression is seen; or as would be done manually); nothing is a
@@ -256,9 +260,10 @@ class RegressionIssueTestCase(DjangoTestCase):
def test_issue_is_regression_with_releases_resolve_by_next(self):
project = Project.objects.create()
timestamp = datetime(2020, 1, 1, tzinfo=timezone.utc)
create_release_if_needed(fresh(project), "1.0.0", create_event(project))
create_release_if_needed(fresh(project), "2.0.0", create_event(project))
create_release_if_needed(fresh(project), "1.0.0", timestamp)
create_release_if_needed(fresh(project), "2.0.0", timestamp)
# new issue is not a regression
issue = Issue.objects.create(project=project, **denormalized_issue_fields())
@@ -271,11 +276,11 @@ class RegressionIssueTestCase(DjangoTestCase):
self.assertFalse(issue_is_regression(fresh(issue), "2.0.0"))
# a new release appears (as part of a new event); this is a regression
create_release_if_needed(fresh(project), "3.0.0", create_event(project))
create_release_if_needed(fresh(project), "3.0.0", timestamp)
self.assertTrue(issue_is_regression(fresh(issue), "3.0.0"))
# first-seen at any later release: regression
create_release_if_needed(fresh(project), "4.0.0", create_event(project))
create_release_if_needed(fresh(project), "4.0.0", timestamp)
self.assertTrue(issue_is_regression(fresh(issue), "4.0.0"))
@@ -442,6 +447,7 @@ class IntegrationTest(TransactionTestCase):
def setUp(self):
super().setUp()
self.verbosity = self.get_verbosity()
self.maxDiff = None # show full diff on assertEqual failures
def get_verbosity(self):
# https://stackoverflow.com/a/27457315/339144
@@ -523,6 +529,8 @@ class IntegrationTest(TransactionTestCase):
filename, response.content if response.status_code != 302 else response.url))
for event in Event.objects.all():
render_stacktrace_md(event) # just make sure this doesn't crash
urls = [
f'/issues/issue/{ event.issue.id }/event/{ event.id }/',
f'/issues/issue/{ event.issue.id }/event/{ event.id }/details/',
@@ -548,6 +556,90 @@ class IntegrationTest(TransactionTestCase):
# we want to know _which_ event failed, hence the raise-from-e here
raise AssertionError("Error rendering event %s" % event.debug_info) from e
def test_render_stacktrace_md(self):
user = User.objects.create_user(username='test', password='test')
project = Project.objects.create(name="test")
ProjectMembership.objects.create(project=project, user=user)
self.client.force_login(user)
sentry_auth_header = get_header_value(f"http://{ project.sentry_key }@hostisignored/{ project.id }")
# event through the ingestion pipeline
command = SendJsonCommand()
command.stdout = StringIO()
command.stderr = StringIO()
SAMPLES_DIR = os.getenv("SAMPLES_DIR", "../event-samples")
# a nice example because it has 4 kinds of frames (some missing source context, some missing local vars)
filename = SAMPLES_DIR + "/bugsink/frames-with-missing-info.json"
with open(filename) as f:
data = json.loads(f.read())
# leave as-is for reproducibility of the test
# data["event_id"] =
if not command.is_valid(data, filename):
raise Exception("validatity check in %s: %s" % (filename, command.stderr.getvalue()))
response = self.client.post(
f"/api/{ project.id }/store/",
json.dumps(data),
content_type="application/json",
headers={
"X-Sentry-Auth": sentry_auth_header,
"X-BugSink-DebugInfo": filename,
},
)
self.assertEqual(
200, response.status_code, "Error in %s: %s" % (
filename, response.content if response.status_code != 302 else response.url))
event = Event.objects.get(issue__project=project, event_id=data["event_id"])
md = render_stacktrace_md(event, frames="all", include_locals=True)
self.assertEqual('''# CapturedStacktraceFo
4 kinds of frames
### manage.py:22 in `complete_with_both` [in-app]
17 | ) from exc
18 | execute_from_command_line(sys.argv)
19 |
20 |
21 | if __name__ == '__main__':
▶ 22 | main()
#### Locals
* `__name__` = `'__main__'`
* `__doc__` = `"Django's command-line utility for administrative tasks."`
* `__package__` = `None`
* `__loader__` = `<_frozen_importlib_external.SourceFileLoader object at 0x7fe00fb21810>`
* `__spec__` = `None`
* `__annotations__` = `{}`
* `__builtins__` = `<module 'builtins' (built-in)>`
* `__file__` = `'/mnt/datacrypt/dev/bugsink/manage.py'`
* `__cached__` = `None`
* `os` = `<module 'os' from '/usr/lib/python3.10/os.py'>`
### manage.py in `missing_code` [in-app]
_no source context available_
#### Locals
* `execute_from_command_line` = `<function execute_from_command_line at 0x7fe00ec72f80>`
### django/core/management/__init__.py:442 in `missing_vars` [in-app]
437 |
438 |
439 | def execute_from_command_line(argv=None):
440 | """Run a ManagementUtility."""
441 | utility = ManagementUtility(argv)
▶ 442 | utility.execute()
### django/core/management/__init__.py in `missing_everything` [in-app]
_no source context available_''', md)
class GroupingUtilsTestCase(DjangoTestCase):

View File

@@ -78,6 +78,7 @@ class EagerPaginator(Paginator):
class KnownCountPaginator(EagerPaginator):
"""optimization: we know the total count of the queryset, so we can avoid a count() query"""
# see also: bugsink/api_pagination.py for an alternative approach
def __init__(self, *args, **kwargs):
self._count = kwargs.pop("count")
@@ -103,6 +104,7 @@ class UncountablePage(Page):
class UncountablePaginator(EagerPaginator):
"""optimization: counting is too expensive; to be used in a template w/o .count and .last"""
# see also: bugsink/api_pagination.py for an alternative approach
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

79
projects/api_views.py Normal file
View File

@@ -0,0 +1,79 @@
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
from bugsink.api_pagination import AscDescCursorPagination
from bugsink.api_mixins import ExpandViewSetMixin, AtomicRequestMixin
from .models import Project
from .serializers import (
ProjectListSerializer,
ProjectDetailSerializer,
ProjectCreateUpdateSerializer,
)
class ProjectPagination(AscDescCursorPagination):
# Cursor pagination requires an indexed, mostly-stable ordering field. We use `name`, which is indexed; for Teams,
# updates are rare and the table is small, so "requirement met in practice though not in theory".
base_ordering = ("name",)
page_size = 250
default_direction = "asc"
class ProjectViewSet(AtomicRequestMixin, ExpandViewSetMixin, viewsets.ModelViewSet):
"""
/api/canonical/0/projects/
GET /projects/ → list ordered by name ASC, hides soft-deleted, optional ?team=<uuid> filter
GET /projects/{pk}/ → detail (pure PK)
POST /projects/ → create {team, name, visibility?}
PATCH /projects/{pk}/ → minimal updates
DELETE → 405
"""
queryset = Project.objects.all()
http_method_names = ["get", "post", "patch", "head", "options"]
pagination_class = ProjectPagination
@extend_schema(
parameters=[
OpenApiParameter(
name="team",
type=OpenApiTypes.UUID,
location=OpenApiParameter.QUERY,
required=False,
description="Optional filter by team UUID.",
),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def filter_queryset(self, queryset):
if self.action != "list":
return queryset
query_params = self.request.query_params
# Hide soft-deleted in lists
qs = queryset.filter(is_deleted=False)
# Optional team filter (no hard requirement; avoids guessing UI rules)
team_id = query_params.get("team")
if team_id:
qs = qs.filter(team=team_id)
return qs
def get_object(self):
# Pure PK lookup (bypass filter_queryset)
queryset = self.get_queryset()
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
obj = get_object_or_404(queryset, **{self.lookup_field: self.kwargs[lookup_url_kwarg]})
self.check_object_permissions(self.request, obj)
return obj
def get_serializer_class(self):
if self.action in ("create", "partial_update"):
return ProjectCreateUpdateSerializer
if self.action == "retrieve":
return ProjectDetailSerializer
return ProjectListSerializer

95
projects/serializers.py Normal file
View File

@@ -0,0 +1,95 @@
from rest_framework import serializers
from bugsink.api_fields import make_enum_field
from teams.models import Team
from bugsink.api_mixins import ExpandableSerializerMixin
from teams.serializers import TeamDetailSerializer
from .models import Project, ProjectVisibility
ProjectVisibilityField = make_enum_field(ProjectVisibility)
class ProjectListSerializer(serializers.ModelSerializer):
visibility = ProjectVisibilityField()
dsn = serializers.CharField(read_only=True)
class Meta:
model = Project
fields = [
"id",
"team",
"name",
"slug",
"is_deleted",
"dsn",
"digested_event_count",
"stored_event_count",
"alert_on_new_issue",
"alert_on_regression",
"alert_on_unmute",
"visibility",
"retention_max_event_count",
]
class ProjectDetailSerializer(ExpandableSerializerMixin, serializers.ModelSerializer):
expandable_fields = {"team": TeamDetailSerializer}
visibility = ProjectVisibilityField()
dsn = serializers.CharField(read_only=True)
class Meta:
model = Project
fields = [
"id",
"team",
"name",
"slug",
"is_deleted",
"dsn",
"digested_event_count",
"stored_event_count",
"alert_on_new_issue",
"alert_on_regression",
"alert_on_unmute",
"visibility",
"retention_max_event_count",
]
class ProjectCreateUpdateSerializer(serializers.ModelSerializer):
id = serializers.UUIDField(read_only=True)
team = serializers.PrimaryKeyRelatedField(queryset=Team.objects.all())
visibility = ProjectVisibilityField(required=False)
class Meta:
model = Project
fields = [
"id",
"team",
"name",
"visibility",
"alert_on_new_issue",
"alert_on_regression",
"alert_on_unmute",
"retention_max_event_count",
# "slug", auto-generated for uniqueness
# "is_deleted", must go through delete_deferred()
# "digested_event_count", system-managed counter
# "stored_event_count", system-managed counter
# "has_releases", system-managed flag
# "dsn", derived from base_url + ids + key
# "sentry_key", server-generated, not client-writable
# "quota_exceeded_until", system-managed quota state
# "next_quota_check", system-managed quota scheduler
]
# extra_kwargs: mark alert/retention fields optional on write (they have defaults)
extra_kwargs = {
"alert_on_new_issue": {"required": False},
"alert_on_regression": {"required": False},
"alert_on_unmute": {"required": False},
"retention_max_event_count": {"required": False},
}

138
projects/test_api.py Normal file
View File

@@ -0,0 +1,138 @@
from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase
from django.urls import reverse
from rest_framework.test import APIClient
from bsmain.models import AuthToken
from teams.models import Team
from projects.models import Project
class ProjectApiTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.team = Team.objects.create(name="Engineering")
def test_list_orders_by_name_and_hides_deleted(self):
Project.objects.create(team=self.team, name="Zebra")
Project.objects.create(team=self.team, name="Alpha")
Project.objects.create(team=self.team, name="Gamma", is_deleted=True)
r = self.client.get(reverse("api:project-list"))
self.assertEqual(r.status_code, 200)
names = [row["name"] for row in r.json()["results"]]
self.assertEqual(names, ["Alpha", "Zebra"])
def test_optional_team_filter(self):
other = Team.objects.create(name="Ops")
Project.objects.create(team=self.team, name="A1")
Project.objects.create(team=other, name="B1")
r = self.client.get(reverse("api:project-list"), {"team": str(self.team.id)})
self.assertEqual(r.status_code, 200)
names = [row["name"] for row in r.json()["results"]]
self.assertEqual(names, ["A1"])
def test_create_requires_team_and_name(self):
r1 = self.client.post(reverse("api:project-list"), {"name": "ProjOnly"}, format="json")
self.assertEqual(r1.status_code, 400)
self.assertIn("team", r1.json())
r2 = self.client.post(reverse("api:project-list"), {"team": str(self.team.id)}, format="json")
self.assertEqual(r2.status_code, 400)
self.assertIn("name", r2.json())
def test_create_and_retrieve(self):
r = self.client.post(
reverse("api:project-list"),
{"team": str(self.team.id), "name": "Core", "visibility": "team_members"},
format="json",
)
self.assertEqual(r.status_code, 201)
pid = r.json()["id"]
r2 = self.client.get(reverse("api:project-detail", args=[pid]))
self.assertEqual(r2.status_code, 200)
body = r2.json()
self.assertEqual(body["name"], "Core")
self.assertEqual(body["visibility"], "team_members")
self.assertIn("dsn", body) # read-only; present on detail
def test_patch_minimal(self):
p = Project.objects.create(team=self.team, name="Old")
r = self.client.patch(
reverse("api:project-detail", args=[p.id]),
{"name": "New", "alert_on_unmute": False},
format="json",
)
self.assertEqual(r.status_code, 200)
body = r.json()
self.assertEqual(body["name"], "New")
self.assertFalse(body["alert_on_unmute"])
def test_delete_not_allowed(self):
p = Project.objects.create(team=self.team, name="Temp")
r = self.client.delete(reverse("api:project-detail", args=[p.id]))
self.assertEqual(r.status_code, 405)
class ExpansionTests(TransactionTestCase):
"""
Expansion tests are exercised via ProjectViewSet, but the intent is to validate the
generic ExpandableSerializerMixin infrastructure.
"""
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.team = Team.objects.create(name="T")
self.project = Project.objects.create(name="P", team=self.team)
def _get(self, expand=None):
url = reverse("api:project-detail", args=[self.project.id])
qp = {"expand": expand} if expand else {}
return self.client.get(url, qp)
def test_default_no_expand(self):
r = self._get()
self.assertEqual(r.status_code, 200)
data = r.json()
# team is just rendered as a reference, not expanded
self.assertEqual(data["team"], str(self.team.id))
def test_with_valid_expand(self):
r = self._get("team")
self.assertEqual(r.status_code, 200)
data = r.json()
# team is fully expanded into object
self.assertEqual(data["team"]["id"], str(self.team.id))
self.assertEqual(data["team"]["name"], self.team.name)
def test_with_invalid_expand(self):
r = self._get("not_a_field")
self.assertEqual(r.status_code, 400)
self.assertEqual(
r.json(),
{"expand": ["Unknown field: not_a_field"]},
)
def test_with_comma_separated_expands(self):
# only 'team' is valid, 'not_a_field' should trigger 400
r = self._get("team,not_a_field")
self.assertEqual(r.status_code, 400)
self.assertEqual(
r.json(),
{"expand": ["Unknown field: not_a_field"]},
)
def test_expand_rejected_when_not_supported(self):
# ProjectListSerializer does not support expand
url = reverse("api:project-list")
r = self.client.get(url, {"expand": "team"})
self.assertEqual(r.status_code, 400)
self.assertEqual(
r.json(),
{"expand": ["Expansions are not supported on this endpoint."]},
)

63
releases/api_views.py Normal file
View File

@@ -0,0 +1,63 @@
from rest_framework import viewsets
from rest_framework.exceptions import ValidationError
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
from bugsink.api_pagination import AscDescCursorPagination
from bugsink.api_mixins import AtomicRequestMixin
from .models import Release
from .serializers import ReleaseListSerializer, ReleaseDetailSerializer, ReleaseCreateSerializer
class ReleasePagination(AscDescCursorPagination):
# Cursor pagination requires an indexed, mostly-stable ordering field. We use `digest_order`: We require
# ?project=<id> and have a composite (project_id, date_released) index, so ORDER BY date_released after filtering by
# project is fast and cursor-stable. (also note that date_released generally comes in in-order).
base_ordering = ("date_released",)
page_size = 250
default_direction = "desc"
class ReleaseViewSet(AtomicRequestMixin, viewsets.ModelViewSet):
"""
LIST requires: ?project=<id>
Ordered by sort_epoch.
CREATE allowed. DELETE potential TODO.
"""
queryset = Release.objects.all()
serializer_class = ReleaseListSerializer
http_method_names = ["get", "post", "head", "options"]
pagination_class = ReleasePagination
@extend_schema(
parameters=[
OpenApiParameter(
name="project",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
required=True,
description="Filter releases by project id (required).",
),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
if self.action != "list":
return queryset
query_params = self.request.query_params
project_id = query_params.get("project")
if not project_id:
raise ValidationError({"project": ["This field is required."]})
return queryset.filter(project=project_id)
def get_serializer_class(self):
if self.action == "create":
return ReleaseCreateSerializer
if self.action == "retrieve":
return ReleaseDetailSerializer
return ReleaseListSerializer

View File

@@ -0,0 +1,29 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("projects", "0014_alter_projectmembership_project"),
("releases", "0003_alter_release_project"),
]
operations = [
migrations.RemoveIndex(
model_name="release",
name="releases_re_sort_ep_5c07c8_idx",
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["project", "sort_epoch"], name="releases_re_project_1ceb8b_idx"
),
),
migrations.AddIndex(
model_name="release",
index=models.Index(
fields=["project", "date_released"],
name="releases_re_project_b17273_idx",
),
),
]

View File

@@ -83,7 +83,8 @@ class Release(models.Model):
unique_together = ("project", "version")
indexes = [
models.Index(fields=["sort_epoch"]),
models.Index(fields=["project", "sort_epoch"]),
models.Index(fields=["project", "date_released"]),
]
def get_short_version(self):
@@ -100,7 +101,7 @@ class Release(models.Model):
return self.version[:12]
def create_release_if_needed(project, version, event, issue=None):
def create_release_if_needed(project, version, timestamp, issue=None):
if version is None:
# because `create_release_if_needed` is called with Issue.release (non-nullable), the below "won't happen"
raise ValueError('The None-like version must be the empty string')
@@ -110,7 +111,9 @@ def create_release_if_needed(project, version, event, issue=None):
version = sanitize_version(version)
release, release_created = Release.objects.get_or_create(project=project, version=version)
release, release_created = Release.objects.get_or_create(project=project, version=version, defaults={
"date_released": timestamp,
})
if release_created and version != "":
if not project.has_releases:
project.has_releases = True
@@ -119,16 +122,14 @@ def create_release_if_needed(project, version, event, issue=None):
if release == project.get_latest_release():
resolved_by_next_qs = Issue.objects.filter(project=project, is_resolved_by_next_release=True)
# NOTE: once we introduce an explicit way of creating releases (not event-based) we can not rely on a
# triggering event anymore for our timestamp.
TurningPoint.objects.bulk_create([TurningPoint(
project=project,
issue=issue, kind=TurningPointKind.NEXT_MATERIALIZED, triggering_event=event,
metadata=json.dumps({"actual_release": release.version}), timestamp=event.ingested_at)
issue=issue, kind=TurningPointKind.NEXT_MATERIALIZED,
# the detection of a new release through an event does not imply a triggering of a TurningPoint:
triggering_event=None,
metadata=json.dumps({"actual_release": release.version}), timestamp=timestamp)
for issue in resolved_by_next_qs
])
event.never_evict = True # .save() will be called by the caller of this function
resolved_by_next_qs.update(
fixed_at=Concat("fixed_at", Value(release.version + "\n")),
@@ -140,7 +141,7 @@ def create_release_if_needed(project, version, event, issue=None):
issue.fixed_at = issue.fixed_at + release.version + "\n"
issue.is_resolved_by_next_release = False
return release
return release, release_created
def sanitize_version(version):

39
releases/serializers.py Normal file
View File

@@ -0,0 +1,39 @@
from django.utils import timezone
from rest_framework import serializers
from projects.models import Project
from rest_framework.exceptions import ValidationError
from .models import Release, create_release_if_needed
class ReleaseListSerializer(serializers.ModelSerializer):
class Meta:
model = Release
fields = ["id", "project", "version", "date_released"]
class ReleaseDetailSerializer(serializers.ModelSerializer):
class Meta:
model = Release
fields = ["id", "project", "version", "date_released", "semver", "is_semver", "sort_epoch"]
read_only_fields = ["semver", "is_semver", "sort_epoch"]
class ReleaseCreateSerializer(serializers.Serializer):
project = serializers.PrimaryKeyRelatedField(queryset=Project.objects.all())
version = serializers.CharField(allow_blank=True)
timestamp = serializers.DateTimeField(required=False)
def create(self, validated_data):
project = validated_data["project"]
version = validated_data["version"]
timestamp = validated_data.get("timestamp") or timezone.now()
release, release_created = create_release_if_needed(project=project, version=version, timestamp=timestamp)
if not release_created:
raise ValidationError({"version": ["Release with this version already exists for the project."]})
return release
def to_representation(self, instance):
return ReleaseDetailSerializer(instance).data

136
releases/test_api.py Normal file
View File

@@ -0,0 +1,136 @@
from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APIClient
from bsmain.models import AuthToken
from projects.models import Project
from releases.models import Release
from releases.api_views import ReleaseViewSet
class ReleaseApiTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.project = Project.objects.create(name="RelProj")
def _create(self, version, **extra):
payload = {"project": self.project.id, "version": version}
payload.update(extra)
response = self.client.post(reverse("api:release-list"), payload, format="json")
return response
def test_list_requires_project(self):
response = self.client.get(reverse("api:release-list"))
self.assertEqual(response.status_code, 400)
self.assertEqual({"project": ["This field is required."]}, response.json())
def test_create_new_returns_201_and_detail_shape(self):
response = self._create("1.2.3", timestamp="2024-01-01T00:00:00Z")
self.assertEqual(response.status_code, 201)
body = response.json()
self.assertIn("id", body)
# model-computed fields are present in response:
self.assertIn("semver", body)
self.assertIn("is_semver", body)
self.assertIn("sort_epoch", body)
def test_create_duplicate_returns_400(self):
result1 = self._create("2.0.0")
self.assertEqual(result1.status_code, 201)
result2 = self._create("2.0.0") # same project+version
self.assertEqual(result2.status_code, 400)
self.assertIn("version", result2.json())
def test_create_allows_empty_version(self):
response = self._create("")
self.assertEqual(response.status_code, 201)
def test_create_without_timestamp_is_allowed(self):
response = self._create("3.0.0")
self.assertEqual(response.status_code, 201)
def test_detail_returns_readonly_fields(self):
created = self._create("4.5.6")
self.assertEqual(created.status_code, 201)
release_id = created.json()["id"]
response = self.client.get(reverse("api:release-detail", args=[release_id]))
self.assertEqual(response.status_code, 200)
body = response.json()
self.assertIn("semver", body)
self.assertIn("is_semver", body)
self.assertIn("sort_epoch", body)
def test_update_and_delete_methods_not_allowed(self):
created = self._create("9.9.9")
self.assertEqual(created.status_code, 201)
release_id = created.json()["id"]
detail_url = reverse("api:release-detail", args=[release_id])
put_response = self.client.put(detail_url, {"version": "X"}, format="json")
patch_response = self.client.patch(detail_url, {"version": "X"}, format="json")
delete_response = self.client.delete(detail_url)
self.assertEqual(put_response.status_code, 405)
self.assertEqual(patch_response.status_code, 405)
self.assertEqual(delete_response.status_code, 405)
class ReleasePaginationTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
self.old_size = ReleaseViewSet.pagination_class.page_size
ReleaseViewSet.pagination_class.page_size = 2
def tearDown(self):
ReleaseViewSet.pagination_class.page_size = self.old_size
def _make_releases(self, project, deltas):
base = timezone.now().replace(microsecond=0)
releases = []
for i, delta in enumerate(deltas):
rel = Release.objects.create(
project=project,
version=f"v{i}",
date_released=base + delta,
)
releases.append(rel)
return releases
def _ids(self, resp):
return [row["id"] for row in resp.json()["results"]]
def test_date_released_desc_two_pages(self):
proj = Project.objects.create(name="P")
releases = self._make_releases(
proj, [timezone.timedelta(days=i) for i in range(5)]
)
r1 = self.client.get(
reverse("api:release-list"), {"project": str(proj.id), "order": "desc"}
)
self.assertEqual(self._ids(r1), [str(releases[4].id), str(releases[3].id)])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(releases[2].id), str(releases[1].id)])
def test_date_released_asc_two_pages(self):
proj = Project.objects.create(name="P2")
releases = self._make_releases(
proj, [timezone.timedelta(days=i) for i in range(5)]
)
r1 = self.client.get(
reverse("api:release-list"), {"project": str(proj.id), "order": "asc"}
)
self.assertEqual(self._ids(r1), [str(releases[0].id), str(releases[1].id)])
r2 = self.client.get(r1.json()["next"])
self.assertEqual(self._ids(r2), [str(releases[2].id), str(releases[3].id)])

View File

@@ -1,8 +1,16 @@
import json
from django.test import TestCase as DjangoTestCase
from datetime import timedelta
from projects.models import Project
from .models import Release, ordered_releases, RE_PACKAGE_VERSION
from django.test import TestCase
from django.utils import timezone
from issues.models import TurningPoint, TurningPointKind
from issues.factories import get_or_create_issue
from .models import Release, ordered_releases, RE_PACKAGE_VERSION, create_release_if_needed
class ReleaseTestCase(DjangoTestCase):
@@ -61,3 +69,69 @@ class ReleaseTestCase(DjangoTestCase):
self.assertEqual(
{"package": "@mypac@kage", "version": "1.2.3"},
RE_PACKAGE_VERSION.match("@mypac@kage@1.2.3").groupdict())
class CreateReleaseIfNeededTests(TestCase):
def setUp(self):
self.timestamp0 = timezone.now()
self.timestamp1 = self.timestamp0 + timedelta(seconds=5)
self.timestamp2 = self.timestamp1 + timedelta(seconds=5)
def test_empty_version_creates_release_without_side_effects(self):
project = Project.objects.create()
release, created = create_release_if_needed(project, "", self.timestamp0)
self.assertTrue(created)
self.assertEqual(release.version, "")
self.assertEqual(release.date_released, self.timestamp0)
project.refresh_from_db()
self.assertFalse(getattr(project, "has_releases", False))
self.assertEqual(TurningPoint.objects.count(), 0)
def test_turning_point_metadata_contains_actual_release(self):
project = Project.objects.create()
issue, _ = get_or_create_issue(project=project)
issue.is_resolved_by_next_release = True
issue.save()
create_release_if_needed(project, "1.2.3", self.timestamp0)
turning_point = TurningPoint.objects.filter(kind=TurningPointKind.NEXT_MATERIALIZED, project=project).first()
self.assertIsNotNone(turning_point)
self.assertEqual(json.loads(turning_point.metadata).get("actual_release"), "1.2.3")
def test_idempotent_when_release_exists(self):
project = Project.objects.create()
create_release_if_needed(project, "2.0.0", self.timestamp0)
turning_point_count_before = TurningPoint.objects.count()
has_releases_before = getattr(project, "has_releases", False)
_, created = create_release_if_needed(project, "2.0.0", self.timestamp1)
self.assertFalse(created)
self.assertEqual(TurningPoint.objects.count(), turning_point_count_before)
project.refresh_from_db()
self.assertEqual(getattr(project, "has_releases", False), has_releases_before)
def test_next_release_materialization_transforms_issue(self):
project = Project.objects.create()
issue, _ = get_or_create_issue(project=project)
issue.is_resolved = True
issue.is_resolved_by_next_release = True
issue.fixed_at = ""
issue.save()
create_release_if_needed(project, "1.0.0", self.timestamp0)
issue.refresh_from_db()
self.assertTrue(issue.is_resolved)
self.assertFalse(issue.is_resolved_by_next_release)
self.assertEqual(issue.fixed_at, "1.0.0\n")
self.assertEqual(
TurningPoint.objects.filter(
project=project, issue=issue, kind=TurningPointKind.NEXT_MATERIALIZED
).count(),
1,
)

View File

@@ -16,3 +16,5 @@ user-agents==2.2.*
fastjsonschema==2.21.*
verbose_csrf_middleware==1.0.*
ecma426>=0.2.0
djangorestframework==3.16.*
drf-spectacular[sidecar]

49
teams/api_views.py Normal file
View File

@@ -0,0 +1,49 @@
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from bugsink.api_pagination import AscDescCursorPagination
from bugsink.api_mixins import AtomicRequestMixin
from .models import Team
from .serializers import (
TeamListSerializer,
TeamDetailSerializer,
TeamCreateUpdateSerializer,
)
class TeamPagination(AscDescCursorPagination):
# Cursor pagination requires an indexed, mostly-stable ordering field. We use `name`, which is indexed; for Teams,
# updates are rare and the table is small, so "requirement met in practice though not in theory".
base_ordering = ("name",)
page_size = 250
default_direction = "asc"
class TeamViewSet(AtomicRequestMixin, viewsets.ModelViewSet):
"""
/api/canonical/0/teams/
GET /teams/ → list ordered by name ASC
GET /teams/{pk}/ → detail (pure PK)
POST /teams/ → create {name, visibility?}
PATCH /teams/{pk}/ → minimal updates
DELETE → 405
"""
queryset = Team.objects.all()
http_method_names = ["get", "post", "patch", "head", "options"]
pagination_class = TeamPagination
def get_object(self):
# Pure PK lookup (bypass filter_queryset)
queryset = self.get_queryset()
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
obj = get_object_or_404(queryset, **{self.lookup_field: self.kwargs[lookup_url_kwarg]})
self.check_object_permissions(self.request, obj)
return obj
def get_serializer_class(self):
if self.action in ("create", "partial_update"):
return TeamCreateUpdateSerializer
if self.action == "retrieve":
return TeamDetailSerializer
return TeamListSerializer

31
teams/serializers.py Normal file
View File

@@ -0,0 +1,31 @@
from rest_framework import serializers
from bugsink.api_fields import make_enum_field
from .models import Team, TeamVisibility
TeamVisibilityField = make_enum_field(TeamVisibility)
class TeamListSerializer(serializers.ModelSerializer):
visibility = TeamVisibilityField()
class Meta:
model = Team
fields = ["id", "name", "visibility"]
class TeamDetailSerializer(serializers.ModelSerializer):
visibility = TeamVisibilityField()
class Meta:
model = Team
fields = ["id", "name", "visibility"]
class TeamCreateUpdateSerializer(serializers.ModelSerializer):
id = serializers.UUIDField(read_only=True)
visibility = TeamVisibilityField(required=False)
class Meta:
model = Team
fields = ["id", "name", "visibility"]

65
teams/test_api.py Normal file
View File

@@ -0,0 +1,65 @@
from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase
from django.urls import reverse
from rest_framework.test import APIClient
from bsmain.models import AuthToken
from teams.models import Team
class TeamApiTests(TransactionTestCase):
def setUp(self):
self.client = APIClient()
token = AuthToken.objects.create()
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {token.token}")
def test_list_ordering_by_name(self):
Team.objects.create(name="Zeta")
Team.objects.create(name="Alpha")
Team.objects.create(name="Gamma")
r = self.client.get(reverse("api:team-list"))
self.assertEqual(r.status_code, 200)
names = [row["name"] for row in r.json()["results"]]
self.assertEqual(names, ["Alpha", "Gamma", "Zeta"])
def test_create_requires_name(self):
r = self.client.post(reverse("api:team-list"), {"visibility": "discoverable"}, format="json")
self.assertEqual(r.status_code, 400)
self.assertEqual(r.json(), {"name": ["This field is required."]})
def test_create_minimal_and_retrieve(self):
r = self.client.post(
reverse("api:team-list"),
{"name": "Core Team", "visibility": "discoverable"},
format="json",
)
self.assertEqual(r.status_code, 201)
team_id = r.json()["id"]
r2 = self.client.get(reverse("api:team-detail", args=[team_id]))
self.assertEqual(r2.status_code, 200)
self.assertEqual(r2.json()["name"], "Core Team")
self.assertEqual(r2.json()["visibility"], "discoverable")
def test_patch_minimal(self):
team = Team.objects.create(name="Old Name")
r = self.client.patch(
reverse("api:team-detail", args=[team.id]),
{"name": "New Name"},
format="json",
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json()["name"], "New Name")
def test_delete_not_allowed(self):
team = Team.objects.create(name="Temp")
r = self.client.delete(reverse("api:team-detail", args=[team.id]))
self.assertEqual(r.status_code, 405)
def test_create_rejects_invalid_visibility(self):
r = self.client.post(
reverse("api:team-list"),
{"name": "Bad", "visibility": "nope"},
format="json",
)
self.assertEqual(r.status_code, 400)
self.assertEqual(r.json(), {"visibility": ['"nope" is not a valid choice.']})

View File

@@ -68,7 +68,7 @@ class TestPygmentizeLineLineCountHandling(RegularTestCase):
_pygmentize_lines(["\n", "\n", "\n"])
class TestChooseLexerForPatter(RegularTestCase):
class TestChooseLexerForPattern(RegularTestCase):
def test_choose_lexer_for_pattern(self):
# simple 'does it not crash' test: