diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c2980c..2932685 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,130 @@ # Changes +## 1.7.3 (17 July 2025) + +Migration fix: delete TurningPoints w/ project=None (Fix #155) + +## 1.7.2 (17 July 2025) + +Various fixes: + +* Dark mode: use monokai style from pygments (Fix #152) +* add `vacuum_files` command (Fix #129) +* Artifact Bundle upload: clean up after extract (See #129) +* Add API catch-all endpoint for logging (Fix #153) +* File-upload: chunk-size of 2MiB (Fix #147) +* Sourcemaps upload: max file size 2GiB (See #147) +* Auto-clean binlogs on docker compose (sample) for mysql (See #149) +* Remove platform 'choices' from Event.model (See 403e28adb410) +* Better `ALLOWED_HOSTS` misconfig error-message (Fix #148) +* As per the "little red box on" #120 +* Fix wasted space at certain width in stacktrace UI (See #120) +* Fixed command's 'running in background' output (See 770ccb16225e) +* Project-edit: redirect to list on-save (See 2b46bfe9a114) +* `cleanup_eventstorage` command: be more clear when no `event_storage` is actually configured (See b2769d7202b6) +* Don't crash on illegal values for platform (See #143, #145) +* Support 'crystal' platform (Fix #145) +* Support 'powershell' platform (Fix #143) + +## 1.7.1 (10 July 2025) + +Fix: user-related forms broken by unclosed link + +## 1.7.0 (9 July 2025) + +Bugsink 1.7.0 introduces Dark Mode (See #40, #125) + +### Housekeeping + +A number of options to clean up unwanted or unneeded data have been added: + +* Project Deletion (See #50, #137) +* Issue Deletion (See #50) +* Vacuum Tags command (See #135) +* `vacuum_eventless_issuetags` command (see #134, #142) + +How these commands/tools relate to each other and may be used is [documented on +the website](https://www.bugsink.com/docs/housekeeping/) + +### Various small fixes + +* Skip `ALLOWED_HOSTS` validation for /health/ endpoints (see #140) +* `get_system_warnings` as a callable (see c2bc2e417475) +* `store_tags`: support 'very many' (~500) tags (see d62e53fdf8e7) +* Snappea: refuse to start in `TASK_ALWAYS_EAGER` mode (see aa255978b776) +* Sentry-SDK requirement, unpin minor version (see a91fdcd65673) + +## 1.6.3 (27 June 2025) + +* fix `make_consistent` on mysql (Fix #132) +* Tags in `event_data` can be lists; deal with that (Fix #130) + +## 1.6.2 (19 June 2025) + +* Too many quotes in local-vars display (Fix #119) + +## 1.6.1 (11 June 2025) + +Remove hard-coded slack `webhook_url` from the "test this connector" loop. + +## 1.6.0 (10 June 2025) + +### Slack Alerts + +Bugsink 1.6.0 introduces Slack Alerts (through webhooks); see #3. + +### Backwards-incompatible changes + +* The default number of web processes (gunicorn server workers) in the + dockerized setup is now equal to `min(cpu_count, 4)`; (it used to be 10). + + set `GUNICORN_CMD_ARGS="--workers=10"` to restore the previous behavior or + choose a custom number. + +### Various Features & Fixes + +* Display formatted log message when available (see #111) +* Add 2 env variables to compose-sample.yaml (See #110) +* Add delete functionality for users (See #108) +* Multi-file sourcemaps (See #87) +* Lookup by `debug_id` in dicts: use UUID (See #105) +* Add robots.txt that disallows crawling +* Add HEALTHCHECK command to Dockerfiles (See #98) +* Fingerprint: convert to string before concatenating (See #102) +* Add /health/ready endpoint (See #98) + +## 1.5.4 (12 May 2025) + +* Add bugsink-util script to allow settings-independent commands to be run +* UX of the `stress_test` command (param cleanup) +* checks on `settings.BASE_URL` +* Show _all_ Request Headers in `CSRF_DEBUG` view (see #100) +* Fix obj not found when visiting project as a non-member superuser + +## 1.5.3 (7 May 2025) + +* Performance fixes of the issue-list when there are many (millions) of _issues_ (rather than just events) in the + database; see aad0f624f904 & 0dfd01db9b38. + +* Fix: `different_runtime_limit` applying to the wrong DB alias, see 699f6e587d28 + +* `CREATE_SUPERUSER` shortcut: robust for ':' in password, see 9b0f0e04f4e4 + +## 1.5.2 (6 May 2025) + +Various performance fixes when there are many (millions) of _issues_ +(rather than just events) in the database: + +* Add index for `Grouping.grouping_key` (and project), see 392f5a30be18, 49e6700d4a81 +* Digest: check Grouping.exists only once (save a query) +* Remove `open_issue_count` from homepage; it's too expensive +* Issue Paginator: don't attempt to count the Issues, see 378366105496 +* Stress test command: more fat-tailed randomness (d5a449020d03) + +Compatibility fix: + +* `format_exception` in `capture_or_log_exception`: python 3.9 compatible + ## 1.5.1 (24 April 2025) Various fixes and improvements: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 87457a8..44f966e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,8 @@ Code contributions are welcome! We use the GitHub PR process to review and merge #### Tailwind -Bugsink uses tailwind for styling. +Bugsink uses tailwind for styling, and [django-tailwind](https://github.com/timonweb/django-tailwind/) +to "do tailwind stuff from the Django world". If you're working on HTML, you should probably develop while running the following somewhere: diff --git a/Dockerfile b/Dockerfile index 656f271..cc52519 100644 --- a/Dockerfile +++ b/Dockerfile @@ -49,4 +49,6 @@ RUN pip install -e . RUN ["bugsink-manage", "migrate", "snappea", "--database=snappea"] -CMD [ "monofy", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--bind=0.0.0.0:$PORT", "--workers=10", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"] +HEALTHCHECK CMD python -c 'import requests; requests.get("http://localhost:8000/health/ready").raise_for_status()' + +CMD [ "monofy", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"] diff --git a/Dockerfile.fromwheel b/Dockerfile.fromwheel index 9dc3962..69c5aad 100644 --- a/Dockerfile.fromwheel +++ b/Dockerfile.fromwheel @@ -71,7 +71,10 @@ RUN --mount=type=cache,target=/var/cache/buildkit/pip \ pip install /wheels/$WHEEL_FILE COPY bugsink/conf_templates/docker.py.template bugsink_conf.py +COPY gunicorn.docker.conf.py /app/ RUN ["bugsink-manage", "migrate", "snappea", "--database=snappea"] -CMD [ "monofy", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--bind=0.0.0.0:$PORT", "--workers=10", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"] +HEALTHCHECK CMD python -c 'import requests; requests.get("http://localhost:8000/health/ready").raise_for_status()' + +CMD [ "monofy", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"] diff --git a/README.md b/README.md index 3eca414..b713152 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,12 @@ # Bugsink: Self-hosted Error Tracking -[Bugsink](https://www.bugsink.com/) offers [Error Tracking](https://www.bugsink.com/error-tracking/) for your applications with full control -through self-hosting. - +* [Error Tracking](https://www.bugsink.com/error-tracking/) * [Built to self-host](https://www.bugsink.com/built-to-self-host/) * [Sentry-SDK compatible](https://www.bugsink.com/connect-any-application/) * [Scalable and reliable](https://www.bugsink.com/scalable-and-reliable/) ### Screenshot -This is what you'll get: - ![Screenshot](https://www.bugsink.com/static/images/JsonSchemaDefinitionException.5e02c1544273.png) @@ -22,7 +18,7 @@ The **quickest way to evaluate Bugsink** is to spin up a throw-away instance usi docker pull bugsink/bugsink:latest docker run \ - -e SECRET_KEY={{ random_secret }} \ + -e SECRET_KEY=PUT_AN_ACTUAL_RANDOM_SECRET_HERE_OF_AT_LEAST_50_CHARS \ -e CREATE_SUPERUSER=admin:admin \ -e PORT=8000 \ -p 8000:8000 \ diff --git a/alerts/forms.py b/alerts/forms.py new file mode 100644 index 0000000..3afe99a --- /dev/null +++ b/alerts/forms.py @@ -0,0 +1,21 @@ +from django.forms import ModelForm + +from .models import MessagingServiceConfig + + +class MessagingServiceConfigForm(ModelForm): + + def __init__(self, project, *args, **kwargs): + super().__init__(*args, **kwargs) + self.project = project + + class Meta: + model = MessagingServiceConfig + fields = ["display_name", "kind"] + + def save(self, commit=True): + instance = super().save(commit=False) + instance.project = self.project + if commit: + instance.save() + return instance diff --git a/alerts/migrations/0001_initial.py b/alerts/migrations/0001_initial.py new file mode 100644 index 0000000..8585973 --- /dev/null +++ b/alerts/migrations/0001_initial.py @@ -0,0 +1,52 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("projects", "0011_fill_stored_event_count"), + ] + + operations = [ + migrations.CreateModel( + name="MessagingServiceConfig", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "display_name", + models.CharField( + help_text='For display in the UI, e.g. "#general on company Slack"', + max_length=100, + ), + ), + ( + "kind", + models.CharField( + choices=[("slack", "Slack (or compatible)")], + default="slack", + max_length=20, + ), + ), + ("config", models.TextField()), + ( + "project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="service_configs", + to="projects.project", + ), + ), + ], + ), + ] diff --git a/alerts/migrations/0002_alter_messagingserviceconfig_project.py b/alerts/migrations/0002_alter_messagingserviceconfig_project.py new file mode 100644 index 0000000..dad1812 --- /dev/null +++ b/alerts/migrations/0002_alter_messagingserviceconfig_project.py @@ -0,0 +1,23 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + # Django came up with 0014, whatever the reason, I'm sure that 0013 is at least required (as per comments there) + ("projects", "0014_alter_projectmembership_project"), + ("alerts", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="messagingserviceconfig", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="service_configs", + to="projects.project", + ), + ), + ] diff --git a/alerts/migrations/__init__.py b/alerts/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/alerts/models.py b/alerts/models.py index 6b20219..e7eec32 100644 --- a/alerts/models.py +++ b/alerts/models.py @@ -1 +1,18 @@ -# Create your models here. +from django.db import models +from projects.models import Project + +from .service_backends.slack import SlackBackend + + +class MessagingServiceConfig(models.Model): + project = models.ForeignKey(Project, on_delete=models.DO_NOTHING, related_name="service_configs") + display_name = models.CharField(max_length=100, blank=False, + help_text='For display in the UI, e.g. "#general on company Slack"') + + kind = models.CharField(choices=[("slack", "Slack (or compatible)"), ], max_length=20, default="slack") + + config = models.TextField(blank=False) + + def get_backend(self): + # once we have multiple backends: lookup by kind. + return SlackBackend(self) diff --git a/alerts/service_backends/__init__.py b/alerts/service_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/alerts/service_backends/slack.py b/alerts/service_backends/slack.py new file mode 100644 index 0000000..be4c9ad --- /dev/null +++ b/alerts/service_backends/slack.py @@ -0,0 +1,164 @@ +import json +import requests + +from django import forms +from django.template.defaultfilters import truncatechars + +from snappea.decorators import shared_task +from bugsink.app_settings import get_settings + +from issues.models import Issue + + +class SlackConfigForm(forms.Form): + webhook_url = forms.URLField(required=True) + + def __init__(self, *args, **kwargs): + config = kwargs.pop("config", None) + + super().__init__(*args, **kwargs) + if config: + self.fields["webhook_url"].initial = config.get("webhook_url", "") + + def get_config(self): + return { + "webhook_url": self.cleaned_data.get("webhook_url"), + } + + +def _safe_markdown(text): + # Slack assigns a special meaning to some characters, so we need to escape them + # to prevent them from being interpreted as formatting/special characters. + return text.replace("&", "&").replace("<", "<").replace(">", ">").replace("*", "\\*").replace("_", "\\_") + + +@shared_task +def slack_backend_send_test_message(webhook_url, project_name, display_name): + # See Slack's Block Kit Builder + + data = {"blocks": [ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "TEST issue", + }, + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "Test message by Bugsink to test the webhook setup.", + }, + }, + { + "type": "section", + "fields": [ + { + "type": "mrkdwn", + "text": "*project*: " + _safe_markdown(project_name), + }, + { + "type": "mrkdwn", + "text": "*message backend*: " + _safe_markdown(display_name), + }, + ] + } + + ]} + + result = requests.post( + webhook_url, + data=json.dumps(data), + headers={"Content-Type": "application/json"}, + ) + + result.raise_for_status() + + +@shared_task +def slack_backend_send_alert(webhook_url, issue_id, state_description, alert_article, alert_reason, unmute_reason=None): + issue = Issue.objects.get(id=issue_id) + + issue_url = get_settings().BASE_URL + issue.get_absolute_url() + link = f"<{issue_url}|" + _safe_markdown(truncatechars(issue.title().replace("|", ""), 200)) + ">" + + sections = [ + { + "type": "header", + "text": { + "type": "plain_text", + "text": f"{alert_reason} issue", + }, + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": link, + }, + }, + ] + + if unmute_reason: + sections.append({ + "type": "section", + "text": { + "type": "mrkdwn", + "text": unmute_reason, + }, + }) + + # assumption: visavis email, project.name is of less importance, because in slack-like things you may (though not + # always) do one-channel per project. more so for site_title (if you have multiple Bugsinks, you'll surely have + # multiple slack channels) + fields = { + "project": issue.project.name + } + + # left as a (possible) TODO, because the amount of refactoring (passing event to this function) is too big for now + # if event.release: + # fields["release"] = event.release + # if event.environment: + # fields["environment"] = event.environment + + data = {"blocks": sections + [ + { + "type": "section", + "fields": [ + { + "type": "mrkdwn", + "text": f"*{field}*: " + _safe_markdown(value), + } for field, value in fields.items() + ] + }, + ]} + + result = requests.post( + webhook_url, + data=json.dumps(data), + headers={"Content-Type": "application/json"}, + ) + + result.raise_for_status() + + +class SlackBackend: + + def __init__(self, service_config): + self.service_config = service_config + + def get_form_class(self): + return SlackConfigForm + + def send_test_message(self): + slack_backend_send_test_message.delay( + json.loads(self.service_config.config)["webhook_url"], + self.service_config.project.name, + self.service_config.display_name, + ) + + def send_alert(self, issue_id, state_description, alert_article, alert_reason, **kwargs): + slack_backend_send_alert.delay( + json.loads(self.service_config.config)["webhook_url"], + issue_id, state_description, alert_article, alert_reason, **kwargs) diff --git a/alerts/tasks.py b/alerts/tasks.py index f8000cd..153b1be 100644 --- a/alerts/tasks.py +++ b/alerts/tasks.py @@ -63,9 +63,21 @@ def send_unmute_alert(issue_id, unmute_reason): def _send_alert(issue_id, state_description, alert_article, alert_reason, **kwargs): + # NOTE: as it stands, there is a bit of asymmetry here: _send_alert is always called in delayed fashion; it delays + # some work itself (message backends) though not all (emails). I kept it like this to be able to add functionality + # without breaking too much (in particular, I like the 3 entry points (send_xx_alert) in the current setup). The + # present solution at least has the advantage that possibly frickle external calls don't break each other. + # The way forward is probably to keep the single 3-way callpoint, but make that non-delayed, and do the calls of + # both message-service and email based alerts in delayed fashion. + from issues.models import Issue # avoid circular import issue = Issue.objects.get(id=issue_id) + + for service in issue.project.service_configs.all(): + service_backend = service.get_backend() + service_backend.send_alert(issue_id, state_description, alert_article, alert_reason, **kwargs) + for user in _get_users_for_email_alert(issue): send_rendered_email( subject=f'"{truncatechars(issue.title(), 80)}" in "{issue.project.name}" ({state_description})', diff --git a/bsmain/__init__.py b/bsmain/__init__.py index 8f7e757..5d473fe 100644 --- a/bsmain/__init__.py +++ b/bsmain/__init__.py @@ -1,3 +1,6 @@ +import os +import urllib.parse + from django.core.checks import Warning, register from django.conf import settings @@ -31,3 +34,45 @@ def check_event_storage_properly_configured(app_configs, **kwargs): id="bsmain.W002", )) return errors + + +@register("bsmain") +def check_base_url_is_url(app_configs, **kwargs): + try: + parts = urllib.parse.urlsplit(str(get_settings().BASE_URL)) + except ValueError as e: + return [Warning( + str(e), + id="bsmain.W003", + )] + + if parts.scheme not in ["http", "https"]: + return [Warning( + "The BASE_URL setting must be a valid URL (starting with http or https).", + id="bsmain.W003", + )] + + if not parts.hostname: + return [Warning( + "The BASE_URL setting must be a valid URL. The hostname must be set.", + id="bsmain.W003", + )] + + return [] + + +@register("bsmain") +def check_proxy_env_vars_consistency(app_configs, **kwargs): + # in this check we straight-up check the os.environ: we can't rely on settings.BEHIND_HTTPS_PROXY to have been set + # since it's Docker-only. + + if ( + os.getenv("BEHIND_HTTPS_PROXY", "False").lower() in ("true", "1", "yes") and + os.getenv("BEHIND_PLAIN_HTTP_PROXY", "False").lower() in ("true", "1", "yes") + ): + return [Warning( + "BEHIND_HTTPS_PROXY and BEHIND_PLAIN_HTTP_PROXY are mutually exclusive.", + id="bsmain.W004", + )] + + return [] diff --git a/bsmain/management/commands/prestart.py b/bsmain/management/commands/prestart.py index f9ae97b..837ce59 100644 --- a/bsmain/management/commands/prestart.py +++ b/bsmain/management/commands/prestart.py @@ -19,7 +19,7 @@ class Command(BaseCommand): if ":" not in os.getenv("CREATE_SUPERUSER"): raise ValueError("CREATE_SUPERUSER should be in the format 'username:password'") - username, password = os.getenv("CREATE_SUPERUSER").split(":") + username, password = os.getenv("CREATE_SUPERUSER").split(":", 1) if User.objects.all().exists(): print( diff --git a/bsmain/management/commands/send_json.py b/bsmain/management/commands/send_json.py index 39bbdb8..826f4e4 100644 --- a/bsmain/management/commands/send_json.py +++ b/bsmain/management/commands/send_json.py @@ -28,7 +28,7 @@ class Command(BaseCommand): parser.add_argument("--fresh-trace", action="store_true") parser.add_argument("--tag", nargs="*", action="append") parser.add_argument("--compress", action="store", choices=["gzip", "deflate", "br"], default=None) - parser.add_argument("--use-envelope", action="store_true") + parser.add_argument("--use-store-api", action="store_true", help="Use (deprecated) /api//store/") parser.add_argument("--chunked-encoding", action="store_true") parser.add_argument( "--x-forwarded-for", action="store", @@ -60,7 +60,7 @@ class Command(BaseCommand): def handle(self, *args, **options): compress = options['compress'] - use_envelope = options['use_envelope'] + use_envelope = not options['use_store_api'] dsn = options['dsn'] successfully_sent = [] diff --git a/bsmain/management/commands/stress_test.py b/bsmain/management/commands/stress_test.py index 8cc6456..2380881 100644 --- a/bsmain/management/commands/stress_test.py +++ b/bsmain/management/commands/stress_test.py @@ -11,11 +11,24 @@ import requests from django.core.management.base import BaseCommand -from compat.dsn import get_store_url, get_envelope_url, get_header_value +from compat.dsn import get_envelope_url, get_header_value from bugsink.streams import compress_with_zlib, WBITS_PARAM_FOR_GZIP, WBITS_PARAM_FOR_DEFLATE from issues.utils import get_values +def random_postfix(): + # avoids numbers, because when usedd in the type I imagine numbers may at some point be ignored in the grouping. + random_number = random.random() + + if random_number < 0.1: + # 10% of the time we simply sample from 1M to create a "fat tail". + unevenly_distributed_number = int(random.random() * 1_000_000) + else: + unevenly_distributed_number = int(1 / random_number) + + return "".join([chr(ord("A") + int(c)) for c in str(unevenly_distributed_number)]) + + class Command(BaseCommand): def add_arguments(self, parser): @@ -23,12 +36,10 @@ class Command(BaseCommand): parser.add_argument("--requests", type=int, default=1) parser.add_argument("--dsn", nargs="+", action="extend") - parser.add_argument("--fresh-id", action="store_true") parser.add_argument("--fresh-timestamp", action="store_true") parser.add_argument("--fresh-trace", action="store_true") parser.add_argument("--tag", nargs="*", action="append") parser.add_argument("--compress", action="store", choices=["gzip", "deflate", "br"], default=None) - parser.add_argument("--use-envelope", action="store_true") parser.add_argument("--random-type", action="store_true", default=False) # generate random exception type parser.add_argument("filename") @@ -38,12 +49,7 @@ class Command(BaseCommand): signal.signal(signal.SIGINT, self.handle_signal) compress = options['compress'] - use_envelope = options['use_envelope'] - # non-envelope mode is deprecated by Sentry; we only implement DIGEST_IMMEDIATELY=True for that mode which is - # usually not what we want to do our stress-tests for. (if this assumption is still true later in 2024, we can - # just remove the non-envelope mode support completely.) - assert use_envelope, "Only envelope mode is supported" dsns = options['dsn'] json_filename = options["filename"] @@ -57,7 +63,7 @@ class Command(BaseCommand): prepared_data[i_thread] = {} for i_request in range(options["requests"]): prepared_data[i_thread][i_request] = self.prepare( - data, options, i_thread, i_request, compress, use_envelope) + data, options, i_thread, i_request, compress) timings[i_thread] = [] @@ -65,7 +71,7 @@ class Command(BaseCommand): t0 = time.time() for i in range(options["threads"]): t = threading.Thread(target=self.loop_send_to_server, args=( - dsns, options, use_envelope, compress, prepared_data[i], timings[i])) + dsns, options, compress, prepared_data[i], timings[i])) t.start() print("waiting for threads to finish") @@ -77,7 +83,7 @@ class Command(BaseCommand): self.print_stats(options["threads"], options["requests"], total_time, timings) print("done") - def prepare(self, data, options, i_thread, i_request, compress, use_envelope): + def prepare(self, data, options, i_thread, i_request, compress): if "timestamp" not in data or options["fresh_timestamp"]: # weirdly enough a large numer of sentry test data don't actually have this required attribute set. # thus, we set it to something arbitrary on the sending side rather than have our server be robust @@ -88,8 +94,8 @@ class Command(BaseCommand): data["timestamp"] = time.time() - if options["fresh_id"]: - data["event_id"] = uuid.uuid4().hex + # in stress tests, we generally send many events, so they must be unique to be meaningful. + data["event_id"] = uuid.uuid4().hex if options["fresh_trace"]: if "contexts" not in data: @@ -112,28 +118,19 @@ class Command(BaseCommand): k, v = tag.split(":", 1) if v == "RANDOM": - # avoids numbers in the type because I imagine numbers may at some point be ignored in the grouping. - into_chars = lambda i: "".join([chr(ord("A") + int(c)) for c in str(i)]) # noqa - - unevenly_distributed_number = int(1 / (random.random() + 0.0000001)) - v = "value-" + into_chars(unevenly_distributed_number) + v = "value-" + random_postfix() data["tags"][k] = v if options["random_type"]: - # avoids numbers in the type because I imagine numbers may at some point be ignored in the grouping. - into_chars = lambda i: "".join([chr(ord("A") + int(c)) for c in str(i)]) # noqa - - unevenly_distributed_number = int(1 / (random.random() + 0.0000001)) values = get_values(data["exception"]) - values[0]["type"] = "Exception" + into_chars(unevenly_distributed_number) + values[0]["type"] = "Exception" + random_postfix() data_bytes = json.dumps(data).encode("utf-8") - if use_envelope: - # the smallest possible envelope: - data_bytes = (b'{"event_id": "%s"}\n{"type": "event"}\n' % (data["event_id"]).encode("utf-8") + - data_bytes) + # the smallest possible envelope: + data_bytes = (b'{"event_id": "%s"}\n{"type": "event"}\n' % (data["event_id"]).encode("utf-8") + + data_bytes) if compress in ["gzip", "deflate"]: if compress == "gzip": @@ -152,19 +149,19 @@ class Command(BaseCommand): return compressed_data - def loop_send_to_server(self, dsns, options, use_envelope, compress, compressed_datas, timings): + def loop_send_to_server(self, dsns, options, compress, compressed_datas, timings): for compressed_data in compressed_datas.values(): if self.stopping: return dsn = random.choice(dsns) t0 = time.time() - success = Command.send_to_server(dsn, options, use_envelope, compress, compressed_data) + success = Command.send_to_server(dsn, options, compress, compressed_data) taken = time.time() - t0 timings.append((success, taken)) @staticmethod - def send_to_server(dsn, options, use_envelope, compress, compressed_data): + def send_to_server(dsn, options, compress, compressed_data): try: headers = { "Content-Type": "application/json", @@ -179,7 +176,7 @@ class Command(BaseCommand): headers["Content-Encoding"] = "deflate" response = requests.post( - get_envelope_url(dsn) if use_envelope else get_store_url(dsn), + get_envelope_url(dsn), headers=headers, data=compressed_data, ) @@ -187,13 +184,13 @@ class Command(BaseCommand): elif compress == "br": headers["Content-Encoding"] = "br" response = requests.post( - get_envelope_url(dsn) if use_envelope else get_store_url(dsn), + get_envelope_url(dsn), headers=headers, data=compressed_data, ) response = requests.post( - get_envelope_url(dsn) if use_envelope else get_store_url(dsn), + get_envelope_url(dsn), headers=headers, data=compressed_data, ) diff --git a/bsmain/templates/bsmain/auth_token_list.html b/bsmain/templates/bsmain/auth_token_list.html index 611371e..1c069a6 100644 --- a/bsmain/templates/bsmain/auth_token_list.html +++ b/bsmain/templates/bsmain/auth_token_list.html @@ -13,7 +13,7 @@ {% endif %} @@ -24,9 +24,9 @@
{% csrf_token %} {# margins display slightly different from the Add Token + -
+
@@ -36,12 +36,12 @@ - + {% for auth_token in auth_tokens %} - + {% empty %} - + {% for breadcrumb in breadcrumbs %} - + - {% comment %} - {# not _that_ useful + {# not _that_ useful @@ -48,7 +48,7 @@ - diff --git a/issues/templates/issues/event_404.html b/issues/templates/issues/event_404.html index 12ddaf4..366861c 100644 --- a/issues/templates/issues/event_404.html +++ b/issues/templates/issues/event_404.html @@ -16,34 +16,34 @@
{# copy/paste of _event_nav, but not based on any event (we have none), prev/next are meaningless also; first/last only when we have an event_qs to navigate through #}
{# nav="last": when doing a new search on an event-page, you want the most recent matching event to show up #} - + {% if event_qs_count %} - + {% else %} -
+
{% endif %} -
+
-
+
{% if event_qs_count %} - + {% else %} -
+
diff --git a/issues/templates/issues/event_details.html b/issues/templates/issues/event_details.html index 939b2fc..249f9bb 100644 --- a/issues/templates/issues/event_details.html +++ b/issues/templates/issues/event_details.html @@ -7,7 +7,7 @@
-
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
+
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
@@ -24,21 +24,21 @@
{% for key, value in key_info %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
{% if logentry_info %}

Log Entry

- +
{% for key, value in logentry_info %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -47,12 +47,12 @@ {% if deployment_info %}

Deployment

- +
{% for key, value in deployment_info %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -60,12 +60,12 @@ {% if event.get_tags %}

Tags

- +
{% for tag in event.get_tags %} -
-
{{ tag.value.key.key }}
-
{{ tag.value.value|linebreaks }}
+
+
{{ tag.value.key.key }}
+
{{ tag.value.value|linebreaks }}
{% endfor %}
@@ -75,12 +75,12 @@ {# note: in the (September 2024) sentry.io interface, user info is displayed under 'contexts', but in the data it simply lives top-level as #} {# is implied by parsed_data.user -- I checked in a recent (September 2024) event.schema.json #}

User

- +
{% for key, value in parsed_data.user|items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -89,13 +89,13 @@ {% if parsed_data.request %}

Request

- +
{% for key, value in parsed_data.request|items %} {% if key != "headers" and key != "env" %}{# we deal with these below #} -
-
{{ key }}
-
{{ value|linebreaks }}
{# forloop.last doesn't work given the if-statement; we can fix that by pre-processing in the view #} +
+
{{ key }}
+
{{ value|linebreaks }}
{# forloop.last doesn't work given the if-statement; we can fix that by pre-processing in the view #}
{% endif %} {% endfor %} @@ -105,9 +105,9 @@

REQUEST HEADERS

{% for key, value in parsed_data.request.headers.items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -118,9 +118,9 @@
{% for key, value in parsed_data.request.env.items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %} @@ -133,14 +133,14 @@ {% if contexts %}

Contexts

- +
{% for context_key, context in contexts|items %}

{{ context_key|upper }}

{% for key, value in context|items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %} {% endfor %} @@ -150,7 +150,7 @@ {% comment %} earlier I said about "tracing": I don't believe much in this whole business of tracing, so I'm not going to display the associated data either -now that we "just display all contexts" this is no longer true... some of the feeling persists, but I don't think +now that we "just display all contexts" this is no longer true... some of the feeling persists, but I don't think that I'm so much anti-tracing that I want specifically exclude it from a generic loop. The data's there, let's just show it (in a non-special way) {% endcomment %} @@ -163,12 +163,12 @@ the fact that we commented-out rather than clobbered reveals a small amount of d {% if parsed_data.contexts.runtime %} {# sentry gives this prime location (even a picture)... but why... it's kinda obvious what you're working in right? Maybe I could put it at the top of the modules list instead. And check if there's any other relevant info in that runtime context (RTFM) #}

Runtime

- +
{% for key, value in parsed_data.contexts.runtime|items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -177,14 +177,14 @@ the fact that we commented-out rather than clobbered reveals a small amount of d {% if parsed_data.modules %}

Modules

- +
{# we have observed that (emperically) the keys in most of the above are sorted in some kind of meaningful way from important to non-important #} {# however, for modules I'd rather just have an alphabetical list. #} {% for key, value in parsed_data.modules|sorted_items %} -
-
{{ key }}
-
{{ value|linebreaks }}
+
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -192,12 +192,25 @@ the fact that we commented-out rather than clobbered reveals a small amount of d {% if parsed_data.sdk %}

SDK

- +
{% for key, value in parsed_data.sdk|items %} -
-
{{ key }}
-
{{ value|linebreaks }}
{# the actual value may be a dict/list, but we'll just print it as a string; this is plenty of space for something as (hopefully) irrelevant as the SDK #} +
+
{{ key }}
+
{{ value|linebreaks }}
{# the actual value may be a dict/list, but we'll just print it as a string; this is plenty of space for something as (hopefully) irrelevant as the SDK #} +
+ {% endfor %} +
+{% endif %} + +{% if sourcemaps_images %} +

Sourcemap IDs

+ +
+ {% for key, value in sourcemaps_images %} +
+
{{ key }}
+
{{ value|linebreaks }}
{% endfor %}
@@ -205,12 +218,12 @@ the fact that we commented-out rather than clobbered reveals a small amount of d {% if parsed_data.extra %}

Extra

- +
{% for key, value in parsed_data.extra|items %} -
-
{{ key }}
-
{{ value|linebreaks }}
{# the actual value may be a dict/list, but we'll just print it as a string; this is plenty of space for something as (hopefully) irrelevant #} +
+
{{ key }}
+
{{ value|linebreaks }}
{# the actual value may be a dict/list, but we'll just print it as a string; this is plenty of space for something as (hopefully) irrelevant #}
{% endfor %}
diff --git a/issues/templates/issues/event_list.html b/issues/templates/issues/event_list.html index 3116a59..dca22b9 100644 --- a/issues/templates/issues/event_list.html +++ b/issues/templates/issues/event_list.html @@ -9,7 +9,7 @@
- Showing {{ page_obj.start_index|intcomma }} - {{ page_obj.end_index|intcomma }} of + Showing {{ page_obj.start_index|intcomma }} - {{ page_obj.end_index|intcomma }} of {% if page_obj.paginator.count == issue.stored_event_count and issue.stored_event_count == issue.digested_event_count %} {# all equal #} {{ page_obj.paginator.count|intcomma }} total events. {% elif page_obj.paginator.count == issue.stored_event_count and issue.stored_event_count != issue.digested_event_count %} {# evictions applied #} @@ -27,44 +27,44 @@ {# adapted copy/pasta from _event_nav #}
- + {% if page_obj.has_previous %} {# no need for 'is_first': if you can go to the left, you can go all the way to the left too #} - + {% else %} -
+
{% endif %} {% if page_obj.has_previous %} - + {% else %} -
+
{% endif %} {% if page_obj.has_next %} - + {% else %} -
+
{% endif %} {% if page_obj.has_next %} - + {% else %} -
+
@@ -79,27 +79,27 @@
- - - - - - @@ -116,13 +116,13 @@ TODO {% endcomment %} {% for event in page_obj %} - + - - @@ -152,7 +152,7 @@ TODO {% empty %} - diff --git a/issues/templates/issues/history.html b/issues/templates/issues/history.html index a20a672..2c78725 100644 --- a/issues/templates/issues/history.html +++ b/issues/templates/issues/history.html @@ -11,15 +11,15 @@
- {{ request.user|best_displayname }} + {{ request.user|best_displayname }}
-
+
- Add comment as manual annotation + Add comment as manual annotation
- +
Now @@ -27,12 +27,12 @@
-
{# 'body' part of the balloon (separated by a line) #} +
{# 'body' part of the balloon (separated by a line) #}
{% csrf_token %} - - + +
{# 'body' part of the balloon #} @@ -45,39 +45,39 @@
{% if turningpoint.user_id %} - {{ turningpoint.user|best_displayname }} + {{ turningpoint.user|best_displayname }} {% else %} - Bugsink + Bugsink {% endif %}
-
+
- {{ turningpoint.get_kind_display }} by - {% if turningpoint.user_id %}{{ turningpoint.user|best_displayname }}{% else %}Bugsink{% endif %} + {{ turningpoint.get_kind_display }} by + {% if turningpoint.user_id %}{{ turningpoint.user|best_displayname }}{% else %}Bugsink{% endif %} {% if turningpoint.user_id == request.user.id %} - + {% if turningpoint.kind == 100 %} - + - + {% endif %} {% endif %}
- +
@@ -87,7 +87,7 @@
{% if turningpoint.parsed_metadata or turningpoint.triggering_event_id or turningpoint.comment or turningpoint.user_id == request.user.id %} {# the last clause means: editable, hence space must be reserved #} -
{# 'body' part of the balloon (separated by a line) #} +
{# 'body' part of the balloon (separated by a line) #}
{{ turningpoint.comment|linebreaksbr }} @@ -97,8 +97,8 @@ {% endif %} @@ -150,7 +150,7 @@ {% if turningpoint.triggering_event_id %} {% endif %} diff --git a/issues/templates/issues/issue_list.html b/issues/templates/issues/issue_list.html index 68eb6e3..b603ac8 100644 --- a/issues/templates/issues/issue_list.html +++ b/issues/templates/issues/issue_list.html @@ -7,6 +7,23 @@ {% block content %} + +
@@ -18,33 +35,35 @@
{% endif %} -
+
-
+ {% csrf_token %}
Auth Tokens
{{ auth_token.token }} @@ -50,13 +50,13 @@
- -
+ +
No Auth Tokens. diff --git a/bugsink/app_settings.py b/bugsink/app_settings.py index d6e217f..0374017 100644 --- a/bugsink/app_settings.py +++ b/bugsink/app_settings.py @@ -44,6 +44,8 @@ DEFAULTS = { "DIGEST_IMMEDIATELY": True, "VALIDATE_ON_DIGEST": "none", # other legal values are "warn" and "strict" "KEEP_ENVELOPES": 0, # set to a number to store that many; 0 means "store none". This is for debugging. + "API_LOG_UNIMPLEMENTED_CALLS": False, # if True, log unimplemented API calls; see #153 + "KEEP_ARTIFACT_BUNDLES": False, # if True, artifact bundles are kept in the database on-upload (for debugging) # MAX* below mirror the (current) values for the Sentry Relay "MAX_EVENT_SIZE": _MEBIBYTE, @@ -95,6 +97,14 @@ def _sanitize(settings): if settings["BASE_URL"].endswith("/"): settings["BASE_URL"] = settings["BASE_URL"][:-1] + if settings["SINGLE_USER"]: + # this is implemented as a "hard imply". Pro: 'it just works' even when configurations are half-baked; con: may + # be confusing if you run into the "I thought I set that like so" case. On balance: I'd rather "just fix it" + # than raise some warning/error and have people deal with that. + settings["SINGLE_TEAM"] = True + settings["USER_REGISTRATION"] = CB_NOBODY + settings["TEAM_CREATION"] = CB_NOBODY + def get_settings(): global _settings diff --git a/bugsink/conf_templates/docker.py.template b/bugsink/conf_templates/docker.py.template index 3a2aa9c..c7c4760 100644 --- a/bugsink/conf_templates/docker.py.template +++ b/bugsink/conf_templates/docker.py.template @@ -20,14 +20,17 @@ DEBUG_CSRF = "USE_DEBUG" if os.getenv("DEBUG_CSRF") == "USE_DEBUG" else os.geten SECRET_KEY = os.getenv("SECRET_KEY") -if os.getenv("BEHIND_HTTPS_PROXY", "False").lower() in ("true", "1", "yes"): - # We hard-tie the ideas of "behind a proxy" and "use https" together: there is no reason to go through the trouble - # of setting up a proxy if you're not using https. We also hard-code some choices of proxy headers; which means you - # have to match those on the proxy side (but that's easy enough). +BEHIND_HTTPS_PROXY = os.getenv("BEHIND_HTTPS_PROXY", "False").lower() in ("true", "1", "yes") +BEHIND_PLAIN_HTTP_PROXY = os.getenv("BEHIND_PLAIN_HTTP_PROXY", "False").lower() in ("true", "1", "yes") + +if BEHIND_HTTPS_PROXY or BEHIND_PLAIN_HTTP_PROXY: + # We hard-code the choice of proxy headers; which means you have to match those on the proxy side (easy enough). SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Note: slightly redundant, Gunicorn also does this. + USE_X_REAL_IP = True + +if BEHIND_HTTPS_PROXY: SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True - USE_X_REAL_IP = True else: # We don't warn about SESSION_COOKIE_SECURE and CSRF_COOKIE_SECURE; we can't set them to True because some browsers # interpret that as "use https for cookies, even on localhost", and there is no https (BEHIND_HTTPS_PROXY is False). @@ -38,6 +41,7 @@ else: ] + # The time-zone here is the default for display purposes (when no project/user configuration is used). # https://docs.djangoproject.com/en/4.2/ref/settings/#std-setting-TIME_ZONE TIME_ZONE = os.getenv("TIME_ZONE", "UTC") @@ -159,6 +163,10 @@ BUGSINK = { # Settings that help with debugging and development ("why isn't Bugsink doing what I expect?") "VALIDATE_ON_DIGEST": os.getenv("VALIDATE_ON_DIGEST", "none").lower(), # other legal values are "warn" and "strict" "KEEP_ENVELOPES": int(os.getenv("KEEP_ENVELOPES", 0)), # keep this many in the database; 0 means "don't keep" + + "API_LOG_UNIMPLEMENTED_CALLS": os.getenv("API_LOG_UNIMPLEMENTED_CALLS", "false").lower() in ("true", "1", "yes"), + "KEEP_ARTIFACT_BUNDLES": os.getenv("KEEP_ARTIFACT_BUNDLES", "false").lower() in ("true", "1", "yes"), + "MINIMIZE_INFORMATION_EXPOSURE": os.getenv("MINIMIZE_INFORMATION_EXPOSURE", "false").lower() in ("true", "1", "yes"), diff --git a/bugsink/context_processors.py b/bugsink/context_processors.py index 7093b8f..6c96408 100644 --- a/bugsink/context_processors.py +++ b/bugsink/context_processors.py @@ -21,15 +21,10 @@ from phonehome.models import Installation SystemWarning = namedtuple('SystemWarning', ['message', 'ignore_url']) -FREE_VERSION_WARNING = mark_safe( - """This is the free version of Bugsink; usage is limited to a single user for local development only. - Using this software in production requires a - paid licence.""") - EMAIL_BACKEND_WARNING = mark_safe( """Email is not set up, emails won't be sent. To get the most out of Bugsink, please - set up - email.""") + set up email.""") def get_snappea_warnings(): @@ -97,35 +92,40 @@ def get_snappea_warnings(): def useful_settings_processor(request): - # name is misnomer, but "who cares". + """Adds useful settings (and more) to the context.""" - installation = Installation.objects.get() + def get_system_warnings(): + # implemented as an inner function to avoid calculating this when it's not actually needed. (i.e. anything + # except "the UI", e.g. ingest, API, admin, 404s). Actual 'cache' behavior is not needed, because this is called + # at most once per request (at the top of the template) + installation = Installation.objects.get() - system_warnings = [] + system_warnings = [] - # This list does not include e.g. the dummy.EmailBackend; intentional, because setting _that_ is always an - # indication of intentional "shut up I don't want to send emails" (and we don't want to warn about that). (as - # opposed to QuietConsoleEmailBackend, which is the default for the Docker "no EMAIL_HOST set" situation) - if settings.EMAIL_BACKEND in [ - 'bugsink.email_backends.QuietConsoleEmailBackend'] and not installation.silence_email_system_warning: + # This list does not include e.g. the dummy.EmailBackend; intentional, because setting _that_ is always an + # indication of intentional "shut up I don't want to send emails" (and we don't want to warn about that). (as + # opposed to QuietConsoleEmailBackend, which is the default for the Docker "no EMAIL_HOST set" situation) + if settings.EMAIL_BACKEND in [ + 'bugsink.email_backends.QuietConsoleEmailBackend'] and not installation.silence_email_system_warning: - if getattr(request, "user", AnonymousUser()).is_superuser: - ignore_url = reverse("silence_email_system_warning") - else: - # not a superuser, so can't silence the warning. I'm applying some heuristics here; - # * superusers (and only those) will be able to deal with this (have access to EMAIL_BACKEND) - # * better to still show (though not silencable) the message to non-superusers. - # this will not always be so, but it's a good start. - ignore_url = None + if getattr(request, "user", AnonymousUser()).is_superuser: + ignore_url = reverse("silence_email_system_warning") + else: + # not a superuser, so can't silence the warning. I'm applying some heuristics here; + # * superusers (and only those) will be able to deal with this (have access to EMAIL_BACKEND) + # * better to still show (though not silencable) the message to non-superusers. + # this will not always be so, but it's a good start. + ignore_url = None - system_warnings.append(SystemWarning(EMAIL_BACKEND_WARNING, ignore_url)) + system_warnings.append(SystemWarning(EMAIL_BACKEND_WARNING, ignore_url)) + + return system_warnings + get_snappea_warnings() return { - # Note: no way to actually set the license key yet, so nagging always happens for now. 'site_title': get_settings().SITE_TITLE, 'registration_enabled': get_settings().USER_REGISTRATION == CB_ANYBODY, 'app_settings': get_settings(), - 'system_warnings': system_warnings + get_snappea_warnings(), + 'system_warnings': get_system_warnings, } diff --git a/bugsink/debug_views.py b/bugsink/debug_views.py index 4f6b567..014d656 100644 --- a/bugsink/debug_views.py +++ b/bugsink/debug_views.py @@ -217,11 +217,10 @@ def csrf_debug(request): "posted": True, "POST": request.POST, "META": { - k: request.META.get(k) for k in [ - "HTTP_ORIGIN", - "HTTP_REFERER", - ] + k: request.META.get(k) for k in request.META.keys() if k.startswith("HTTP_") }, + "SECURE_PROXY_SSL_HEADER": settings.SECURE_PROXY_SSL_HEADER[0] if settings.SECURE_PROXY_SSL_HEADER else None, + "process_view": _process_view_steps(middleware, request, context), }) diff --git a/bugsink/decorators.py b/bugsink/decorators.py index ae3d3fb..cb07865 100644 --- a/bugsink/decorators.py +++ b/bugsink/decorators.py @@ -39,7 +39,7 @@ def issue_membership_required(function): if "issue_pk" not in kwargs: raise TypeError("issue_pk must be passed as a keyword argument") issue_pk = kwargs.pop("issue_pk") - issue = get_object_or_404(Issue, pk=issue_pk) + issue = get_object_or_404(Issue, pk=issue_pk, is_deleted=False) kwargs["issue"] = issue if request.user.is_superuser: return function(request, *args, **kwargs) diff --git a/bugsink/middleware.py b/bugsink/middleware.py index 2d5674e..3578fd3 100644 --- a/bugsink/middleware.py +++ b/bugsink/middleware.py @@ -89,6 +89,11 @@ class SetRemoteAddrMiddleware: @staticmethod def parse_x_forwarded_for(header_value): + # NOTE: our method parsing _does not_ remove port numbers from the X-Forwarded-For header; such setups are rare + # (but legal according to the spec) but [1] we don't recommend them and [2] we recommend X-Real-IP over + # X-Forwarded-For anyway. + # https://serverfault.com/questions/753682/iis-server-farm-with-arr-why-does-http-x-forwarded-for-have-a-port-nu + if header_value in [None, ""]: # The most typical misconfiguration is to forget to set the header at all, or to have it be empty. In that # case, we'll just set the IP to None, which will mean some data will be missing from your events (but @@ -116,6 +121,7 @@ class SetRemoteAddrMiddleware: def __call__(self, request): if settings.USE_X_REAL_IP: + # NOTE: X-Real-IP never contains a port number AFAICT by searching online so the below is IP-only: request.META["REMOTE_ADDR"] = request.META.get("HTTP_X_REAL_IP", None) elif settings.USE_X_FORWARDED_FOR: # elif: X-Real-IP / X-Forwarded-For are mutually exclusive diff --git a/bugsink/pygments_extensions.py b/bugsink/pygments_extensions.py index bc94bd0..4b255e7 100644 --- a/bugsink/pygments_extensions.py +++ b/bugsink/pygments_extensions.py @@ -16,6 +16,7 @@ from os.path import basename from pygments.lexers import ( ActionScript3Lexer, CLexer, ColdfusionHtmlLexer, CSharpLexer, HaskellLexer, GoLexer, GroovyLexer, JavaLexer, JavascriptLexer, ObjectiveCLexer, PerlLexer, PhpLexer, PythonLexer, RubyLexer, TextLexer, XmlPhpLexer, + PowerShellLexer, CrystalLexer ) _all_lexers = None @@ -105,7 +106,7 @@ def guess_lexer_for_filename(_fn, platform, code=None, **options): def lexer_for_platform(platform, **options): # We can depend on platform having been set: it's a required attribute as per Sentry's docs. - # The LHS in the table below is a fixed list of available platforms, as per the Sentry docs. + # The LHS in the table below is a fixed list of available platforms, as per the Sentry docs. (but: #143, #145) # The RHS is my educated guess for what these platforms map to in Pygments. clz = { @@ -114,6 +115,7 @@ def lexer_for_platform(platform, **options): "cfml": ColdfusionHtmlLexer, "cocoa": TextLexer, # I couldn't find the Cocoa lexer in Pygments, this will do for now. "csharp": CSharpLexer, + "crystal": CrystalLexer, # _not_ in the list of "acceptable platforms", but "seen in the wild" (#145) "elixir": TextLexer, # I couldn't find the Elixir lexer in Pygments, this will do for now. "haskell": HaskellLexer, "go": GoLexer, @@ -131,9 +133,10 @@ def lexer_for_platform(platform, **options): "other": TextLexer, # "other" by definition implies that nothing is known. "perl": PerlLexer, # or Perl6Lexer... "php": PhpLexer, + "powershell": PowerShellLexer, # _not_ in the list of "acceptable platforms", but "seen in the wild" (#143) "python": PythonLexer, "ruby": RubyLexer, - }[platform] + }.get(platform, TextLexer) # default to TextLexer if not found; see #143 and #145 for why we fall back at all options = _custom_options(clz, options) return clz(**options) diff --git a/bugsink/scripts/util.py b/bugsink/scripts/util.py new file mode 100644 index 0000000..5d69842 --- /dev/null +++ b/bugsink/scripts/util.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +"""A copy of the Django-generated manage.py, but: + +* in the bugsink.scripts package, such that it can be wrapped by a setuptools-installable script +* with the DJANGO_SETTINGS_MODULE set to `bugsink.settings.default` by default. + +This script can be used to run Django management commands for which the settings _don't matter_. + +Such commands "should probably" be extracted to be Django-independent, but that incurs its own extra work (as well as +future maintenance burden): some utility code is shared, the command utilizes the Django argv parsing, and a separate +repo _always_ brings extra overhead (e.g. for testing, CI, etc.). So this is a pragmatic solution to the problem. +""" +import os +import sys + + +def find_commands(management_dir): + # explicitly enumerate Django (settings)-independent commands here (for --help) + if 'bsmain' in management_dir: + return ["stress_test", "send_json"] + return [] + + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bugsink.settings.default') + try: + # we just monkeypatch the find_commands function to return the commands which are actually settings-independent. + import django.core.management + django.core.management.find_commands = find_commands + + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/bugsink/settings/default.py b/bugsink/settings/default.py index 2064246..40fcbc7 100644 --- a/bugsink/settings/default.py +++ b/bugsink/settings/default.py @@ -188,10 +188,17 @@ DATABASES = { }, # This is a "database as message queue" setup; If you're reading this and are thinking of replacing this particular - # DB with mysql/postgres, know that you "probably shouldn't". https://www.bugsink.com/blog/snappea-design/ + # DB with mysql/postgres, know that you "probably shouldn't". + # + # Regarding the location (NAME) of the file: the expectation is that this is local to your Bugsink instance, and + # that there is a your setup has exactly an equal number of [a] gunicorn webservers, [b] snappea foremans, and [c] + # snappea databases; [d] machines/containers (with a strong preference for that number being 1). In short: you + # should probably not touch this, and if you're thinking of pointing to a mounted volume you probably are + # misunderstanding what this is for. + # https://www.bugsink.com/blog/snappea-design/ "snappea": { 'ENGINE': 'bugsink.timed_sqlite_backend', - 'NAME': os.getenv("SNAPPEA_DATABASE_PATH", 'snappea.sqlite3'), + 'NAME': os.getenv("SNAPPEA_DATABASE_PATH", 'snappea.sqlite3'), # NOTE: read the above comment # 'TEST': { postponed, for starters we'll do something like SNAPPEA_ALWAYS_EAGER 'OPTIONS': { 'timeout': 5, diff --git a/bugsink/settings/development.py b/bugsink/settings/development.py index b6b23dc..a9c2c26 100644 --- a/bugsink/settings/development.py +++ b/bugsink/settings/development.py @@ -1,10 +1,9 @@ from .default import * # noqa -from .default import BASE_DIR, INSTALLED_APPS, MIDDLEWARE, LOGGING, DATABASES, I_AM_RUNNING +from .default import BASE_DIR, LOGGING, DATABASES, I_AM_RUNNING import os from sentry_sdk_extensions.transport import MoreLoudlyFailingTransport -from debug_toolbar.middleware import show_toolbar from bugsink.utils import deduce_allowed_hosts, eat_your_own_dogfood @@ -13,33 +12,6 @@ SECRET_KEY = 'django-insecure-$@clhhieazwnxnha-_zah&(bieq%yux7#^07&xsvhn58t)8@xw DEBUG = True -# > The Debug Toolbar is shown only if your IP address is listed in Django’s INTERNAL_IPS setting. This means that for -# > local development, you must add "127.0.0.1" to INTERNAL_IPS. -INTERNAL_IPS = [ - "127.0.0.1", -] - -if not I_AM_RUNNING == "TEST": - INSTALLED_APPS += [ - "debug_toolbar", - ] - -MIDDLEWARE = [ - "debug_toolbar.middleware.DebugToolbarMiddleware", -] + MIDDLEWARE - - -def show_toolbar_for_queryparam(request): - if "__debug__" not in request.path and not request.GET.get("debug", ""): - return False - return show_toolbar(request) - - -DEBUG_TOOLBAR_CONFIG = { - "SHOW_TOOLBAR_CALLBACK": show_toolbar_for_queryparam, -} - - # this way of configuring (DB, DB_USER, DB_PASSWORD) is specific to the development environment if os.getenv("DB", "sqlite") == "mysql": DATABASES['default'] = { @@ -91,15 +63,13 @@ SNAPPEA = { "NUM_WORKERS": 1, } -POSTMARK_API_KEY = os.getenv('POSTMARK_API_KEY') - -EMAIL_HOST = 'smtp.postmarkapp.com' -EMAIL_HOST_USER = POSTMARK_API_KEY -EMAIL_HOST_PASSWORD = POSTMARK_API_KEY +EMAIL_HOST = os.getenv("EMAIL_HOST") +EMAIL_HOST_USER = os.getenv("EMAIL_HOST_USER") +EMAIL_HOST_PASSWORD = os.getenv("EMAIL_HOST_PASSWORD") EMAIL_PORT = 587 EMAIL_USE_TLS = True -SERVER_EMAIL = DEFAULT_FROM_EMAIL = 'Klaas van Schelven ' +SERVER_EMAIL = DEFAULT_FROM_EMAIL = 'Klaas van Schelven ' BUGSINK = { @@ -122,10 +92,13 @@ BUGSINK = { "VALIDATE_ON_DIGEST": "warn", # "KEEP_ENVELOPES": 10, + "API_LOG_UNIMPLEMENTED_CALLS": True, # set MAX_EVENTS* very high to be able to do serious performance testing (which I do often in my dev environment) "MAX_EVENTS_PER_PROJECT_PER_5_MINUTES": 1_000_000, "MAX_EVENTS_PER_PROJECT_PER_HOUR": 50_000_000, + + "KEEP_ARTIFACT_BUNDLES": True, # in development: useful to preserve sourcemap uploads } @@ -167,3 +140,6 @@ LOGGING["loggers"]["snappea"]["level"] = "DEBUG" LOGGING["formatters"]["snappea"]["format"] = "{asctime} - {threadName} - {levelname:7} - {message}" ALLOWED_HOSTS = deduce_allowed_hosts(BUGSINK["BASE_URL"]) + +# django-tailwind setting; the below allows for environment-variable overriding of the npm binary path. +NPM_BIN_PATH = os.getenv("NPM_BIN_PATH", "npm") diff --git a/bugsink/tests.py b/bugsink/tests.py index d2d4f5a..7dbcb02 100644 --- a/bugsink/tests.py +++ b/bugsink/tests.py @@ -7,6 +7,7 @@ from unittest import TestCase as RegularTestCase from django.test import TestCase as DjangoTestCase from django.test import override_settings from django.core.exceptions import SuspiciousOperation +from .wsgi import allowed_hosts_error_message from .volume_based_condition import VolumeBasedCondition from .streams import ( @@ -376,3 +377,47 @@ class SetRemoteAddrMiddlewareTestCase(RegularTestCase): with self.assertRaises(SuspiciousOperation): SetRemoteAddrMiddleware.parse_x_forwarded_for("123.123.123.123,1.2.3.4") + + +class AllowedHostsMsgTestCase(DjangoTestCase): + + def test_allowed_hosts_error_message(self): + self.maxDiff = None + + # Note: cases for ALLOWED_HOSTS=[] are redundant because Django will refuse to start in that case. + + # ALLOWED_HOST only contains non-production domains that we typically _do not_ want to suggest in the msg + self.assertEqual( + "'Host: foobar' as sent by browser/proxy not in ALLOWED_HOSTS=['localhost', '127.0.0.1']. " + "Add 'foobar' to ALLOWED_HOSTS or configure proxy to use 'Host: your.host.example'.", + allowed_hosts_error_message("foobar", ["localhost", "127.0.0.1"])) + + # proxy misconfig: proxy speaks to "localhost" + self.assertEqual( + "'Host: localhost' as sent by browser/proxy not in ALLOWED_HOSTS=['testserver']. " + "Configure proxy to use 'Host: testserver' or add the desired host to ALLOWED_HOSTS.", + allowed_hosts_error_message("localhost", ["testserver"])) + + # proxy misconfig: proxy speaks (local) IP + self.assertEqual( + "'Host: 127.0.0.1' as sent by browser/proxy not in ALLOWED_HOSTS=['testserver']. " + "Configure proxy to use 'Host: testserver' or add the desired host to ALLOWED_HOSTS.", + allowed_hosts_error_message("127.0.0.1", ["testserver"])) + + # proxy misconfig: proxy speaks (remote) IP + self.assertEqual( + "'Host: 123.123.123.123' as sent by browser/proxy not in ALLOWED_HOSTS=['testserver']. " + "Configure proxy to use 'Host: testserver' or add the desired host to ALLOWED_HOSTS.", + allowed_hosts_error_message("123.123.123.123", ["testserver"])) + + # plain old typo ALLOWED_HOSTS-side + self.assertEqual( + "'Host: testserver' as sent by browser/proxy not in ALLOWED_HOSTS=['teeestserver']. " + "Add 'testserver' to ALLOWED_HOSTS or configure proxy to use 'Host: teeestserver'.", + allowed_hosts_error_message("testserver", ["teeestserver"])) + + # plain old typo proxy-config-side + self.assertEqual( + "'Host: teeestserver' as sent by browser/proxy not in ALLOWED_HOSTS=['testserver']. " + "Add 'teeestserver' to ALLOWED_HOSTS or configure proxy to use 'Host: testserver'.", + allowed_hosts_error_message("teeestserver", ["testserver"])) diff --git a/bugsink/timed_sqlite_backend/base.py b/bugsink/timed_sqlite_backend/base.py index df76a4e..9d07718 100644 --- a/bugsink/timed_sqlite_backend/base.py +++ b/bugsink/timed_sqlite_backend/base.py @@ -1,3 +1,4 @@ +import logging from collections import namedtuple from copy import deepcopy import time @@ -10,6 +11,9 @@ from django.db.backends.sqlite3.base import ( DatabaseWrapper as UnpatchedDatabaseWrapper, SQLiteCursorWrapper as UnpatchedSQLiteCursorWrapper, ) + +logger = logging.getLogger("bugsink") + # We disinguish between the default runtime limit for a connection (set in the settings) and a runtime limit set by the # "with different_runtime_limit" idiom, i.e. temporarily. The reason we need to distinguish these two concepts (and keep # track of their values) explicitly, and provide the fallback getter mechanism (cm if available, otherwise @@ -42,7 +46,7 @@ def _set_runtime_limit(using, is_default_for_connection, seconds): ) -def _get_runtime_limit(using=None): +def _get_runtime_limit(using): if using is None: using = DEFAULT_DB_ALIAS @@ -76,11 +80,12 @@ def different_runtime_limit(seconds, using=None): @contextmanager -def limit_runtime(conn): +def limit_runtime(alias, conn, query=None, params=None): + # query & params are only used for logging purposes; they are not used to actually limit the runtime. start = time.time() def check_time(): - if time.time() > start + _get_runtime_limit(): + if time.time() > start + _get_runtime_limit(alias): return 1 return 0 @@ -93,6 +98,18 @@ def limit_runtime(conn): yield + if time.time() > start + _get_runtime_limit(alias) + 0.01: + # https://sqlite.org/forum/forumpost/fa65709226 to see why we need this. + # + # Doing an actual timeout _now_ doesn't achieve anything (the goal is generally to avoid things taking too long, + # once you're here only time-travel can help you). So `logger.error()` rather than `raise OperationalError`. + # + # + 0.05s to avoid false positives like so: the query completing in exactly runtime_limit with the final check + # coming a fraction of a second later (0.01s is assumed to be well on the "avoid false positives" side of the + # trade-off) + took = time.time() - start + logger.error("limit_runtime miss (%.3fs): %s %s", took, query, params) + conn.set_progress_handler(None, 0) @@ -139,23 +156,29 @@ class DatabaseWrapper(UnpatchedDatabaseWrapper): # return PrintOnClose(result) def create_cursor(self, name=None): - return self.connection.cursor(factory=SQLiteCursorWrapper) + return self.connection.cursor(factory=get_sqlite_cursor_wrapper(self.alias)) -class SQLiteCursorWrapper(UnpatchedSQLiteCursorWrapper): +def get_sqlite_cursor_wrapper(alias): + if alias is None: + alias = DEFAULT_DB_ALIAS - def execute(self, query, params=None): - if settings.I_AM_RUNNING == "MIGRATE": - # migrations in Sqlite are often slow (drop/recreate tables, etc); so we don't want to limit them - return super().execute(query, params) + class SQLiteCursorWrapper(UnpatchedSQLiteCursorWrapper): - with limit_runtime(self.connection): - return super().execute(query, params) + def execute(self, query, params=None): + if settings.I_AM_RUNNING == "MIGRATE": + # migrations in Sqlite are often slow (drop/recreate tables, etc); so we don't want to limit them + return super().execute(query, params) - def executemany(self, query, param_list): - if settings.I_AM_RUNNING == "MIGRATE": - # migrations in Sqlite are often slow (drop/recreate tables, etc); so we don't want to limit them - return super().executemany(query, param_list) + with limit_runtime(alias, self.connection, query=query, params=params): + return super().execute(query, params) - with limit_runtime(self.connection): - return super().executemany(query, param_list) + def executemany(self, query, param_list): + if settings.I_AM_RUNNING == "MIGRATE": + # migrations in Sqlite are often slow (drop/recreate tables, etc); so we don't want to limit them + return super().executemany(query, param_list) + + with limit_runtime(alias, self.connection, query=query, params=param_list): + return super().executemany(query, param_list) + + return SQLiteCursorWrapper diff --git a/bugsink/transaction.py b/bugsink/transaction.py index 23fda45..5129e58 100644 --- a/bugsink/transaction.py +++ b/bugsink/transaction.py @@ -7,6 +7,9 @@ import threading from django.db import transaction as django_db_transaction from django.db import DEFAULT_DB_ALIAS +from django.conf import settings + +from snappea.settings import get_settings as get_snappea_settings performance_logger = logging.getLogger("bugsink.performance.db") local_storage = threading.local() @@ -153,7 +156,7 @@ class ImmediateAtomic(SuperDurableAtomic): connection = django_db_transaction.get_connection(self.using) if hasattr(connection, "_start_transaction_under_autocommit"): - connection._start_transaction_under_autocommit_original = connection._start_transaction_under_autocommit + self._start_transaction_under_autocommit_original = connection._start_transaction_under_autocommit connection._start_transaction_under_autocommit = types.MethodType( _start_transaction_under_autocommit_patched, connection) @@ -183,9 +186,9 @@ class ImmediateAtomic(SuperDurableAtomic): performance_logger.info(f"{took * 1000:6.2f}ms IMMEDIATE transaction{using_clause}") connection = django_db_transaction.get_connection(self.using) - if hasattr(connection, "_start_transaction_under_autocommit"): - connection._start_transaction_under_autocommit = connection._start_transaction_under_autocommit_original - del connection._start_transaction_under_autocommit_original + if hasattr(self, "_start_transaction_under_autocommit_original"): + connection._start_transaction_under_autocommit = self._start_transaction_under_autocommit_original + del self._start_transaction_under_autocommit_original @contextmanager @@ -206,10 +209,32 @@ def immediate_atomic(using=None, savepoint=True, durable=True): else: immediate_atomic = ImmediateAtomic(using, savepoint, durable) - # https://stackoverflow.com/a/45681273/339144 provides some context on nesting context managers; and how to proceed - # if you want to do this with an arbitrary number of context managers. - with SemaphoreContext(using), immediate_atomic: - yield + if get_snappea_settings().TASK_ALWAYS_EAGER: + # In ALWAYS_EAGER mode we cannot use SemaphoreContext as the outermost context, because any delay_on_commit + # tasks that are triggered on __exit__ of the (in that case, inner) immediate_atomic, when themselves initiating + # a new task-with-transaction, will not be able to acquire the semaphore (it's not been released yet). + # Fundamentally the solution would be to push the "on commit" logic onto the outermost context, but that seems + # fragile (monkeypatching/heavy overriding) and since the whole SemaphoreContext is only needed as an extra + # guard against WAL growth (not something we care about in the non-production setup), we just simplify for that + # case. + with immediate_atomic: + yield + + elif "sqlite" not in settings.DATABASES[using]["ENGINE"]: + # The SemaphoreContext was added specifically to address the WAL growth issue in sqlite; better not to use it + # for other database backends; in particular, if such databases have longer default timeouts, then the error + # message may be confusing (semaphore timeout after 10s throws an error... while the thread that hogs the DB + # is _not_ (yet) timed out) + # + # in-string matching matches both our 'timed' backend and the django default. + with immediate_atomic: + yield + + else: + # https://stackoverflow.com/a/45681273/339144 provides some context on nesting context managers; and how to + # proceed if you want to do this with an arbitrary number of context managers. + with SemaphoreContext(using), immediate_atomic: + yield def delay_on_commit(function, *args, **kwargs): diff --git a/bugsink/urls.py b/bugsink/urls.py index 122e6c2..560272b 100644 --- a/bugsink/urls.py +++ b/bugsink/urls.py @@ -3,7 +3,7 @@ from django.conf import settings from django.contrib import admin from django.urls import include, path from django.contrib.auth import views as auth_views -from django.views.generic import RedirectView +from django.views.generic import RedirectView, TemplateView from alerts.views import debug_email as debug_alerts_email from users.views import debug_email as debug_users_email @@ -11,9 +11,10 @@ from teams.views import debug_email as debug_teams_email from bugsink.app_settings import get_settings from users.views import signup, confirm_email, resend_confirmation, request_reset_password, reset_password, preferences from ingest.views import download_envelope -from files.views import chunk_upload, artifact_bundle_assemble +from files.views import chunk_upload, artifact_bundle_assemble, api_catch_all +from bugsink.decorators import login_exempt -from .views import home, trigger_error, favicon, settings_view, silence_email_system_warning, counts +from .views import home, trigger_error, favicon, settings_view, silence_email_system_warning, counts, health_check_ready from .debug_views import csrf_debug @@ -25,6 +26,8 @@ admin.site.index_title = "Admin" # everyone calls this the "admin" anyway. Let' urlpatterns = [ path('', home, name='home'), + path("health/ready", health_check_ready, name="health_check_ready"), + path("accounts/signup/", signup, name="signup"), path("accounts/resend-confirmation/", resend_confirmation, name="resend_confirmation"), path("accounts/confirm-email//", confirm_email, name="confirm_email"), @@ -49,6 +52,8 @@ urlpatterns = [ path('api/', include('ingest.urls')), + path('api/', api_catch_all, name='api_catch_all'), + # not in /api/ because it's not part of the ingest API, but still part of the ingest app path('ingest/envelope//', download_envelope, name='download_envelope'), @@ -73,6 +78,7 @@ urlpatterns = [ path('debug/csrf/', csrf_debug, name='csrf_debug'), path("favicon.ico", favicon), + path("robots.txt", login_exempt(TemplateView.as_view(template_name="robots.txt", content_type="text/plain"))), ] if settings.DEBUG: @@ -83,14 +89,6 @@ if settings.DEBUG: path('trigger-error/', trigger_error), ] - try: - import debug_toolbar # noqa - urlpatterns = [ - path('__debug__/', include('debug_toolbar.urls')), - ] + urlpatterns - except ImportError: - pass - handler400 = "bugsink.views.bad_request" handler403 = "bugsink.views.permission_denied" diff --git a/bugsink/utils.py b/bugsink/utils.py index 2682820..def5795 100644 --- a/bugsink/utils.py +++ b/bugsink/utils.py @@ -1,7 +1,10 @@ +from collections import defaultdict from urllib.parse import urlparse from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template +from django.apps import apps +from django.db.models import ForeignKey, F from .version import version @@ -161,3 +164,150 @@ def eat_your_own_dogfood(sentry_dsn, **kwargs): sentry_sdk.init( **default_kwargs, ) + + +def get_model_topography(): + """ + Returns a dependency graph mapping: + referenced_model_key -> [ + (referrer_model_class, fk_name), + ... + ] + """ + dep_graph = defaultdict(list) + for model in apps.get_models(): + for field in model._meta.get_fields(include_hidden=True): + if isinstance(field, ForeignKey): + referenced_model = field.related_model + referenced_key = f"{referenced_model._meta.app_label}.{referenced_model.__name__}" + dep_graph[referenced_key].append((model, field.name)) + return dep_graph + + +def fields_for_prune_orphans(model): + if model.__name__ == "IssueTag": + return ("value_id",) + return () + + +def prune_orphans(model, d_ids_to_check): + """For some model, does dangling-model-cleanup. + + In a sense the oposite of delete_deps; delete_deps takes care of deleting the recursive closure of things that point + to some root. The present function cleans up things that are being pointed to (and, after some other thing is + deleted, potentially are no longer being pointed to, hence 'orphaned'). + + This is the hardcoded edition (IssueTag only); we _could_ try to think about doing this generically based on the + dependency graph, but it's quite questionably whether a combination of generic & performant is easy to arrive at and + worth it. + + pruning of TagValue is done "inline" (as opposed to using a GC-like vacuum "later") because, whatever the exact + performance trade-offs may be, the following holds true: + + 1. the inline version is easier to reason about, it "just happens ASAP", and in the context of a given issue; + vacuum-based has to take into consideration the full DB including non-orphaned values. + 2. repeated work is somewhat minimalized b/c of the IssueTag/EventTag relationship as described in prune_tagvalues. + """ + + from tags.models import prune_tagvalues # avoid circular import + + if model.__name__ != "IssueTag": + return # we only prune IssueTag orphans + + ids_to_check = [d["value_id"] for d in d_ids_to_check] # d_ids_to_check: mirrors fields_for_prune_orphans(model) + + prune_tagvalues(ids_to_check) + + +def do_pre_delete(project_id, model, pks_to_delete, is_for_project): + "More model-specific cleanup, if needed; only for Event model at the moment." + + if model.__name__ != "Event": + return # we only do more cleanup for Event + + from projects.models import Project + from events.models import Event + from events.retention import cleanup_events_on_storage + + cleanup_events_on_storage( + Event.objects.filter(pk__in=pks_to_delete).exclude(storage_backend=None) + .values_list("id", "storage_backend") + ) + + if is_for_project: + # no need to update the stored_event_count for the project, because the project is being deleted + return + + # Update project stored_event_count to reflect the deletion of the events. note: alternatively, we could do this + # on issue-delete (issue.stored_event_count is known too); potato, potato though. + # note: don't bother to do the same thing for Issue.stored_event_count, since we're in the process of deleting Issue + Project.objects.filter(id=project_id).update(stored_event_count=F('stored_event_count') - len(pks_to_delete)) + + +def delete_deps_with_budget(project_id, referring_model, fk_name, referred_ids, budget, dep_graph, is_for_project): + r""" + Deletes all objects of type referring_model that refer to any of the referred_ids via fk_name. + Returns the number of deleted objects. + And does this recursively (i.e. if there are further dependencies, it will delete those as well). + + Caller This Func + | | + V V + referring_model + ^ / + \-------fk_name---- + + referred_ids relevant_ids (deduced using a query) + """ + num_deleted = 0 + + # Fetch ids of referring objects and their referred ids. Note that an index of fk_name can be assumed to exist, + # because fk_name is a ForeignKey field, and Django automatically creates an index for ForeignKey fields unless + # instructed otherwise: https://github.com/django/django/blob/7feafd79a481/django/db/models/fields/related.py#L1025 + relevant_ids = list( + referring_model.objects.filter(**{f"{fk_name}__in": referred_ids}).order_by(f"{fk_name}_id", 'pk').values( + *(('pk',) + fields_for_prune_orphans(referring_model)) + )[:budget] + ) + + if not relevant_ids: + # we didn't find any referring objects. optimization: skip any recursion and referring_model.delete() + return 0 + + # The recursing bit: + for_recursion = dep_graph.get(f"{referring_model._meta.app_label}.{referring_model.__name__}", []) + + for model_for_recursion, fk_name_for_recursion in for_recursion: + num_deleted += delete_deps_with_budget( + project_id, + model_for_recursion, + fk_name_for_recursion, + [d["pk"] for d in relevant_ids], + budget - num_deleted, + dep_graph, + is_for_project, + ) + + if num_deleted >= budget: + return num_deleted + + # If this point is reached: we have deleted all referring objects that we could delete, and we still have budget + # left. We can now delete the referring objects themselves (limited by budget). + relevant_ids_after_rec = relevant_ids[:budget - num_deleted] + + do_pre_delete(project_id, referring_model, [d['pk'] for d in relevant_ids_after_rec], is_for_project) + + my_num_deleted, del_d = referring_model.objects.filter(pk__in=[d['pk'] for d in relevant_ids_after_rec]).delete() + num_deleted += my_num_deleted + assert set(del_d.keys()) == {referring_model._meta.label} # assert no-cascading (we do that ourselves) + + if is_for_project: + # short-circuit: project-deletion implies "no orphans" because the project kill everything with it. + return num_deleted + + # Note that prune_orphans doesn't respect the budget. Reason: it's not easy to do, b/c the order is reversed (we + # would need to predict somehow at the previous step how much budget to leave unused) and we don't care _that much_ + # about a precise budget "at the edges of our algo", as long as we don't have a "single huge blocking thing". + prune_orphans(referring_model, relevant_ids_after_rec) + + return num_deleted diff --git a/bugsink/views.py b/bugsink/views.py index e94320c..884b05a 100644 --- a/bugsink/views.py +++ b/bugsink/views.py @@ -98,6 +98,20 @@ def home(request): return redirect("team_list") +@login_exempt +def health_check_ready(request): + """ + A simple health check that returns 200 if the server is up and running. To be used in containerized environments + in a way that 'makes sense to you', e.g. as a readiness probe in Kubernetes. + + What this "proves" is that the application server is up and accepting requests. + + By design, this health check does not check the database connection; we only make a statement about _our own + health_; this is to avoid killing the app-server if the database is down. + """ + return HttpResponse("OK", content_type="text/plain") + + @login_exempt def trigger_error(request): raise Exception("Exception triggered on purpose to debug error handling") diff --git a/bugsink/wsgi.py b/bugsink/wsgi.py index 65d3892..b527904 100644 --- a/bugsink/wsgi.py +++ b/bugsink/wsgi.py @@ -13,15 +13,46 @@ import django from django.core.handlers.wsgi import WSGIHandler, WSGIRequest from django.core.exceptions import DisallowedHost +from django.http.request import split_domain_port, validate_host +from django.core.validators import validate_ipv46_address +from django.core.exceptions import ValidationError os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bugsink_conf') +def is_ip_address(value): + try: + validate_ipv46_address(value) + return True + except ValidationError: + return False + + +def allowed_hosts_error_message(domain, allowed_hosts): + # Start with the plain statement of fact: x not in y. + msg = "'Host: %s' as sent by browser/proxy not in ALLOWED_HOSTS=%s. " % (domain, allowed_hosts) + + suggestable_allowed_hosts = [host for host in allowed_hosts if host not in ["localhost", ".localhost", "127.0.0.1"]] + if len(suggestable_allowed_hosts) == 0: + proxy_suggestion = "your.host.example" + else: + proxy_suggestion = " | ".join(suggestable_allowed_hosts) + + if domain == "localhost" or is_ip_address(domain): + # in these cases Proxy misconfig is the more likely culprit. Point to that _first_ and (while still mentioning + # ALLOWED_HOSTS); don't mention the specific domain that was used as a likely "good value" for ALLLOWED_HOSTS. + return msg + "Configure proxy to use 'Host: %s' or add the desired host to ALLOWED_HOSTS." % proxy_suggestion + + # the domain looks "pretty good"; be verbose/explicit about the 2 possible changes in config. + return msg + "Add '%s' to ALLOWED_HOSTS or configure proxy to use 'Host: %s'." % (domain, proxy_suggestion) + + class CustomWSGIRequest(WSGIRequest): """ - Custom WSQIRequest subclass with 2 fixes: + Custom WSQIRequest subclass with 3 fixes/changes: * Chunked Transfer Encoding (Django's behavior is broken) + * Skip ALLOWED_HOSTS validation for /health/ endpoints (see #140) * Better error message for disallowed hosts Note: used in all servers (in gunicorn through wsgi.py; in Django's runserver through WSGI_APPLICATION) @@ -49,27 +80,33 @@ class CustomWSGIRequest(WSGIRequest): We're leaking a bit of information here, but I don't think it's too much TBH -- especially in the light of ssl certificates being specifically tied to the domain name. """ + if self.path.startswith == "/health/": + # For /health/ endpoints, we skip the ALLOWED_HOSTS validation (see #140). + return self._get_raw_host() - # Import pushed down to make it absolutely clear we avoid circular importing/loading the wrong thing: + # copied from HttpRequest.get_host() in Django 4.2, with modifications. + + host = self._get_raw_host() + + # Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True. from django.conf import settings + allowed_hosts = settings.ALLOWED_HOSTS + if settings.DEBUG and not allowed_hosts: + allowed_hosts = [".localhost", "127.0.0.1", "[::1]"] - try: - return super().get_host() - except DisallowedHost as e: - message = str(e) + domain, port = split_domain_port(host) + if domain and validate_host(domain, allowed_hosts): + return host + else: + if domain: + msg = allowed_hosts_error_message(domain, allowed_hosts) - if "ALLOWED_HOSTS" in message: - # The following 3 lines are copied from HttpRequest.get_host() in Django 4.2 - allowed_hosts = settings.ALLOWED_HOSTS - if settings.DEBUG and not allowed_hosts: - allowed_hosts = [".localhost", "127.0.0.1", "[::1]"] - - message = message[:-1 * len(".")] - message += ", which is currently set to %s." % repr(allowed_hosts) - - # from None, because our DisallowedHost is so directly caused by super()'s DisallowedHost that cause and - # effect are the same, i.e. cause must be hidden from the stacktrace for the sake of clarity. - raise DisallowedHost(message) from None + else: + msg = "Invalid HTTP_HOST header: %r." % host + msg += ( + " The domain name provided is not valid according to RFC 1034/1035." + ) + raise DisallowedHost(msg) class CustomWSGIHandler(WSGIHandler): diff --git a/compat/dsn.py b/compat/dsn.py index 19f2bb2..561ebaa 100644 --- a/compat/dsn.py +++ b/compat/dsn.py @@ -7,6 +7,10 @@ def _colon_port(port): def build_dsn(base_url, project_id, public_key): parts = urllib.parse.urlsplit(base_url) + + assert parts.scheme in ("http", "https"), "The BASE_URL setting must be a valid URL (starting with http or https)." + assert parts.hostname, "The BASE_URL setting must be a valid URL. The hostname must be set." + return (f"{ parts.scheme }://{ public_key }@{ parts.hostname }{ _colon_port(parts.port) }" + f"{ parts.path }/{ project_id }") diff --git a/compose-sample.yaml b/compose-sample.yaml index 9944ebf..767daad 100644 --- a/compose-sample.yaml +++ b/compose-sample.yaml @@ -2,6 +2,8 @@ services: mysql: image: mysql:latest restart: unless-stopped + command : "--binlog_expire_logs_seconds=3600" + environment: MYSQL_ROOT_PASSWORD: change_your_passwords_for_real_usage # TODO: Change this MYSQL_DATABASE: bugsink @@ -26,6 +28,8 @@ services: CREATE_SUPERUSER: admin:admin # Change this (or remove it and execute 'createsuperuser' against the running container) PORT: 8000 DATABASE_URL: mysql://root:change_your_passwords_for_real_usage@mysql:3306/bugsink + BEHIND_HTTPS_PROXY: "false" # Change this for setups behind a proxy w/ ssl enabled + BASE_URL: "http://localhost:8000" healthcheck: test: ["CMD-SHELL", "python -c 'import requests; requests.get(\"http://localhost:8000/\").raise_for_status()'"] interval: 5s diff --git a/events/admin.py b/events/admin.py index d9a4fb4..73783c3 100644 --- a/events/admin.py +++ b/events/admin.py @@ -1,11 +1,17 @@ +import json + from django.utils.html import escape, mark_safe from django.contrib import admin +from django.views.decorators.csrf import csrf_protect +from django.utils.decorators import method_decorator -import json +from bugsink.transaction import immediate_atomic from projects.admin import ProjectFilter from .models import Event +csrf_protect_m = method_decorator(csrf_protect) + @admin.register(Event) class EventAdmin(admin.ModelAdmin): @@ -90,3 +96,28 @@ class EventAdmin(admin.ModelAdmin): def on_site(self, obj): return mark_safe('View') + + def get_deleted_objects(self, objs, request): + to_delete = list(objs) + ["...all its related objects... (delayed)"] + model_count = { + Event: len(objs), + } + perms_needed = set() + protected = [] + return to_delete, model_count, perms_needed, protected + + def delete_queryset(self, request, queryset): + # NOTE: not the most efficient; it will do for a first version. + with immediate_atomic(): + for obj in queryset: + obj.delete_deferred() + + def delete_model(self, request, obj): + with immediate_atomic(): + obj.delete_deferred() + + @csrf_protect_m + def delete_view(self, request, object_id, extra_context=None): + # the superclass version, but with the transaction.atomic context manager commented out (we do this ourselves) + # with transaction.atomic(using=router.db_for_write(self.model)): + return self._delete_view(request, object_id, extra_context) diff --git a/events/factories.py b/events/factories.py index bd3a9ef..18567e6 100644 --- a/events/factories.py +++ b/events/factories.py @@ -43,11 +43,24 @@ def create_event(project=None, issue=None, timestamp=None, event_data=None): ) -def create_event_data(): +def create_event_data(exception_type=None): # create minimal event data that is valid as per from_json() - return { + result = { "event_id": uuid.uuid4().hex, "timestamp": timezone.now().isoformat(), "platform": "python", } + + if exception_type is not None: + # allow for a specific exception type to get unique groupers/issues + result["exception"] = { + "values": [ + { + "type": exception_type, + "value": "This is a test exception", + } + ] + } + + return result diff --git a/events/management/commands/cleanup_eventstorage.py b/events/management/commands/cleanup_eventstorage.py index 3391873..862a353 100644 --- a/events/management/commands/cleanup_eventstorage.py +++ b/events/management/commands/cleanup_eventstorage.py @@ -16,15 +16,31 @@ class Command(BaseCommand): # are practice and theory the same. In practice, they are not. def add_arguments(self, parser): - parser.add_argument('storage_name', type=str, help='The name of the storage to clean up') + storage_names = get_settings().EVENT_STORAGES.keys() + available_storages = ", ".join(storage_names) + + if storage_names: + help_text = f'Name of the storage to clean up (one of: {available_storages})' + else: + help_text = 'Name of the storage to clean up. You have not configured any event storages, so storage ' \ + 'cleanup is not possible.' + + parser.add_argument('storage_name', type=str, help=help_text) def handle(self, *args, **options): self.stopped = False signal.signal(signal.SIGINT, self.handle_sigint) - storage_names = ",".join(get_settings().EVENT_STORAGES.keys()) + storage_names = get_settings().EVENT_STORAGES.keys() + available_storages = ", ".join(storage_names) + if options['storage_name'] not in storage_names: - print(f"Storage name {options['storage_name']} not found. Available storage names: {storage_names}") + if not storage_names: + print("Storage name {options['storage_name']} not found because you have not configured any event " + "storage at all so cleanup of event-storage doesn't really make sense.") + sys.exit(1) + + print(f"Storage name {options['storage_name']} not found. Available storage names: {available_storages}") sys.exit(1) storage = get_storage(options['storage_name']) diff --git a/events/management/commands/make_consistent.py b/events/management/commands/make_consistent.py index 52935bc..053c23c 100644 --- a/events/management/commands/make_consistent.py +++ b/events/management/commands/make_consistent.py @@ -11,6 +11,9 @@ from bugsink.transaction import immediate_atomic from bugsink.timed_sqlite_backend.base import allow_long_running_queries from bugsink.moreiterutils import batched +from projects.tasks import delete_project_deps +from issues.tasks import delete_issue_deps + class DryRunException(Exception): # transaction.rollback doesn't work in atomic blocks; a poor man's substitute is to just raise something specific. @@ -23,7 +26,7 @@ def _delete_for_missing_fk(clazz, field_name): ## Dangling FKs: Non-existing objects may come into being when people muddle in the database directly with foreign key checks turned - off (note that fk checks are turned off by default in SQLite for backwards compatibility reasons). + off (note that fk checks are turned off by default in sqlite's CLI for backwards compatibility reasons). In the future it's further possible that there will be pieces the actual Bugsink code where FK-checks are turned off temporarily (e.g. when deleting a project with very many related objects). (In March 2025 there was no such code @@ -76,6 +79,8 @@ def make_consistent(): _delete_for_missing_fk(Release, 'project') + _delete_for_missing_fk(EventTag, 'issue') # See #132 for the ordering of this statement + _delete_for_missing_fk(Event, 'project') _delete_for_missing_fk(Event, 'issue') @@ -138,7 +143,7 @@ class Command(BaseCommand): # In theory, this command should not be required, because Bugsink _should_ leave itself in a consistent state after # every operation. However, in practice Bugsink may not always do as promised, people reach into the database for - # whatever reason, or things go out of whack during development. + # whatever reason, things go out of whack during development, or a crash of snappea leaves half-finished work. def add_arguments(self, parser): parser.add_argument('--dry-run', action='store_true', help="Roll back all changes after making them.") @@ -151,5 +156,32 @@ class Command(BaseCommand): make_consistent() if options['dry_run']: raise DryRunException("Dry run requested; rolling back changes.") + + if not options['dry_run']: + # for is_deleted objects, we enqueue deletion. + # + # such objects may remain dangling forever because the "enqueue if work remains" is not robust for + # snappea-shutdown (snappea will not detect remaining work on-restartup). + # + # doing this as an "enqueue work in snappea" solution is somewhat unsatisfying, because: + # * it means more stuff will happen _after_ make_consistent is done running; + # * it means that inconstencies created during the deferred process are not made consistent + # * because we cannot detect what snappea is currently doing (at least not without hacks) we might + # doubly-enqueue (it will still work, but one process will have a NoSuchObjectError at the end) + # + # I still picked this, because the alternative of doing it inline has its own problems: + # * we may easily exhaust the stack when calling this on lots of objects; bigger batches isn't a + # solution for this because it has its own problems (we don't have batches for nothing). + # * our tasks have "immediate atomic" on the inside, not a happy marriage with the approach here + # (including dry-run) + + for obj in Project.objects.filter(is_deleted=True): + print("Enqueuing deletion of project dependencies for %s" % obj) + delete_project_deps.delay(str(obj.pk)) + + for obj in Issue.objects.filter(is_deleted=True): + print("Enqueuing deletion of issue dependencies for %s" % obj) + delete_issue_deps.delay(str(obj.project_id), str(obj.pk)) + except DryRunException: print("Changes have been rolled back (dry-run)") diff --git a/events/migrations/0001_initial.py b/events/migrations/0001_initial.py index 23a2a28..086b3b0 100644 --- a/events/migrations/0001_initial.py +++ b/events/migrations/0001_initial.py @@ -18,7 +18,7 @@ class Migration(migrations.Migration): ('event_id', models.UUIDField(editable=False, help_text='As per the sent data')), ('data', models.TextField()), ('timestamp', models.DateTimeField(db_index=True)), - ('platform', models.CharField(choices=[('as3', 'As3'), ('c', 'C'), ('cfml', 'Cfml'), ('cocoa', 'Cocoa'), ('csharp', 'Csharp'), ('elixir', 'Elixir'), ('haskell', 'Haskell'), ('go', 'Go'), ('groovy', 'Groovy'), ('java', 'Java'), ('javascript', 'Javascript'), ('native', 'Native'), ('node', 'Node'), ('objc', 'Objc'), ('other', 'Other'), ('perl', 'Perl'), ('php', 'Php'), ('python', 'Python'), ('ruby', 'Ruby')], max_length=64)), + ('platform', models.CharField(max_length=64)), ('level', models.CharField(blank=True, choices=[('fatal', 'Fatal'), ('error', 'Error'), ('warning', 'Warning'), ('info', 'Info'), ('debug', 'Debug')], max_length=7)), ('logger', models.CharField(blank=True, default='', max_length=64)), ('transaction', models.CharField(blank=True, default='', max_length=200)), diff --git a/events/migrations/0020_remove_events_with_null_issue_or_grouping.py b/events/migrations/0020_remove_events_with_null_issue_or_grouping.py new file mode 100644 index 0000000..888bb1e --- /dev/null +++ b/events/migrations/0020_remove_events_with_null_issue_or_grouping.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.21 on 2025-07-03 08:30 + +from django.db import migrations + + +def remove_events_with_null_fks(apps, schema_editor): + # Up until now, we have various models w/ .issue=FK(null=True, on_delete=models.SET_NULL) + # Although it is "not expected" in the interface, issue-deletion would have led to those + # objects with a null issue. We're about to change that to .issue=FK(null=False, ...) which + # would crash if we don't remove those objects first. Object-removal is "fine" though, because + # as per the meaning of the SET_NULL, these objects were "dangling" anyway. + + Event = apps.get_model("events", "Event") + + Event.objects.filter(issue__isnull=True).delete() + + # overcomplete b/c .issue would imply this, done anyway in the name of "defensive programming" + Event.objects.filter(grouping__isnull=True).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("events", "0019_event_storage_backend"), + + # "in principle" order shouldn't matter, because the various objects that are being deleted here are all "fully + # contained" by the .issue; to be safe, however, we depend on the below, because of Grouping.objects.delete() + # (which would set Event.grouping=NULL, which the present migration takes into account). + ("issues", "0020_remove_objects_with_null_issue"), + ] + + operations = [ + migrations.RunPython(remove_events_with_null_fks, reverse_code=migrations.RunPython.noop), + ] diff --git a/events/migrations/0021_alter_do_nothing.py b/events/migrations/0021_alter_do_nothing.py new file mode 100644 index 0000000..a8a9c34 --- /dev/null +++ b/events/migrations/0021_alter_do_nothing.py @@ -0,0 +1,27 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0021_alter_do_nothing"), + ("events", "0020_remove_events_with_null_issue_or_grouping"), + ] + + operations = [ + migrations.AlterField( + model_name="event", + name="grouping", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="issues.grouping" + ), + ), + migrations.AlterField( + model_name="event", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="issues.issue" + ), + ), + ] diff --git a/events/migrations/0022_alter_event_project.py b/events/migrations/0022_alter_event_project.py new file mode 100644 index 0000000..86a04e9 --- /dev/null +++ b/events/migrations/0022_alter_event_project.py @@ -0,0 +1,21 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + # Django came up with 0014, whatever the reason, I'm sure that 0013 is at least required (as per comments there) + ("projects", "0014_alter_projectmembership_project"), + ("events", "0021_alter_do_nothing"), + ] + + operations = [ + migrations.AlterField( + model_name="event", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/events/migrations/0023_event_remote_addr.py b/events/migrations/0023_event_remote_addr.py new file mode 100644 index 0000000..ab256bf --- /dev/null +++ b/events/migrations/0023_event_remote_addr.py @@ -0,0 +1,16 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('events', '0022_alter_event_project'), + ] + + operations = [ + migrations.AddField( + model_name='event', + name='remote_addr', + field=models.GenericIPAddressField(blank=True, default=None, null=True), + ), + ] diff --git a/events/models.py b/events/models.py index 974108b..e06115c 100644 --- a/events/models.py +++ b/events/models.py @@ -8,33 +8,14 @@ from django.utils.functional import cached_property from projects.models import Project from compat.timestamp import parse_timestamp +from bugsink.transaction import delay_on_commit from issues.utils import get_title_for_exception_type_and_value from .retention import get_random_irrelevance from .storage_registry import get_write_storage, get_storage - -class Platform(models.TextChoices): - AS3 = "as3" - C = "c" - CFML = "cfml" - COCOA = "cocoa" - CSHARP = "csharp" - ELIXIR = "elixir" - HASKELL = "haskell" - GO = "go" - GROOVY = "groovy" - JAVA = "java" - JAVASCRIPT = "javascript" - NATIVE = "native" - NODE = "node" - OBJC = "objc" - OTHER = "other" - PERL = "perl" - PHP = "php" - PYTHON = "python" - RUBY = "ruby" +from .tasks import delete_event_deps class Level(models.TextChoices): @@ -71,12 +52,10 @@ class Event(models.Model): ingested_at = models.DateTimeField(blank=False, null=False) digested_at = models.DateTimeField(db_index=True, blank=False, null=False) + remote_addr = models.GenericIPAddressField(blank=True, null=True, default=None) - # not actually expected to be null, but we want to be able to delete issues without deleting events (cleanup later) - issue = models.ForeignKey("issues.Issue", blank=False, null=True, on_delete=models.SET_NULL) - - # not actually expected to be null - grouping = models.ForeignKey("issues.Grouping", blank=False, null=True, on_delete=models.SET_NULL) + issue = models.ForeignKey("issues.Issue", blank=False, null=False, on_delete=models.DO_NOTHING) + grouping = models.ForeignKey("issues.Grouping", blank=False, null=False, on_delete=models.DO_NOTHING) # The docs say: # > Required. Hexadecimal string representing a uuid4 value. The length is exactly 32 characters. Dashes are not @@ -85,7 +64,7 @@ class Event(models.Model): # uuid4 clientside". In any case, we just rely on the envelope's event_id (required per the envelope spec). # Not a primary key: events may be duplicated across projects event_id = models.UUIDField(primary_key=False, null=False, editable=False, help_text="As per the sent data") - project = models.ForeignKey(Project, blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey(Project, blank=False, null=False, on_delete=models.DO_NOTHING) data = models.TextField(blank=False, null=False) @@ -93,8 +72,10 @@ class Event(models.Model): # > a numeric (integer or float) value representing the number of seconds that have elapsed since the Unix epoch. timestamp = models.DateTimeField(db_index=True, blank=False, null=False) - # > A string representing the platform the SDK is submitting from. [..] Acceptable values are [as defined below] - platform = models.CharField(max_length=64, blank=False, null=False, choices=Platform.choices) + # > A string representing the platform the SDK is submitting from. [..] + # (the list of supported platforms is ~700 items long, and since we don't actually depend on this value to be any + # item from that list, we don't force it to be one of them) + platform = models.CharField(max_length=64, blank=False, null=False) # > ### Optional Attributes @@ -257,6 +238,11 @@ class Event(models.Model): debug_info=event_metadata["debug_info"][:255], + # just getting from the dict would be more precise, since we always add this info, but doing the .get() + # allows for backwards compatability (digesting events for which the info was not added on-ingest) so + # we'll take the defensive approach "for now" (until most everyone is on >= 1.7.4) + remote_addr=event_metadata.get("remote_addr"), + digest_order=digest_order, irrelevance_for_retention=irrelevance_for_retention, @@ -285,3 +271,13 @@ class Event(models.Model): return list( self.tags.all().select_related("value", "value__key").order_by("value__key__key") ) + + def delete_deferred(self): + """Schedules deletion of all related objects""" + # NOTE: for such a small closure, I couldn't be bothered to have an .is_deleted field and deal with it. (the + # idea being that the deletion will be relatively quick anyway). We still need "something" though, since we've + # set DO_NOTHING everywhere. An alternative would be the "full inline", i.e. delete everything right in the + # request w/o any delay. That diverges even more from the approach for Issue/Project, making such things a + # "design decision needed". Maybe if we get more `delete_deferred` impls. we'll have a bit more info to figure + # out if we can harmonize on (e.g.) 2 approaches. + delay_on_commit(delete_event_deps, str(self.project_id), str(self.id)) diff --git a/events/retention.py b/events/retention.py index d3a7799..53c1dad 100644 --- a/events/retention.py +++ b/events/retention.py @@ -376,7 +376,7 @@ def evict_for_epoch_and_irrelevance(project, max_epoch, max_irrelevance, max_eve Event.objects.filter(pk__in=pks_to_delete).exclude(storage_backend=None) .values_list("id", "storage_backend") ) - issue_deletions = { + deletions_per_issue = { d['issue_id']: d['count'] for d in Event.objects.filter(pk__in=pks_to_delete).values("issue_id").annotate(count=Count("issue_id"))} @@ -387,9 +387,9 @@ def evict_for_epoch_and_irrelevance(project, max_epoch, max_irrelevance, max_eve nr_of_deletions = Event.objects.filter(pk__in=pks_to_delete).delete()[1].get("events.Event", 0) else: nr_of_deletions = 0 - issue_deletions = {} + deletions_per_issue = {} - return EvictionCounts(nr_of_deletions, issue_deletions) + return EvictionCounts(nr_of_deletions, deletions_per_issue) def cleanup_events_on_storage(todos): diff --git a/events/tasks.py b/events/tasks.py new file mode 100644 index 0000000..c88a006 --- /dev/null +++ b/events/tasks.py @@ -0,0 +1,53 @@ +from snappea.decorators import shared_task + +from bugsink.utils import get_model_topography, delete_deps_with_budget +from bugsink.transaction import immediate_atomic, delay_on_commit + + +@shared_task +def delete_event_deps(project_id, event_id): + from .models import Event # avoid circular import + with immediate_atomic(): + # matches what we do in events/retention.py (and for which argumentation exists); in practive I have seen _much_ + # faster deletion times (in the order of .03s per task on my local laptop) when using a budget of 500, _but_ + # it's not a given those were for "expensive objects" (e.g. events); and I'd rather err on the side of caution + # (worst case we have a bit of inefficiency; in any case this avoids hogging the global write lock / timeouts). + budget = 500 + num_deleted = 0 + + # NOTE: for this delete_x_deps, we didn't bother optimizing the topography graph (the dependency-graph of a + # single event is believed to be small enough to not warrent further optimization). + dep_graph = get_model_topography() + + for model_for_recursion, fk_name_for_recursion in dep_graph["events.Event"]: + this_num_deleted = delete_deps_with_budget( + project_id, + model_for_recursion, + fk_name_for_recursion, + [event_id], + budget - num_deleted, + dep_graph, + is_for_project=False, + ) + + num_deleted += this_num_deleted + + if num_deleted >= budget: + delay_on_commit(delete_event_deps, project_id, event_id) + return + + if budget - num_deleted <= 0: + # no more budget for the self-delete. + delay_on_commit(delete_event_deps, project_id, event_id) + + else: + # final step: delete the event itself + issue = Event.objects.get(pk=event_id).issue + + Event.objects.filter(pk=event_id).delete() + + # manual (outside of delete_deps_with_budget) b/c the special-case in that function is (ATM) specific to + # project (it was built around Issue-deletion initially, so Issue outliving the event-deletion was not + # part of that functionality). we might refactor this at some point. + issue.stored_event_count -= 1 + issue.save(update_fields=["stored_event_count"]) diff --git a/events/tests.py b/events/tests.py index 82ac2be..672414c 100644 --- a/events/tests.py +++ b/events/tests.py @@ -9,8 +9,7 @@ from django.utils import timezone from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase from projects.models import Project, ProjectMembership -from issues.models import Issue -from issues.factories import denormalized_issue_fields +from issues.factories import get_or_create_issue from .factories import create_event from .retention import ( @@ -28,7 +27,7 @@ class ViewTests(TransactionTestCase): self.user = User.objects.create_user(username='test', password='test') self.project = Project.objects.create() ProjectMembership.objects.create(project=self.project, user=self.user) - self.issue = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + self.issue, _ = get_or_create_issue(project=self.project) self.event = create_event(self.project, self.issue) self.client.force_login(self.user) @@ -154,7 +153,7 @@ class RetentionTestCase(DjangoTestCase): digested_at = timezone.now() self.project = Project.objects.create(retention_max_event_count=5) - self.issue = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + self.issue, _ = get_or_create_issue(project=self.project) for digest_order in range(1, 7): project_stored_event_count += 1 # +1 pre-create, as in the ingestion view @@ -180,7 +179,7 @@ class RetentionTestCase(DjangoTestCase): project_stored_event_count = 0 self.project = Project.objects.create(retention_max_event_count=999) - self.issue = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + self.issue, _ = get_or_create_issue(project=self.project) current_timestamp = datetime.datetime(2022, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc) diff --git a/events/utils.py b/events/utils.py index 47f81e2..382b2fd 100644 --- a/events/utils.py +++ b/events/utils.py @@ -1,8 +1,16 @@ +from os.path import basename +from datetime import datetime, timezone +from uuid import UUID import json import sourcemap from issues.utils import get_values +from bugsink.transaction import delay_on_commit + +from compat.timestamp import format_timestamp + from files.models import FileMetadata +from files.tasks import record_file_accesses # Dijkstra, Sourcemaps and Python lists start at 0, but editors and our UI show lines starting at 1. @@ -104,17 +112,20 @@ def apply_sourcemaps(event_data): return debug_id_for_filename = { - image["code_file"]: image["debug_id"] + image["code_file"]: UUID(image["debug_id"]) for image in images if "debug_id" in image and "code_file" in image and image["type"] == "sourcemap" } metadata_obj_lookup = { - str(metadata_obj.debug_id): metadata_obj + metadata_obj.debug_id: metadata_obj for metadata_obj in FileMetadata.objects.filter( debug_id__in=debug_id_for_filename.values(), file_type="source_map").select_related("file") } + metadata_ids = [metadata_obj.id for metadata_obj in metadata_obj_lookup.values()] + delay_on_commit(record_file_accesses, metadata_ids, format_timestamp(datetime.now(timezone.utc))) + filenames_with_metas = [ (filename, metadata_obj_lookup[debug_id]) for (filename, debug_id) in debug_id_for_filename.items() @@ -129,26 +140,60 @@ def apply_sourcemaps(event_data): source_for_filename = {} for filename, meta in filenames_with_metas: sm_data = json.loads(_postgres_fix(meta.file.data)) - if "sourcesContent" not in sm_data or len(sm_data["sourcesContent"]) != 1: - # our assumption is: 1 sourcemap, 1 source. The fact that both "sources" (a list of filenames) and - # "sourcesContent" are lists seems to indicate that this assumption does not generally hold. But it not - # holding does not play well with the id of debug_id, I think? - continue - source_for_filename[filename] = sm_data["sourcesContent"][0].splitlines() + sources = sm_data.get("sources", []) + sources_content = sm_data.get("sourcesContent", []) + + for (source_file_name, source_file) in zip(sources, sources_content): + source_for_filename[source_file_name] = source_file.splitlines() for exception in get_values(event_data.get("exception", {})): for frame in exception.get("stacktrace", {}).get("frames", []): # NOTE: try/except in the loop would allow us to selectively skip frames that we fail to process - if frame.get("filename") in sourcemap_for_filename and frame["filename"] in source_for_filename: + if frame.get("filename") in sourcemap_for_filename: sm = sourcemap_for_filename[frame["filename"]] - lines = source_for_filename[frame["filename"]] token = sm.lookup(frame["lineno"] + FROM_DISPLAY, frame["colno"]) - frame["pre_context"] = lines[max(0, token.src_line - 5):token.src_line] - frame["context_line"] = lines[token.src_line] - frame["post_context"] = lines[token.src_line + 1:token.src_line + 5] - frame["lineno"] = token.src_line + TO_DISPLAY - # frame["colno"] = token.src_col + TO_DISPLAY not actually used + if token.src in source_for_filename: + lines = source_for_filename[token.src] + + frame["pre_context"] = lines[max(0, token.src_line - 5):token.src_line] + frame["context_line"] = lines[token.src_line] + frame["post_context"] = lines[token.src_line + 1:token.src_line + 5] + frame["lineno"] = token.src_line + TO_DISPLAY + frame['filename'] = token.src + frame['function'] = token.name + # frame["colno"] = token.src_col + TO_DISPLAY not actually used + + elif frame.get("filename") in debug_id_for_filename: + # The event_data reports that a debug_id is available for this filename, but we don't have it; this + # could be because the sourcemap was not uploaded. We want to show the debug_id in the stacktrace as + # a hint to the user that they should upload the sourcemap. + frame["debug_id"] = str(debug_id_for_filename[frame["filename"]]) + + +def get_sourcemap_images(event_data): + # NOTE: butchered copy/paste of apply_sourcemaps; refactoring for DRY is a TODO + images = event_data.get("debug_meta", {}).get("images", []) + if not images: + return [] + + debug_id_for_filename = { + image["code_file"]: UUID(image["debug_id"]) + for image in images + if "debug_id" in image and "code_file" in image and image["type"] == "sourcemap" + } + + metadata_obj_lookup = { + metadata_obj.debug_id: metadata_obj + for metadata_obj in FileMetadata.objects.filter( + debug_id__in=debug_id_for_filename.values(), file_type="source_map").select_related("file") + } + + return [ + (basename(filename), + f"{debug_id} " + (" (uploaded)" if debug_id in metadata_obj_lookup else " (not uploaded)")) + for filename, debug_id in debug_id_for_filename.items() + ] diff --git a/files/admin.py b/files/admin.py index 198bdf5..8093068 100644 --- a/files/admin.py +++ b/files/admin.py @@ -7,14 +7,14 @@ from .models import Chunk, File, FileMetadata @admin.register(Chunk) class ChunkAdmin(admin.ModelAdmin): - list_display = ('checksum', 'size') + list_display = ('checksum', 'size', 'created_at') search_fields = ('checksum',) readonly_fields = ('data',) @admin.register(File) class FileAdmin(admin.ModelAdmin): - list_display = ('filename', 'checksum', 'size', 'download_link') + list_display = ('filename', 'checksum', 'size', 'download_link', 'created_at', 'accessed_at') search_fields = ('checksum',) readonly_fields = ('data', 'download_link') @@ -27,5 +27,6 @@ class FileAdmin(admin.ModelAdmin): @admin.register(FileMetadata) class FileMetadataAdmin(admin.ModelAdmin): - list_display = ('debug_id', 'file_type', 'file') + list_display = ('debug_id', 'file_type', 'file', 'created_at') search_fields = ('file__checksum', 'debug_id', 'file_type') + readonly_fields = ('file', 'debug_id', 'file_type', 'data', 'created_at') diff --git a/files/management/__init__.py b/files/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/files/management/commands/__init__.py b/files/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/files/management/commands/vacuum_files.py b/files/management/commands/vacuum_files.py new file mode 100644 index 0000000..5161c85 --- /dev/null +++ b/files/management/commands/vacuum_files.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand +from files.tasks import vacuum_files + + +class Command(BaseCommand): + help = "Kick off (sourcemaps-)files cleanup by vacuuming old entries." + + def handle(self, *args, **options): + vacuum_files.delay() + self.stdout.write("Called vacuum_files.delay(); the task will run in the background (snapea).") diff --git a/files/migrations/0002_chunk_created_at_file_accessed_at_file_created_at_and_more.py b/files/migrations/0002_chunk_created_at_file_accessed_at_file_created_at_and_more.py new file mode 100644 index 0000000..3585433 --- /dev/null +++ b/files/migrations/0002_chunk_created_at_file_accessed_at_file_created_at_and_more.py @@ -0,0 +1,44 @@ +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + + dependencies = [ + ("files", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="chunk", + name="created_at", + field=models.DateTimeField( + auto_now_add=True, db_index=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + migrations.AddField( + model_name="file", + name="accessed_at", + field=models.DateTimeField( + auto_now_add=True, db_index=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + migrations.AddField( + model_name="file", + name="created_at", + field=models.DateTimeField( + auto_now_add=True, db_index=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + migrations.AddField( + model_name="filemetadata", + name="created_at", + field=models.DateTimeField( + auto_now_add=True, db_index=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + ] diff --git a/files/models.py b/files/models.py index 91ba42c..0011df7 100644 --- a/files/models.py +++ b/files/models.py @@ -5,6 +5,7 @@ class Chunk(models.Model): checksum = models.CharField(max_length=40, unique=True) # unique implies index, which we also use for lookups size = models.PositiveIntegerField() data = models.BinaryField(null=False) # as with Events, we can "eventually" move this out of the database + created_at = models.DateTimeField(auto_now_add=True, editable=False, db_index=True) def __str__(self): return self.checksum @@ -23,6 +24,8 @@ class File(models.Model): size = models.PositiveIntegerField() data = models.BinaryField(null=False) # as with Events, we can "eventually" move this out of the database + created_at = models.DateTimeField(auto_now_add=True, editable=False, db_index=True) + accessed_at = models.DateTimeField(auto_now_add=True, editable=False, db_index=True) def __str__(self): return self.filename @@ -36,6 +39,7 @@ class FileMetadata(models.Model): debug_id = models.UUIDField(max_length=40, null=True, blank=True) file_type = models.CharField(max_length=255, null=True, blank=True) data = models.TextField() # we just dump the rest in here; let's see how much we really need. + created_at = models.DateTimeField(auto_now_add=True, editable=False, db_index=True) def __str__(self): # somewhat useless when debug_id is None; but that's not the case we care about ATM diff --git a/files/tasks.py b/files/tasks.py index 71f9c97..bc52941 100644 --- a/files/tasks.py +++ b/files/tasks.py @@ -1,14 +1,31 @@ +import re +import logging +from datetime import timedelta from zipfile import ZipFile import json from hashlib import sha1 from io import BytesIO from os.path import basename +from django.utils import timezone +from compat.timestamp import parse_timestamp from snappea.decorators import shared_task -from bugsink.transaction import immediate_atomic + +from bugsink.transaction import immediate_atomic, delay_on_commit +from bugsink.app_settings import get_settings from .models import Chunk, File, FileMetadata +logger = logging.getLogger("bugsink.api") + + +# "In the wild", we have run into non-unique debug IDs (one in code, one in comment-at-bottom). This regex matches a +# known pattern for "one in code", such that we can at least warn if it's not the same at the actually reported one. +# See #157 +IN_CODE_DEBUG_ID_REGEX = re.compile( + r'e\._sentryDebugIds\[.*?\]\s*=\s*["\']([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})["\']' +) + @shared_task def assemble_artifact_bundle(bundle_checksum, chunk_checksums): @@ -44,7 +61,16 @@ def assemble_artifact_bundle(bundle_checksum, chunk_checksums): debug_id = manifest_entry.get("headers", {}).get("debug-id", None) file_type = manifest_entry.get("type", None) if debug_id is None or file_type is None: - # such records exist and we could store them, but we don't, since we don't have a purpose for them. + because = ( + "it has neither Debug ID nor file-type" if debug_id is None and file_type is None else + "it has no Debug ID" if debug_id is None else "it has no file-type") + + logger.warning( + "Uploaded file %s will be ignored by Bugsink because %s.", + filename, + because, + ) + continue FileMetadata.objects.get_or_create( @@ -56,7 +82,21 @@ def assemble_artifact_bundle(bundle_checksum, chunk_checksums): } ) - # NOTE we _could_ get rid of the file at this point (but we don't). Ties in to broader questions of retention. + # the in-code regexes show up in the _minified_ source only (the sourcemap's original source code will not + # have been "polluted" with it yet, since it's the original). + if file_type == "minified_source": + mismatches = set(IN_CODE_DEBUG_ID_REGEX.findall(file_data.decode("utf-8"))) - {debug_id} + if mismatches: + logger.warning( + "Uploaded file %s contains multiple Debug IDs. Uploaded as %s, but also found: %s.", + filename, + debug_id, + ", ".join(sorted(mismatches)), + ) + + if not get_settings().KEEP_ARTIFACT_BUNDLES: + # delete the bundle file after processing, since we don't need it anymore. + bundle_file.delete() def assemble_file(checksum, chunk_checksums, filename): @@ -75,10 +115,67 @@ def assemble_file(checksum, chunk_checksums, filename): if sha1(data).hexdigest() != checksum: raise Exception("checksum mismatch") - return File.objects.get_or_create( + result = File.objects.get_or_create( checksum=checksum, defaults={ "size": len(data), "data": data, "filename": filename, }) + + # the assumption here is: chunks are basically use-once, so we can delete them after use. "in theory" a chunk may + # be used in multiple files (which are still being assembled) but with chunksizes in the order of 1MiB, I'd say this + # is unlikely. + chunks.delete() + return result + + +@shared_task +def record_file_accesses(metadata_ids, accessed_at): + # implemented as a task to get around the fact that file-access happens in an otherwise read-only view (and the fact + # that the access happened is a write to the DB). + + # a few thoughts on the context of "doing this as a task": [1] the expected througput is relatively low (UI) so the + # task overhead should be OK [2] it's not "absolutely criticial" to always record this (99% is enough) and [3] it's + # not related to the reading transaction _at all_ (all we need to record is the fact that it happened. + # + # thought on instead pulling it to the top of the UI's view: code-wise, it's annoying but doable (annoying b/c + # 'for_request_method' won't work anymore). But this would still make this key UI view depend on the write lock + # which is such a shame for responsiveness so we'll stick with task-based. + + with immediate_atomic(): + parsed_accessed_at = parse_timestamp(accessed_at) + + # note: filtering on IDs comes with "robust for deletions" out-of-the-box (and: 2 queries only) + file_ids = FileMetadata.objects.filter(id__in=metadata_ids).values_list("file_id", flat=True) + File.objects.filter(id__in=file_ids).update(accessed_at=parsed_accessed_at) + + +@shared_task +def vacuum_files(): + now = timezone.now() + with immediate_atomic(): + # budget is not yet tuned; reasons for high values: we're dealing with "leaves in the model-dep-tree here"; + # reasons for low values: deletion of files might just be expensive. + budget = 500 + num_deleted = 0 + + for model, field_name, max_days in [ + (Chunk, 'created_at', 1,), # 1 is already quite long... Chunks are used immediately, or not at all. + (File, 'accessed_at', 90), + # for FileMetadata we rely on cascading from File (which will always happen "eventually") + ]: + + while num_deleted < budget: + ids = (model.objects.filter(**{f"{field_name}__lt": now - timedelta(days=max_days)})[:budget]. + values_list('id', flat=True)) + + if len(ids) == 0: + break + + model.objects.filter(id__in=ids).delete() + num_deleted += len(ids) + + if num_deleted == budget: + # budget exhausted but possibly more to delete, so we re-schedule the task + delay_on_commit(vacuum_files) diff --git a/files/tests.py b/files/tests.py index bfc222e..cf63729 100644 --- a/files/tests.py +++ b/files/tests.py @@ -1,3 +1,5 @@ +from hashlib import sha1 +from uuid import UUID import json import gzip from io import BytesIO @@ -10,6 +12,9 @@ from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase from projects.models import Project, ProjectMembership from events.models import Event from bsmain.models import AuthToken +from bugsink.moreiterutils import batched + +from .models import File, FileMetadata User = get_user_model() @@ -47,14 +52,41 @@ class FilesTests(TransactionTestCase): self.assertEqual(401, response.status_code) self.assertEqual({"error": "Invalid token"}, response.json()) + def test_uuid_behavior_of_django(self): + # test to check Django is doing the thing of converting various UUID-like things on "both sides" before + # comparing. "this probably shouldn't be necessary" to test, but I'd rather have a test that proves it works + # than to have to reason about it. Context: https://github.com/bugsink/bugsink/issues/105 + + uuids = [ + "12345678123456781234567812345678", # uuid_str_no_dashes + "12345678-1234-5678-1234-567812345678", # uuid_str_with_dashes + UUID("12345678-1234-5678-1234-567812345678"), # uuid_object + ] + + file = File.objects.create(size=0) + for create_with in uuids: + FileMetadata.objects.all().delete() # clean up before each test + FileMetadata.objects.create( + debug_id=create_with, + file_type="source_map", + file=file, + ) + + for test_with in uuids: + fms = FileMetadata.objects.filter(debug_id__in=[test_with]) + self.assertEqual(1, fms.count()) + def test_assemble_artifact_bundle(self): SAMPLES_DIR = os.getenv("SAMPLES_DIR", "../event-samples") - event_samples = [SAMPLES_DIR + fn for fn in ["/bugsink/uglifyjs-minified-sourcemaps-in-bundle.json"]] + event_samples = [SAMPLES_DIR + fn for fn in [ + "/bugsink/uglifyjs-minified-sourcemaps-in-bundle.json", + "/bugsink/uglifyjs-minified-sourcemaps-in-bundle-multi-file.json", + ]] artifact_bundles = glob(SAMPLES_DIR + "/*/artifact_bundles/*.zip") - if len(artifact_bundles) == 0: - raise Exception(f"No artifact bundles found in {SAMPLES_DIR}; I insist on having some to test with.") + if len(artifact_bundles) != 2: + raise Exception(f"Not all artifact bundles found in {SAMPLES_DIR}; I insist on having some to test with.") for filename in artifact_bundles: with open(filename, 'rb') as f: @@ -116,19 +148,73 @@ class FilesTests(TransactionTestCase): 200, response.status_code, "Error in %s: %s" % ( filename, response.content if response.status_code != 302 else response.url)) - for event in Event.objects.all(): + for event_id, key_phrase in [ + ("af4d4093e2d548bea61683abecb8ee95", 'captureException.js in foo line 15'), # noqa + ("ed483af389554d9cac475049ed9f560f", 'captureException.js in foo line 10'), # noqa + ]: + + event = Event.objects.get(event_id=event_id) + url = f'/issues/issue/{ event.issue.id }/event/{ event.id }/' try: - # we just check for a 200; this at least makes sure we have no failing template rendering response = self.client.get(url) self.assertEqual( 200, response.status_code, response.content if response.status_code != 302 else response.url) - # we could/should make this more general later; this is great for example nr.1: - key_phrase = 'captureException line 15' self.assertTrue(key_phrase in response.content.decode('utf-8')) except Exception as e: # we want to know _which_ event failed, hence the raise-from-e here - raise AssertionError("Error rendering event %s" % event.debug_info) from e + raise AssertionError("Error rendering event %s" % event.event_id) from e + + def test_assemble_artifact_bundle_small_chunks(self): + # Copy-paste of test_assemble_artifact_bundle, but checking _only_ that bundle assembly works with small chunks. + SAMPLES_DIR = os.getenv("SAMPLES_DIR", "../event-samples") + + filename = SAMPLES_DIR + "/bugsink/artifact_bundles/51a5a327666cf1d11e23adfd55c3becad27ae769.zip" + with open(filename, 'rb') as f: + all_data = f.read() + + seen_checksums = [] + for data in batched(all_data, len(all_data) // 10): + data = bytes(data) + checksum = sha1(data).hexdigest() + + gzipped_file = BytesIO(gzip.compress(data)) + gzipped_file.name = checksum + + # 1. chunk-upload + response = self.client.post( + "/api/0/organizations/anyorg/chunk-upload/", + data={"file_gzip": gzipped_file}, + headers=self.token_headers, + ) + + self.assertEqual( + 200, response.status_code, "Error in %s: %s" % ( + filename, response.content if response.status_code != 302 else response.url)) + + seen_checksums.append(checksum) + + checksum = os.path.basename(filename).split(".")[0] + + # 2. artifactbundle/assemble + data = { + "checksum": checksum, + "chunks": seen_checksums, + "projects": [ + "unused_for_now" + ] + } + + response = self.client.post( + "/api/0/organizations/anyorg/artifactbundle/assemble/", + json.dumps(data), + content_type="application/json", + headers=self.token_headers, + ) + + self.assertEqual( + 200, response.status_code, "Error in %s: %s" % ( + filename, response.content if response.status_code != 302 else response.url)) diff --git a/files/views.py b/files/views.py index 4f4e4a8..0508d71 100644 --- a/files/views.py +++ b/files/views.py @@ -2,10 +2,12 @@ import json from hashlib import sha1 from gzip import GzipFile from io import BytesIO +import logging from django.http import JsonResponse, HttpResponse from django.views.decorators.csrf import csrf_exempt from django.contrib.auth.decorators import user_passes_test +from django.http import Http404 from sentry.assemble import ChunkFileState @@ -16,9 +18,12 @@ from bsmain.models import AuthToken from .models import Chunk, File from .tasks import assemble_artifact_bundle +logger = logging.getLogger("bugsink.api") + _KIBIBYTE = 1024 _MEBIBYTE = 1024 * _KIBIBYTE +_GIBIBYTE = 1024 * _MEBIBYTE class NamedBytesIO(BytesIO): @@ -39,26 +44,32 @@ def get_chunk_upload_settings(request, organization_slug): # * https://github.com/getsentry/sentry/pull/29347 url = get_settings().BASE_URL + "/api/0/organizations/" + organization_slug + "/chunk-upload/" - # Our "chunk_upload" is chunked in name only; i.e. we only "speak chunked" for the purpose of API-compatability with - # sentry-cli, but we provide params here such that that cli will only send a single chunk. - return JsonResponse({ "url": url, - # For now, staying close to the default MAX_ENVELOPE_COMPRESSED_SIZE, which is 20MiB; - # I _think_ I saw a note somewhere on (one of) these values having to be a power of 2; hence 32 here. - # - # When implementing uploading, it was done to support sourcemaps. It seems that over at Sentry, the reason they - # went so complicated in the first place was to enable DIF support (hunderds of MiB regularly). - "chunkSize": 32 * _MEBIBYTE, - "maxRequestSize": 32 * _MEBIBYTE, + # We pick a "somewhat arbitrary" value between 1MiB and 16MiB to balance between "works reliably" and "lower + # overhead", erring on the "works reliably" side of that spectrum. There's really no lower bound technically, + # I've played with 32-byte requests. + # note: sentry-cli <= v2.39.1 requires a power of 2 here. + # chunkSize == maxRequestSize per the comments on `chunksPerRequest: 1`. + "chunkSize": 2 * _MEBIBYTE, + "maxRequestSize": 2 * _MEBIBYTE, - # I didn't check the supposed relationship between maxRequestSize and maxFileSize, but assume something similar - # to what happens w/ envelopes; hence harmonizing with MAX_ENVELOPE_SIZE (and rounding up to a power of 2) here - "maxFileSize": 128 * _MEBIBYTE, + # The limit here is _actually storing this_. For now "just picking a high limit" assuming that we'll have decent + # storage (#151) for the files eventually. + "maxFileSize": 2 * _GIBIBYTE, - # force single-chunk by setting these to 1. + # In our current setup increasing concurrency doesn't help (single-writer architecture) while coming at the cost + # of potential reliability issues. Current codebase has works just fine with it _in principle_ (tested by + # setting concurrency=10, chunkSize=32, maxRequestSize=32 and adding a sleep(random(..)) in chunk_upload (right + # before return, and seeing that sentry-cli fires a bunch of things in parallel and artifact_bundle_assemble as + # a final step. "concurrency": 1, + + # There _may_ be good reasons to support multiple chunks per request, but I haven't found a reason to + # distinguish between chunkSize and maxRequestSize yet, so I'd rather keep them synced for easier reasoning. + # Current codebase has been observed to work just fine with it though (tested w/ chunkSize=32 and + # chunksPerRequest=100 and seeing sentry-cli do a single request with many small chunks). "chunksPerRequest": 1, "hashAlgorithm": "sha1", @@ -193,3 +204,40 @@ def download_file(request, checksum): response = HttpResponse(file.data, content_type="application/octet-stream") response["Content-Disposition"] = f"attachment; filename={file.filename}" return response + + +@csrf_exempt +def api_catch_all(request, subpath): + if not get_settings().API_LOG_UNIMPLEMENTED_CALLS: + raise Http404("Unimplemented API endpoint: /api/" + subpath) + + lines = [ + "Unimplemented API usage:", + f" Path: /api/{subpath}", + f" Method: {request.method}", + ] + + if request.GET: + lines.append(f" GET: {request.GET.dict()}") + + if request.POST: + lines.append(f" POST: {request.POST.dict()}") + + body = request.body + if body: + try: + decoded = body.decode("utf-8", errors="replace").strip() + lines.append(" Body:") + lines.append(f" {decoded[:500]}") + try: + parsed = json.loads(decoded) + pretty = json.dumps(parsed, indent=2)[:10_000] + lines.append(" JSON body:") + lines.extend(f" {line}" for line in pretty.splitlines()) + except json.JSONDecodeError: + pass + except Exception as e: + lines.append(f" Body: ") + + logger.info("\n".join(lines)) + raise Http404("Unimplemented API endpoint: /api/" + subpath) diff --git a/gunicorn.docker.conf.py b/gunicorn.docker.conf.py new file mode 100644 index 0000000..cddc72f --- /dev/null +++ b/gunicorn.docker.conf.py @@ -0,0 +1,4 @@ +# gunicorn config file for Docker deployments +import multiprocessing + +workers = min(multiprocessing.cpu_count(), 4) diff --git a/ingest/tests.py b/ingest/tests.py index e23c0fa..1ebbfe9 100644 --- a/ingest/tests.py +++ b/ingest/tests.py @@ -296,7 +296,7 @@ class IngestViewTestCase(TransactionTestCase): SAMPLES_DIR = os.getenv("SAMPLES_DIR", "../event-samples") - event_samples = glob(SAMPLES_DIR + "/*/*.json") + event_samples = glob(SAMPLES_DIR + "/sentry/mobile1-xen.json") # pick a fixed one for reproducibility known_broken = [SAMPLES_DIR + "/" + s.strip() for s in _readlines(SAMPLES_DIR + "/KNOWN-BROKEN")] if len(event_samples) == 0: @@ -436,7 +436,8 @@ class IngestViewTestCase(TransactionTestCase): SAMPLES_DIR = os.getenv("SAMPLES_DIR", "../event-samples") - event_samples = glob(SAMPLES_DIR + "/*/*.json") + event_samples = glob(SAMPLES_DIR + "/sentry/mobile1-xen.json") # this one has 'exception.values[0].type' + known_broken = [SAMPLES_DIR + "/" + s.strip() for s in _readlines(SAMPLES_DIR + "/KNOWN-BROKEN")] if len(event_samples) == 0: diff --git a/ingest/views.py b/ingest/views.py index 6a9e034..0601208 100644 --- a/ingest/views.py +++ b/ingest/views.py @@ -1,3 +1,4 @@ +import hashlib import os import logging import io @@ -130,7 +131,7 @@ class BaseIngestAPIView(View): @classmethod def get_project(cls, project_pk, sentry_key): try: - return Project.objects.get(pk=project_pk, sentry_key=sentry_key) + return Project.objects.get(pk=project_pk, sentry_key=sentry_key, is_deleted=False) except Project.DoesNotExist: # We don't distinguish between "project not found" and "key incorrect"; there's no real value in that from # the user perspective (they deal in dsns). Additional advantage: no need to do constant-time-comp on @@ -170,11 +171,17 @@ class BaseIngestAPIView(View): # Meta means: not part of the event data. Basically: information that is available at the time of ingestion, and # that must be passed to digest() in a serializable form. debug_info = request.META.get("HTTP_X_BUGSINK_DEBUGINFO", "") + + # .get(..) -- don't want to crash on this and it's non-trivial to find a source that tells me with certainty + # that the REMOTE_ADDR is always in request.META (it probably is in practice) + remote_addr = request.META.get("REMOTE_ADDR") + return { "event_id": event_id, "project_id": project.id, "ingested_at": format_timestamp(ingested_at), "debug_info": debug_info, + "remote_addr": remote_addr, } @classmethod @@ -250,7 +257,12 @@ class BaseIngestAPIView(View): ingested_at = parse_timestamp(event_metadata["ingested_at"]) digested_at = datetime.now(timezone.utc) if digested_at is None else digested_at # explicit passing: test only - project = Project.objects.get(pk=event_metadata["project_id"]) + try: + project = Project.objects.get(pk=event_metadata["project_id"], is_deleted=False) + except Project.DoesNotExist: + # we may get here if the project was deleted after the event was ingested, but before it was digested + # (covers both "deletion in progress (is_deleted=True)" and "fully deleted"). + return if not cls.count_project_periods_and_act_on_it(project, digested_at): return # if over-quota: just return (any cleanup is done calling-side) @@ -269,7 +281,19 @@ class BaseIngestAPIView(View): grouping_key = get_issue_grouper_for_data(event_data, calculated_type, calculated_value) - if not Grouping.objects.filter(project_id=event_metadata["project_id"], grouping_key=grouping_key).exists(): + try: + grouping = Grouping.objects.get( + project_id=event_metadata["project_id"], grouping_key=grouping_key, + grouping_key_hash=hashlib.sha256(grouping_key.encode()).hexdigest()) + + issue = grouping.issue + issue_created = False + + # update the denormalized fields + issue.last_seen = ingested_at + issue.digested_event_count += 1 + + except Grouping.DoesNotExist: # we don't have Project.issue_count here ('premature optimization') so we just do an aggregate instead. max_current = Issue.objects.filter(project_id=event_metadata["project_id"]).aggregate( Max("digest_order"))["digest_order__max"] @@ -291,18 +315,10 @@ class BaseIngestAPIView(View): grouping = Grouping.objects.create( project_id=event_metadata["project_id"], grouping_key=grouping_key, + grouping_key_hash=hashlib.sha256(grouping_key.encode()).hexdigest(), issue=issue, ) - else: - grouping = Grouping.objects.get(project_id=event_metadata["project_id"], grouping_key=grouping_key) - issue = grouping.issue - issue_created = False - - # update the denormalized fields - issue.last_seen = ingested_at - issue.digested_event_count += 1 - # +1 because we're about to add one event. project_stored_event_count = project.stored_event_count + 1 @@ -355,6 +371,7 @@ class BaseIngestAPIView(View): if issue_created: TurningPoint.objects.create( + project=project, issue=issue, triggering_event=event, timestamp=ingested_at, kind=TurningPointKind.FIRST_SEEN) event.never_evict = True @@ -366,6 +383,7 @@ class BaseIngestAPIView(View): # new issues cannot be regressions by definition, hence this is in the 'else' branch if issue_is_regression(issue, event.release): TurningPoint.objects.create( + project=project, issue=issue, triggering_event=event, timestamp=ingested_at, kind=TurningPointKind.REGRESSED) event.never_evict = True diff --git a/issues/admin.py b/issues/admin.py index 67508f7..cc9402c 100644 --- a/issues/admin.py +++ b/issues/admin.py @@ -1,8 +1,14 @@ from django.contrib import admin +from bugsink.transaction import immediate_atomic +from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_protect + from .models import Issue, Grouping, TurningPoint from .forms import IssueAdminForm +csrf_protect_m = method_decorator(csrf_protect) + class GroupingInline(admin.TabularInline): model = Grouping @@ -79,3 +85,28 @@ class IssueAdmin(admin.ModelAdmin): 'digested_event_count', 'stored_event_count', ] + + def get_deleted_objects(self, objs, request): + to_delete = list(objs) + ["...all its related objects... (delayed)"] + model_count = { + Issue: len(objs), + } + perms_needed = set() + protected = [] + return to_delete, model_count, perms_needed, protected + + def delete_queryset(self, request, queryset): + # NOTE: not the most efficient; it will do for a first version. + with immediate_atomic(): + for obj in queryset: + obj.delete_deferred() + + def delete_model(self, request, obj): + with immediate_atomic(): + obj.delete_deferred() + + @csrf_protect_m + def delete_view(self, request, object_id, extra_context=None): + # the superclass version, but with the transaction.atomic context manager commented out (we do this ourselves) + # with transaction.atomic(using=router.db_for_write(self.model)): + return self._delete_view(request, object_id, extra_context) diff --git a/issues/factories.py b/issues/factories.py index d04d1bc..4b9efd6 100644 --- a/issues/factories.py +++ b/issues/factories.py @@ -1,3 +1,4 @@ +import hashlib from django.utils import timezone from projects.models import Project @@ -11,6 +12,7 @@ def get_or_create_issue(project=None, event_data=None): if event_data is None: from events.factories import create_event_data event_data = create_event_data() + if project is None: project = Project.objects.create(name="Test project") @@ -26,6 +28,7 @@ def get_or_create_issue(project=None, event_data=None): grouping = Grouping.objects.create( project=project, grouping_key=grouping_key, + grouping_key_hash=hashlib.sha256(grouping_key.encode()).hexdigest(), issue=issue, ) diff --git a/issues/migrations/0014_grouping_grouping_key_hash.py b/issues/migrations/0014_grouping_grouping_key_hash.py new file mode 100644 index 0000000..a9638a5 --- /dev/null +++ b/issues/migrations/0014_grouping_grouping_key_hash.py @@ -0,0 +1,17 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0013_fix_issue_stored_event_counts"), + ] + + operations = [ + migrations.AddField( + model_name="grouping", + name="grouping_key_hash", + field=models.CharField(default="", max_length=64), + preserve_default=False, + ), + ] diff --git a/issues/migrations/0015_set_grouping_hash.py b/issues/migrations/0015_set_grouping_hash.py new file mode 100644 index 0000000..326f4f5 --- /dev/null +++ b/issues/migrations/0015_set_grouping_hash.py @@ -0,0 +1,20 @@ +import hashlib +from django.db import migrations + + +def set_grouping_hash(apps, schema_editor): + Grouping = apps.get_model("issues", "Grouping") + for grouping in Grouping.objects.all(): + grouping.grouping_key_hash = hashlib.sha256(grouping.grouping_key.encode()).hexdigest() + grouping.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0014_grouping_grouping_key_hash"), + ] + + operations = [ + migrations.RunPython(set_grouping_hash), + ] diff --git a/issues/migrations/0016_alter_grouping_unique_together.py b/issues/migrations/0016_alter_grouping_unique_together.py new file mode 100644 index 0000000..19eb884 --- /dev/null +++ b/issues/migrations/0016_alter_grouping_unique_together.py @@ -0,0 +1,16 @@ +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0011_fill_stored_event_count"), + ("issues", "0015_set_grouping_hash"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="grouping", + unique_together={("project", "grouping_key_hash")}, + ), + ] diff --git a/issues/migrations/0017_issue_list_indexes_must_start_with_project.py b/issues/migrations/0017_issue_list_indexes_must_start_with_project.py new file mode 100644 index 0000000..96133c5 --- /dev/null +++ b/issues/migrations/0017_issue_list_indexes_must_start_with_project.py @@ -0,0 +1,55 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0016_alter_grouping_unique_together"), + ] + + operations = [ + migrations.RemoveIndex( + model_name="issue", + name="issues_issu_first_s_9fb0f9_idx", + ), + migrations.RemoveIndex( + model_name="issue", + name="issues_issu_last_se_400a05_idx", + ), + migrations.RemoveIndex( + model_name="issue", + name="issues_issu_is_reso_eaf32b_idx", + ), + migrations.RemoveIndex( + model_name="issue", + name="issues_issu_is_mute_6fe7fc_idx", + ), + migrations.RemoveIndex( + model_name="issue", + name="issues_issu_is_reso_0b6923_idx", + ), + migrations.AddIndex( + model_name="issue", + index=models.Index( + fields=["project", "is_resolved", "is_muted", "last_seen"], + name="issue_list_open", + ), + ), + migrations.AddIndex( + model_name="issue", + index=models.Index( + fields=["project", "is_muted", "last_seen"], name="issue_list_muted" + ), + ), + migrations.AddIndex( + model_name="issue", + index=models.Index( + fields=["project", "is_resolved", "last_seen"], + name="issue_list_resolved", + ), + ), + migrations.AddIndex( + model_name="issue", + index=models.Index(fields=["project", "last_seen"], name="issue_list_all"), + ), + ] diff --git a/issues/migrations/0018_issue_is_deleted.py b/issues/migrations/0018_issue_is_deleted.py new file mode 100644 index 0000000..42f8a92 --- /dev/null +++ b/issues/migrations/0018_issue_is_deleted.py @@ -0,0 +1,16 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0017_issue_list_indexes_must_start_with_project"), + ] + + operations = [ + migrations.AddField( + model_name="issue", + name="is_deleted", + field=models.BooleanField(default=False), + ), + ] diff --git a/issues/migrations/0019_alter_grouping_grouping_key_hash.py b/issues/migrations/0019_alter_grouping_grouping_key_hash.py new file mode 100644 index 0000000..fb6955f --- /dev/null +++ b/issues/migrations/0019_alter_grouping_grouping_key_hash.py @@ -0,0 +1,16 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0018_issue_is_deleted"), + ] + + operations = [ + migrations.AlterField( + model_name="grouping", + name="grouping_key_hash", + field=models.CharField(max_length=64, null=True), + ), + ] diff --git a/issues/migrations/0020_remove_objects_with_null_issue.py b/issues/migrations/0020_remove_objects_with_null_issue.py new file mode 100644 index 0000000..cfb26ce --- /dev/null +++ b/issues/migrations/0020_remove_objects_with_null_issue.py @@ -0,0 +1,26 @@ +from django.db import migrations + + +def remove_objects_with_null_issue(apps, schema_editor): + # Up until now, we have various models w/ .issue=FK(null=True, on_delete=models.SET_NULL) + # Although it is "not expected" in the interface, issue-deletion would have led to those + # objects with a null issue. We're about to change that to .issue=FK(null=False, ...) which + # would crash if we don't remove those objects first. Object-removal is "fine" though, because + # as per the meaning of the SET_NULL, these objects were "dangling" anyway. + + Grouping = apps.get_model("issues", "Grouping") + TurningPoint = apps.get_model("issues", "TurningPoint") + + Grouping.objects.filter(issue__isnull=True).delete() + TurningPoint.objects.filter(issue__isnull=True).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0019_alter_grouping_grouping_key_hash"), + ] + + operations = [ + migrations.RunPython(remove_objects_with_null_issue, reverse_code=migrations.RunPython.noop), + ] diff --git a/issues/migrations/0021_alter_do_nothing.py b/issues/migrations/0021_alter_do_nothing.py new file mode 100644 index 0000000..1824073 --- /dev/null +++ b/issues/migrations/0021_alter_do_nothing.py @@ -0,0 +1,26 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0020_remove_objects_with_null_issue"), + ] + + operations = [ + migrations.AlterField( + model_name="grouping", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="issues.issue" + ), + ), + migrations.AlterField( + model_name="turningpoint", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="issues.issue" + ), + ), + ] diff --git a/issues/migrations/0022_turningpoint_project.py b/issues/migrations/0022_turningpoint_project.py new file mode 100644 index 0000000..1ce2bc0 --- /dev/null +++ b/issues/migrations/0022_turningpoint_project.py @@ -0,0 +1,22 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0012_project_is_deleted"), + ("issues", "0021_alter_do_nothing"), + ] + + operations = [ + migrations.AddField( + model_name="turningpoint", + name="project", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.DO_NOTHING, + to="projects.project", + ), + ), + ] diff --git a/issues/migrations/0023_turningpoint_set_project.py b/issues/migrations/0023_turningpoint_set_project.py new file mode 100644 index 0000000..c5d6845 --- /dev/null +++ b/issues/migrations/0023_turningpoint_set_project.py @@ -0,0 +1,36 @@ +from django.db import migrations + + +def turningpoint_set_project(apps, schema_editor): + TurningPoint = apps.get_model("issues", "TurningPoint") + + # TurningPoint.objects.update(project=F("issue__project")) + # fails with 'Joined field references are not permitted in this query" + + # This one's elegant and works in sqlite but not in MySQL: + # TurningPoint.objects.update( + # project=Subquery( + # TurningPoint.objects + # .filter(pk=OuterRef('pk')) + # .values('issue__project')[:1] + # ) + # ) + # django.db.utils.OperationalError: (1093, "You can't specify target table 'issues_turningpoint' for update in FROM + # clause") + + # so in the end we'll just loop: + + for turningpoint in TurningPoint.objects.all(): + turningpoint.project = turningpoint.issue.project + turningpoint.save(update_fields=["project"]) + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0022_turningpoint_project"), + ] + + operations = [ + migrations.RunPython(turningpoint_set_project, migrations.RunPython.noop), + ] diff --git a/issues/migrations/0024_turningpoint_project_alter_not_null.py b/issues/migrations/0024_turningpoint_project_alter_not_null.py new file mode 100644 index 0000000..9462a3e --- /dev/null +++ b/issues/migrations/0024_turningpoint_project_alter_not_null.py @@ -0,0 +1,36 @@ +from django.db import migrations, models +import django.db.models.deletion + + +def delete_turningpoints_pointing_to_null_project(apps, schema_editor): + # In 0023_turningpoint_set_project, we set the project field for TurningPoint to the associated Issue's project. + # _However_, at that point in time in our migration-history, Issue's project field was still nullable, and the big + # null-project-fk-deleting migration (projects/migrations/0013_delete_objects_pointing_to_null_project.py) is _sure_ + # not to have run yet (it depends on the present migration). (it wouldn't delete TurningPoints anyway, but it would + # delete project-less Issues). Anyway, we just take care of the TurningPoints here (that's ok as per 0013_delete_... + # logic, i.e. no-project means no way to access) and it's also possible since they are on the edge of our object + # graph. + TurningPoint = apps.get_model("issues", "TurningPoint") + TurningPoint.objects.filter(project__isnull=True).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0012_project_is_deleted"), + ("issues", "0023_turningpoint_set_project"), + ] + + operations = [ + migrations.RunPython( + delete_turningpoints_pointing_to_null_project, + migrations.RunPython.noop, + ), + migrations.AlterField( + model_name="turningpoint", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/issues/migrations/0025_alter_grouping_project_alter_issue_project.py b/issues/migrations/0025_alter_grouping_project_alter_issue_project.py new file mode 100644 index 0000000..5c49f63 --- /dev/null +++ b/issues/migrations/0025_alter_grouping_project_alter_issue_project.py @@ -0,0 +1,28 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + # Django came up with 0014, whatever the reason, I'm sure that 0013 is at least required (as per comments there) + ("projects", "0014_alter_projectmembership_project"), + ("issues", "0024_turningpoint_project_alter_not_null"), + ] + + operations = [ + migrations.AlterField( + model_name="grouping", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + migrations.AlterField( + model_name="issue", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/issues/models.py b/issues/models.py index 69a320c..7bf0003 100644 --- a/issues/models.py +++ b/issues/models.py @@ -10,6 +10,7 @@ from django.conf import settings from django.utils.functional import cached_property from bugsink.volume_based_condition import VolumeBasedCondition +from bugsink.transaction import delay_on_commit from alerts.tasks import send_unmute_alert from compat.timestamp import parse_timestamp, format_timestamp from tags.models import IssueTag, TagValue @@ -18,6 +19,8 @@ from .utils import ( parse_lines, serialize_lines, filter_qs_for_fixed_at, exclude_qs_for_fixed_at, get_title_for_exception_type_and_value) +from .tasks import delete_issue_deps + class IncongruentStateException(Exception): pass @@ -32,7 +35,9 @@ class Issue(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) project = models.ForeignKey( - "projects.Project", blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + "projects.Project", blank=False, null=False, on_delete=models.DO_NOTHING) + + is_deleted = models.BooleanField(default=False) # 1-based for the same reasons as Event.digest_order digest_order = models.PositiveIntegerField(blank=False, null=False) @@ -72,6 +77,21 @@ class Issue(models.Model): self.digest_order = max_current + 1 if max_current is not None else 1 super().save(*args, **kwargs) + def delete_deferred(self): + """Marks the issue as deleted, and schedules deletion of all related objects""" + self.is_deleted = True + self.save(update_fields=["is_deleted"]) + + # we set grouping_key_hash to None to ensure that event digests that happen simultaneously with the delayed + # cleanup will get their own fresh Grouping and hence Issue. This matches with the behavior that would happen + # if Issue deletion would have been instantaneous (i.e. it's the least surprising behavior). + # + # `issue=None` is explicitly _not_ part of this update, such that the actual deletion of the Groupings will be + # picked up as part of the delete_issue_deps task. + self.grouping_set.all().update(grouping_key_hash=None) + + delay_on_commit(delete_issue_deps, str(self.project_id), str(self.id)) + def friendly_id(self): return f"{ self.project.slug.upper() }-{ self.digest_order }" @@ -176,13 +196,11 @@ class Issue(models.Model): ("project", "digest_order"), ] indexes = [ - models.Index(fields=["first_seen"]), - models.Index(fields=["last_seen"]), - - # 3 indexes for the list view (state_filter) - models.Index(fields=["is_resolved", "is_muted", "last_seen"]), # filter on resolved/muted - models.Index(fields=["is_muted", "last_seen"]), # filter on muted - models.Index(fields=["is_resolved", "last_seen"]), # filter on resolved + # 4 indexes for the list view (state_filter) + models.Index(fields=["project", "is_resolved", "is_muted", "last_seen"], name="issue_list_open"), + models.Index(fields=["project", "is_muted", "last_seen"], name="issue_list_muted"), + models.Index(fields=["project", "is_resolved", "last_seen"], name="issue_list_resolved"), # and unresolved + models.Index(fields=["project", "last_seen"], name="issue_list_all"), # all ] @@ -195,18 +213,25 @@ class Grouping(models.Model): into a single issue. (such manual merging is not yet implemented, but the data-model is already prepared for it) """ project = models.ForeignKey( - "projects.Project", blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + "projects.Project", blank=False, null=False, on_delete=models.DO_NOTHING) - # NOTE: I don't want to have any principled maximum on the grouping key, nor do I want to prematurely optimize the - # lookup. If lookups are slow, we _could_ examine whether manually hashing these values and matching on the hash - # helps. grouping_key = models.TextField(blank=False, null=False) - issue = models.ForeignKey("Issue", blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + # we hash the key to make it indexable on MySQL, see https://code.djangoproject.com/ticket/2495 + grouping_key_hash = models.CharField(max_length=64, blank=False, null=True) + + issue = models.ForeignKey("Issue", blank=False, null=False, on_delete=models.DO_NOTHING) def __str__(self): return self.grouping_key + class Meta: + unique_together = [ + # principled: grouping _key_ is a _key_ for a reason (within a project). This also implies the main way of + # looking up groupings has an appropriate index. + ("project", "grouping_key_hash"), + ] + def format_unmute_reason(unmute_metadata): if "mute_until" in unmute_metadata: @@ -323,10 +348,15 @@ class IssueStateManager(object): # path is never reached via UI-based paths (because those are by definition not event-triggered); thus # the 2 ways of creating TurningPoints do not collide. TurningPoint.objects.create( + project_id=issue.project_id, issue=issue, triggering_event=triggering_event, timestamp=triggering_event.ingested_at, kind=TurningPointKind.UNMUTED, metadata=json.dumps(unmute_metadata)) triggering_event.never_evict = True # .save() will be called by the caller of this function + @staticmethod + def delete(issue): + issue.delete_deferred() + @staticmethod def get_unmute_thresholds(issue): unmute_vbcs = [ @@ -445,6 +475,11 @@ class IssueQuerysetStateManager(object): for issue in issue_qs: IssueStateManager.unmute(issue, triggering_event) + @staticmethod + def delete(issue_qs): + for issue in issue_qs: + issue.delete_deferred() + class TurningPointKind(models.IntegerChoices): # The language of the kinds reflects a historic view of the system, e.g. "first seen" as opposed to "new issue"; an @@ -466,7 +501,8 @@ class TurningPoint(models.Model): # basically: an Event, but that name was already taken in our system :-) alternative names I considered: # "milestone", "state_change", "transition", "annotation", "episode" - issue = models.ForeignKey("Issue", blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey("projects.Project", blank=False, null=False, on_delete=models.DO_NOTHING) + issue = models.ForeignKey("Issue", blank=False, null=False, on_delete=models.DO_NOTHING) triggering_event = models.ForeignKey("events.Event", blank=True, null=True, on_delete=models.DO_NOTHING) # null: the system-user diff --git a/issues/tasks.py b/issues/tasks.py new file mode 100644 index 0000000..ba6fe9b --- /dev/null +++ b/issues/tasks.py @@ -0,0 +1,82 @@ +from snappea.decorators import shared_task + +from bugsink.utils import get_model_topography, delete_deps_with_budget +from bugsink.transaction import immediate_atomic, delay_on_commit + + +def get_model_topography_with_issue_override(): + """ + Returns the model topography with ordering adjusted to prefer deletions via .issue, when available. + + This assumes that Issue is not only the root of the dependency graph, but also that if a model has an .issue + ForeignKey, deleting it via that path is sufficient, meaning we can safely avoid visiting the same model again + through other ForeignKey routes (e.g. Event.grouping or TurningPoint.triggering_event). + + The preference is encoded via an explicit list of models, which are visited early and only via their .issue path. + """ + from issues.models import TurningPoint, Grouping + from events.models import Event + from tags.models import IssueTag, EventTag + + preferred = [ + TurningPoint, # above Event, to avoid deletions via .triggering_event + EventTag, # above Event, to avoid deletions via .event + Event, # above Grouping, to avoid deletions via .grouping + Grouping, + IssueTag, + ] + + def as_preferred(lst): + """ + Sorts the list of (model, fk_name) tuples such that the models are in the preferred order as indicated above, + and models which occur with another fk_name are pruned + """ + return sorted( + [(model, fk_name) for model, fk_name in lst if fk_name == "issue" or model not in preferred], + key=lambda x: preferred.index(x[0]) if x[0] in preferred else len(preferred), + ) + + topo = get_model_topography() + for k, lst in topo.items(): + topo[k] = as_preferred(lst) + + return topo + + +@shared_task +def delete_issue_deps(project_id, issue_id): + from .models import Issue # avoid circular import + with immediate_atomic(): + # matches what we do in events/retention.py (and for which argumentation exists); in practive I have seen _much_ + # faster deletion times (in the order of .03s per task on my local laptop) when using a budget of 500, _but_ + # it's not a given those were for "expensive objects" (e.g. events); and I'd rather err on the side of caution + # (worst case we have a bit of inefficiency; in any case this avoids hogging the global write lock / timeouts). + budget = 500 + num_deleted = 0 + + dep_graph = get_model_topography_with_issue_override() + + for model_for_recursion, fk_name_for_recursion in dep_graph["issues.Issue"]: + this_num_deleted = delete_deps_with_budget( + project_id, + model_for_recursion, + fk_name_for_recursion, + [issue_id], + budget - num_deleted, + dep_graph, + is_for_project=False, + ) + + num_deleted += this_num_deleted + + if num_deleted >= budget: + delay_on_commit(delete_issue_deps, project_id, issue_id) + return + + if budget - num_deleted <= 0: + # no more budget for the self-delete. + delay_on_commit(delete_issue_deps, project_id, issue_id) + + else: + # final step: delete the issue itself + Issue.objects.filter(pk=issue_id).delete() diff --git a/issues/templates/issues/_event_nav.html b/issues/templates/issues/_event_nav.html index 92e78d8..5d7e65d 100644 --- a/issues/templates/issues/_event_nav.html +++ b/issues/templates/issues/_event_nav.html @@ -1,45 +1,45 @@ {% load add_to_qs %}
{# nav="last": when doing a new search on an event-page, you want the most recent matching event to show up #} - +
{% if has_prev %} {# no need for 'is_first': if you can go to the left, you can go all the way to the left too #} - + {% else %} -
+
{% endif %} {% if has_prev %} - + {% else %} -
+
{% endif %} {% if has_next %} - + {% else %} -
+
{% endif %} {% if has_next %} - + {% else %} -
+
diff --git a/issues/templates/issues/base.html b/issues/templates/issues/base.html index 11f2a12..9beecc8 100644 --- a/issues/templates/issues/base.html +++ b/issues/templates/issues/base.html @@ -14,16 +14,16 @@ {% csrf_token %} {% if issue.is_resolved %}{# i.e. buttons disabled #} {# see issues/tests.py for why this is turned off ATM #} - {# #} + {# #} {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if issue.project.has_releases %} - + - + {# we just hide the whole dropdown; this is the easiest implementation of not-showing the dropdown #} {% else %} - + {% endif %} {% endspaceless %} @@ -31,27 +31,27 @@ {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if issue.project.has_releases %} - {# 'by next' is shown even if 'by current' is also shown: just because you haven't seen 'by current' doesn't mean it's actually already solved; and in fact we show this option first precisely because we can always show it #} - + {# 'by next' is shown even if 'by current' is also shown: just because you haven't seen 'by current' doesn't mean it's actually already solved; and in fact we show this option first precisely because we can always show it #} + +
{% else %} - + {% endif %} {% endspaceless %} @@ -59,33 +59,33 @@ {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if not issue.is_muted and not issue.is_resolved %} - + {% else %} - + {% endif %} +
{% if issue.is_muted and not issue.is_resolved %} - + {% else %} - + {% endif %} {% endspaceless %} @@ -106,14 +106,14 @@ {# overflow-x-auto is needed at the level of the flex item such that it works at the level where we need it (the code listings)#}
{# 96rem is 1536px, which matches the 2xl class; this is no "must" but eyeballing revealed: good result #} -
-
Stacktrace
-
Event Details
-
Breadcrumbs
-
Event List
-
Tags
-
Grouping
-
History
+
@@ -121,9 +121,9 @@ {% endblock %}
-
+
{% if is_event_page %}
Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} which occured at {{ event.ingested_at|date:"j M G:i T" }}
{% endif %} -
+
{% if is_event_page %} Download | JSON @@ -131,7 +131,7 @@ {% endif %} {% if app_settings.USE_ADMIN and user.is_staff %} - {% if is_event_page %} + {% if is_event_page %} | Event Admin | {% endif %} Issue Admin @@ -145,19 +145,19 @@
-
{# div-in-div to match the spacing of the tabs, which is caused by the hover-thick-line; we use border-2 on both sides rather than border-b-4 to get the text aligned centeredly #} +
{# div-in-div to match the spacing of the tabs, which is caused by the hover-thick-line; we use border-2 on both sides rather than border-b-4 to get the text aligned centeredly #} Issue Key Info
-
Issue #
+
Issue #
{{ issue.friendly_id }}
-
State
+
State
{% if issue.is_resolved %} Resolved @@ -185,7 +185,7 @@
-
Nr. of events:
+
Nr. of events:
{{ issue.digested_event_count|intcomma }} {% if issue.digested_event_count != issue.stored_event_count %} total seen
{{ issue.stored_event_count|intcomma }} available
@@ -196,24 +196,24 @@ {% if issue.digested_event_count > 1 %}
-
First seen:
+
First seen:
{{ issue.first_seen|date:"j M G:i T" }}
-
Last seen:
+
Last seen:
{{ issue.last_seen|date:"j M G:i T" }}
{% else %}
-
Seen at:
+
Seen at:
{{ issue.first_seen|date:"j M G:i T" }}
{% endif %} {% if issue.get_events_at_2 %}
-
Seen in releases:
+
Seen in releases:
{% for version in issue.get_events_at_2 %} {{ version|shortsha }}{% if not forloop.last %},{% endif %} @@ -228,16 +228,16 @@ {% if tab != "tags" and issue.tags_summary %}
-
+
Issue Tags
{% for issuetags in issue.tags_summary %}
-
{{ issuetags.0.key.key }}:
+
{{ issuetags.0.key.key }}:
- {% for issuetag in issuetags %} + {% for issuetag in issuetags %} {{ issuetag.value.value }} ({{ issuetag.pct }}%){% if not forloop.last %},{% endif %} {% endfor %}
diff --git a/issues/templates/issues/breadcrumbs.html b/issues/templates/issues/breadcrumbs.html index daf107c..48f6143 100644 --- a/issues/templates/issues/breadcrumbs.html +++ b/issues/templates/issues/breadcrumbs.html @@ -8,7 +8,7 @@
-
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
+
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
@@ -32,14 +32,14 @@
+ {{ breadcrumb.category }} {{ breadcrumb.type }} {{ breadcrumb.message }} + {{ breadcrumb.timestamp }} {# {{ breadcrumb.timestamp|date:"G:i T" and milis }} #}
+ # + ID + Timestamp + Title + Release + Environment
+ {{ event.digest_order }} {# how useful is this really? #} + {# how useful is this really? #} {{ event.id|truncatechars:9 }}
+ No events found{% if q %} for "{{ q }}"{% endif %}.
- {# I briefly considered hiding this thead 'if not issue_list' but it actually looks worse; instead, we just hide that one checkbox #} + {# I briefly considered hiding this thead if there are no items but it actually looks worse; instead, we just hide that one checkbox #} - + @@ -132,15 +161,15 @@ {% for issue in page_obj %} - + {% empty %} - +
-
- {% if issue_list %}{% endif %} +
+ + {# the below sounds expensive, but this list is cached #} + {% if page_obj.object_list|length > 0 %}{% endif %}
@@ -52,16 +71,16 @@ {% if disable_resolve_buttons %} {# see issues/tests.py for why this is turned off ATM #} - {# #} + {# #} {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if project.has_releases %} - + - + {# we just hide the whole dropdown; this is the easiest implementation of not-showing the dropdown #} {% else %} - + {% endif %} {% endspaceless %} @@ -69,24 +88,24 @@ {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if project.has_releases %} - {# 'by next' is shown even if 'by current' is also shown: just because you haven't seen 'by current' doesn't mean it's actually already solved; and in fact we show this option first precisely because we can always show it #} - + {# 'by next' is shown even if 'by current' is also shown: just because you haven't seen 'by current' doesn't mean it's actually already solved; and in fact we show this option first precisely because we can always show it #} + + {% else %} - + {% endif %} {% endspaceless %} @@ -94,36 +113,46 @@ {% spaceless %}{# needed to avoid whitespace between the looks-like-one-buttons #} {% if not disable_mute_buttons %} - + {% else %} - + {% endif %} + {% if not disable_unmute_buttons %} - + {% else %} - + {% endif %} + + {% endspaceless %} + + {# NOTE: "reopen" is not available in the UI as per the notes in issue_detail #} - {# only for resolved/muted items #} + {# only for resolved/muted items #}
-
- +
+
-
+
{% if q %}{# a single text is the catch-all for searching w/o results; 'seems enough' because one would generally only search after already having seen some issues (or not), i.e. having seen the relevant message as per below #} No {{ state_filter }} issues found for "{{ q }}" {% else %} {% if state_filter == "open" %} Congratulations! You have no open issues. {% if project.digested_event_count == 0 %} - This might mean you have not yet set up your SDK. + This might mean you have not yet set up your SDK. {% endif %} {% else %} No {{ state_filter }} issues found. @@ -200,29 +229,29 @@ {% endif %} - {% if page_obj.paginator.num_pages > 1 %} - Issues {{ page_obj.start_index|intcomma }}–{{ page_obj.end_index|intcomma }} of {{ page_obj.paginator.count|intcomma }} - {% elif page_obj.paginator.count > 0 %} - {{ page_obj.paginator.count|intcomma }} Issues + {% if page_obj.object_list|length > 0 %}{# sounds expensive, but this list is cached #} + Issues {{ page_obj.start_index|intcomma }} – {{ page_obj.end_index|intcomma }} + {% else %} + {% if page_obj.number > 1 %} + Less than {{ page_obj.start_index }} Issues {# corresponds to the 1/250 case of having an exactly full page and navigating to an empty page after that #} + {% else %} + 0 Issues + {% endif %} {% endif %} {% if page_obj.has_next %} - - - {% else %} - {% endif %}
{# the div with a few project-related icons (pjt-members, pjt-settings, my settings, dsn) on the lower RHS #} {% if not app_settings.SINGLE_USER %}{% if member.is_admin or request.user.is_superuser %} -
+
@@ -232,7 +261,7 @@ {% endif %}{% endif %} {% if member.is_admin or request.user.is_superuser %} -
+
@@ -243,7 +272,7 @@ {% endif %} {# member-existance is implied if you can see this page #} -
+ {# member-existance is implied if you can see this page #} -
+
{% endblock %} diff --git a/issues/templates/issues/stacktrace.html b/issues/templates/issues/stacktrace.html index 4af6e04..6036c72 100644 --- a/issues/templates/issues/stacktrace.html +++ b/issues/templates/issues/stacktrace.html @@ -28,23 +28,25 @@ {% for exception in exceptions %} -
+
{% if forloop.counter0 == 0 %} -
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
+
{{ event.ingested_at|date:"j M G:i T" }} (Event {{ event.digest_order|intcomma }} of {{ issue.digested_event_count|intcomma }} total{% if q %} — {{ event_qs_count|intcomma }} found by search{% endif %})
{% endif %}

{{ exception.type }}

{{ exception.value }}
{% if forloop.counter0 == 0 %} -
-
- - - - +
{# container of 2 divs: one for buttons, one for event-nav; on smaller screens these are 2 rows; on bigger they are side-by-side #} +
+ + + + +
+
{% include "issues/_event_nav.html" %}
@@ -53,15 +55,15 @@ {% for frame in exception.stacktrace.frames %} {% with frame=frame|pygmentize:event.platform %} - -
{# per frame div #} + +
{# per frame div #} {% if frame.raise_point %}{% endif %} {% if frame.in_app %}{% endif %} {% if forloop.first and forloop.parentloop.first %}{% endif %} -
{# per frame header div #} +
{# per frame header div #} -
{# filename, function, lineno #} +
{# filename, function, lineno #} {% if frame.in_app %} {{ frame.filename }}{% if frame.function %} in {{ frame.function }}{% endif %}{% if frame.lineno %} line {{ frame.lineno }}{% endif %}. {% else %} @@ -72,13 +74,13 @@
{# indicator for frame's position in stacktrace #} {% if stack_of_plates and forloop.first or not stack_of_plates and forloop.last %} {% if stack_of_plates and forloop.parentloop.first or not stack_of_plates and forloop.parentloop.last %} - raise {{ exception.type }} + raise {{ exception.type }} {% else %} - raise {{ exception.type }} (handled) + raise {{ exception.type }} (handled) {% endif %} {% elif stack_of_plates and forloop.last or not stack_of_plates and forloop.first %} {# strictly speaking, not actually "else", but to avoid clutter we hide 'outermost' info when this is also the raise-point #} {% if stack_of_plates and forloop.parentloop.first or not stack_of_plates and forloop.parentloop.last %} - → begin + → begin {% else %} {% comment %}I find it (quite too) hard to come up with a good name for this type of frame that is both short and clear. Thoughts so fare were: * try... @@ -90,7 +92,7 @@ * "divergence w/ main exception" * first unique frame {% endcomment %} - try… + try… {% endif %} {% endif %} @@ -103,18 +105,18 @@
{# per frame header div #} -
{# collapsable part #}
{# convience div for padding & border; the border is basically the top-border of the next header #} {% if "context_line" in frame and frame.context_line is not None %} -
{# code listing #} +
{# code listing #} {# the spread-out pX-6 in this code is intentional to ensure the padding is visible when scrolling to the right, and not visible when scrolling is possible (i.e. the text is cut-off awkwardly to hint at scrolling #}
    {% for line in frame.pre_context %}
  1. {{ line }} {# leave space to avoid collapse #}
  2. {% endfor %} {# the gradient is a workaround, because I can't get a full-width elem going here inside the overflow #} {# when some other line is overflowing. Using the gradient hides this fact (it happens to also look good) #} -
  3. {{ frame.context_line }} {# leave space to avoid collapse #}
  4. +
  5. {{ frame.context_line }} {# leave space to avoid collapse #}
  6. {% for line in frame.post_context %}
  7. {{ line }} {# leave space to avoid collapse #}
  8. {% endfor %}
@@ -123,13 +125,13 @@ {% if frame.vars %}
{# variables #}
-
Variable
-
Value
+
Variable
+
Value
{% for var, value in frame.vars|items %}
-
{{ var }}
-
{{ value|format_var }}
+
{{ var }}
+
{{ value|format_var }}
{% endfor %} {% if frame.vars|incomplete %} @@ -144,7 +146,11 @@ {% if "context_line" not in frame or frame.context_line is None %}{% if not frame.vars %}{# nested ifs as a subsitute for brackets-in-templates #}
- No code context or variables available for this frame. + {% if frame.debug_id %}{# only in the no-vars-either case to avoid excessive if-nesting (at the cost of completeness, but "will yes-vars, broken debug_id even be a case? For now we hope not) #} + No sourcemaps found for Debug ID {{ frame.debug_id }} + {% else %} + No code context or variables available for this frame. + {% endif %}
{% endif %}{% endif %} diff --git a/issues/templates/issues/tags.html b/issues/templates/issues/tags.html index 16d2a6c..1e9a588 100644 --- a/issues/templates/issues/tags.html +++ b/issues/templates/issues/tags.html @@ -8,11 +8,11 @@

{{ issuetags.0.key.key }}:

- {% for issuetag in issuetags %} -
-
{{ issuetag.value.value }}
-
{{ issuetag.pct }}%
-
{{ issuetag.count }} events
+ {% for issuetag in issuetags %} +
+
{{ issuetag.value.value }}
+
{{ issuetag.pct }}%
+
{{ issuetag.count }} events
{% endfor %}
diff --git a/issues/tests.py b/issues/tests.py index 3698ff5..0e04e7b 100644 --- a/issues/tests.py +++ b/issues/tests.py @@ -13,8 +13,10 @@ from django.test import TestCase as DjangoTestCase from django.contrib.auth import get_user_model from django.test import tag from django.conf import settings +from django.apps import apps from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase +from bugsink.utils import get_model_topography from projects.models import Project, ProjectMembership from releases.models import create_release_if_needed from events.factories import create_event @@ -23,11 +25,14 @@ from compat.dsn import get_header_value from events.models import Event from ingest.views import BaseIngestAPIView from issues.factories import get_or_create_issue +from tags.models import store_tags +from tags.tasks import vacuum_tagvalues -from .models import Issue, IssueStateManager +from .models import Issue, IssueStateManager, TurningPoint, TurningPointKind from .regressions import is_regression, is_regression_2, issue_is_regression from .factories import denormalized_issue_fields from .utils import get_issue_grouper_for_data +from .tasks import get_model_topography_with_issue_override User = get_user_model() @@ -351,12 +356,11 @@ class MuteUnmuteTestCase(TransactionTestCase): def test_unmute_simple_case(self, send_unmute_alert): project = Project.objects.create() - issue = Issue.objects.create( - project=project, - unmute_on_volume_based_conditions='[{"period": "day", "nr_of_periods": 1, "volume": 1}]', - is_muted=True, - **denormalized_issue_fields(), - ) + issue, _ = get_or_create_issue(project) + + issue.unmute_on_volume_based_conditions = '[{"period": "day", "nr_of_periods": 1, "volume": 1}]' + issue.is_muted = True + issue.save() event = create_event(project, issue) BaseIngestAPIView.count_issue_periods_and_act_on_it(issue, event, datetime.now(timezone.utc)) @@ -371,15 +375,14 @@ class MuteUnmuteTestCase(TransactionTestCase): def test_unmute_two_simultaneously_should_lead_to_one_alert(self, send_unmute_alert): project = Project.objects.create() - issue = Issue.objects.create( - project=project, - unmute_on_volume_based_conditions='''[ + issue, _ = get_or_create_issue(project) + + issue. unmute_on_volume_based_conditions = '''[ {"period": "day", "nr_of_periods": 1, "volume": 1}, {"period": "month", "nr_of_periods": 1, "volume": 1} -]''', - is_muted=True, - **denormalized_issue_fields(), - ) +]''' + issue.is_muted = True + issue.save() event = create_event(project, issue) BaseIngestAPIView.count_issue_periods_and_act_on_it(issue, event, datetime.now(timezone.utc)) @@ -665,3 +668,98 @@ class GroupingUtilsTestCase(DjangoTestCase): def test_fingerprint_with_default(self): self.assertEqual("Log Message: ⋄ ⋄ fixed string", get_issue_grouper_for_data({"fingerprint": ["{{ default }}", "fixed string"]})) + + +class IssueDeletionTestCase(TransactionTestCase): + + def setUp(self): + super().setUp() + self.project = Project.objects.create(name="Test Project", stored_event_count=1) # 1, in prep. of the below + self.issue, _ = get_or_create_issue(self.project) + self.event = create_event(self.project, issue=self.issue) + + TurningPoint.objects.create( + project=self.project, + issue=self.issue, triggering_event=self.event, timestamp=self.event.ingested_at, + kind=TurningPointKind.FIRST_SEEN) + + self.event.never_evict = True + self.event.save() + + store_tags(self.event, self.issue, {"foo": "bar"}) + + def test_delete_issue(self): + models = [apps.get_model(app_label=s.split('.')[0], model_name=s.split('.')[1].lower()) for s in [ + 'events.Event', 'issues.Grouping', 'issues.TurningPoint', 'tags.EventTag', 'issues.Issue', 'tags.IssueTag', + 'tags.TagValue', # TagValue 'feels like' a vacuum_model (FKs reversed) but is cleaned up in `prune_orphans` + ]] + + # see the note in `prune_orphans` about TagKey to understand why it's special. + vacuum_models = [apps.get_model(app_label=s.split('.')[0], model_name=s.split('.')[1].lower()) + for s in ['tags.TagKey',]] + + for model in models + vacuum_models: + # test-the-test: make sure some instances of the models actually exist after setup + self.assertTrue(model.objects.exists(), f"Some {model.__name__} should exist") + + # assertNumQueries() is brittle and opaque. But at least the brittle part is quick to fix (a single number) and + # provides a canary for performance regressions. + + # correct for bugsink/transaction.py's select_for_update for non-sqlite databases + correct_for_select_for_update = 1 if 'sqlite' not in settings.DATABASES['default']['ENGINE'] else 0 + + with self.assertNumQueries(19 + correct_for_select_for_update): + self.issue.delete_deferred() + + # tests run w/ TASK_ALWAYS_EAGER, so in the below we can just check the database directly + for model in models: + self.assertFalse(model.objects.exists(), f"No {model.__name__}s should exist after issue deletion") + + for model in vacuum_models: + # 'should' in quotes because this isn't so because we believe it's better if they did, but because the + # code currently does not delete them. + self.assertTrue(model.objects.exists(), f"Some {model.__name__}s 'should' exist after issue deletion") + + self.assertEqual(0, Project.objects.get().stored_event_count) + + vacuum_tagvalues() + # tests run w/ TASK_ALWAYS_EAGER, so any "delayed" (recursive) calls can be expected to have run + + for model in vacuum_models: + self.assertFalse(model.objects.exists(), f"No {model.__name__}s should exist after vacuuming") + + def test_dependency_graphs(self): + # tests for an implementation detail of defered deletion, namely 1 test that asserts what the actual + # model-topography is, and one test that shows how we manually override it; this is to trigger a failure when + # the topology changes (and forces us to double-check that the override is still correct). + + orig = get_model_topography() + override = get_model_topography_with_issue_override() + + def walk(topo, model_name): + results = [] + for model, fk_name in topo[model_name]: + results.append((model, fk_name)) + results.extend(walk(topo, model._meta.label)) + return results + + self.assertEqual(walk(orig, 'issues.Issue'), [ + (apps.get_model('issues', 'Grouping'), 'issue'), + (apps.get_model('events', 'Event'), 'grouping'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('issues', 'TurningPoint'), 'issue'), + (apps.get_model('events', 'Event'), 'issue'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('tags', 'EventTag'), 'issue'), + (apps.get_model('tags', 'IssueTag'), 'issue'), + ]) + + self.assertEqual(walk(override, 'issues.Issue'), [ + (apps.get_model('issues', 'TurningPoint'), 'issue'), + (apps.get_model('tags', 'EventTag'), 'issue'), + (apps.get_model('events', 'Event'), 'issue'), + (apps.get_model('issues', 'Grouping'), 'issue'), + (apps.get_model('tags', 'IssueTag'), 'issue'), + ]) diff --git a/issues/urls.py b/issues/urls.py index 2d8a93f..cf26d43 100644 --- a/issues/urls.py +++ b/issues/urls.py @@ -54,7 +54,8 @@ urlpatterns = [ path('issue//event//', issue_event_stacktrace, name="event_stacktrace"), path('issue//event//details/', issue_event_details, name="event_details"), - path('issue//event//breadcrumbs/', issue_event_details, name="event_breadcrumbs"), + path( + 'issue//event//breadcrumbs/', issue_event_breadcrumbs, name="event_breadcrumbs"), path('issue//tags/', issue_tags), path('issue//history/', issue_history), diff --git a/issues/utils.py b/issues/utils.py index aa5431b..58a210a 100644 --- a/issues/utils.py +++ b/issues/utils.py @@ -151,7 +151,9 @@ def get_issue_grouper_for_data(data, calculated_type=None, calculated_value=None if fingerprint: return " ⋄ ".join([ - default_issue_grouper(calculated_type, calculated_value, transaction) if part == "{{ default }}" else part + (default_issue_grouper(calculated_type, calculated_value, transaction) + if part == "{{ default }}" + else str(part)) for part in fingerprint ]) diff --git a/issues/views.py b/issues/views.py index 13b3a77..990793a 100644 --- a/issues/views.py +++ b/issues/views.py @@ -15,6 +15,7 @@ from django.http import Http404 from django.core.paginator import Paginator, Page from django.db.utils import OperationalError from django.conf import settings +from django.utils.functional import cached_property from sentry.utils.safe import get_path from sentry_sdk_extensions import capture_or_log_exception @@ -34,7 +35,7 @@ from tags.search import search_issues, search_events, search_events_optimized from .models import Issue, IssueQuerysetStateManager, IssueStateManager, TurningPoint, TurningPointKind from .forms import CommentForm from .utils import get_values, get_main_exception -from events.utils import annotate_with_meta, apply_sourcemaps +from events.utils import annotate_with_meta, apply_sourcemaps, get_sourcemap_images logger = logging.getLogger("bugsink.issues") @@ -87,6 +88,35 @@ class KnownCountPaginator(EagerPaginator): return self._count +class UncountablePage(Page): + """The Page subclass to be used with UncountablePaginator.""" + + @cached_property + def has_next(self): + # hack that works 249/250 times: if the current page is full, we have a next page + return len(self.object_list) == self.paginator.per_page + + @cached_property + def end_index(self): + return (self.paginator.per_page * (self.number - 1)) + len(self.object_list) + + +class UncountablePaginator(EagerPaginator): + """optimization: counting is too expensive; to be used in a template w/o .count and .last""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _get_page(self, *args, **kwargs): + object_list = args[0] + object_list = list(object_list) + return UncountablePage(object_list, *(args[1:]), **kwargs) + + @property + def count(self): + return 1_000_000_000 # big enough to be bigger than what you can click through or store in the DB. + + def _request_repr(parsed_data): if "request" not in parsed_data: return "" @@ -98,6 +128,10 @@ def _is_valid_action(action, issue): """We take the 'strict' approach of complaining even when the action is simply a no-op, because you're already in the desired state.""" + if action == "delete": + # any type of issue can be deleted + return True + if issue.is_resolved: # any action is illegal on resolved issues (as per our current UI) return False @@ -123,6 +157,10 @@ def _is_valid_action(action, issue): def _q_for_invalid_for_action(action): """returns a Q obj of issues for which the action is not valid.""" + if action == "delete": + # delete is always valid, so we don't want any issues to be returned, https://stackoverflow.com/a/39001190 + return Q(pk__in=[]) + illegal_conditions = Q(is_resolved=True) # any action is illegal on resolved issues (as per our current UI) if action.startswith("resolved_release:"): @@ -139,7 +177,10 @@ def _q_for_invalid_for_action(action): def _make_history(issue_or_qs, action, user): - if action == "resolve": + if action == "delete": + return # we're about to delete the issue, so no history is needed (nor possible) + + elif action == "resolve": kind = TurningPointKind.RESOLVED elif action.startswith("resolved"): kind = TurningPointKind.RESOLVED @@ -180,10 +221,13 @@ def _make_history(issue_or_qs, action, user): now = timezone.now() if isinstance(issue_or_qs, Issue): TurningPoint.objects.create( + project=issue_or_qs.project, issue=issue_or_qs, kind=kind, user=user, metadata=json.dumps(metadata), timestamp=now) else: TurningPoint.objects.bulk_create([ - TurningPoint(issue=issue, kind=kind, user=user, metadata=json.dumps(metadata), timestamp=now) + TurningPoint( + project_id=issue.project_id, issue=issue, kind=kind, user=user, metadata=json.dumps(metadata), + timestamp=now) for issue in issue_or_qs ]) @@ -219,6 +263,8 @@ def _apply_action(manager, issue_or_qs, action, user): }])) elif action == "unmute": manager.unmute(issue_or_qs) + elif action == "delete": + manager.delete(issue_or_qs) def issue_list(request, project_pk, state_filter="open"): @@ -262,20 +308,24 @@ def _issue_list_pt_2(request, project, state_filter, unapplied_issue_ids): } issue_list = d_state_filter[state_filter]( - Issue.objects.filter(project=project) + Issue.objects.filter(project=project, is_deleted=False) ).order_by("-last_seen") if request.GET.get("q"): issue_list = search_issues(project, issue_list, request.GET["q"]) - paginator = EagerPaginator(issue_list, 250) + paginator = UncountablePaginator(issue_list, 250) page_number = request.GET.get("page") page_obj = paginator.get_page(page_number) + try: + member = ProjectMembership.objects.get(project=project, user=request.user) + except ProjectMembership.DoesNotExist: + member = None # this can happen if the user is superuser (as per `project_membership_required` decorator) + return render(request, "issues/issue_list.html", { "project": project, - "member": ProjectMembership.objects.get(project=project, user=request.user), - "issue_list": issue_list, + "member": member, "state_filter": state_filter, "mute_options": GLOBAL_MUTE_OPTIONS, @@ -558,6 +608,7 @@ def issue_event_details(request, issue, event_pk=None, digest_order=None, nav=No ("ingested at", _date_with_milis_html(event.ingested_at)), ("digested at", _date_with_milis_html(event.digested_at)), ("digest order", event.digest_order), + ("remote_addr", event.remote_addr), ] logentry_info = [] @@ -578,6 +629,11 @@ def issue_event_details(request, issue, event_pk=None, digest_order=None, nav=No logentry_key = "logentry" if "logentry" in parsed_data else "message" if isinstance(parsed_data.get(logentry_key), dict): + # NOTE: event.schema.json says "If `message` and `params` are given, Sentry will attempt to backfill + # `formatted` if empty." but we don't do that yet. + if parsed_data.get(logentry_key, {}).get("formatted"): + logentry_info.append(("formatted", parsed_data[logentry_key]["formatted"])) + if parsed_data.get(logentry_key, {}).get("message"): logentry_info.append(("message", parsed_data[logentry_key]["message"])) @@ -589,7 +645,7 @@ def issue_event_details(request, issue, event_pk=None, digest_order=None, nav=No for param_k, param_v in params.items(): logentry_info.append((param_k, param_v)) - elif isinstance(parsed_data.get(logentry_key), str): + elif isinstance(parsed_data.get(logentry_key), str): # robust for top-level as str (see #55) logentry_info.append(("message", parsed_data[logentry_key])) key_info += [ @@ -603,6 +659,15 @@ def issue_event_details(request, issue, event_pk=None, digest_order=None, nav=No contexts = get_contexts_enriched_with_ua(parsed_data) + try: + sourcemaps_images = get_sourcemap_images(parsed_data) + except Exception as e: + if settings.DEBUG or settings.I_AM_RUNNING == "TEST": + # when developing/testing, I _do_ want to get notified + raise + # sourcemaps are still experimental; we don't want to fail on them, so we just log the error and move on. + capture_or_log_exception(e, logger) + return render(request, "issues/event_details.html", { "tab": "event-details", "this_view": "event_details", @@ -616,6 +681,7 @@ def issue_event_details(request, issue, event_pk=None, digest_order=None, nav=No "logentry_info": logentry_info, "deployment_info": deployment_info, "contexts": contexts, + "sourcemaps_images": sourcemaps_images, "mute_options": GLOBAL_MUTE_OPTIONS, "q": request.GET.get("q", ""), # event_qs_count is not used when there is no q, so no need to calculate it in that case @@ -728,6 +794,7 @@ def history_comment_new(request, issue): # think that's amount of magic to have: it still allows one to erase comments (possibly for non-manual # kinds) but it saves you from what is obviously a mistake (without complaining with a red box or something) TurningPoint.objects.create( + project=issue.project, issue=issue, kind=TurningPointKind.MANUAL_ANNOTATION, user=request.user, comment=form.cleaned_data["comment"], timestamp=timezone.now()) diff --git a/projects/admin.py b/projects/admin.py index dccd827..2b823f7 100644 --- a/projects/admin.py +++ b/projects/admin.py @@ -1,9 +1,17 @@ from django.contrib import admin +from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_protect + from admin_auto_filters.filters import AutocompleteFilter +from bugsink.transaction import immediate_atomic + from .models import Project, ProjectMembership +csrf_protect_m = method_decorator(csrf_protect) + + class ProjectFilter(AutocompleteFilter): title = 'Project' field_name = 'project' @@ -31,9 +39,8 @@ class ProjectAdmin(admin.ModelAdmin): list_display = [ 'name', 'dsn', - 'alert_on_new_issue', - 'alert_on_regression', - 'alert_on_unmute', + 'digested_event_count', + 'stored_event_count', ] readonly_fields = [ @@ -47,6 +54,31 @@ class ProjectAdmin(admin.ModelAdmin): 'slug': ['name'], } + def get_deleted_objects(self, objs, request): + to_delete = list(objs) + ["...all its related objects... (delayed)"] + model_count = { + Project: len(objs), + } + perms_needed = set() + protected = [] + return to_delete, model_count, perms_needed, protected + + def delete_queryset(self, request, queryset): + # NOTE: not the most efficient; it will do for a first version. + with immediate_atomic(): + for obj in queryset: + obj.delete_deferred() + + def delete_model(self, request, obj): + with immediate_atomic(): + obj.delete_deferred() + + @csrf_protect_m + def delete_view(self, request, object_id, extra_context=None): + # the superclass version, but with the transaction.atomic context manager commented out (we do this ourselves) + # with transaction.atomic(using=router.db_for_write(self.model)): + return self._delete_view(request, object_id, extra_context) + # the preferred way to deal with ProjectMembership is actually through the inline above; however, because this may prove # to not scale well with (very? more than 50?) memberships per project, we've left the separate admin interface here for diff --git a/projects/migrations/0012_project_is_deleted.py b/projects/migrations/0012_project_is_deleted.py new file mode 100644 index 0000000..6e3625a --- /dev/null +++ b/projects/migrations/0012_project_is_deleted.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.21 on 2025-07-03 13:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0011_fill_stored_event_count"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="is_deleted", + field=models.BooleanField(default=False), + ), + ] diff --git a/projects/migrations/0013_delete_objects_pointing_to_null_project.py b/projects/migrations/0013_delete_objects_pointing_to_null_project.py new file mode 100644 index 0000000..aee41be --- /dev/null +++ b/projects/migrations/0013_delete_objects_pointing_to_null_project.py @@ -0,0 +1,48 @@ +from django.db import migrations + + +def delete_objects_pointing_to_null_project(apps, schema_editor): + # Up until now, we have various models w/ .project=FK(null=True, on_delete=models.SET_NULL) + # Although it is "not expected" in the interface, project-deletion would have led to those + # objects with a null project. We're about to change that to .project=FK(null=False, ...) which + # would crash if we don't remove those objects first. Object-removal is "fine" though, because + # as per the meaning of the SET_NULL, these objects were "dangling" anyway. + + # We implement this as a _single_ cross-app migration so that reasoning about the order of deletions is easy (and + # we can just copy the correct order from the project/tasks.py `preferred` variable. This cross-appness does mean + # that we must specify all dependencies here, and all the set-null migrations (from various apps) must point at this + # migration as their dependency. + + # from tasks.py, but in "strings" form + preferred = [ + 'tags.EventTag', + 'tags.IssueTag', + 'tags.TagValue', + 'tags.TagKey', + # 'issues.TurningPoint', # not needed, .project is already not-null (we just added it) + 'events.Event', + 'issues.Grouping', + # 'alerts.MessagingServiceConfig', was CASCADE (not null), so no deletion needed + # 'projects.ProjectMembership', was CASCADE (not null), so no deletion needed + 'releases.Release', + 'issues.Issue', + ] + + for model_name in preferred: + model = apps.get_model(*model_name.split('.')) + model.objects.filter(project__isnull=True).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0012_project_is_deleted"), + ("issues", "0024_turningpoint_project_alter_not_null"), + ("tags", "0004_alter_do_nothing"), + ("releases", "0002_release_releases_re_sort_ep_5c07c8_idx"), + ("events", "0021_alter_do_nothing"), + ] + + operations = [ + migrations.RunPython(delete_objects_pointing_to_null_project, reverse_code=migrations.RunPython.noop), + ] diff --git a/projects/migrations/0014_alter_projectmembership_project.py b/projects/migrations/0014_alter_projectmembership_project.py new file mode 100644 index 0000000..cd13986 --- /dev/null +++ b/projects/migrations/0014_alter_projectmembership_project.py @@ -0,0 +1,19 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0013_delete_objects_pointing_to_null_project"), + ] + + operations = [ + migrations.AlterField( + model_name="projectmembership", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/projects/models.py b/projects/models.py index 73f9b71..ac8c367 100644 --- a/projects/models.py +++ b/projects/models.py @@ -5,11 +5,14 @@ from django.conf import settings from django.utils.text import slugify from bugsink.app_settings import get_settings +from bugsink.transaction import delay_on_commit from compat.dsn import build_dsn from teams.models import TeamMembership +from .tasks import delete_project_deps + # ## Visibility/Access-design # @@ -74,6 +77,7 @@ class Project(models.Model): name = models.CharField(max_length=255, blank=False, null=False, unique=True) slug = models.SlugField(max_length=50, blank=False, null=False, unique=True) + is_deleted = models.BooleanField(default=False) # sentry_key mirrors the "public" part of the sentry DSN. As of late 2023 Sentry's docs say the this about DSNs: # @@ -143,6 +147,13 @@ class Project(models.Model): super().save(*args, **kwargs) + def delete_deferred(self): + """Marks the project as deleted, and schedules deletion of all related objects""" + self.is_deleted = True + self.save(update_fields=["is_deleted"]) + + delay_on_commit(delete_project_deps, str(self.id)) + def is_joinable(self, user=None): if user is not None: # take the user's team membership into account @@ -164,7 +175,7 @@ class Project(models.Model): class ProjectMembership(models.Model): - project = models.ForeignKey(Project, on_delete=models.CASCADE) + project = models.ForeignKey(Project, on_delete=models.DO_NOTHING) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) send_email_alerts = models.BooleanField(default=None, null=True) diff --git a/projects/tasks.py b/projects/tasks.py index 219320e..5b51b28 100644 --- a/projects/tasks.py +++ b/projects/tasks.py @@ -4,12 +4,13 @@ from snappea.decorators import shared_task from bugsink.app_settings import get_settings from bugsink.utils import send_rendered_email - -from .models import Project +from bugsink.transaction import immediate_atomic, delay_on_commit +from bugsink.utils import get_model_topography, delete_deps_with_budget @shared_task def send_project_invite_email_new_user(email, project_pk, token): + from .models import Project # avoid circular import project = Project.objects.get(pk=project_pk) send_rendered_email( @@ -30,6 +31,7 @@ def send_project_invite_email_new_user(email, project_pk, token): @shared_task def send_project_invite_email(email, project_pk): + from .models import Project # avoid circular import project = Project.objects.get(pk=project_pk) send_rendered_email( @@ -45,3 +47,95 @@ def send_project_invite_email(email, project_pk): }), }, ) + + +def get_model_topography_with_project_override(): + """ + Returns the model topography with ordering adjusted to prefer deletions via .project, when available. + + This assumes that Project is not only the root of the dependency graph, but also that if a model has an .project + ForeignKey, deleting it via that path is sufficient, meaning we can safely avoid visiting the same model again + through other ForeignKey routes (e.g. any of the .issue paths). + + The preference is encoded via an explicit list of models, which are visited early and only via their .project path. + """ + from issues.models import Issue, TurningPoint, Grouping + from events.models import Event + from tags.models import IssueTag, EventTag, TagValue, TagKey + from alerts.models import MessagingServiceConfig + from releases.models import Release + from projects.models import ProjectMembership + + preferred = [ + # Tag-related: remove the "depending" models first and the most depended on last. + EventTag, # above Event, to avoid deletions via .event + IssueTag, + TagValue, + TagKey, + + TurningPoint, # above Event, to avoid deletions via .triggering_event + Event, # above Grouping, to avoid deletions via .grouping + Grouping, + + # these things "could be anywhere" in the ordering; they're not that connected; we put them at the end. + MessagingServiceConfig, + ProjectMembership, + Release, + + Issue, # at the bottom, most everything points to this, we'd rather delete those things via .project + ] + + def as_preferred(lst): + """ + Sorts the list of (model, fk_name) tuples such that the models are in the preferred order as indicated above, + and models which occur with another fk_name are pruned + """ + return sorted( + [(model, fk_name) for model, fk_name in lst if fk_name == "project" or model not in preferred], + key=lambda x: preferred.index(x[0]) if x[0] in preferred else len(preferred), + ) + + topo = get_model_topography() + for k, lst in topo.items(): + topo[k] = as_preferred(lst) + + return topo + + +@shared_task +def delete_project_deps(project_id): + from .models import Project # avoid circular import + with immediate_atomic(): + # matches what we do in events/retention.py (and for which argumentation exists); in practive I have seen _much_ + # faster deletion times (in the order of .03s per task on my local laptop) when using a budget of 500, _but_ + # it's not a given those were for "expensive objects" (e.g. events); and I'd rather err on the side of caution + # (worst case we have a bit of inefficiency; in any case this avoids hogging the global write lock / timeouts). + budget = 500 + num_deleted = 0 + + dep_graph = get_model_topography_with_project_override() + + for model_for_recursion, fk_name_for_recursion in dep_graph["projects.Project"]: + this_num_deleted = delete_deps_with_budget( + project_id, + model_for_recursion, + fk_name_for_recursion, + [project_id], + budget - num_deleted, + dep_graph, + is_for_project=True, + ) + + num_deleted += this_num_deleted + + if num_deleted >= budget: + delay_on_commit(delete_project_deps, project_id) + return + + if budget - num_deleted <= 0: + # no more budget for the self-delete. + delay_on_commit(delete_project_deps, project_id) + + else: + # final step: delete the issue itself + Project.objects.filter(pk=project_id).delete() diff --git a/projects/templates/projects/project_alerts_setup.html b/projects/templates/projects/project_alerts_setup.html new file mode 100644 index 0000000..fb1de1f --- /dev/null +++ b/projects/templates/projects/project_alerts_setup.html @@ -0,0 +1,82 @@ +{% extends "base.html" %} +{% load static %} + +{% block title %}Alerts · {{ project.name }} · {{ site_title }}{% endblock %} + +{% block content %} + + + +
+ + + +{% endblock %} diff --git a/projects/templates/projects/project_edit.html b/projects/templates/projects/project_edit.html index ac0510c..a83c738 100644 --- a/projects/templates/projects/project_edit.html +++ b/projects/templates/projects/project_edit.html @@ -5,13 +5,32 @@ {% block title %}Edit {{ project.name }} · {{ site_title }}{% endblock %} {% block content %} -{# div class="text-cyan-800" here in an attempt to trigger tailwind, which does not pick up Pyhton code #} + +
-
-
+ {% csrf_token %}
@@ -27,11 +46,20 @@ {% tailwind_formfield form.retention_max_event_count %} {% tailwind_formfield form.dsn %} - - Cancel +
+ + Cancel + +
-
{% endblock %} + +{% block extra_js %} + + +{% endblock %} diff --git a/projects/templates/projects/project_list.html b/projects/templates/projects/project_list.html index ec0dd98..86e79f4 100644 --- a/projects/templates/projects/project_list.html +++ b/projects/templates/projects/project_list.html @@ -17,7 +17,7 @@
{% if can_create %} {% endif %}
{# top, RHS (buttons) #} @@ -27,17 +27,26 @@
-
+ {% if messages %} +
    + {% for message in messages %} + {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} +
  • {{ message }}
  • + {% endfor %} +
+ {% endif %} + +
-
My Projects
+
My Projects
{% if not app_settings.SINGLE_USER %} -
Team Projects
-
Other Projects
+
Team Projects
+
Other Projects
{% endif %}
{% comment %}
- +
{% endcomment %}
@@ -50,37 +59,37 @@ {% for project in project_list %} - + + + {% empty %} - + diff --git a/projects/templates/projects/project_member_settings.html b/projects/templates/projects/project_member_settings.html index 1e6eb35..64a3bf5 100644 --- a/projects/templates/projects/project_member_settings.html +++ b/projects/templates/projects/project_member_settings.html @@ -17,7 +17,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -37,11 +37,11 @@ {% tailwind_formfield form.role %} {% tailwind_formfield form.send_email_alerts %} - + {% if this_is_you %} - Cancel {# not quite perfect, because "you" can also click on yourself in the member list #} + Cancel {# not quite perfect, because "you" can also click on yourself in the member list #} {% else %} - Cancel + Cancel {% endif %} diff --git a/projects/templates/projects/project_members.html b/projects/templates/projects/project_members.html index da21a48..1a283de 100644 --- a/projects/templates/projects/project_members.html +++ b/projects/templates/projects/project_members.html @@ -15,7 +15,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -24,8 +24,8 @@

Project Members

+ Invite Member +
@@ -35,19 +35,19 @@
{% if project.member or request.user.is_superuser %} - {{ project.name }} + {{ project.name }} {% else %} - {{ project.name }} + {{ project.name }} {% endif %}
{{ project.team.name }} | {{ project.member_count }} members - | {{ project.open_issue_count }} open issues + {# | {{ project.open_issue_count }} open issues #} {% if project.member %} - | my settings + | my settings {% endif %}
{% if project.member %} {% if not project.member.accepted %} - You're invited! + You're invited! {% elif project.member.is_admin %} {# NOTE: we intentionally hide admin-ness for non-accepted users; #} - Admin + Admin {% endif %} {% endif %} {% if not app_settings.SINGLE_USER %}{% if project.member.is_admin or request.user.is_superuser %} - + {% if project.member or request.user.is_superuser %} + {% else %}
- -
+ + {% endif %} {% else %} {% if ownership_filter == "teams" or project.is_joinable or request.user.is_superuser %}{# ownership_filter check: you can always join your own team's projects, so if you're looking at a list of them... #}
- +
{% endif %} {% endif %} @@ -147,7 +168,7 @@
No projects found.
- + {% for member in members %} - + @@ -55,23 +55,23 @@ {% empty %} - + @@ -85,8 +85,8 @@ diff --git a/projects/templates/projects/project_members_accept.html b/projects/templates/projects/project_members_accept.html index 6306200..d08aacd 100644 --- a/projects/templates/projects/project_members_accept.html +++ b/projects/templates/projects/project_members_accept.html @@ -21,8 +21,8 @@ You have been invited to join the project "{{ project.name }}" in the role of "{{ membership.get_role_display }}". Please confirm by clicking the button below. - - + + diff --git a/projects/templates/projects/project_members_invite.html b/projects/templates/projects/project_members_invite.html index 525c1a3..57968b6 100644 --- a/projects/templates/projects/project_members_invite.html +++ b/projects/templates/projects/project_members_invite.html @@ -16,7 +16,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -32,23 +32,23 @@ {% tailwind_formfield_implicit form.email %}
{# ml-1 is strictly speaking not aligned, but visually it looks better "to me"; perhaps because of all of the round elements? #} -
{{ form.role.label }}
+
{{ form.role.label }}
{{ form.role }}
{% if form.role.errors %} {% for error in form.role.errors %} -
{{ error }}
+
{{ error }}
{% endfor %} {% elif form.role.help_text %} -
{{ form.role.help_text|safe }}
+
{{ form.role.help_text|safe }}
{% endif %}
- - - Cancel + + + Cancel diff --git a/projects/templates/projects/project_messaging_service_edit.html b/projects/templates/projects/project_messaging_service_edit.html new file mode 100644 index 0000000..b181bbb --- /dev/null +++ b/projects/templates/projects/project_messaging_service_edit.html @@ -0,0 +1,44 @@ +{% extends "base.html" %} +{% load static %} +{% load tailwind_forms %} + +{% block title %}Messaging Service · {{ project.name }} · {{ site_title }}{% endblock %} + +{% block content %} + +
+ +
+
+ {% csrf_token %} + + {% if messages %} +
    + {% for message in messages %} + {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} +
  • {{ message }}
  • + {% endfor %} +
+ {% endif %} + +
+

Messaging Service | {{ project.name }}

+
+ + {% for field in form %} + {% tailwind_formfield field %} + {% endfor %} + + {% for field in config_form %} + {% tailwind_formfield field %} + {% endfor %} + + + Cancel + + + +
+
+ +{% endblock %} diff --git a/projects/templates/projects/project_new.html b/projects/templates/projects/project_new.html index a9cc0d3..bc7fcd1 100644 --- a/projects/templates/projects/project_new.html +++ b/projects/templates/projects/project_new.html @@ -22,8 +22,8 @@ {% tailwind_formfield form.visibility %} {% tailwind_formfield form.retention_max_event_count %} - - Cancel + + Cancel diff --git a/projects/templates/projects/project_sdk_setup.html b/projects/templates/projects/project_sdk_setup.html index d29065a..28288f0 100644 --- a/projects/templates/projects/project_sdk_setup.html +++ b/projects/templates/projects/project_sdk_setup.html @@ -26,30 +26,30 @@
- Not listed? It will probably work anyway! Just use the DSN above, and let us know if you run into any issues (on GitHub or Discord). + Not listed? It will probably work anyway! Just use the DSN above, and let us know if you run into any issues (on GitHub or Discord).
-
- After triggering an event on purpose, it should now appear in the list of open issues. +
+ After triggering an event on purpose, it should now appear in the list of open issues.

Further reading

-
- For more information on how to use the SDK, check the Bugsink-specific SDK recommendations. +
+ For more information on how to use the SDK, check the Bugsink-specific SDK recommendations.
- +
diff --git a/projects/templates/projects/project_sdk_setup_javascript.html b/projects/templates/projects/project_sdk_setup_javascript.html index 1b79e19..1993028 100644 --- a/projects/templates/projects/project_sdk_setup_javascript.html +++ b/projects/templates/projects/project_sdk_setup_javascript.html @@ -13,8 +13,8 @@
Connect your JavaScript application to Bugsink to start tracking errors. - Bugsink is compatible with the Sentry SDK. - Detailed instructions per framework are in the Sentry SDK Documentation. In the below we provide an overview, zoom in on the differences between Bugsink and Sentry, and provide a snippet with the correct DSN set. + Bugsink is compatible with the Sentry SDK. + Detailed instructions per framework are in the Sentry SDK Documentation. In the below we provide an overview, zoom in on the differences between Bugsink and Sentry, and provide a snippet with the correct DSN set.

Step 1: Install the SDK

@@ -67,16 +67,16 @@ Sentry.init({ throw new Error("Error Thrown on purpose to send it to Bugsink"); {% endcode %} -
- Your event should now appear in the list of open issues. +
+ Your event should now appear in the list of open issues.

Further reading

-
- For more information on how to use the SDK, check the Bugsink-specific SDK recommendations. +
+ For more information on how to use the SDK, check the Bugsink-specific SDK recommendations.
- +
diff --git a/projects/templates/projects/project_sdk_setup_php.html b/projects/templates/projects/project_sdk_setup_php.html index f56768f..dae0c34 100644 --- a/projects/templates/projects/project_sdk_setup_php.html +++ b/projects/templates/projects/project_sdk_setup_php.html @@ -13,8 +13,8 @@
Connect your PHP application to Bugsink to start tracking errors. - Bugsink is compatible with the Sentry SDK. - Note that the instructions for Laravel and Symfony are quite different from plain PHP. + Bugsink is compatible with the Sentry SDK. + Note that the instructions for Laravel and Symfony are quite different from plain PHP. In the below we provide an overview, zoom in on the differences between Bugsink and Sentry, and provide a snippet with the correct DSN set.
@@ -64,16 +64,16 @@ try { } ?>{% endcode %} -
- Your event should now appear in the list of open issues. +
+ Your event should now appear in the list of open issues.

Further reading

-
- For more information on how to use the SDK, check the Bugsink-specific SDK recommendations. +
+ For more information on how to use the SDK, check the Bugsink-specific SDK recommendations.
- +
diff --git a/projects/templates/projects/project_sdk_setup_python.html b/projects/templates/projects/project_sdk_setup_python.html index bafe8a1..931a20f 100644 --- a/projects/templates/projects/project_sdk_setup_python.html +++ b/projects/templates/projects/project_sdk_setup_python.html @@ -13,7 +13,7 @@
Connect your Python application to Bugsink to start tracking errors. - Bugsink is compatible with the Sentry SDK. A basic setup is the following: + Bugsink is compatible with the Sentry SDK. A basic setup is the following:

Step 1: Install the SDK

@@ -63,16 +63,16 @@ sentry_sdk.init( raise Exception("Raised Exception on purpose to send it to Bugsink") {% endcode %} -
- Your event should now appear in the list of open issues. +
+ Your event should now appear in the list of open issues.

Further reading

-
- For more information on how to use the SDK, check the Bugsink-specific SDK recommendations. +
+ For more information on how to use the SDK, check the Bugsink-specific SDK recommendations.
- +
diff --git a/projects/tests.py b/projects/tests.py index 38a61ce..986209a 100644 --- a/projects/tests.py +++ b/projects/tests.py @@ -1 +1,138 @@ -# from django.test import TestCase as DjangoTestCase +from django.conf import settings +from django.apps import apps +from django.contrib.auth import get_user_model + +from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase +from bugsink.utils import get_model_topography +from projects.models import Project, ProjectMembership +from events.factories import create_event +from issues.factories import get_or_create_issue +from tags.models import store_tags +from issues.models import TurningPoint, TurningPointKind +from alerts.models import MessagingServiceConfig +from releases.models import Release + +from .tasks import get_model_topography_with_project_override + +User = get_user_model() + + +class ProjectDeletionTestCase(TransactionTestCase): + + def setUp(self): + super().setUp() + self.project = Project.objects.create(name="Test Project", stored_event_count=1) # 1, in prep. of the below + self.issue, _ = get_or_create_issue(self.project) + self.event = create_event(self.project, issue=self.issue) + self.user = User.objects.create_user(username='test', password='test') + + TurningPoint.objects.create( + project=self.project, + issue=self.issue, triggering_event=self.event, timestamp=self.event.ingested_at, + kind=TurningPointKind.FIRST_SEEN) + + MessagingServiceConfig.objects.create(project=self.project) + ProjectMembership.objects.create(project=self.project, user=self.user) + Release.objects.create(project=self.project, version="1.0.0") + + self.event.never_evict = True + self.event.save() + + store_tags(self.event, self.issue, {"foo": "bar"}) + + def test_delete_project(self): + models = [apps.get_model(app_label=s.split('.')[0], model_name=s.split('.')[1].lower()) for s in [ + "tags.EventTag", + "tags.IssueTag", + "tags.TagValue", + "tags.TagKey", + "issues.TurningPoint", + "events.Event", + "issues.Grouping", + "alerts.MessagingServiceConfig", + "projects.ProjectMembership", + "releases.Release", + "issues.Issue", + "projects.Project", + ]] + + for model in models: + # test-the-test: make sure some instances of the models actually exist after setup + self.assertTrue(model.objects.exists(), f"Some {model.__name__} should exist") + + # assertNumQueries() is brittle and opaque. But at least the brittle part is quick to fix (a single number) and + # provides a canary for performance regressions. + + # correct for bugsink/transaction.py's select_for_update for non-sqlite databases + correct_for_select_for_update = 1 if 'sqlite' not in settings.DATABASES['default']['ENGINE'] else 0 + + with self.assertNumQueries(27 + correct_for_select_for_update): + self.project.delete_deferred() + + # tests run w/ TASK_ALWAYS_EAGER, so in the below we can just check the database directly + for model in models: + self.assertFalse(model.objects.exists(), f"No {model.__name__}s should exist after issue deletion") + + def test_dependency_graphs(self): + # tests for an implementation detail of defered deletion, namely 1 test that asserts what the actual + # model-topography is, and one test that shows how we manually override it; this is to trigger a failure when + # the topology changes (and forces us to double-check that the override is still correct). + + orig = get_model_topography() + override = get_model_topography_with_project_override() + + def walk(topo, model_name): + results = [] + for model, fk_name in topo[model_name]: + results.append((model, fk_name)) + results.extend(walk(topo, model._meta.label)) + return results + + self.assertEqual(walk(orig, 'projects.Project'), [ + (apps.get_model('projects', 'ProjectMembership'), 'project'), + (apps.get_model('releases', 'Release'), 'project'), + (apps.get_model('issues', 'Issue'), 'project'), + (apps.get_model('issues', 'Grouping'), 'issue'), + (apps.get_model('events', 'Event'), 'grouping'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('issues', 'TurningPoint'), 'issue'), + (apps.get_model('events', 'Event'), 'issue'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('tags', 'EventTag'), 'issue'), + (apps.get_model('tags', 'IssueTag'), 'issue'), + (apps.get_model('issues', 'Grouping'), 'project'), + (apps.get_model('events', 'Event'), 'grouping'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('issues', 'TurningPoint'), 'project'), + (apps.get_model('events', 'Event'), 'project'), + (apps.get_model('issues', 'TurningPoint'), 'triggering_event'), + (apps.get_model('tags', 'EventTag'), 'event'), + (apps.get_model('tags', 'TagKey'), 'project'), + (apps.get_model('tags', 'TagValue'), 'key'), + (apps.get_model('tags', 'EventTag'), 'value'), + (apps.get_model('tags', 'IssueTag'), 'value'), + (apps.get_model('tags', 'IssueTag'), 'key'), + (apps.get_model('tags', 'TagValue'), 'project'), + (apps.get_model('tags', 'EventTag'), 'value'), + (apps.get_model('tags', 'IssueTag'), 'value'), + (apps.get_model('tags', 'EventTag'), 'project'), + (apps.get_model('tags', 'IssueTag'), 'project'), + (apps.get_model('alerts', 'MessagingServiceConfig'), 'project'), + ]) + + self.assertEqual(walk(override, 'projects.Project'), [ + (apps.get_model('tags', 'EventTag'), 'project'), + (apps.get_model('tags', 'IssueTag'), 'project'), + (apps.get_model('tags', 'TagValue'), 'project'), + (apps.get_model('tags', 'TagKey'), 'project'), + (apps.get_model('issues', 'TurningPoint'), 'project'), + (apps.get_model('events', 'Event'), 'project'), + (apps.get_model('issues', 'Grouping'), 'project'), + (apps.get_model('alerts', 'MessagingServiceConfig'), 'project'), + (apps.get_model('projects', 'ProjectMembership'), 'project'), + (apps.get_model('releases', 'Release'), 'project'), + (apps.get_model('issues', 'Issue'), 'project') + ]) diff --git a/projects/urls.py b/projects/urls.py index 6b08424..e9677fc 100644 --- a/projects/urls.py +++ b/projects/urls.py @@ -2,7 +2,8 @@ from django.urls import path from .views import ( project_list, project_members, project_members_accept, project_member_settings, project_members_invite, - project_members_accept_new_user, project_new, project_edit, project_sdk_setup) + project_members_accept_new_user, project_new, project_edit, project_sdk_setup, project_alerts_setup, + project_messaging_service_add, project_messaging_service_edit) urlpatterns = [ path('', project_list, name="project_list"), @@ -21,4 +22,10 @@ urlpatterns = [ path('/sdk-setup/', project_sdk_setup, name="project_sdk_setup"), path('/sdk-setup//', project_sdk_setup, name="project_sdk_setup_platform"), + + path('/alerts/', project_alerts_setup, name="project_alerts_setup"), + path('/alerts/service/add/', project_messaging_service_add, name="project_messaging_service_add"), + path( + '/alerts/service//edit/', project_messaging_service_edit, + name="project_messaging_service_edit"), ] diff --git a/projects/views.py b/projects/views.py index 30c29ae..541aa6f 100644 --- a/projects/views.py +++ b/projects/views.py @@ -1,3 +1,4 @@ +import json from datetime import timedelta from django.shortcuts import render @@ -17,6 +18,10 @@ from teams.models import TeamMembership, Team, TeamRole from bugsink.app_settings import get_settings, CB_ANYBODY, CB_MEMBERS, CB_ADMINS from bugsink.decorators import login_exempt, atomic_for_request_method +from alerts.models import MessagingServiceConfig +from alerts.forms import MessagingServiceConfigForm +from alerts.service_backends.slack import SlackConfigForm + from .models import Project, ProjectMembership, ProjectRole, ProjectVisibility from .forms import ProjectMembershipForm, MyProjectMembershipForm, ProjectMemberInviteForm, ProjectForm from .tasks import send_project_invite_email, send_project_invite_email_new_user @@ -30,21 +35,24 @@ def project_list(request, ownership_filter=None): my_memberships = ProjectMembership.objects.filter(user=request.user) my_team_memberships = TeamMembership.objects.filter(user=request.user) - my_projects = Project.objects.filter(projectmembership__in=my_memberships).order_by('name').distinct() + my_projects = Project.objects.filter( + projectmembership__in=my_memberships, is_deleted=False).order_by('name').distinct() my_teams_projects = \ Project.objects \ - .filter(team__teammembership__in=my_team_memberships) \ + .filter(team__teammembership__in=my_team_memberships, is_deleted=False) \ .exclude(projectmembership__in=my_memberships) \ .order_by('name').distinct() if request.user.is_superuser: # superusers can see all project, even hidden ones other_projects = Project.objects \ + .filter(is_deleted=False) \ .exclude(projectmembership__in=my_memberships) \ .exclude(team__teammembership__in=my_team_memberships) \ .order_by('name').distinct() else: other_projects = Project.objects \ + .filter(is_deleted=False) \ .exclude(projectmembership__in=my_memberships) \ .exclude(team__teammembership__in=my_team_memberships) \ .exclude(visibility=ProjectVisibility.TEAM_MEMBERS) \ @@ -84,7 +92,8 @@ def project_list(request, ownership_filter=None): raise ValueError(f"Invalid ownership_filter: {ownership_filter}") project_list = base_qs.annotate( - open_issue_count=models.Count('issue', filter=models.Q(issue__is_resolved=False, issue__is_muted=False)), + # open_issue_count disabled, it's too expensive + # open_issue_count=models.Count('issue', filter=models.Q(issue__is_resolved=False, issue__is_muted=False)), member_count=models.Count( 'projectmembership', distinct=True, filter=models.Q(projectmembership__accepted=True)), ).select_related('team') @@ -152,16 +161,32 @@ def _check_project_admin(project, user): @atomic_for_request_method def project_edit(request, project_pk): - project = Project.objects.get(id=project_pk) + project = Project.objects.get(id=project_pk, is_deleted=False) _check_project_admin(project, request.user) if request.method == 'POST': - form = ProjectForm(request.POST, instance=project) + action = request.POST.get('action') + if action == 'delete': + # Double-check that the user is an admin or superuser + if (not request.user.is_superuser + and not ProjectMembership.objects.filter( + project=project, user=request.user, role=ProjectRole.ADMIN, accepted=True).exists() + and not TeamMembership.objects.filter( + team=project.team, user=request.user, role=TeamRole.ADMIN, accepted=True).exists()): + raise PermissionDenied("Only project or team admins can delete projects") + + # Delete the project + project.delete_deferred() + messages.success(request, f'Project "{project.name}" has been deleted successfully.') + return redirect('project_list') + + form = ProjectForm(request.POST, instance=project) if form.is_valid(): form.save() - return redirect('project_members', project_pk=project.id) + messages.success(request, 'Project settings updated successfully.') + return redirect('project_list') else: form = ProjectForm(instance=project) @@ -174,7 +199,7 @@ def project_edit(request, project_pk): @atomic_for_request_method def project_members(request, project_pk): - project = Project.objects.get(id=project_pk) + project = Project.objects.get(id=project_pk, is_deleted=False) _check_project_admin(project, request.user) if request.method == 'POST': @@ -209,7 +234,7 @@ def project_members_invite(request, project_pk): # NOTE: project-member invite is just that: a direct invite to a project. If you want to also/instead invite someone # to a team, you need to just do that instead. - project = Project.objects.get(id=project_pk) + project = Project.objects.get(id=project_pk, is_deleted=False) _check_project_admin(project, request.user) @@ -271,7 +296,7 @@ def project_member_settings(request, project_pk, user_pk): this_is_you = str(user_pk) == str(request.user.id) if not this_is_you: - _check_project_admin(Project.objects.get(id=project_pk), request.user) + _check_project_admin(Project.objects.get(id=project_pk, is_deleted=False), request.user) membership = ProjectMembership.objects.get(project=project_pk, user=user_pk) create_form = lambda data: ProjectMembershipForm(data, instance=membership) # noqa @@ -296,7 +321,7 @@ def project_member_settings(request, project_pk, user_pk): return render(request, 'projects/project_member_settings.html', { 'this_is_you': this_is_you, 'user': User.objects.get(id=user_pk), - 'project': Project.objects.get(id=project_pk), + 'project': Project.objects.get(id=project_pk, is_deleted=False), 'form': form, }) @@ -356,7 +381,7 @@ def project_members_accept(request, project_pk): # invited as user B. Security-wise this is fine, but UX-wise it could be confusing. However, I'm in the assumption # here that normal people (i.e. not me) don't have multiple accounts, so I'm not going to bother with this. - project = Project.objects.get(id=project_pk) + project = Project.objects.get(id=project_pk, is_deleted=False) membership = ProjectMembership.objects.get(project=project, user=request.user) if membership.accepted: @@ -381,7 +406,7 @@ def project_members_accept(request, project_pk): @atomic_for_request_method def project_sdk_setup(request, project_pk, platform=""): - project = Project.objects.get(id=project_pk) + project = Project.objects.get(id=project_pk, is_deleted=False) if not request.user.is_superuser and not ProjectMembership.objects.filter(project=project, user=request.user, accepted=True).exists(): @@ -398,3 +423,84 @@ def project_sdk_setup(request, project_pk, platform=""): "project": project, "dsn": project.dsn, }) + + +@atomic_for_request_method +def project_alerts_setup(request, project_pk): + project = Project.objects.get(id=project_pk, is_deleted=False) + _check_project_admin(project, request.user) + + if request.method == 'POST': + full_action_str = request.POST.get('action') + action, service_id = full_action_str.split(":", 1) + if action == "remove": + MessagingServiceConfig.objects.filter(project=project_pk, id=service_id).delete() + elif action == "test": + service = MessagingServiceConfig.objects.get(project=project_pk, id=service_id) + service_backend = service.get_backend() + service_backend.send_test_message() + messages.success( + request, "Test message sent; check the configured service to see if it arrived.") + + return render(request, 'projects/project_alerts_setup.html', { + 'project': project, + 'service_configs': project.service_configs.all(), + }) + + +@atomic_for_request_method +def project_messaging_service_add(request, project_pk): + project = Project.objects.get(id=project_pk, is_deleted=False) + _check_project_admin(project, request.user) + + if request.method == 'POST': + form = MessagingServiceConfigForm(project, request.POST) + config_form = SlackConfigForm(data=request.POST) + + if form.is_valid() and config_form.is_valid(): + service = form.save(commit=False) + service.config = json.dumps(config_form.get_config()) + service.save() + + messages.success(request, "Messaging service added successfully.") + return redirect('project_alerts_setup', project_pk=project_pk) + + else: + form = MessagingServiceConfigForm(project) + config_form = SlackConfigForm() + + return render(request, 'projects/project_messaging_service_edit.html', { + 'project': project, + 'form': form, + 'config_form': config_form, + }) + + +@atomic_for_request_method +def project_messaging_service_edit(request, project_pk, service_pk): + project = Project.objects.get(id=project_pk, is_deleted=False) + _check_project_admin(project, request.user) + + instance = project.service_configs.get(id=service_pk) + + if request.method == 'POST': + form = MessagingServiceConfigForm(project, request.POST, instance=instance) + config_form = SlackConfigForm(data=request.POST) + + if form.is_valid() and config_form.is_valid(): + service = form.save(commit=False) + service.config = json.dumps(config_form.get_config()) + service.save() + + messages.success(request, "Messaging service updated successfully.") + return redirect('project_alerts_setup', project_pk=project_pk) + + else: + form = MessagingServiceConfigForm(project, instance=instance) + config_form = SlackConfigForm(config=json.loads(instance.config)) + + return render(request, 'projects/project_messaging_service_edit.html', { + 'project': project, + 'form': form, + 'config_form': config_form, + }) diff --git a/pyproject.toml b/pyproject.toml index 6efdb85..82cecf0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ dynamic = ["version", "dependencies"] [project.scripts] bugsink-show-version = "bugsink.scripts.show_version:main" bugsink-manage = "bugsink.scripts.manage:main" +bugsink-util = "bugsink.scripts.util:main" bugsink-create-conf = "bugsink.scripts.create_conf:main" bugsink-runsnappea = "bugsink.scripts.runsnappea:main" diff --git a/releases/migrations/0003_alter_release_project.py b/releases/migrations/0003_alter_release_project.py new file mode 100644 index 0000000..b2f3ed3 --- /dev/null +++ b/releases/migrations/0003_alter_release_project.py @@ -0,0 +1,21 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + # Django came up with 0014, whatever the reason, I'm sure that 0013 is at least required (as per comments there) + ("projects", "0014_alter_projectmembership_project"), + ("releases", "0002_release_releases_re_sort_ep_5c07c8_idx"), + ] + + operations = [ + migrations.AlterField( + model_name="release", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/releases/models.py b/releases/models.py index 872f49f..5d57288 100644 --- a/releases/models.py +++ b/releases/models.py @@ -44,8 +44,7 @@ class Release(models.Model): # sentry does releases per-org; we don't follow that example. our belief is basically: [1] in reality releases are # per software package and a software package is basically a bugsink project and [2] any cross-project-per-org # analysis you might do is more likely to be in the realm of "transactions", something we don't want to support. - project = models.ForeignKey( - "projects.Project", blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey("projects.Project", blank=False, null=False, on_delete=models.DO_NOTHING) # full version as provided by either implicit (per-event) or explicit (some API) means, including package name # max_length matches Even.release (which is deduced from Sentry) @@ -124,6 +123,7 @@ def create_release_if_needed(project, version, event, issue=None): # triggering event anymore for our timestamp. TurningPoint.objects.bulk_create([TurningPoint( + project=project, issue=issue, kind=TurningPointKind.NEXT_MATERIALIZED, triggering_event=event, metadata=json.dumps({"actual_release": release.version}), timestamp=event.ingested_at) for issue in resolved_by_next_qs diff --git a/releases/tests.py b/releases/tests.py index 89bdc31..b91652e 100644 --- a/releases/tests.py +++ b/releases/tests.py @@ -1,13 +1,16 @@ from django.test import TestCase as DjangoTestCase from datetime import timedelta +from projects.models import Project from .models import Release, ordered_releases, RE_PACKAGE_VERSION class ReleaseTestCase(DjangoTestCase): def test_create_and_order(self): - r0 = Release.objects.create(version="e80f98923f7426a8087009f4c629d25a35565a6a") + project = Project.objects.create(name="Test Project") + + r0 = Release.objects.create(project=project, version="e80f98923f7426a8087009f4c629d25a35565a6a") self.assertFalse(r0.is_semver) self.assertEqual(0, r0.sort_epoch) @@ -17,6 +20,7 @@ class ReleaseTestCase(DjangoTestCase): # real usage too) # * it ensures that dates are ignored when comparing r1 and r2 (r2 has a smaller date than r1, but comes later) r1 = Release.objects.create( + project=project, version="2a678dbbbecd2978ccaa76c326a0fb2e70073582", date_released=r0.date_released + timedelta(seconds=10), ) @@ -24,17 +28,17 @@ class ReleaseTestCase(DjangoTestCase): self.assertEqual(0, r1.sort_epoch) # switch to semver, epoch 1 - r2 = Release.objects.create(version="1.0.0") + r2 = Release.objects.create(project=project, version="1.0.0") self.assertTrue(r2.is_semver) self.assertEqual(1, r2.sort_epoch) # stick with semver, but use a lower version - r3 = Release.objects.create(version="0.1.0") + r3 = Release.objects.create(project=project, version="0.1.0") self.assertTrue(r3.is_semver) self.assertEqual(1, r3.sort_epoch) # put in package name; this is basically ignored for ordering purposes - r4 = Release.objects.create(version="package@2.0.0") + r4 = Release.objects.create(project=project, version="package@2.0.0") self.assertTrue(r4.is_semver) self.assertEqual(ordered_releases(), [r0, r1, r3, r2, r4]) diff --git a/requirements.development.txt b/requirements.development.txt index c3fb503..de895d2 100644 --- a/requirements.development.txt +++ b/requirements.development.txt @@ -1,3 +1,2 @@ # testing/development only: -django-debug-toolbar coverage diff --git a/requirements.txt b/requirements.txt index 2db4aea..f0b2b7d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,8 @@ gunicorn==23.0.* Django==5.2.* -sentry-sdk==2.27.* +sentry-sdk==2.* django-tailwind==3.6.* -jsonschema==4.23.* +jsonschema==4.25.* semver==3.0.* django-admin-autocomplete-filter==0.7.* pygments==2.19.* diff --git a/snappea/foreman.py b/snappea/foreman.py index 3f7f6e4..6217342 100644 --- a/snappea/foreman.py +++ b/snappea/foreman.py @@ -97,6 +97,12 @@ class Foreman: logger.info(" ========= SNAPPEA =========") + if self.settings.TASK_ALWAYS_EAGER: + # Exiting _mostly_ because of a matter of intent: this combination of settings/actions makes so little sense + # that it can't really be intentional. It's also probably broken (or in any case: not tested to work) + logger.info("Startup: Can't run Foreman in TASK_ALWAYS_EAGER mode, EXIT") + sys.exit(1) + # if the PID_FILE already exists, read it to see whether snappea is already running. # this implementation is not supposed to be bullet-proof for race conditions (nor is it cross-platform)... it's # just a small check to prevent the regularly occurring cases: diff --git a/static/images/bugsink-logo-dark.png b/static/images/bugsink-logo-dark.png new file mode 100644 index 0000000..eaf0f78 Binary files /dev/null and b/static/images/bugsink-logo-dark.png differ diff --git a/static/js/entity_edit.js b/static/js/entity_edit.js new file mode 100644 index 0000000..3613372 --- /dev/null +++ b/static/js/entity_edit.js @@ -0,0 +1,27 @@ +"use strict"; + +/** + * Initializes delete functionality for entity edit pages + */ +function initializeDeleteModal() { + const modal = document.getElementById('deleteModal'); + const deleteBtn = document.getElementById('deleteButton'); + const cancelBtn = document.getElementById('cancelDelete'); + + if (!modal || !deleteBtn || !cancelBtn) { + console.error('One or more required elements not found'); + return; + } + + deleteBtn.addEventListener('click', () => { + modal.classList.remove('hidden'); + }); + + cancelBtn.addEventListener('click', () => { + modal.classList.add('hidden'); + }); +} + +document.addEventListener('DOMContentLoaded', function() { + initializeDeleteModal(); +}); diff --git a/static/js/user_list.js b/static/js/user_list.js new file mode 100644 index 0000000..deea734 --- /dev/null +++ b/static/js/user_list.js @@ -0,0 +1,32 @@ +"use strict"; + +/** + * Initializes delete functionality for user list page + */ +function initializeDeleteModal() { + const modal = document.getElementById('deleteModal'); + const deleteButtons = document.querySelectorAll('.delete-button'); + const cancelBtn = document.getElementById('cancelDelete'); + const deleteActionInput = document.getElementById('deleteAction'); + + if (!modal || deleteButtons.length === 0 || !cancelBtn || !deleteActionInput) { + console.error('One or more required elements not found'); + return; + } + + deleteButtons.forEach(button => { + button.addEventListener('click', () => { + const userId = button.getAttribute('data-user-id'); + deleteActionInput.value = 'delete:' + userId; + modal.classList.remove('hidden'); + }); + }); + + cancelBtn.addEventListener('click', () => { + modal.classList.add('hidden'); + }); +} + +document.addEventListener('DOMContentLoaded', function() { + initializeDeleteModal(); +}); diff --git a/tags/management/commands/vacuum_eventless_issuetags.py b/tags/management/commands/vacuum_eventless_issuetags.py new file mode 100644 index 0000000..b835c48 --- /dev/null +++ b/tags/management/commands/vacuum_eventless_issuetags.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand +from tags.tasks import vacuum_eventless_issuetags + + +class Command(BaseCommand): + help = "Kick off tag cleanup by vacuuming IssueTag objects for which there is no EventTag equivalent" + + def handle(self, *args, **options): + vacuum_eventless_issuetags.delay() + self.stdout.write("Called vacuum_eventless_issuetags.delay(); the task will run in the background (snapea).") diff --git a/tags/management/commands/vacuum_tags.py b/tags/management/commands/vacuum_tags.py new file mode 100644 index 0000000..d5b592b --- /dev/null +++ b/tags/management/commands/vacuum_tags.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand +from tags.tasks import vacuum_tagvalues + + +class Command(BaseCommand): + help = "Kick off tag cleanup by vacuuming orphaned TagValue and TagKey entries." + + def handle(self, *args, **options): + vacuum_tagvalues.delay() + self.stdout.write("Called vacuum_tagvalues.delay(); the task will run in the background (snapea).") diff --git a/tags/migrations/0002_no_cascade.py b/tags/migrations/0002_no_cascade.py new file mode 100644 index 0000000..26af011 --- /dev/null +++ b/tags/migrations/0002_no_cascade.py @@ -0,0 +1,40 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("tags", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="eventtag", + name="value", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="tags.tagvalue" + ), + ), + migrations.AlterField( + model_name="issuetag", + name="key", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="tags.tagkey" + ), + ), + migrations.AlterField( + model_name="issuetag", + name="value", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="tags.tagvalue" + ), + ), + migrations.AlterField( + model_name="tagvalue", + name="key", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="tags.tagkey" + ), + ), + ] diff --git a/tags/migrations/0003_remove_objects_with_null_issue.py b/tags/migrations/0003_remove_objects_with_null_issue.py new file mode 100644 index 0000000..a28d427 --- /dev/null +++ b/tags/migrations/0003_remove_objects_with_null_issue.py @@ -0,0 +1,26 @@ +from django.db import migrations + + +def remove_objects_with_null_issue(apps, schema_editor): + # Up until now, we have various models w/ .issue=FK(null=True, on_delete=models.SET_NULL) + # Although it is "not expected" in the interface, issue-deletion would have led to those + # objects with a null issue. We're about to change that to .issue=FK(null=False, ...) which + # would crash if we don't remove those objects first. Object-removal is "fine" though, because + # as per the meaning of the SET_NULL, these objects were "dangling" anyway. + + EventTag = apps.get_model("tags", "EventTag") + IssueTag = apps.get_model("tags", "IssueTag") + + EventTag.objects.filter(issue__isnull=True).delete() + IssueTag.objects.filter(issue__isnull=True).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("tags", "0002_no_cascade"), + ] + + operations = [ + migrations.RunPython(remove_objects_with_null_issue, reverse_code=migrations.RunPython.noop), + ] diff --git a/tags/migrations/0004_alter_do_nothing.py b/tags/migrations/0004_alter_do_nothing.py new file mode 100644 index 0000000..f48e77e --- /dev/null +++ b/tags/migrations/0004_alter_do_nothing.py @@ -0,0 +1,31 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("issues", "0021_alter_do_nothing"), + ("tags", "0003_remove_objects_with_null_issue"), + ] + + operations = [ + migrations.AlterField( + model_name="eventtag", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="event_tags", + to="issues.issue", + ), + ), + migrations.AlterField( + model_name="issuetag", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + related_name="tags", + to="issues.issue", + ), + ), + ] diff --git a/tags/migrations/0005_alter_eventtag_project_alter_issuetag_project_and_more.py b/tags/migrations/0005_alter_eventtag_project_alter_issuetag_project_and_more.py new file mode 100644 index 0000000..d04aba4 --- /dev/null +++ b/tags/migrations/0005_alter_eventtag_project_alter_issuetag_project_and_more.py @@ -0,0 +1,42 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + # Django came up with 0014, whatever the reason, I'm sure that 0013 is at least required (as per comments there) + ("projects", "0014_alter_projectmembership_project"), + ("tags", "0004_alter_do_nothing"), + ] + + operations = [ + migrations.AlterField( + model_name="eventtag", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + migrations.AlterField( + model_name="issuetag", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + migrations.AlterField( + model_name="tagkey", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + migrations.AlterField( + model_name="tagvalue", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, to="projects.project" + ), + ), + ] diff --git a/tags/models.py b/tags/models.py index a2c4db7..37a9754 100644 --- a/tags/models.py +++ b/tags/models.py @@ -23,6 +23,7 @@ from django.db.models import Q, F from projects.models import Project from tags.utils import deduce_tags, is_mostly_unique +from bugsink.moreiterutils import batched # Notes on .project as it lives on TagValue, IssueTag and EventTag: # In all cases, project could be derived through other means: for TagValue it's implied by TagKey.project; for IssueTag @@ -36,7 +37,7 @@ from tags.utils import deduce_tags, is_mostly_unique class TagKey(models.Model): - project = models.ForeignKey(Project, blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey(Project, blank=False, null=False, on_delete=models.DO_NOTHING) key = models.CharField(max_length=32, blank=False, null=False) # Tags that are "mostly unique" are not displayed in the issue tag counts, because the distribution of values is @@ -52,10 +53,13 @@ class TagKey(models.Model): # the obvious constraint, which doubles as a lookup index for store_tags and search. unique_together = ('project', 'key') + def __str__(self): + return f"{self.key}" + class TagValue(models.Model): - project = models.ForeignKey(Project, blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' - key = models.ForeignKey(TagKey, blank=False, null=False, on_delete=models.CASCADE) + project = models.ForeignKey(Project, blank=False, null=False, on_delete=models.DO_NOTHING) + key = models.ForeignKey(TagKey, blank=False, null=False, on_delete=models.DO_NOTHING) value = models.CharField(max_length=200, blank=False, null=False, db_index=True) class Meta: @@ -69,22 +73,21 @@ class TagValue(models.Model): class EventTag(models.Model): - project = models.ForeignKey(Project, blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey(Project, blank=False, null=False, on_delete=models.DO_NOTHING) # value already implies key in our current setup. - value = models.ForeignKey(TagValue, blank=False, null=False, on_delete=models.CASCADE) + value = models.ForeignKey(TagValue, blank=False, null=False, on_delete=models.DO_NOTHING) - # issue is a denormalization that allows for a single-table-index for efficient search. - # SET_NULL: Issue deletion is not actually possible yet, so this is moot (for now). + # issue is a denormalization that allows for a single-table-index for efficient search/vacuum_eventless_issuetags. issue = models.ForeignKey( - 'issues.Issue', blank=False, null=True, on_delete=models.SET_NULL, related_name="event_tags") + 'issues.Issue', blank=False, null=False, on_delete=models.DO_NOTHING, related_name="event_tags") # digest_order is a denormalization that allows for a single-table-index for efficient search. digest_order = models.PositiveIntegerField(blank=False, null=False) # DO_NOTHING: we manually implement CASCADE (i.e. when an event is cleaned up, clean up associated tags) in the # eviction process. Why CASCADE? [1] you'll have to do it "at some point", so you might as well do it right when - # evicting (async in the 'most resilient setup' anyway, b/c that happens when ingesting) [2] the order of magnitude + # evicting (async in the 'most resilient setup' anyway, b/c that happens when digesting) [2] the order of magnitude # is "tens of deletions per event", so that's no reason to postpone. "Why manually" is explained in events/retention event = models.ForeignKey('events.Event', blank=False, null=False, on_delete=models.DO_NOTHING, related_name='tags') @@ -98,6 +101,7 @@ class EventTag(models.Model): # for search, which filters a list of EventTag down to those matching certain values and a given issue. # (both orderings of the (value, issue) would work for the current search query; if we ever introduce # "search across issues" the below would work for that too (but the reverse wouldn't)) + # also used by vacuum_eventless_issuetags (ORed Q(issue_id, value_id)) models.Index(fields=['value', 'issue', 'digest_order']), ] @@ -107,16 +111,15 @@ class EventTag(models.Model): class IssueTag(models.Model): - project = models.ForeignKey(Project, blank=False, null=True, on_delete=models.SET_NULL) # SET_NULL: cleanup 'later' + project = models.ForeignKey(Project, blank=False, null=False, on_delete=models.DO_NOTHING) # denormalization that allows for a single-table-index for efficient search. - key = models.ForeignKey(TagKey, blank=False, null=False, on_delete=models.CASCADE) + key = models.ForeignKey(TagKey, blank=False, null=False, on_delete=models.DO_NOTHING) # value already implies key in our current setup. - value = models.ForeignKey(TagValue, blank=False, null=False, on_delete=models.CASCADE) + value = models.ForeignKey(TagValue, blank=False, null=False, on_delete=models.DO_NOTHING) - # SET_NULL: Issue deletion is not actually possible yet, so this is moot (for now). - issue = models.ForeignKey('issues.Issue', blank=False, null=True, on_delete=models.SET_NULL, related_name='tags') + issue = models.ForeignKey('issues.Issue', blank=False, null=False, on_delete=models.DO_NOTHING, related_name='tags') # 1. As it stands, there is only a single counter per issue-tagvalue combination. In principle/theory this type of # denormalization will break down when you want to show this kind of information filtered by some other dimension, @@ -163,6 +166,17 @@ def digest_tags(event_data, event, issue): def store_tags(event, issue, tags): + # observed: a non-batched implementation of store_tags() would crash (e.g. in sqlite: Expression tree is too large + # (maximum depth 1000)); + + # The value of 64 was arrived at by trying all powers of 2 (locally, on sqlite), observing that 256 was the last + # non-failing one, and then taking a factor 4 safety-margin. Realistically, 64 tags _per event_ "should be enough + # for anyone". + for kv_batch in batched(tags.items(), 64): + _store_tags(event, issue, {k: v for k, v in kv_batch}) + + +def _store_tags(event, issue, tags): if not tags: return # short-circuit; which is a performance optimization which also avoids some the need for further guards @@ -233,3 +247,19 @@ def store_tags(event, issue, tags): IssueTag.objects.filter(value__in=tag_value_objects, issue=issue).update( count=F('count') + 1 ) + + +def prune_tagvalues(ids_to_check): + # used_in_event check is not needed, because non-existence of IssueTag always implies non-existince of EventTag, + # since [1] EventTag creation implies IssueTag creation and [2] in the cleanup code EventTag is deleted first. + used_in_issuetag = set( + IssueTag.objects.filter(value_id__in=ids_to_check).values_list('value_id', flat=True) + ) + unused = [pk for pk in ids_to_check if pk not in used_in_issuetag] + + if unused: + TagValue.objects.filter(id__in=unused).delete() + + # The principled approach would be to clean up TagKeys as well at this point, but in practice there will be orders + # of magnitude fewer TagKey objects, and they are much less likely to become dangling, so the GC-like algo of "just + # vacuuming once in a while" is a much better fit for that. diff --git a/tags/tasks.py b/tags/tasks.py new file mode 100644 index 0000000..13a9908 --- /dev/null +++ b/tags/tasks.py @@ -0,0 +1,157 @@ +from django.db.models import Q + +from snappea.decorators import shared_task + +from bugsink.moreiterutils import batched +from bugsink.transaction import immediate_atomic, delay_on_commit +from tags.models import TagValue, TagKey, EventTag, IssueTag, _or_join, prune_tagvalues + +BATCH_SIZE = 10_000 + + +@shared_task +def vacuum_tagvalues(min_id=0): + # This task cleans up unused TagValue in batches. A TagValue can be unused if no IssueTag or EventTag references it, + # this can happen if IssueTag or EventTag entries are deleted. Cleanup is avoided in that case to avoid repeated + # checks. But it still needs to be done eventually to avoid bloating the database, which is what this task does. + + # Impl. notes: + # + # * select id_to_check first, and then check which of those are used in EventTag or IssueTag. This avoids doing + # TagValue.exclude(some_usage_pattern) which may be slow / for which reasoning about performance is hard. + # * batched to allow for incremental cleanup, using a defer-with-min-id pattern to implement the batching. + # + # Known limitation: + # with _many_ TagValues (whether used or not) and when running in EAGER mode, this thing overflows the stack. + # Basically: because then the "delayed recursion" is not actually delayed, it just runs immediately. Answer: for + # "big things" (basically: serious setups) set up snappea. + + with immediate_atomic(): + # Select candidate TagValue IDs above min_id + ids_to_check = list( + TagValue.objects + .filter(id__gt=min_id) + .order_by('id') + .values_list('id', flat=True)[:BATCH_SIZE] + ) + + if not ids_to_check: + # Done with TagValues → start TagKey cleanup + delay_on_commit(vacuum_tagkeys, 0) + return + + # Determine which ids_to_check are referenced + used_in_event = set( + EventTag.objects.filter(value_id__in=ids_to_check).values_list('value_id', flat=True) + ) + used_in_issue = set( + IssueTag.objects.filter(value_id__in=ids_to_check).values_list('value_id', flat=True) + ) + + unused = [pk for pk in ids_to_check if pk not in used_in_event and pk not in used_in_issue] + + # Actual deletion + if unused: + TagValue.objects.filter(id__in=unused).delete() + + # Defer next batch + vacuum_tagvalues.delay(ids_to_check[-1]) + + +@shared_task +def vacuum_tagkeys(min_id=0): + with immediate_atomic(): + # Select candidate TagKey IDs above min_id + ids_to_check = list( + TagKey.objects + .filter(id__gt=min_id) + .order_by('id') + .values_list('id', flat=True)[:BATCH_SIZE] + ) + + if not ids_to_check: + return # done + + # Determine which ids_to_check are referenced + used = set( + TagValue.objects.filter(key_id__in=ids_to_check).values_list('key_id', flat=True) + ) + + unused = [pk for pk in ids_to_check if pk not in used] + + # Actual deletion + if unused: + TagKey.objects.filter(id__in=unused).delete() + + # Defer next batch + vacuum_tagkeys.delay(ids_to_check[-1]) + + +@shared_task +def vacuum_eventless_issuetags(min_id=0): + # This task removes IssueTag entries that are no longer referenced by any EventTag for an Event on the same Issue. + # + # Under normal operation, we evict Events and their EventTags. However, we do not track how many EventTags back + # an IssueTag, so we have historically chosen not to clean up IssueTags during Event deletion. (see #134) + # + # This has the upside of being cheap and preserving all known values for an Issue (e.g. all environments/releases + # ever seen). But it comes with downsides: + # + # * stale IssueTags remain for deleted Events + # * search-by-tag may return Issues without matching Events + # * TagValues will not be vacuumed as long as they’re still referenced by an IssueTag + # + # This task aims to reconcile that, in a delayed and resumable fashion. + + # Empirically determined: at this size, each batch is approx .3s (local dev, sqlite); Note that we're "nearer to the + # edge of the object-graph" than for e.g. even-retention, so we can both afford bigger batches (less cascading + # effects per item) as well as need bigger batches (because there are more expected items in a fanning-out + # object-graph). + BATCH_SIZE = 2048 + + # Community wisdom (says ChatGPT, w/o source): queries with dozens of OR clauses can slow down significantly. 64 is + # a safe, batch size that avoids planner overhead and keeps things fast across databases. + INNER_BATCH_SIZE = 64 + + with immediate_atomic(): + issue_tag_infos = list( + IssueTag.objects + .filter(id__gt=min_id) + .order_by('id') + .values('id', 'issue_id', 'value_id')[:BATCH_SIZE] + ) + + for issue_tag_infos_batch in batched(issue_tag_infos, INNER_BATCH_SIZE): + matching_eventtags = _or_join([ + Q(issue_id=it['issue_id'], value_id=it['value_id']) for it in issue_tag_infos_batch + ]) + + if matching_eventtags: + in_use_issue_value_pairs = set( + EventTag.objects + .filter(matching_eventtags) + .values_list('issue_id', 'value_id') + ) + else: + in_use_issue_value_pairs = set() + + stale_issuetags = [ + it + for it in issue_tag_infos_batch + if (it['issue_id'], it['value_id']) not in in_use_issue_value_pairs + ] + + if stale_issuetags: + IssueTag.objects.filter(id__in=[it['id'] for it in stale_issuetags]).delete() + + # inline pruning of TagValue (as opposed to using "vacuum later") following the same reasoning as in + # prune_orphans. + prune_tagvalues([it['value_id'] for it in stale_issuetags]) + + if not issue_tag_infos: + # We don't have a continuation for the "done" case. One could argue: kick off vacuum_tagvalues there, but I'd + # rather rather build the toolbox of cleanup tasks first and see how they might fit together later. Because the + # downside of triggering the next vacuum command would be that "more things might happen too soon". + return + + vacuum_eventless_issuetags.delay(issue_tag_infos[-1]['id']) diff --git a/tags/tests.py b/tags/tests.py index 1d24d4f..318f1af 100644 --- a/tags/tests.py +++ b/tags/tests.py @@ -1,14 +1,16 @@ from unittest import TestCase as RegularTestCase from django.test import TestCase as DjangoTestCase +from bugsink.test_utils import TransactionTestCase25251 as TransactionTestCase from projects.models import Project from issues.factories import get_or_create_issue, denormalized_issue_fields -from events.factories import create_event +from events.factories import create_event, create_event_data from issues.models import Issue -from .models import store_tags, EventTag +from .models import store_tags, EventTag, IssueTag, TagValue from .utils import deduce_tags from .search import search_events, search_issues, parse_query, search_events_optimized +from .tasks import vacuum_eventless_issuetags class DeduceTagsTestCase(RegularTestCase): @@ -17,7 +19,6 @@ class DeduceTagsTestCase(RegularTestCase): self.assertEqual(deduce_tags({}), {}) self.assertEqual(deduce_tags({"tags": {"foo": "bar"}}), {"foo": "bar"}) - # finally, a more complex example (more or less real-world) event_data = { "server_name": "server", "release": "1.0", @@ -120,6 +121,23 @@ class StoreTagsTestCase(DjangoTestCase): self.assertEqual(self.issue.tags.first().count, 2) self.assertEqual(self.issue.tags.first().value.key.key, "foo") + def test_store_same_tag_on_two_issues_creates_two_issuetags(self): + store_tags(self.event, self.issue, {"foo": "bar"}) + + other_issue, _ = get_or_create_issue(self.project, event_data=create_event_data("other_issue")) + other_event = create_event(self.project, issue=other_issue) + store_tags(other_event, other_issue, {"foo": "bar"}) + + self.assertEqual(IssueTag.objects.count(), 2) + self.assertEqual(2, IssueTag.objects.filter(value__key__key="foo").count()) + + def test_store_many_tags(self): + # observed: a non-batched implementation of store_tags() would crash (e.g. in sqlite: Expression tree is too + # large (maximum depth 1000)); if the below doesn't crash, we've got a batched implementation that works + event = create_event(self.project, issue=self.issue) + store_tags(event, self.issue, {f"key-{i}": f"value-{i}" for i in range(512)}) + self.assertEqual(IssueTag.objects.filter(issue=self.issue).count(), 512) + class SearchParserTestCase(RegularTestCase): @@ -179,12 +197,12 @@ class SearchTestCase(DjangoTestCase): # scenario (in which there would be some relation between the tags of issues and events), but it allows us to # test event_search more easily (if each event is tied to a different issue, searching for tags is meaningless, # since you always search within the context of an issue). - self.global_issue = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + self.global_issue, _ = get_or_create_issue(project=self.project, event_data=create_event_data("global")) - issue_with_tags_and_text = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + issue_with_tags_and_text, _ = get_or_create_issue(project=self.project, event_data=create_event_data("tag_txt")) event_with_tags_and_text = create_event(self.project, issue=self.global_issue) - issue_with_tags_no_text = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + issue_with_tags_no_text, _ = get_or_create_issue(project=self.project, event_data=create_event_data("no_text")) event_with_tags_no_text = create_event(self.project, issue=self.global_issue) store_tags(event_with_tags_and_text, issue_with_tags_and_text, {f"k-{i}": f"v-{i}" for i in range(5)}) @@ -192,7 +210,7 @@ class SearchTestCase(DjangoTestCase): # fix the EventTag objects' issue, which is broken per the non-real-world setup (see above) EventTag.objects.all().update(issue=self.global_issue) - issue_without_tags = Issue.objects.create(project=self.project, **denormalized_issue_fields()) + issue_without_tags, _ = get_or_create_issue(project=self.project, event_data=create_event_data("no_tags")) event_without_tags = create_event(self.project, issue=self.global_issue) for obj in [issue_with_tags_and_text, event_with_tags_and_text, issue_without_tags, event_without_tags]: @@ -200,7 +218,7 @@ class SearchTestCase(DjangoTestCase): obj.calculated_value = "findable value" obj.save() - Issue.objects.create(project=self.project, **denormalized_issue_fields()) + get_or_create_issue(project=self.project, event_data=create_event_data("no_text")) create_event(self.project, issue=self.global_issue) def _test_search(self, search_x): @@ -248,3 +266,78 @@ class SearchTestCase(DjangoTestCase): def test_search_issues(self): self._test_search(lambda query: search_issues(self.project, Issue.objects.all(), query)) + + +class VacuumEventlessIssueTagsTestCase(TransactionTestCase): + # Note: this test depends on EAGER mode in both the setup (delete_derred to trigger cascading deletes) and the + # testing of the thing under test (vacuum_eventless_issuetags). + + def setUp(self): + self.project = Project.objects.create(name="T") + self.issue, _ = get_or_create_issue(self.project) + + def test_no_eventtags_means_vacuum(self): + event = create_event(self.project, issue=self.issue) + store_tags(event, self.issue, {"foo": "bar"}) + event.delete_deferred() + + self.assertEqual(IssueTag.objects.count(), 1) + vacuum_eventless_issuetags() + # in the above we deleted EventTag; implies 0 after-vacuum + self.assertEqual(IssueTag.objects.count(), 0) + + def test_one_eventtag_preserves_issuetag(self): + event = create_event(self.project, issue=self.issue) + store_tags(event, self.issue, {"foo": "bar"}) + + self.assertEqual(IssueTag.objects.count(), 1) + vacuum_eventless_issuetags() + # in the above we did not delete EventTag; implies 1 after-vacuum + self.assertEqual(IssueTag.objects.count(), 1) + + def test_other_event_same_tag_same_issue_preserves(self): + event1 = create_event(self.project, issue=self.issue) + event2 = create_event(self.project, issue=self.issue) + store_tags(event1, self.issue, {"foo": "bar"}) + store_tags(event2, self.issue, {"foo": "bar"}) + event1.delete_deferred() + + self.assertEqual(IssueTag.objects.count(), 1) + vacuum_eventless_issuetags() + # we deleted the EventTag for event1, but since event2 has the same tag, it should be preserved on the Issue + self.assertEqual(IssueTag.objects.count(), 1) + + def test_other_event_same_tag_other_issue_does_not_preserve(self): + event1 = create_event(self.project, issue=self.issue) + store_tags(event1, self.issue, {"foo": "bar"}) + + other_issue, _ = get_or_create_issue(self.project, event_data=create_event_data("other_issue")) + event2 = create_event(self.project, issue=other_issue) + store_tags(event2, other_issue, {"foo": "bar"}) + + event1.delete_deferred() + + self.assertEqual(IssueTag.objects.filter(issue=self.issue).count(), 1) + vacuum_eventless_issuetags() + self.assertEqual(IssueTag.objects.filter(issue=self.issue).count(), 0) + + def test_many_tags_spanning_chunks(self): + event = create_event(self.project, issue=self.issue) + store_tags(event, self.issue, {f"key-{i}": f"value-{i}" for i in range(2048 + 1)}) # bigger than BATCH_SIZE + + # check setup: all issue tags are there + self.assertEqual(IssueTag.objects.filter(issue=self.issue).count(), 2048 + 1) + + event.delete_deferred() + vacuum_eventless_issuetags() + + # all tags should be gone after vacuum + self.assertEqual(IssueTag.objects.filter(issue=self.issue).count(), 0) + + def test_tagvalue_is_pruned(self): + event = create_event(self.project, issue=self.issue) + store_tags(event, self.issue, {"foo": "bar"}) + event.delete_deferred() + + vacuum_eventless_issuetags() + self.assertEqual(TagValue.objects.all().count(), 0) diff --git a/tags/utils.py b/tags/utils.py index 2f6479c..4a5143b 100644 --- a/tags/utils.py +++ b/tags/utils.py @@ -87,6 +87,9 @@ def deduce_tags(event_data): # we start with the explicitly provided tags tags = event_data.get('tags', {}) + if isinstance(tags, list): + tags = {k: v for k, v in tags} + for tag_key, lookup_path in EVENT_DATA_CONVERSION_TABLE.items(): value = get_path(event_data, *lookup_path) diff --git a/teams/templates/teams/team_edit.html b/teams/templates/teams/team_edit.html index 539ab9c..4a8037f 100644 --- a/teams/templates/teams/team_edit.html +++ b/teams/templates/teams/team_edit.html @@ -24,8 +24,8 @@ {% tailwind_formfield form.name %} {% tailwind_formfield form.visibility %} - - Cancel + + Cancel diff --git a/teams/templates/teams/team_list.html b/teams/templates/teams/team_list.html index d0cd5a0..ae09aa4 100644 --- a/teams/templates/teams/team_list.html +++ b/teams/templates/teams/team_list.html @@ -17,7 +17,7 @@
{% if can_create %} {% endif %}
{# top, RHS (buttons) #} @@ -26,14 +26,14 @@
-
+
{% comment %}
- +
{% endcomment %}
@@ -46,16 +46,16 @@
{{ project.name }}
{{ member.user.email }} {# "best name" perhaps later? #} {% if not member.accepted %} - Invitation pending + Invitation pending {% elif member.is_admin %} {# NOTE: we intentionally hide admin-ness for non-accepted users #} - Admin + Admin {% endif %}
{% if not member.accepted %} - + {% endif %} {% if request.user == member.user %} - + {% else %} {# NOTE: in our setup request_user_is_admin is implied because only admins may view the membership page #} - + {% endif %} -
+
{# Note: this is already somewhat exceptional, because the usually you'll at least see yourself here (unless you're a superuser and a project has become memberless) #} - No members yet. Invite someone. + No members yet. Invite someone.
{% for team in team_list %} - + @@ -63,9 +63,9 @@ @@ -73,7 +73,7 @@ {# strictly: if not app_settings.SINGLE_USER; in practice it won't matter much, because in SINGLE_USER mode you won't end up in a team overview anyway #} - {% empty %} - + diff --git a/teams/templates/teams/team_member_settings.html b/teams/templates/teams/team_member_settings.html index e6c0e96..dd4a187 100644 --- a/teams/templates/teams/team_member_settings.html +++ b/teams/templates/teams/team_member_settings.html @@ -17,7 +17,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -37,11 +37,11 @@ {% tailwind_formfield form.role %} {% tailwind_formfield form.send_email_alerts %} - + {% if this_is_you %} - Cancel {# not quite perfect, because "you" can also click on yourself in the member list #} + Cancel {# not quite perfect, because "you" can also click on yourself in the member list #} {% else %} - Cancel + Cancel {% endif %} diff --git a/teams/templates/teams/team_members.html b/teams/templates/teams/team_members.html index 1d0f982..ac69725 100644 --- a/teams/templates/teams/team_members.html +++ b/teams/templates/teams/team_members.html @@ -15,7 +15,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -24,8 +24,8 @@

Team Members

+ Invite Member +
@@ -35,19 +35,19 @@
-
+
{{ team.name }}
{{ team.project_count }} projects | {{ team.member_count }} members {% if team.member %} - | my settings + | my settings {% endif %}
{% if team.member %} {% if not team.member.accepted %} - You're invited! + You're invited! {% elif team.member.is_admin %} {# NOTE: we intentionally hide admin-ness for non-accepted users #} - Admin + Admin {% endif %} {% endif %} {% if team.member.is_admin or request.user.is_superuser %} - + {% if team.member %} {% if not team.member.accepted %} + Invitation + {% else %}
- -
+ + {% endif %} {% else %} {% if team.is_joinable or request.user.is_superuser %}
- +
{% endif %} {% endif %} @@ -118,7 +118,7 @@
No teams found.
- + {% for member in members %} - + @@ -55,23 +55,23 @@ {% empty %} - + @@ -85,7 +85,7 @@ diff --git a/teams/templates/teams/team_members_accept.html b/teams/templates/teams/team_members_accept.html index 0d22ed7..8fb965f 100644 --- a/teams/templates/teams/team_members_accept.html +++ b/teams/templates/teams/team_members_accept.html @@ -21,8 +21,8 @@ You have been invited to join the team "{{ team.name }}" in the role of "{{ membership.get_role_display }}". Please confirm by clicking the button below. - - + + diff --git a/teams/templates/teams/team_members_invite.html b/teams/templates/teams/team_members_invite.html index a83f8ac..c72bbb7 100644 --- a/teams/templates/teams/team_members_invite.html +++ b/teams/templates/teams/team_members_invite.html @@ -18,7 +18,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -34,23 +34,23 @@ {% tailwind_formfield_implicit form.email %}
{# ml-1 is strictly speaking not aligned, but visually it looks better "to me"; perhaps because of all of the round elements? #} -
{{ form.role.label }}
+
{{ form.role.label }}
{{ form.role }}
{% if form.role.errors %} {% for error in form.role.errors %} -
{{ error }}
+
{{ error }}
{% endfor %} {% elif form.role.help_text %} -
{{ form.role.help_text|safe }}
+
{{ form.role.help_text|safe }}
{% endif %}
- - - Cancel + + + Cancel diff --git a/teams/templates/teams/team_new.html b/teams/templates/teams/team_new.html index c6ac517..3be81cc 100644 --- a/teams/templates/teams/team_new.html +++ b/teams/templates/teams/team_new.html @@ -20,8 +20,8 @@ {% tailwind_formfield form.name %} {% tailwind_formfield form.visibility %} - - Cancel + + Cancel diff --git a/teams/views.py b/teams/views.py index fdf0b33..19e5d4a 100644 --- a/teams/views.py +++ b/teams/views.py @@ -122,8 +122,22 @@ def team_edit(request, team_pk): raise PermissionDenied("You are not an admin of this team") if request.method == 'POST': - form = TeamForm(request.POST, instance=team) + action = request.POST.get('action') + if action == 'delete': + # Double-check that the user is an admin or superuser + if not (TeamMembership.objects.filter(team=team, user=request.user, role=TeamRole.ADMIN, accepted=True).exists() or + request.user.is_superuser): + raise PermissionDenied("Only team admins can delete teams") + + # Delete all associated projects first + team.project_set.all().delete() + # Delete the team itself + team.delete() + messages.success(request, f'Team "{team.name}" has been deleted successfully.') + return redirect('team_list') + + form = TeamForm(request.POST, instance=team) if form.is_valid(): form.save() return redirect('team_members', team_pk=team.id) diff --git a/templates/500.html b/templates/500.html index 4190b48..e9ad3a3 100644 --- a/templates/500.html +++ b/templates/500.html @@ -14,7 +14,7 @@ You will find more information in: diff --git a/templates/bugsink/counts.html b/templates/bugsink/counts.html index 9f6a399..ac41c09 100644 --- a/templates/bugsink/counts.html +++ b/templates/bugsink/counts.html @@ -18,11 +18,11 @@
{% for key, annotated_count in model_counts|items %} -
-
{{ key }}
-
{{ annotated_count.count|intcomma }}
-
{% if annotated_count.timestamp %}cached {{ annotated_count.timestamp|date:"G:i T" }}{% else %} {% endif %}
-
 
+
+
{{ key }}
+
{{ annotated_count.count|intcomma }}
+
{% if annotated_count.timestamp %}cached {{ annotated_count.timestamp|date:"G:i T" }}{% else %} {% endif %}
+
 
{% endfor %} diff --git a/templates/bugsink/csrf_debug.html b/templates/bugsink/csrf_debug.html index 9a3cbdc..feaaa13 100644 --- a/templates/bugsink/csrf_debug.html +++ b/templates/bugsink/csrf_debug.html @@ -15,9 +15,9 @@

Relevant settings

{% for key, value in relevant_settings|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %} @@ -25,32 +25,32 @@

POST Data

{% csrf_token %} - + {% else %} -

META

+

Request Headers (META)

{% for key, value in META|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %}

Middleware.process_view

{% for key, value in process_view|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %} {% if origin_verified_steps %}

_origin_verified: steps

{% for key, value in origin_verified_steps|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %} {% endif %} @@ -58,18 +58,18 @@ {% if check_referer_steps %}

_check_referer: steps

{% for key, value in check_referer_steps|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %} {% endif %} - +

POST data

{% for key, value in POST|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %} @@ -80,7 +80,7 @@
To try again with the least risk of confusion, reload this page without reposting it first, and then POST again using the button.
- + {% endif %} diff --git a/templates/bugsink/login.html b/templates/bugsink/login.html index a137aff..c31a4e0 100644 --- a/templates/bugsink/login.html +++ b/templates/bugsink/login.html @@ -5,16 +5,16 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
{% if form.errors %} -
Your username and password didn't match. Please try again.
+
Your username and password didn't match. Please try again.
{% elif next %} {% if user.is_authenticated %} @@ -30,13 +30,13 @@ - +
- +
@@ -44,8 +44,8 @@
- Forgot password? - {% if registration_enabled %}Create an account{% endif %} + Forgot password? + {% if registration_enabled %}Create an account{% endif %}
diff --git a/templates/bugsink/settings.html b/templates/bugsink/settings.html index 678cf4a..ecfba1d 100644 --- a/templates/bugsink/settings.html +++ b/templates/bugsink/settings.html @@ -17,40 +17,40 @@
{% for key, value in bugsink_settings|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %}

Snappea

- +
{% for key, value in snappea_settings|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %}

Misc

- +
{% for key, value in misc_settings|items %} -
-
{{ key }}
-
{{ value }}
+
+
{{ key }}
+
{{ value }}
{% endfor %}

System info

- +
-
+
Version
{{ version }}
diff --git a/templates/robots.txt b/templates/robots.txt new file mode 100644 index 0000000..1f53798 --- /dev/null +++ b/templates/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / diff --git a/templates/signup.html b/templates/signup.html index 0f50756..ccf12a7 100644 --- a/templates/signup.html +++ b/templates/signup.html @@ -6,10 +6,10 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
diff --git a/theme/static/css/dist/styles.css b/theme/static/css/dist/styles.css index 1e62467..16ef526 100644 --- a/theme/static/css/dist/styles.css +++ b/theme/static/css/dist/styles.css @@ -1 +1 @@ -*,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }/*! tailwindcss v3.4.13 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:IBM Plex Sans,ui-sans-serif,system-ui,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:IBM Plex Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]{display:none}[multiple],[type=date],[type=datetime-local],[type=email],[type=month],[type=number],[type=password],[type=search],[type=tel],[type=text],[type=time],[type=url],[type=week],input:where(:not([type])),select,textarea{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-color:#fff;border-color:#6b7280;border-width:1px;border-radius:0;padding:.5rem .75rem;font-size:1rem;line-height:1.5rem;--tw-shadow:0 0 #0000}[multiple]:focus,[type=date]:focus,[type=datetime-local]:focus,[type=email]:focus,[type=month]:focus,[type=number]:focus,[type=password]:focus,[type=search]:focus,[type=tel]:focus,[type=text]:focus,[type=time]:focus,[type=url]:focus,[type=week]:focus,input:where(:not([type])):focus,select:focus,textarea:focus{outline:2px solid transparent;outline-offset:2px;--tw-ring-inset:var(--tw-empty,/*!*/ /*!*/);--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:#2563eb;--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow);border-color:#2563eb}input::-moz-placeholder,textarea::-moz-placeholder{color:#6b7280;opacity:1}input::placeholder,textarea::placeholder{color:#6b7280;opacity:1}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-date-and-time-value{min-height:1.5em;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-meridiem-field,::-webkit-datetime-edit-millisecond-field,::-webkit-datetime-edit-minute-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-second-field,::-webkit-datetime-edit-year-field{padding-top:0;padding-bottom:0}select{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3E%3Cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='m6 8 4 4 4-4'/%3E%3C/svg%3E");background-position:right .5rem center;background-repeat:no-repeat;background-size:1.5em 1.5em;padding-right:2.5rem;-webkit-print-color-adjust:exact;print-color-adjust:exact}[multiple],[size]:where(select:not([size="1"])){background-image:none;background-position:0 0;background-repeat:unset;background-size:initial;padding-right:.75rem;-webkit-print-color-adjust:unset;print-color-adjust:unset}[type=checkbox],[type=radio]{-webkit-appearance:none;-moz-appearance:none;appearance:none;padding:0;-webkit-print-color-adjust:exact;print-color-adjust:exact;display:inline-block;vertical-align:middle;background-origin:border-box;-webkit-user-select:none;-moz-user-select:none;user-select:none;flex-shrink:0;height:1rem;width:1rem;color:#2563eb;background-color:#fff;border-color:#6b7280;border-width:1px;--tw-shadow:0 0 #0000}[type=checkbox]{border-radius:0}[type=radio]{border-radius:100%}[type=checkbox]:focus,[type=radio]:focus{outline:2px solid transparent;outline-offset:2px;--tw-ring-inset:var(--tw-empty,/*!*/ /*!*/);--tw-ring-offset-width:2px;--tw-ring-offset-color:#fff;--tw-ring-color:#2563eb;--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}[type=checkbox]:checked,[type=radio]:checked{border-color:transparent;background-color:currentColor;background-size:100% 100%;background-position:50%;background-repeat:no-repeat}[type=checkbox]:checked{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 16 16'%3E%3Cpath d='M12.207 4.793a1 1 0 0 1 0 1.414l-5 5a1 1 0 0 1-1.414 0l-2-2a1 1 0 0 1 1.414-1.414L6.5 9.086l4.293-4.293a1 1 0 0 1 1.414 0'/%3E%3C/svg%3E")}@media (forced-colors:active) {[type=checkbox]:checked{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=radio]:checked{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 16 16'%3E%3Ccircle cx='8' cy='8' r='3'/%3E%3C/svg%3E")}@media (forced-colors:active) {[type=radio]:checked{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=checkbox]:checked:focus,[type=checkbox]:checked:hover,[type=radio]:checked:focus,[type=radio]:checked:hover{border-color:transparent;background-color:currentColor}[type=checkbox]:indeterminate{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 16 16'%3E%3Cpath stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M4 8h8'/%3E%3C/svg%3E");border-color:transparent;background-color:currentColor;background-size:100% 100%;background-position:50%;background-repeat:no-repeat}@media (forced-colors:active) {[type=checkbox]:indeterminate{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=checkbox]:indeterminate:focus,[type=checkbox]:indeterminate:hover{border-color:transparent;background-color:currentColor}[type=file]{background:unset;border-color:inherit;border-width:0;border-radius:0;padding:0;font-size:unset;line-height:inherit}[type=file]:focus{outline:1px solid ButtonText;outline:1px auto -webkit-focus-ring-color}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border-width:0}.visible{visibility:visible}.collapse{visibility:collapse}.static{position:static}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.sticky{position:sticky}.left-1\/2{left:50%}.float-right{float:right}.m-1{margin:.25rem}.m-4{margin:1rem}.mx-4{margin-left:1rem;margin-right:1rem}.mx-auto{margin-left:auto;margin-right:auto}.my-2{margin-top:.5rem;margin-bottom:.5rem}.my-4{margin-top:1rem;margin-bottom:1rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mb-6{margin-bottom:1.5rem}.mb-8{margin-bottom:2rem}.ml-0{margin-left:0}.ml-1{margin-left:.25rem}.ml-2{margin-left:.5rem}.ml-3{margin-left:.75rem}.ml-4{margin-left:1rem}.ml-auto{margin-left:auto}.mr-2{margin-right:.5rem}.mr-4{margin-right:1rem}.mt-2{margin-top:.5rem}.mt-4{margin-top:1rem}.mt-6{margin-top:1.5rem}.mt-8{margin-top:2rem}.block{display:block}.inline{display:inline}.flex{display:flex}.inline-flex{display:inline-flex}.table{display:table}.grid{display:grid}.contents{display:contents}.hidden{display:none}.size-6{width:1.5rem;height:1.5rem}.size-8{width:2rem;height:2rem}.h-12{height:3rem}.h-32{height:8rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-8{height:2rem}.h-screen{height:100vh}.w-1\/2{width:50%}.w-1\/3{width:33.333333%}.w-1\/4{width:25%}.w-1\/6{width:16.666667%}.w-10\/12{width:83.333333%}.w-12{width:3rem}.w-128{width:32rem}.w-2\/3{width:66.666667%}.w-24{width:6rem}.w-3\/4{width:75%}.w-4{width:1rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-8{width:2rem}.w-full{width:100%}.max-w-4xl{max-width:56rem}.flex-\[2_1_96rem\]{flex:2 1 96rem}.flex-auto{flex:1 1 auto}.flex-none{flex:none}.border-collapse{border-collapse:collapse}.-translate-x-1\/2{--tw-translate-x:-50%}.-translate-x-1\/2,.-translate-y-1\/2{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.-translate-y-1\/2{--tw-translate-y:-50%}.rotate-180{--tw-rotate:180deg}.rotate-180,.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.cursor-pointer{cursor:pointer}.list-inside{list-style-position:inside}.list-decimal{list-style-type:decimal}.list-disc{list-style-type:disc}.list-none{list-style-type:none}.flex-row{flex-direction:row}.flex-col{flex-direction:column}.place-content-end{place-content:end}.content-start{align-content:flex-start}.items-start{align-items:flex-start}.items-end{align-items:flex-end}.items-center{align-items:center}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.self-stretch{align-self:stretch}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-scroll{overflow-y:scroll}.text-ellipsis{text-overflow:ellipsis}.whitespace-nowrap{white-space:nowrap}.whitespace-pre{white-space:pre}.rounded-2xl{border-radius:1rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.rounded-xl{border-radius:.75rem}.rounded-e-md{border-start-end-radius:.375rem;border-end-end-radius:.375rem}.rounded-s-md{border-start-start-radius:.375rem;border-end-start-radius:.375rem}.border{border-width:1px}.border-2{border-width:2px}.border-b-2{border-bottom-width:2px}.border-b-4{border-bottom-width:4px}.border-l-2{border-left-width:2px}.border-r-2{border-right-width:2px}.border-t-2{border-top-width:2px}.border-dotted{border-style:dotted}.border-cyan-500{--tw-border-opacity:1;border-color:rgb(6 182 212/var(--tw-border-opacity))}.border-cyan-800{--tw-border-opacity:1;border-color:rgb(21 94 117/var(--tw-border-opacity))}.border-red-800{--tw-border-opacity:1;border-color:rgb(153 27 27/var(--tw-border-opacity))}.border-slate-200{--tw-border-opacity:1;border-color:rgb(226 232 240/var(--tw-border-opacity))}.border-slate-300{--tw-border-opacity:1;border-color:rgb(203 213 225/var(--tw-border-opacity))}.border-slate-400{--tw-border-opacity:1;border-color:rgb(148 163 184/var(--tw-border-opacity))}.border-slate-50{--tw-border-opacity:1;border-color:rgb(248 250 252/var(--tw-border-opacity))}.border-slate-500{--tw-border-opacity:1;border-color:rgb(100 116 139/var(--tw-border-opacity))}.border-yellow-200{--tw-border-opacity:1;border-color:rgb(254 240 138/var(--tw-border-opacity))}.bg-cyan-100{--tw-bg-opacity:1;background-color:rgb(207 250 254/var(--tw-bg-opacity))}.bg-cyan-200{--tw-bg-opacity:1;background-color:rgb(165 243 252/var(--tw-bg-opacity))}.bg-cyan-50{--tw-bg-opacity:1;background-color:rgb(236 254 255/var(--tw-bg-opacity))}.bg-gray-50{--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity))}.bg-red-100{--tw-bg-opacity:1;background-color:rgb(254 226 226/var(--tw-bg-opacity))}.bg-slate-100{--tw-bg-opacity:1;background-color:rgb(241 245 249/var(--tw-bg-opacity))}.bg-slate-200{--tw-bg-opacity:1;background-color:rgb(226 232 240/var(--tw-bg-opacity))}.bg-slate-50{--tw-bg-opacity:1;background-color:rgb(248 250 252/var(--tw-bg-opacity))}.bg-slate-800{--tw-bg-opacity:1;background-color:rgb(30 41 59/var(--tw-bg-opacity))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity))}.bg-yellow-100{--tw-bg-opacity:1;background-color:rgb(254 249 195/var(--tw-bg-opacity))}.bg-gradient-to-r{background-image:linear-gradient(to right,var(--tw-gradient-stops))}.from-slate-300{--tw-gradient-from:#cbd5e1 var(--tw-gradient-from-position);--tw-gradient-to:rgba(203,213,225,0) var(--tw-gradient-to-position);--tw-gradient-stops:var(--tw-gradient-from),var(--tw-gradient-to)}.fill-cyan-500{fill:#06b6d4}.fill-slate-300{fill:#cbd5e1}.fill-slate-500{fill:#64748b}.fill-slate-800{fill:#1e293b}.stroke-slate-300{stroke:#cbd5e1}.p-12{padding:3rem}.p-2{padding:.5rem}.p-4{padding:1rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-4{padding-left:1rem;padding-right:1rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-8{padding-top:2rem;padding-bottom:2rem}.pb-1{padding-bottom:.25rem}.pb-2{padding-bottom:.5rem}.pb-4{padding-bottom:1rem}.pb-6{padding-bottom:1.5rem}.pl-1{padding-left:.25rem}.pl-12{padding-left:3rem}.pl-2{padding-left:.5rem}.pl-4{padding-left:1rem}.pl-6{padding-left:1.5rem}.pr-2{padding-right:.5rem}.pr-4{padding-right:1rem}.pr-6{padding-right:1.5rem}.pt-1{padding-top:.25rem}.pt-2{padding-top:.5rem}.pt-4{padding-top:1rem}.pt-6{padding-top:1.5rem}.pt-8{padding-top:2rem}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}.align-top{vertical-align:top}.font-mono{font-family:IBM Plex Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.font-serif{font-family:ui-serif,Georgia,Cambria,Times New Roman,Times,serif}.text-2xl{font-size:1.5rem;line-height:2rem}.text-4xl{font-size:2.25rem;line-height:2.5rem}.text-5xl{font-size:3rem;line-height:1}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-xs{font-size:.75rem;line-height:1rem}.font-bold{font-weight:700}.font-medium{font-weight:500}.uppercase{text-transform:uppercase}.italic{font-style:italic}.not-italic{font-style:normal}.leading-normal{line-height:1.5}.tracking-normal{letter-spacing:0}.text-cyan-500{--tw-text-opacity:1;color:rgb(6 182 212/var(--tw-text-opacity))}.text-cyan-800{--tw-text-opacity:1;color:rgb(21 94 117/var(--tw-text-opacity))}.text-gray-500{--tw-text-opacity:1;color:rgb(107 114 128/var(--tw-text-opacity))}.text-red-500{--tw-text-opacity:1;color:rgb(239 68 68/var(--tw-text-opacity))}.text-slate-200{--tw-text-opacity:1;color:rgb(226 232 240/var(--tw-text-opacity))}.text-slate-300{--tw-text-opacity:1;color:rgb(203 213 225/var(--tw-text-opacity))}.text-slate-500{--tw-text-opacity:1;color:rgb(100 116 139/var(--tw-text-opacity))}.text-slate-700{--tw-text-opacity:1;color:rgb(51 65 85/var(--tw-text-opacity))}.text-slate-800{--tw-text-opacity:1;color:rgb(30 41 59/var(--tw-text-opacity))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity))}.underline{text-decoration-line:underline}.decoration-dotted{text-decoration-style:dotted}.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.ease-in-out{transition-timing-function:cubic-bezier(.4,0,.2,1)}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:normal;font-weight:400;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-regular.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:italic;font-weight:400;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:normal;font-weight:700;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-700.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:italic;font-weight:700;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-700italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:normal;font-weight:400;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-regular.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:italic;font-weight:400;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:normal;font-weight:700;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-700.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:italic;font-weight:700;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-700italic.woff2) format("woff2")}.dropdown{position:relative;display:inline-block}.dropdown-content-right{display:none;position:absolute;z-index:1;margin-left:auto;right:0}.dropdown-content-left{display:none;position:absolute;z-index:1;left:0}.dropdown:hover .dropdown-content-left,.dropdown:hover .dropdown-content-right{display:flex}.triangle-left{position:relative}.triangle-left:before{content:"";border-color:transparent #cbd5e1 transparent transparent;border-style:solid;border-width:9px 8px 9px 0;position:absolute;left:-8px;top:20px}.triangle-left:after{content:"";border-color:transparent #fff transparent transparent;border-style:solid;border-width:9px 8px 9px 0;position:absolute;left:-6px;top:20px}pre{line-height:125%}.syntax-coloring .c{color:#3d7b7b;font-style:italic}.syntax-coloring .err{border:1px solid red}.syntax-coloring .k{color:green;font-weight:700}.syntax-coloring .o{color:#666}.syntax-coloring .ch,.syntax-coloring .cm{color:#3d7b7b;font-style:italic}.syntax-coloring .cp{color:#9c6500}.syntax-coloring .c1,.syntax-coloring .cpf,.syntax-coloring .cs{color:#3d7b7b;font-style:italic}.syntax-coloring .gd{color:#a00000}.syntax-coloring .ge{font-style:italic}.syntax-coloring .ges{font-weight:700;font-style:italic}.syntax-coloring .gr{color:#e40000}.syntax-coloring .gh{color:navy;font-weight:700}.syntax-coloring .gi{color:#008400}.syntax-coloring .go{color:#717171}.syntax-coloring .gp{color:navy;font-weight:700}.syntax-coloring .gs{font-weight:700}.syntax-coloring .gu{color:purple;font-weight:700}.syntax-coloring .gt{color:#04d}.syntax-coloring .kc,.syntax-coloring .kd,.syntax-coloring .kn{color:green;font-weight:700}.syntax-coloring .kp{color:green}.syntax-coloring .kr{color:green;font-weight:700}.syntax-coloring .kt{color:#b00040}.syntax-coloring .m{color:#666}.syntax-coloring .s{color:#ba2121}.syntax-coloring .na{color:#687822}.syntax-coloring .nb{color:green}.syntax-coloring .nc{color:#00f;font-weight:700}.syntax-coloring .no{color:#800}.syntax-coloring .nd{color:#a2f}.syntax-coloring .ni{color:#717171;font-weight:700}.syntax-coloring .ne{color:#cb3f38;font-weight:700}.syntax-coloring .nf{color:#00f}.syntax-coloring .nl{color:#767600}.syntax-coloring .nn{color:#00f;font-weight:700}.syntax-coloring .nt{color:green;font-weight:700}.syntax-coloring .nv{color:#19177c}.syntax-coloring .ow{color:#a2f;font-weight:700}.syntax-coloring .w{color:#bbb}.syntax-coloring .mb,.syntax-coloring .mf,.syntax-coloring .mh,.syntax-coloring .mi,.syntax-coloring .mo{color:#666}.syntax-coloring .dl,.syntax-coloring .sa,.syntax-coloring .sb,.syntax-coloring .sc{color:#ba2121}.syntax-coloring .sd{color:#ba2121;font-style:italic}.syntax-coloring .s2{color:#ba2121}.syntax-coloring .se{color:#aa5d1f;font-weight:700}.syntax-coloring .sh{color:#ba2121}.syntax-coloring .si{color:#a45a77;font-weight:700}.syntax-coloring .sx{color:green}.syntax-coloring .sr{color:#a45a77}.syntax-coloring .s1{color:#ba2121}.syntax-coloring .ss{color:#19177c}.syntax-coloring .bp{color:green}.syntax-coloring .fm{color:#00f}.syntax-coloring .vc,.syntax-coloring .vg,.syntax-coloring .vi,.syntax-coloring .vm{color:#19177c}.syntax-coloring .il{color:#666}input[type=radio]{color:#06b6d4}.hover\:border-b-4:hover{border-bottom-width:4px}.hover\:border-slate-400:hover{--tw-border-opacity:1;border-color:rgb(148 163 184/var(--tw-border-opacity))}.hover\:bg-cyan-400:hover{--tw-bg-opacity:1;background-color:rgb(34 211 238/var(--tw-bg-opacity))}.hover\:bg-slate-100:hover{--tw-bg-opacity:1;background-color:rgb(241 245 249/var(--tw-bg-opacity))}.hover\:bg-slate-200:hover{--tw-bg-opacity:1;background-color:rgb(226 232 240/var(--tw-bg-opacity))}.hover\:bg-slate-300:hover{--tw-bg-opacity:1;background-color:rgb(203 213 225/var(--tw-bg-opacity))}.focus\:border-cyan-500:focus{--tw-border-opacity:1;border-color:rgb(6 182 212/var(--tw-border-opacity))}.focus\:outline-none:focus{outline:2px solid transparent;outline-offset:2px}.focus\:ring-cyan-200:focus{--tw-ring-opacity:1;--tw-ring-color:rgb(165 243 252/var(--tw-ring-opacity))}.active\:ring:active{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(3px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}@media (min-width:768px){.md\:mb-8{margin-bottom:2rem}.md\:h-16{height:4rem}.md\:w-16{width:4rem}.md\:p-4{padding:1rem}.md\:p-8{padding:2rem}.md\:py-4{padding-top:1rem;padding-bottom:1rem}.md\:pb-16{padding-bottom:4rem}.md\:pl-24{padding-left:6rem}.md\:pr-24{padding-right:6rem}.md\:pt-24{padding-top:6rem}}@media (min-width:1024px){.lg\:w-5\/12{width:41.666667%}.lg\:flex-row-reverse{flex-direction:row-reverse}.lg\:pb-0{padding-bottom:0}}@media (min-width:1280px){.xl\:flex{display:flex}} \ No newline at end of file +*,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }/*! tailwindcss v3.4.17 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:IBM Plex Sans,ui-sans-serif,system-ui,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:IBM Plex Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}[multiple],[type=date],[type=datetime-local],[type=email],[type=month],[type=number],[type=password],[type=search],[type=tel],[type=text],[type=time],[type=url],[type=week],input:where(:not([type])),select,textarea{-webkit-appearance:none;-moz-appearance:none;appearance:none;background-color:#fff;border-color:#6b7280;border-width:1px;border-radius:0;padding:.5rem .75rem;font-size:1rem;line-height:1.5rem;--tw-shadow:0 0 #0000}[multiple]:focus,[type=date]:focus,[type=datetime-local]:focus,[type=email]:focus,[type=month]:focus,[type=number]:focus,[type=password]:focus,[type=search]:focus,[type=tel]:focus,[type=text]:focus,[type=time]:focus,[type=url]:focus,[type=week]:focus,input:where(:not([type])):focus,select:focus,textarea:focus{outline:2px solid transparent;outline-offset:2px;--tw-ring-inset:var(--tw-empty,/*!*/ /*!*/);--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:#2563eb;--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow);border-color:#2563eb}input::-moz-placeholder,textarea::-moz-placeholder{color:#6b7280;opacity:1}input::placeholder,textarea::placeholder{color:#6b7280;opacity:1}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-date-and-time-value{min-height:1.5em;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-meridiem-field,::-webkit-datetime-edit-millisecond-field,::-webkit-datetime-edit-minute-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-second-field,::-webkit-datetime-edit-year-field{padding-top:0;padding-bottom:0}select{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3E%3Cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='m6 8 4 4 4-4'/%3E%3C/svg%3E");background-position:right .5rem center;background-repeat:no-repeat;background-size:1.5em 1.5em;padding-right:2.5rem;-webkit-print-color-adjust:exact;print-color-adjust:exact}[multiple],[size]:where(select:not([size="1"])){background-image:none;background-position:0 0;background-repeat:unset;background-size:initial;padding-right:.75rem;-webkit-print-color-adjust:unset;print-color-adjust:unset}[type=checkbox],[type=radio]{-webkit-appearance:none;-moz-appearance:none;appearance:none;padding:0;-webkit-print-color-adjust:exact;print-color-adjust:exact;display:inline-block;vertical-align:middle;background-origin:border-box;-webkit-user-select:none;-moz-user-select:none;user-select:none;flex-shrink:0;height:1rem;width:1rem;color:#2563eb;background-color:#fff;border-color:#6b7280;border-width:1px;--tw-shadow:0 0 #0000}[type=checkbox]{border-radius:0}[type=radio]{border-radius:100%}[type=checkbox]:focus,[type=radio]:focus{outline:2px solid transparent;outline-offset:2px;--tw-ring-inset:var(--tw-empty,/*!*/ /*!*/);--tw-ring-offset-width:2px;--tw-ring-offset-color:#fff;--tw-ring-color:#2563eb;--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}[type=checkbox]:checked,[type=radio]:checked{border-color:transparent;background-color:currentColor;background-size:100% 100%;background-position:50%;background-repeat:no-repeat}[type=checkbox]:checked{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 16 16'%3E%3Cpath d='M12.207 4.793a1 1 0 0 1 0 1.414l-5 5a1 1 0 0 1-1.414 0l-2-2a1 1 0 0 1 1.414-1.414L6.5 9.086l4.293-4.293a1 1 0 0 1 1.414 0'/%3E%3C/svg%3E")}@media (forced-colors:active) {[type=checkbox]:checked{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=radio]:checked{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' viewBox='0 0 16 16'%3E%3Ccircle cx='8' cy='8' r='3'/%3E%3C/svg%3E")}@media (forced-colors:active) {[type=radio]:checked{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=checkbox]:checked:focus,[type=checkbox]:checked:hover,[type=radio]:checked:focus,[type=radio]:checked:hover{border-color:transparent;background-color:currentColor}[type=checkbox]:indeterminate{background-image:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 16 16'%3E%3Cpath stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M4 8h8'/%3E%3C/svg%3E");border-color:transparent;background-color:currentColor;background-size:100% 100%;background-position:50%;background-repeat:no-repeat}@media (forced-colors:active) {[type=checkbox]:indeterminate{-webkit-appearance:auto;-moz-appearance:auto;appearance:auto}}[type=checkbox]:indeterminate:focus,[type=checkbox]:indeterminate:hover{border-color:transparent;background-color:currentColor}[type=file]{background:unset;border-color:inherit;border-width:0;border-radius:0;padding:0;font-size:unset;line-height:inherit}[type=file]:focus{outline:1px solid ButtonText;outline:1px auto -webkit-focus-ring-color}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}@media (min-width:1920px){.container{max-width:1920px}}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border-width:0}.visible{visibility:visible}.collapse{visibility:collapse}.static{position:static}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.sticky{position:sticky}.inset-0{inset:0}.left-1\/2{left:50%}.z-50{z-index:50}.float-right{float:right}.m-1{margin:.25rem}.m-4{margin:1rem}.mx-4{margin-left:1rem;margin-right:1rem}.mx-auto{margin-left:auto;margin-right:auto}.my-2{margin-top:.5rem;margin-bottom:.5rem}.my-4{margin-top:1rem;margin-bottom:1rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mb-6{margin-bottom:1.5rem}.mb-8{margin-bottom:2rem}.ml-0{margin-left:0}.ml-1{margin-left:.25rem}.ml-2{margin-left:.5rem}.ml-3{margin-left:.75rem}.ml-4{margin-left:1rem}.ml-auto{margin-left:auto}.mr-2{margin-right:.5rem}.mr-4{margin-right:1rem}.mt-2{margin-top:.5rem}.mt-3{margin-top:.75rem}.mt-4{margin-top:1rem}.mt-6{margin-top:1.5rem}.mt-8{margin-top:2rem}.block{display:block}.inline{display:inline}.flex{display:flex}.inline-flex{display:inline-flex}.table{display:table}.grid{display:grid}.contents{display:contents}.hidden{display:none}.size-6{width:1.5rem;height:1.5rem}.size-8{width:2rem;height:2rem}.h-12{height:3rem}.h-32{height:8rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-8{height:2rem}.h-full{height:100%}.h-screen{height:100vh}.w-1\/2{width:50%}.w-1\/3{width:33.333333%}.w-1\/4{width:25%}.w-1\/6{width:16.666667%}.w-10\/12{width:83.333333%}.w-12{width:3rem}.w-128{width:32rem}.w-2\/3{width:66.666667%}.w-24{width:6rem}.w-3\/4{width:75%}.w-4{width:1rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-8{width:2rem}.w-96{width:24rem}.w-full{width:100%}.max-w-4xl{max-width:56rem}.flex-\[2_1_96rem\]{flex:2 1 96rem}.flex-auto{flex:1 1 auto}.flex-none{flex:none}.border-collapse{border-collapse:collapse}.-translate-x-1\/2{--tw-translate-x:-50%}.-translate-x-1\/2,.-translate-y-1\/2{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.-translate-y-1\/2{--tw-translate-y:-50%}.rotate-180{--tw-rotate:180deg}.rotate-180,.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.cursor-pointer{cursor:pointer}.list-inside{list-style-position:inside}.list-decimal{list-style-type:decimal}.list-disc{list-style-type:disc}.list-none{list-style-type:none}.flex-row{flex-direction:row}.flex-col{flex-direction:column}.flex-col-reverse{flex-direction:column-reverse}.place-content-end{place-content:end}.content-start{align-content:flex-start}.items-start{align-items:flex-start}.items-end{align-items:flex-end}.items-center{align-items:center}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.space-x-4>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-right:calc(1rem*var(--tw-space-x-reverse));margin-left:calc(1rem*(1 - var(--tw-space-x-reverse)))}.self-stretch{align-self:stretch}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.overflow-y-scroll{overflow-y:scroll}.text-ellipsis{text-overflow:ellipsis}.whitespace-nowrap{white-space:nowrap}.whitespace-pre{white-space:pre}.rounded{border-radius:.25rem}.rounded-2xl{border-radius:1rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.rounded-xl{border-radius:.75rem}.rounded-e-md{border-start-end-radius:.375rem;border-end-end-radius:.375rem}.rounded-s-md{border-start-start-radius:.375rem;border-end-start-radius:.375rem}.border{border-width:1px}.border-2{border-width:2px}.border-b-2{border-bottom-width:2px}.border-b-4{border-bottom-width:4px}.border-l-2{border-left-width:2px}.border-r-2{border-right-width:2px}.border-t-2{border-top-width:2px}.border-dotted{border-style:dotted}.border-cyan-500{--tw-border-opacity:1;border-color:rgb(6 182 212/var(--tw-border-opacity,1))}.border-cyan-800{--tw-border-opacity:1;border-color:rgb(21 94 117/var(--tw-border-opacity,1))}.border-red-600{--tw-border-opacity:1;border-color:rgb(220 38 38/var(--tw-border-opacity,1))}.border-red-800{--tw-border-opacity:1;border-color:rgb(153 27 27/var(--tw-border-opacity,1))}.border-slate-200{--tw-border-opacity:1;border-color:rgb(226 232 240/var(--tw-border-opacity,1))}.border-slate-300{--tw-border-opacity:1;border-color:rgb(203 213 225/var(--tw-border-opacity,1))}.border-slate-400{--tw-border-opacity:1;border-color:rgb(148 163 184/var(--tw-border-opacity,1))}.border-slate-50{--tw-border-opacity:1;border-color:rgb(248 250 252/var(--tw-border-opacity,1))}.border-slate-500{--tw-border-opacity:1;border-color:rgb(100 116 139/var(--tw-border-opacity,1))}.border-yellow-200{--tw-border-opacity:1;border-color:rgb(254 240 138/var(--tw-border-opacity,1))}.bg-cyan-100{--tw-bg-opacity:1;background-color:rgb(207 250 254/var(--tw-bg-opacity,1))}.bg-cyan-200{--tw-bg-opacity:1;background-color:rgb(165 243 252/var(--tw-bg-opacity,1))}.bg-cyan-50{--tw-bg-opacity:1;background-color:rgb(236 254 255/var(--tw-bg-opacity,1))}.bg-gray-50{--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1))}.bg-red-100{--tw-bg-opacity:1;background-color:rgb(254 226 226/var(--tw-bg-opacity,1))}.bg-red-50{--tw-bg-opacity:1;background-color:rgb(254 242 242/var(--tw-bg-opacity,1))}.bg-red-500{--tw-bg-opacity:1;background-color:rgb(239 68 68/var(--tw-bg-opacity,1))}.bg-slate-100{--tw-bg-opacity:1;background-color:rgb(241 245 249/var(--tw-bg-opacity,1))}.bg-slate-200{--tw-bg-opacity:1;background-color:rgb(226 232 240/var(--tw-bg-opacity,1))}.bg-slate-50{--tw-bg-opacity:1;background-color:rgb(248 250 252/var(--tw-bg-opacity,1))}.bg-slate-600{--tw-bg-opacity:1;background-color:rgb(71 85 105/var(--tw-bg-opacity,1))}.bg-slate-800{--tw-bg-opacity:1;background-color:rgb(30 41 59/var(--tw-bg-opacity,1))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity,1))}.bg-yellow-100{--tw-bg-opacity:1;background-color:rgb(254 249 195/var(--tw-bg-opacity,1))}.bg-opacity-50{--tw-bg-opacity:0.5}.bg-gradient-to-r{background-image:linear-gradient(to right,var(--tw-gradient-stops))}.from-slate-300{--tw-gradient-from:#cbd5e1 var(--tw-gradient-from-position);--tw-gradient-to:rgba(203,213,225,0) var(--tw-gradient-to-position);--tw-gradient-stops:var(--tw-gradient-from),var(--tw-gradient-to)}.fill-cyan-500{fill:#06b6d4}.fill-slate-300{fill:#cbd5e1}.fill-slate-500{fill:#64748b}.fill-slate-800{fill:#1e293b}.stroke-slate-300{stroke:#cbd5e1}.p-12{padding:3rem}.p-2{padding:.5rem}.p-4{padding:1rem}.p-6{padding:1.5rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-4{padding-left:1rem;padding-right:1rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-8{padding-top:2rem;padding-bottom:2rem}.pb-1{padding-bottom:.25rem}.pb-2{padding-bottom:.5rem}.pb-4{padding-bottom:1rem}.pb-6{padding-bottom:1.5rem}.pl-1{padding-left:.25rem}.pl-12{padding-left:3rem}.pl-2{padding-left:.5rem}.pl-4{padding-left:1rem}.pl-6{padding-left:1.5rem}.pr-2{padding-right:.5rem}.pr-4{padding-right:1rem}.pr-6{padding-right:1.5rem}.pt-1{padding-top:.25rem}.pt-2{padding-top:.5rem}.pt-4{padding-top:1rem}.pt-6{padding-top:1.5rem}.pt-8{padding-top:2rem}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}.align-top{vertical-align:top}.font-mono{font-family:IBM Plex Mono,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.font-serif{font-family:ui-serif,Georgia,Cambria,Times New Roman,Times,serif}.text-2xl{font-size:1.5rem;line-height:2rem}.text-4xl{font-size:2.25rem;line-height:2.5rem}.text-5xl{font-size:3rem;line-height:1}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xl{font-size:1.25rem;line-height:1.75rem}.text-xs{font-size:.75rem;line-height:1rem}.font-bold{font-weight:700}.font-medium{font-weight:500}.font-semibold{font-weight:600}.uppercase{text-transform:uppercase}.italic{font-style:italic}.not-italic{font-style:normal}.leading-normal{line-height:1.5}.tracking-normal{letter-spacing:0}.text-black{--tw-text-opacity:1;color:rgb(0 0 0/var(--tw-text-opacity,1))}.text-cyan-500{--tw-text-opacity:1;color:rgb(6 182 212/var(--tw-text-opacity,1))}.text-gray-500{--tw-text-opacity:1;color:rgb(107 114 128/var(--tw-text-opacity,1))}.text-red-500{--tw-text-opacity:1;color:rgb(239 68 68/var(--tw-text-opacity,1))}.text-slate-200{--tw-text-opacity:1;color:rgb(226 232 240/var(--tw-text-opacity,1))}.text-slate-300{--tw-text-opacity:1;color:rgb(203 213 225/var(--tw-text-opacity,1))}.text-slate-500{--tw-text-opacity:1;color:rgb(100 116 139/var(--tw-text-opacity,1))}.text-slate-700{--tw-text-opacity:1;color:rgb(51 65 85/var(--tw-text-opacity,1))}.text-slate-800{--tw-text-opacity:1;color:rgb(30 41 59/var(--tw-text-opacity,1))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity,1))}.underline{text-decoration-line:underline}.decoration-dotted{text-decoration-style:dotted}.shadow-lg{--tw-shadow:0 10px 15px -3px rgba(0,0,0,.1),0 4px 6px -4px rgba(0,0,0,.1);--tw-shadow-colored:0 10px 15px -3px var(--tw-shadow-color),0 4px 6px -4px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.ease-in-out{transition-timing-function:cubic-bezier(.4,0,.2,1)}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:normal;font-weight:400;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-regular.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:italic;font-weight:400;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:normal;font-weight:700;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-700.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Sans;font-style:italic;font-weight:700;src:url(../../fonts/ibm-plex-sans-v19-cyrillic_cyrillic-ext_greek_latin_latin-ext_vietnamese-700italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:normal;font-weight:400;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-regular.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:italic;font-weight:400;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-italic.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:normal;font-weight:700;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-700.woff2) format("woff2")}@font-face{font-display:swap;font-family:IBM Plex Mono;font-style:italic;font-weight:700;src:url(../../fonts/ibm-plex-mono-v19-cyrillic_cyrillic-ext_latin_latin-ext_vietnamese-700italic.woff2) format("woff2")}.dropdown{position:relative;display:inline-block}.dropdown-content-right{display:none;position:absolute;z-index:1;margin-left:auto;right:0}.dropdown-content-left{display:none;position:absolute;z-index:1;left:0}.dropdown:hover .dropdown-content-left,.dropdown:hover .dropdown-content-right{display:flex}.triangle-left{position:relative}.triangle-left:before{content:"";border-color:transparent #cbd5e1 transparent transparent;border-style:solid;border-width:9px 8px 9px 0;position:absolute;left:-8px;top:20px}.triangle-left:after{content:"";border-color:transparent #fff transparent transparent;border-style:solid;border-width:9px 8px 9px 0;position:absolute;left:-6px;top:20px}[data-theme=dark] .triangle-left:before{border-color:transparent #475569 transparent transparent}[data-theme=dark] .triangle-left:after{border-color:transparent #334155 transparent transparent}pre{line-height:125%}.syntax-coloring .c{color:#3d7b7b;font-style:italic}.syntax-coloring .err{border:1px solid red}.syntax-coloring .k{color:green;font-weight:700}.syntax-coloring .o{color:#666}.syntax-coloring .ch,.syntax-coloring .cm{color:#3d7b7b;font-style:italic}.syntax-coloring .cp{color:#9c6500}.syntax-coloring .c1,.syntax-coloring .cpf,.syntax-coloring .cs{color:#3d7b7b;font-style:italic}.syntax-coloring .gd{color:#a00000}.syntax-coloring .ge{font-style:italic}.syntax-coloring .ges{font-weight:700;font-style:italic}.syntax-coloring .gr{color:#e40000}.syntax-coloring .gh{color:navy;font-weight:700}.syntax-coloring .gi{color:#008400}.syntax-coloring .go{color:#717171}.syntax-coloring .gp{color:navy;font-weight:700}.syntax-coloring .gs{font-weight:700}.syntax-coloring .gu{color:purple;font-weight:700}.syntax-coloring .gt{color:#04d}.syntax-coloring .kc,.syntax-coloring .kd,.syntax-coloring .kn{color:green;font-weight:700}.syntax-coloring .kp{color:green}.syntax-coloring .kr{color:green;font-weight:700}.syntax-coloring .kt{color:#b00040}.syntax-coloring .m{color:#666}.syntax-coloring .s{color:#ba2121}.syntax-coloring .na{color:#687822}.syntax-coloring .nb{color:green}.syntax-coloring .nc{color:#00f;font-weight:700}.syntax-coloring .no{color:#800}.syntax-coloring .nd{color:#a2f}.syntax-coloring .ni{color:#717171;font-weight:700}.syntax-coloring .ne{color:#cb3f38;font-weight:700}.syntax-coloring .nf{color:#00f}.syntax-coloring .nl{color:#767600}.syntax-coloring .nn{color:#00f;font-weight:700}.syntax-coloring .nt{color:green;font-weight:700}.syntax-coloring .nv{color:#19177c}.syntax-coloring .ow{color:#a2f;font-weight:700}.syntax-coloring .w{color:#bbb}.syntax-coloring .mb,.syntax-coloring .mf,.syntax-coloring .mh,.syntax-coloring .mi,.syntax-coloring .mo{color:#666}.syntax-coloring .dl,.syntax-coloring .sa,.syntax-coloring .sb,.syntax-coloring .sc{color:#ba2121}.syntax-coloring .sd{color:#ba2121;font-style:italic}.syntax-coloring .s2{color:#ba2121}.syntax-coloring .se{color:#aa5d1f;font-weight:700}.syntax-coloring .sh{color:#ba2121}.syntax-coloring .si{color:#a45a77;font-weight:700}.syntax-coloring .sx{color:green}.syntax-coloring .sr{color:#a45a77}.syntax-coloring .s1{color:#ba2121}.syntax-coloring .ss{color:#19177c}.syntax-coloring .bp{color:green}.syntax-coloring .fm{color:#00f}.syntax-coloring .vc,.syntax-coloring .vg,.syntax-coloring .vi,.syntax-coloring .vm{color:#19177c}.syntax-coloring .il{color:#666}[data-theme=dark] span.linenos,[data-theme=dark] td.linenos .normal{color:inherit;background-color:transparent;padding-left:5px;padding-right:5px}[data-theme=dark] span.linenos.special,[data-theme=dark] td.linenos .special{color:#000;background-color:#ffffc0;padding-left:5px;padding-right:5px}[data-theme=dark] .syntax-coloring .hll{background-color:#49483e}[data-theme=dark] .syntax-coloring{background:#272822;color:#f8f8f2}[data-theme=dark] .syntax-coloring .c{color:#959077}[data-theme=dark] .syntax-coloring .err{color:#ed007e;background-color:#1e0010}[data-theme=dark] .syntax-coloring .esc,[data-theme=dark] .syntax-coloring .g{color:#f8f8f2}[data-theme=dark] .syntax-coloring .k{color:#66d9ef}[data-theme=dark] .syntax-coloring .l{color:#ae81ff}[data-theme=dark] .syntax-coloring .n{color:#f8f8f2}[data-theme=dark] .syntax-coloring .o{color:#ff4689}[data-theme=dark] .syntax-coloring .p,[data-theme=dark] .syntax-coloring .x{color:#f8f8f2}[data-theme=dark] .syntax-coloring .c1,[data-theme=dark] .syntax-coloring .ch,[data-theme=dark] .syntax-coloring .cm,[data-theme=dark] .syntax-coloring .cp,[data-theme=dark] .syntax-coloring .cpf,[data-theme=dark] .syntax-coloring .cs{color:#959077}[data-theme=dark] .syntax-coloring .gd{color:#ff4689}[data-theme=dark] .syntax-coloring .ge{color:#f8f8f2;font-style:italic}[data-theme=dark] .syntax-coloring .ges{color:#f8f8f2;font-weight:700;font-style:italic}[data-theme=dark] .syntax-coloring .gh,[data-theme=dark] .syntax-coloring .gr{color:#f8f8f2}[data-theme=dark] .syntax-coloring .gi{color:#a6e22e}[data-theme=dark] .syntax-coloring .go{color:#66d9ef}[data-theme=dark] .syntax-coloring .gp{color:#ff4689;font-weight:700}[data-theme=dark] .syntax-coloring .gs{color:#f8f8f2;font-weight:700}[data-theme=dark] .syntax-coloring .gu{color:#959077}[data-theme=dark] .syntax-coloring .gt{color:#f8f8f2}[data-theme=dark] .syntax-coloring .kc,[data-theme=dark] .syntax-coloring .kd{color:#66d9ef}[data-theme=dark] .syntax-coloring .kn{color:#ff4689}[data-theme=dark] .syntax-coloring .kp,[data-theme=dark] .syntax-coloring .kr,[data-theme=dark] .syntax-coloring .kt{color:#66d9ef}[data-theme=dark] .syntax-coloring .ld{color:#e6db74}[data-theme=dark] .syntax-coloring .m{color:#ae81ff}[data-theme=dark] .syntax-coloring .s{color:#e6db74}[data-theme=dark] .syntax-coloring .na{color:#a6e22e}[data-theme=dark] .syntax-coloring .nb{color:#f8f8f2}[data-theme=dark] .syntax-coloring .nc{color:#a6e22e}[data-theme=dark] .syntax-coloring .no{color:#66d9ef}[data-theme=dark] .syntax-coloring .nd{color:#a6e22e}[data-theme=dark] .syntax-coloring .ni{color:#f8f8f2}[data-theme=dark] .syntax-coloring .ne,[data-theme=dark] .syntax-coloring .nf{color:#a6e22e}[data-theme=dark] .syntax-coloring .nl,[data-theme=dark] .syntax-coloring .nn{color:#f8f8f2}[data-theme=dark] .syntax-coloring .nx{color:#a6e22e}[data-theme=dark] .syntax-coloring .py{color:#f8f8f2}[data-theme=dark] .syntax-coloring .nt{color:#ff4689}[data-theme=dark] .syntax-coloring .nv{color:#f8f8f2}[data-theme=dark] .syntax-coloring .ow{color:#ff4689}[data-theme=dark] .syntax-coloring .pm,[data-theme=dark] .syntax-coloring .w{color:#f8f8f2}[data-theme=dark] .syntax-coloring .mb,[data-theme=dark] .syntax-coloring .mf,[data-theme=dark] .syntax-coloring .mh,[data-theme=dark] .syntax-coloring .mi,[data-theme=dark] .syntax-coloring .mo{color:#ae81ff}[data-theme=dark] .syntax-coloring .dl,[data-theme=dark] .syntax-coloring .s2,[data-theme=dark] .syntax-coloring .sa,[data-theme=dark] .syntax-coloring .sb,[data-theme=dark] .syntax-coloring .sc,[data-theme=dark] .syntax-coloring .sd{color:#e6db74}[data-theme=dark] .syntax-coloring .se{color:#ae81ff}[data-theme=dark] .syntax-coloring .s1,[data-theme=dark] .syntax-coloring .sh,[data-theme=dark] .syntax-coloring .si,[data-theme=dark] .syntax-coloring .sr,[data-theme=dark] .syntax-coloring .ss,[data-theme=dark] .syntax-coloring .sx{color:#e6db74}[data-theme=dark] .syntax-coloring .bp{color:#f8f8f2}[data-theme=dark] .syntax-coloring .fm{color:#a6e22e}[data-theme=dark] .syntax-coloring .vc,[data-theme=dark] .syntax-coloring .vg,[data-theme=dark] .syntax-coloring .vi,[data-theme=dark] .syntax-coloring .vm{color:#f8f8f2}[data-theme=dark] .syntax-coloring .il{color:#ae81ff}input[type=radio]{color:#06b6d4}.hover\:border-b-4:hover{border-bottom-width:4px}.hover\:border-slate-400:hover{--tw-border-opacity:1;border-color:rgb(148 163 184/var(--tw-border-opacity,1))}.hover\:bg-cyan-400:hover{--tw-bg-opacity:1;background-color:rgb(34 211 238/var(--tw-bg-opacity,1))}.hover\:bg-red-50:hover{--tw-bg-opacity:1;background-color:rgb(254 242 242/var(--tw-bg-opacity,1))}.hover\:bg-red-600:hover{--tw-bg-opacity:1;background-color:rgb(220 38 38/var(--tw-bg-opacity,1))}.hover\:bg-slate-100:hover{--tw-bg-opacity:1;background-color:rgb(241 245 249/var(--tw-bg-opacity,1))}.hover\:bg-slate-200:hover{--tw-bg-opacity:1;background-color:rgb(226 232 240/var(--tw-bg-opacity,1))}.hover\:bg-slate-300:hover{--tw-bg-opacity:1;background-color:rgb(203 213 225/var(--tw-bg-opacity,1))}.focus\:border-cyan-500:focus{--tw-border-opacity:1;border-color:rgb(6 182 212/var(--tw-border-opacity,1))}.focus\:outline-none:focus{outline:2px solid transparent;outline-offset:2px}.focus\:ring-cyan-200:focus{--tw-ring-opacity:1;--tw-ring-color:rgb(165 243 252/var(--tw-ring-opacity,1))}.active\:ring:active{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(3px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}@media (min-width:768px){.md\:mb-8{margin-bottom:2rem}.md\:h-16{height:4rem}.md\:w-16{width:4rem}.md\:p-4{padding:1rem}.md\:p-8{padding:2rem}.md\:py-4{padding-top:1rem;padding-bottom:1rem}.md\:pb-16{padding-bottom:4rem}.md\:pl-24{padding-left:6rem}.md\:pr-24{padding-right:6rem}.md\:pt-24{padding-top:6rem}}@media (min-width:1024px){.lg\:w-5\/12{width:41.666667%}.lg\:flex-row{flex-direction:row}.lg\:flex-row-reverse{flex-direction:row-reverse}.lg\:pb-0{padding-bottom:0}}@media (min-width:1280px){.xl\:flex{display:flex}}@media (min-width:1920px){.\33xl\:flex-row{flex-direction:row}.\33xl\:pt-0{padding-top:0}}.dark\:block:where([data-theme=dark],[data-theme=dark] *){display:block}.dark\:hidden:where([data-theme=dark],[data-theme=dark] *){display:none}.dark\:border-amber-900:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(120 53 15/var(--tw-border-opacity,1))}.dark\:border-cyan-400:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(34 211 238/var(--tw-border-opacity,1))}.dark\:border-red-400:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(248 113 113/var(--tw-border-opacity,1))}.dark\:border-slate-400:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(148 163 184/var(--tw-border-opacity,1))}.dark\:border-slate-600:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(71 85 105/var(--tw-border-opacity,1))}.dark\:border-slate-700:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(51 65 85/var(--tw-border-opacity,1))}.dark\:border-slate-900:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(15 23 42/var(--tw-border-opacity,1))}.dark\:border-yellow-700:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(161 98 7/var(--tw-border-opacity,1))}.dark\:bg-amber-800:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(146 64 14/var(--tw-bg-opacity,1))}.dark\:bg-cyan-700:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(14 116 144/var(--tw-bg-opacity,1))}.dark\:bg-cyan-900:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(22 78 99/var(--tw-bg-opacity,1))}.dark\:bg-red-700:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(185 28 28/var(--tw-bg-opacity,1))}.dark\:bg-red-900:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(127 29 29/var(--tw-bg-opacity,1))}.dark\:bg-slate-700:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(51 65 85/var(--tw-bg-opacity,1))}.dark\:bg-slate-800:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(30 41 59/var(--tw-bg-opacity,1))}.dark\:bg-slate-900:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(15 23 42/var(--tw-bg-opacity,1))}.dark\:bg-opacity-50:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:0.5}.dark\:from-slate-950:where([data-theme=dark],[data-theme=dark] *){--tw-gradient-from:#020617 var(--tw-gradient-from-position);--tw-gradient-to:rgba(2,6,23,0) var(--tw-gradient-to-position);--tw-gradient-stops:var(--tw-gradient-from),var(--tw-gradient-to)}.dark\:fill-slate-100:where([data-theme=dark],[data-theme=dark] *){fill:#f1f5f9}.dark\:fill-slate-500:where([data-theme=dark],[data-theme=dark] *){fill:#64748b}.dark\:fill-slate-600:where([data-theme=dark],[data-theme=dark] *){fill:#475569}.dark\:stroke-slate-600:where([data-theme=dark],[data-theme=dark] *){stroke:#475569}.dark\:text-cyan-300:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(103 232 249/var(--tw-text-opacity,1))}.dark\:text-cyan-400:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(34 211 238/var(--tw-text-opacity,1))}.dark\:text-gray-400:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(156 163 175/var(--tw-text-opacity,1))}.dark\:text-red-400:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(248 113 113/var(--tw-text-opacity,1))}.dark\:text-slate-100:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(241 245 249/var(--tw-text-opacity,1))}.dark\:text-slate-300:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(203 213 225/var(--tw-text-opacity,1))}.dark\:text-slate-600:where([data-theme=dark],[data-theme=dark] *){--tw-text-opacity:1;color:rgb(71 85 105/var(--tw-text-opacity,1))}.dark\:hover\:border-slate-500:hover:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(100 116 139/var(--tw-border-opacity,1))}.dark\:hover\:bg-cyan-600:hover:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(8 145 178/var(--tw-bg-opacity,1))}.dark\:hover\:bg-red-800:hover:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(153 27 27/var(--tw-bg-opacity,1))}.dark\:hover\:bg-slate-700:hover:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(51 65 85/var(--tw-bg-opacity,1))}.dark\:hover\:bg-slate-800:hover:where([data-theme=dark],[data-theme=dark] *){--tw-bg-opacity:1;background-color:rgb(30 41 59/var(--tw-bg-opacity,1))}.dark\:focus\:border-cyan-400:focus:where([data-theme=dark],[data-theme=dark] *){--tw-border-opacity:1;border-color:rgb(34 211 238/var(--tw-border-opacity,1))}.dark\:focus\:ring-cyan-700:focus:where([data-theme=dark],[data-theme=dark] *){--tw-ring-opacity:1;--tw-ring-color:rgb(14 116 144/var(--tw-ring-opacity,1))} \ No newline at end of file diff --git a/theme/static_src/src/styles.css b/theme/static_src/src/styles.css index 5533e24..1839a8a 100644 --- a/theme/static_src/src/styles.css +++ b/theme/static_src/src/styles.css @@ -126,6 +126,15 @@ top: 20px; } +/* Tailwind dark mode support */ +[data-theme="dark"] .triangle-left:before { + border-color: transparent rgb(71 85 105) transparent transparent; +} + +[data-theme="dark"] .triangle-left:after { + border-color: transparent rgb(51 65 85) transparent transparent; +} + /* The below is the output of: @@ -205,6 +214,91 @@ pre { line-height: 125%; } .syntax-coloring .vm { color: #19177C } /* Name.Variable.Magic */ .syntax-coloring .il { color: #666666 } /* Literal.Number.Integer.Long */ +[data-theme="dark"] td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +[data-theme="dark"] span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +[data-theme="dark"] td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +[data-theme="dark"] span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +[data-theme="dark"] .syntax-coloring .hll { background-color: #49483e } +[data-theme="dark"] .syntax-coloring { background: #272822; color: #F8F8F2 } +[data-theme="dark"] .syntax-coloring .c { color: #959077 } /* Comment */ +[data-theme="dark"] .syntax-coloring .err { color: #ED007E; background-color: #1E0010 } /* Error */ +[data-theme="dark"] .syntax-coloring .esc { color: #F8F8F2 } /* Escape */ +[data-theme="dark"] .syntax-coloring .g { color: #F8F8F2 } /* Generic */ +[data-theme="dark"] .syntax-coloring .k { color: #66D9EF } /* Keyword */ +[data-theme="dark"] .syntax-coloring .l { color: #AE81FF } /* Literal */ +[data-theme="dark"] .syntax-coloring .n { color: #F8F8F2 } /* Name */ +[data-theme="dark"] .syntax-coloring .o { color: #FF4689 } /* Operator */ +[data-theme="dark"] .syntax-coloring .x { color: #F8F8F2 } /* Other */ +[data-theme="dark"] .syntax-coloring .p { color: #F8F8F2 } /* Punctuation */ +[data-theme="dark"] .syntax-coloring .ch { color: #959077 } /* Comment.Hashbang */ +[data-theme="dark"] .syntax-coloring .cm { color: #959077 } /* Comment.Multiline */ +[data-theme="dark"] .syntax-coloring .cp { color: #959077 } /* Comment.Preproc */ +[data-theme="dark"] .syntax-coloring .cpf { color: #959077 } /* Comment.PreprocFile */ +[data-theme="dark"] .syntax-coloring .c1 { color: #959077 } /* Comment.Single */ +[data-theme="dark"] .syntax-coloring .cs { color: #959077 } /* Comment.Special */ +[data-theme="dark"] .syntax-coloring .gd { color: #FF4689 } /* Generic.Deleted */ +[data-theme="dark"] .syntax-coloring .ge { color: #F8F8F2; font-style: italic } /* Generic.Emph */ +[data-theme="dark"] .syntax-coloring .ges { color: #F8F8F2; font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +[data-theme="dark"] .syntax-coloring .gr { color: #F8F8F2 } /* Generic.Error */ +[data-theme="dark"] .syntax-coloring .gh { color: #F8F8F2 } /* Generic.Heading */ +[data-theme="dark"] .syntax-coloring .gi { color: #A6E22E } /* Generic.Inserted */ +[data-theme="dark"] .syntax-coloring .go { color: #66D9EF } /* Generic.Output */ +[data-theme="dark"] .syntax-coloring .gp { color: #FF4689; font-weight: bold } /* Generic.Prompt */ +[data-theme="dark"] .syntax-coloring .gs { color: #F8F8F2; font-weight: bold } /* Generic.Strong */ +[data-theme="dark"] .syntax-coloring .gu { color: #959077 } /* Generic.Subheading */ +[data-theme="dark"] .syntax-coloring .gt { color: #F8F8F2 } /* Generic.Traceback */ +[data-theme="dark"] .syntax-coloring .kc { color: #66D9EF } /* Keyword.Constant */ +[data-theme="dark"] .syntax-coloring .kd { color: #66D9EF } /* Keyword.Declaration */ +[data-theme="dark"] .syntax-coloring .kn { color: #FF4689 } /* Keyword.Namespace */ +[data-theme="dark"] .syntax-coloring .kp { color: #66D9EF } /* Keyword.Pseudo */ +[data-theme="dark"] .syntax-coloring .kr { color: #66D9EF } /* Keyword.Reserved */ +[data-theme="dark"] .syntax-coloring .kt { color: #66D9EF } /* Keyword.Type */ +[data-theme="dark"] .syntax-coloring .ld { color: #E6DB74 } /* Literal.Date */ +[data-theme="dark"] .syntax-coloring .m { color: #AE81FF } /* Literal.Number */ +[data-theme="dark"] .syntax-coloring .s { color: #E6DB74 } /* Literal.String */ +[data-theme="dark"] .syntax-coloring .na { color: #A6E22E } /* Name.Attribute */ +[data-theme="dark"] .syntax-coloring .nb { color: #F8F8F2 } /* Name.Builtin */ +[data-theme="dark"] .syntax-coloring .nc { color: #A6E22E } /* Name.Class */ +[data-theme="dark"] .syntax-coloring .no { color: #66D9EF } /* Name.Constant */ +[data-theme="dark"] .syntax-coloring .nd { color: #A6E22E } /* Name.Decorator */ +[data-theme="dark"] .syntax-coloring .ni { color: #F8F8F2 } /* Name.Entity */ +[data-theme="dark"] .syntax-coloring .ne { color: #A6E22E } /* Name.Exception */ +[data-theme="dark"] .syntax-coloring .nf { color: #A6E22E } /* Name.Function */ +[data-theme="dark"] .syntax-coloring .nl { color: #F8F8F2 } /* Name.Label */ +[data-theme="dark"] .syntax-coloring .nn { color: #F8F8F2 } /* Name.Namespace */ +[data-theme="dark"] .syntax-coloring .nx { color: #A6E22E } /* Name.Other */ +[data-theme="dark"] .syntax-coloring .py { color: #F8F8F2 } /* Name.Property */ +[data-theme="dark"] .syntax-coloring .nt { color: #FF4689 } /* Name.Tag */ +[data-theme="dark"] .syntax-coloring .nv { color: #F8F8F2 } /* Name.Variable */ +[data-theme="dark"] .syntax-coloring .ow { color: #FF4689 } /* Operator.Word */ +[data-theme="dark"] .syntax-coloring .pm { color: #F8F8F2 } /* Punctuation.Marker */ +[data-theme="dark"] .syntax-coloring .w { color: #F8F8F2 } /* Text.Whitespace */ +[data-theme="dark"] .syntax-coloring .mb { color: #AE81FF } /* Literal.Number.Bin */ +[data-theme="dark"] .syntax-coloring .mf { color: #AE81FF } /* Literal.Number.Float */ +[data-theme="dark"] .syntax-coloring .mh { color: #AE81FF } /* Literal.Number.Hex */ +[data-theme="dark"] .syntax-coloring .mi { color: #AE81FF } /* Literal.Number.Integer */ +[data-theme="dark"] .syntax-coloring .mo { color: #AE81FF } /* Literal.Number.Oct */ +[data-theme="dark"] .syntax-coloring .sa { color: #E6DB74 } /* Literal.String.Affix */ +[data-theme="dark"] .syntax-coloring .sb { color: #E6DB74 } /* Literal.String.Backtick */ +[data-theme="dark"] .syntax-coloring .sc { color: #E6DB74 } /* Literal.String.Char */ +[data-theme="dark"] .syntax-coloring .dl { color: #E6DB74 } /* Literal.String.Delimiter */ +[data-theme="dark"] .syntax-coloring .sd { color: #E6DB74 } /* Literal.String.Doc */ +[data-theme="dark"] .syntax-coloring .s2 { color: #E6DB74 } /* Literal.String.Double */ +[data-theme="dark"] .syntax-coloring .se { color: #AE81FF } /* Literal.String.Escape */ +[data-theme="dark"] .syntax-coloring .sh { color: #E6DB74 } /* Literal.String.Heredoc */ +[data-theme="dark"] .syntax-coloring .si { color: #E6DB74 } /* Literal.String.Interpol */ +[data-theme="dark"] .syntax-coloring .sx { color: #E6DB74 } /* Literal.String.Other */ +[data-theme="dark"] .syntax-coloring .sr { color: #E6DB74 } /* Literal.String.Regex */ +[data-theme="dark"] .syntax-coloring .s1 { color: #E6DB74 } /* Literal.String.Single */ +[data-theme="dark"] .syntax-coloring .ss { color: #E6DB74 } /* Literal.String.Symbol */ +[data-theme="dark"] .syntax-coloring .bp { color: #F8F8F2 } /* Name.Builtin.Pseudo */ +[data-theme="dark"] .syntax-coloring .fm { color: #A6E22E } /* Name.Function.Magic */ +[data-theme="dark"] .syntax-coloring .vc { color: #F8F8F2 } /* Name.Variable.Class */ +[data-theme="dark"] .syntax-coloring .vg { color: #F8F8F2 } /* Name.Variable.Global */ +[data-theme="dark"] .syntax-coloring .vi { color: #F8F8F2 } /* Name.Variable.Instance */ +[data-theme="dark"] .syntax-coloring .vm { color: #F8F8F2 } /* Name.Variable.Magic */ +[data-theme="dark"] .syntax-coloring .il { color: #AE81FF } /* Literal.Number.Integer.Long */ + input[type='radio'] { /* I wanted to style the whole of the radio button in a non-navy color (something that fits more with what we do generally but I didn't manage to get it done in the self-allotted time. I'm still seeing a navy outer ring */ diff --git a/theme/static_src/tailwind.config.js b/theme/static_src/tailwind.config.js index 5048748..b0ceda2 100644 --- a/theme/static_src/tailwind.config.js +++ b/theme/static_src/tailwind.config.js @@ -8,6 +8,7 @@ const defaultTheme = require("tailwindcss/defaultTheme"); module.exports = { + darkMode: ['selector', '[data-theme="dark"]'], content: [ /** * HTML. Paths to Django template files that will contain Tailwind CSS classes. @@ -44,9 +45,13 @@ module.exports = { // '../../**/*.py' "../../issues/views.py", "../../theme/templatetags/code.py", + "../../theme/templatetags/tailwind_forms.py", ], theme: { extend: { + screens: { + '3xl': '1920px', + }, spacing: { '128': '32rem', }, diff --git a/theme/templates/bare_base.html b/theme/templates/bare_base.html index 3e1ffae..588ddbc 100644 --- a/theme/templates/bare_base.html +++ b/theme/templates/bare_base.html @@ -1,18 +1,32 @@ {% load static tailwind_tags %}{# copy of base.html, but without variables (and hence no menu), for use in contextless templates (e.g. 500.html) #} - + {% block title %}Bugsink{% endblock %} - + {% tailwind_preload_css %} {% tailwind_css %} + - +
-
- Bugsink logo +
diff --git a/theme/templates/barest_base.html b/theme/templates/barest_base.html index 19b089a..b57f0ea 100644 --- a/theme/templates/barest_base.html +++ b/theme/templates/barest_base.html @@ -1,15 +1,29 @@ {% load static tailwind_tags %}{# copy of bare_base.html, but without even a menu bar #} - + {% block title %}Bugsink{% endblock %} - + {% tailwind_preload_css %} {% tailwind_css %} + - +
{% block content %}{% endblock %} diff --git a/theme/templates/base.html b/theme/templates/base.html index a97f937..c313e1a 100644 --- a/theme/templates/base.html +++ b/theme/templates/base.html @@ -1,53 +1,67 @@ {% load static tailwind_tags version add_to_qs %} - + {% block title %}{{ site_title }}{% endblock %} - + {% tailwind_preload_css %} {% tailwind_css %} + - +
-
- Bugsink logo -
{{ site_title }}
+
+ Bugsink logo +
{{ site_title }}
{% if not app_settings.SINGLE_TEAM %} -
Teams
+
Teams
{% endif %} -
Projects
+
Projects
{% if project %} -
Issues ({{ project.name }})
+
Issues ({{ project.name }})
{% endif %}
{% if app_settings.USE_ADMIN and user.is_staff %} -
Admin
+
Admin
{% endif %} {% if user.is_superuser %} -
Users
-
Tokens
+
Users
+
Tokens
{% endif %} {% if logged_in_user.is_anonymous %} -
Login
{# I don't think this is actually ever shown in practice, because you must always be logged in #} +
Login
{# I don't think this is actually ever shown in practice, because you must always be logged in #} {% else %} -
Preferences
-
{% csrf_token %}
+
Preferences
+
{% csrf_token %}
{% endif %}
{% for system_warning in system_warnings %} -
-
+
+
{{ system_warning.message }}
{% if system_warning.ignore_url %} @@ -57,7 +71,7 @@ {% endfor %} {% comment %} {# for use when we introduce a notification system #} -
+
Foo bar baz
diff --git a/theme/templates/tailwind_forms/formfield.html b/theme/templates/tailwind_forms/formfield.html index 1c349be..4e34cac 100644 --- a/theme/templates/tailwind_forms/formfield.html +++ b/theme/templates/tailwind_forms/formfield.html @@ -2,17 +2,17 @@
{% if not implicit %} -
{{ formfield.label }}:
+
{{ formfield.label }}:
{% endif %} {{ formfield }} {% if formfield.errors %} {% for error in formfield.errors %} -
{{ error }}
+
{{ error }}
{% endfor %} {% elif formfield.help_text %} -
{{ formfield.help_text|safe }}
+
{{ formfield.help_text|safe }}
{% endif %}
{% endif %} diff --git a/theme/templatetags/code.py b/theme/templatetags/code.py index a6b7140..e222675 100644 --- a/theme/templatetags/code.py +++ b/theme/templatetags/code.py @@ -37,4 +37,4 @@ class CodeNode(template.Node): lexer = get_lexer_by_name(lang, stripall=True) - return highlight(code, lexer, formatter).replace("highlight", "p-4 mt-4 bg-slate-50 syntax-coloring") + return highlight(code, lexer, formatter).replace("highlight", "p-4 mt-4 bg-slate-50 dark:bg-slate-800 syntax-coloring") diff --git a/theme/templatetags/issues.py b/theme/templatetags/issues.py index 48cfa64..98e67b1 100644 --- a/theme/templatetags/issues.py +++ b/theme/templatetags/issues.py @@ -152,7 +152,7 @@ def format_var(value): return get def gen_base(obj): - yield escape(repr(obj)), None + yield escape(str(obj)), None def bracket_wrap(gen, b_open, sep, b_close): yield b_open, None diff --git a/theme/templatetags/tailwind_forms.py b/theme/templatetags/tailwind_forms.py index 94bae85..b8fd899 100644 --- a/theme/templatetags/tailwind_forms.py +++ b/theme/templatetags/tailwind_forms.py @@ -11,9 +11,9 @@ def tailwind_formfield(formfield, implicit=False): return {"formfield": None} if formfield.errors: - formfield.field.widget.attrs['class'] = "bg-red-50" + formfield.field.widget.attrs['class'] = "bg-red-50 dark:bg-red-900" else: - formfield.field.widget.attrs['class'] = "bg-slate-50" + formfield.field.widget.attrs['class'] = "bg-slate-50 dark:bg-slate-800" formfield.field.widget.attrs['class'] += " pl-4 py-2 md:py-4 focus:outline-none w-full" if implicit: diff --git a/theme/tests.py b/theme/tests.py index f8e6cc1..cd6e168 100644 --- a/theme/tests.py +++ b/theme/tests.py @@ -80,6 +80,14 @@ class TestFormatVar(RegularTestCase): return format_var(var).replace("'", "'") def test_format_var_none(self): + # This is how we've actually observed None values in the SDKs, so we should also handle it + self.assertEqual( + "None", + self._format_var("None"), + ) + + # I _think_ SDKs generally don't send null (None) as a value, but if/when they do we should handle it + # gracefully. See #119 self.assertEqual( "None", self._format_var(None), @@ -92,11 +100,12 @@ class TestFormatVar(RegularTestCase): "c": {"d": 4}, "d": [], "e": {}, - "f": None, + "f": "None", + "g": "", } self.assertEqual( - "{'a': 1, 'b': [2, 3], 'c': {'d': 4}, 'd': [], 'e': {}, 'f': None}", + "{'a': 1, 'b': [2, 3], 'c': {'d': 4}, 'd': [], 'e': {}, 'f': None, 'g': <python obj>}", self._format_var(var), ) diff --git a/users/forms.py b/users/forms.py index 311b522..190719a 100644 --- a/users/forms.py +++ b/users/forms.py @@ -140,7 +140,13 @@ class PreferencesForm(ModelForm): # I haven't gotten a decent display for checkboxes in forms yet; the quickest hack around this is a ChoiceField send_email_alerts = forms.ChoiceField( label=_("Send email alerts"), choices=TRUE_FALSE_CHOICES, required=False, widget=forms.Select()) + theme_preference = forms.ChoiceField( + label=_("Theme preference"), + choices=User.THEME_CHOICES, + required=True, + widget=forms.Select(), + ) class Meta: model = User - fields = ("send_email_alerts",) + fields = ("send_email_alerts", "theme_preference",) diff --git a/users/migrations/0002_user_theme_preference.py b/users/migrations/0002_user_theme_preference.py new file mode 100644 index 0000000..ac57bee --- /dev/null +++ b/users/migrations/0002_user_theme_preference.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.23 on 2025-06-16 08:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='user', + name='theme_preference', + field=models.CharField(choices=[('system', 'System Default'), ('light', 'Light'), ('dark', 'Dark')], default='system', max_length=10), + ), + ] diff --git a/users/models.py b/users/models.py index 89edc8e..68c4960 100644 --- a/users/models.py +++ b/users/models.py @@ -17,6 +17,18 @@ class User(AbstractUser): send_email_alerts = models.BooleanField(default=True, blank=True) + THEME_CHOICES = [ + ("system", "System Default"), + ("light", "Light"), + ("dark", "Dark"), + ] + theme_preference = models.CharField( + max_length=10, + choices=THEME_CHOICES, + default="system", + blank=False, + ) + class Meta: db_table = 'auth_user' diff --git a/users/templates/users/confirm_email.html b/users/templates/users/confirm_email.html index b8b49e4..446757a 100644 --- a/users/templates/users/confirm_email.html +++ b/users/templates/users/confirm_email.html @@ -5,14 +5,14 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
- +
Confirm your email address by clicking the button below.
diff --git a/users/templates/users/confirm_email_sent.html b/users/templates/users/confirm_email_sent.html index e44cc37..6cfaec0 100644 --- a/users/templates/users/confirm_email_sent.html +++ b/users/templates/users/confirm_email_sent.html @@ -5,10 +5,10 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
diff --git a/users/templates/users/logged_out.html b/users/templates/users/logged_out.html index 7aa276f..d71f3f8 100644 --- a/users/templates/users/logged_out.html +++ b/users/templates/users/logged_out.html @@ -5,14 +5,14 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
- You have been logged out. Log in again. + You have been logged out. Log in again.
diff --git a/users/templates/users/preferences.html b/users/templates/users/preferences.html index e5e7822..bc04472 100644 --- a/users/templates/users/preferences.html +++ b/users/templates/users/preferences.html @@ -17,7 +17,7 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %} @@ -27,8 +27,9 @@
{% tailwind_formfield form.send_email_alerts %} + {% tailwind_formfield form.theme_preference %} - +
diff --git a/users/templates/users/request_reset_password.html b/users/templates/users/request_reset_password.html index d6dce67..770d592 100644 --- a/users/templates/users/request_reset_password.html +++ b/users/templates/users/request_reset_password.html @@ -6,10 +6,10 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
@@ -23,7 +23,7 @@
diff --git a/users/templates/users/resend_confirmation.html b/users/templates/users/resend_confirmation.html index 0a820dc..a5471b9 100644 --- a/users/templates/users/resend_confirmation.html +++ b/users/templates/users/resend_confirmation.html @@ -6,10 +6,10 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
diff --git a/users/templates/users/reset_password.html b/users/templates/users/reset_password.html index e41753e..165b76b 100644 --- a/users/templates/users/reset_password.html +++ b/users/templates/users/reset_password.html @@ -6,10 +6,11 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo +
@@ -27,7 +28,7 @@
diff --git a/users/templates/users/reset_password_email_sent.html b/users/templates/users/reset_password_email_sent.html index 740fafe..b0347bc 100644 --- a/users/templates/users/reset_password_email_sent.html +++ b/users/templates/users/reset_password_email_sent.html @@ -5,10 +5,10 @@ {% block content %} -
{# the cyan background #} -
{# the centered box #} -
{# the logo #} - Bugsink logo +
{# the cyan background #} +
{# the centered box #} +
{# the logo #} + Bugsink logo
diff --git a/users/templates/users/user_edit.html b/users/templates/users/user_edit.html index 1ab0c4d..ea06be5 100644 --- a/users/templates/users/user_edit.html +++ b/users/templates/users/user_edit.html @@ -23,8 +23,8 @@ {% tailwind_formfield form.username %} - - Cancel + + Cancel
diff --git a/users/templates/users/user_list.html b/users/templates/users/user_list.html index b9d93d6..29ac354 100644 --- a/users/templates/users/user_list.html +++ b/users/templates/users/user_list.html @@ -5,6 +5,28 @@ {% block content %} + + +
@@ -13,19 +35,18 @@
    {% for message in messages %} {# if we introduce different levels we can use{% message.level == DEFAULT_MESSAGE_LEVELS.SUCCESS %} #} -
  • {{ message }}
  • +
  • {{ message }}
  • {% endfor %}
{% endif %}

Users

- {% comment %} Our current invite-system is tied to either a team or a project; no "global" invites (yet). + Invite Member +
{% endcomment %}
@@ -36,18 +57,17 @@
{{ team.name }}
- {{ member.user.email }} {# "best name" perhaps later? #} + {{ member.user.email }} {# "best name" perhaps later? #} {% if not member.accepted %} - Invitation pending + Invitation pending {% elif member.is_admin %} {# NOTE: we intentionally hide admin-ness for non-accepted users #} - Admin + Admin {% endif %}
{% if not member.accepted %} - + {% endif %} {% if request.user == member.user %} - + {% else %} {# NOTE: in our setup request_user_is_admin is implied because only admins may view the membership page #} - + {% endif %} -
+
{# Note: this is already somewhat exceptional, because the usually you'll at least see yourself here (unless you're a superuser and a team has become memberless) #} - No members yet. Invite someone. + No members yet. Invite someone.
- + {% for user in users %} - + @@ -56,18 +76,18 @@
{% if not request.user == user %} {% if user.is_active %} - + {% else %} - + + {% endif %} {% endif %} -
+ {% endfor %} - {#% empty %} not needed, a site without users cannot be visited by a user #}
Users
- {{ user.username }} {# "best name" perhaps later? #} - {# Invitation pending #} {# perhaps useful for "not active"? #} + {{ user.username }} {% if member.is_superuser %} - Superuser + Superuser {% endif %}
@@ -77,10 +97,17 @@ {% comment %}
- Back to Xxxx {# perhaps once this is part of some other flow #} + Back to Xxxx {# perhaps once this is part of some other flow #}
{% endcomment %}
{% endblock %} + +{% block extra_js %} + + +{% endblock %} diff --git a/users/views.py b/users/views.py index 7876d05..78d26fb 100644 --- a/users/views.py +++ b/users/views.py @@ -44,6 +44,16 @@ def user_list(request): messages.success(request, 'User %s activated' % user.username) return redirect('user_list') + if action == "delete": + user = User.objects.get(pk=user_pk) + if user.is_active: + messages.error(request, 'Cannot delete active user %s' % user.username) + else: + username = user.username + user.delete() + messages.success(request, 'User %s deleted' % username) + return redirect('user_list') + return render(request, 'users/user_list.html', { 'users': users, })