From 9770711823c94a41249dd5485024f9bf10cc0ec9 Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Sun, 18 May 2025 09:33:43 +0200 Subject: [PATCH] Add robots.txt that disallows crawling There's not much to crawl, but if you're running Bugsink somewhere and search engines 'somehow find out about it', you generally probably don't want randos on the internet to find that --- bugsink/urls.py | 4 +++- templates/robots.txt | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 templates/robots.txt diff --git a/bugsink/urls.py b/bugsink/urls.py index d12aaa4..44804f4 100644 --- a/bugsink/urls.py +++ b/bugsink/urls.py @@ -3,7 +3,7 @@ from django.conf import settings from django.contrib import admin from django.urls import include, path from django.contrib.auth import views as auth_views -from django.views.generic import RedirectView +from django.views.generic import RedirectView, TemplateView from alerts.views import debug_email as debug_alerts_email from users.views import debug_email as debug_users_email @@ -12,6 +12,7 @@ from bugsink.app_settings import get_settings from users.views import signup, confirm_email, resend_confirmation, request_reset_password, reset_password, preferences from ingest.views import download_envelope from files.views import chunk_upload, artifact_bundle_assemble +from bugsink.decorators import login_exempt from .views import home, trigger_error, favicon, settings_view, silence_email_system_warning, counts, health_check_ready from .debug_views import csrf_debug @@ -75,6 +76,7 @@ urlpatterns = [ path('debug/csrf/', csrf_debug, name='csrf_debug'), path("favicon.ico", favicon), + path("robots.txt", login_exempt(TemplateView.as_view(template_name="robots.txt", content_type="text/plain"))), ] if settings.DEBUG: diff --git a/templates/robots.txt b/templates/robots.txt new file mode 100644 index 0000000..1f53798 --- /dev/null +++ b/templates/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: /