From e2d9a76109a9d1e5d3defa4b9e791c8153a58144 Mon Sep 17 00:00:00 2001 From: Dalai Felinto Date: Mon, 21 Oct 2024 15:41:28 +0200 Subject: [PATCH 1/2] Implement basic robots.txt It basically makes sure that the approval queue does not get crawled. --- blender_extensions/urls.py | 3 ++- extensions/templates/robots.txt | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 extensions/templates/robots.txt diff --git a/blender_extensions/urls.py b/blender_extensions/urls.py index 7c64a3a1..b82ed668 100644 --- a/blender_extensions/urls.py +++ b/blender_extensions/urls.py @@ -18,7 +18,7 @@ from django.contrib import admin from django.contrib.flatpages import views as flatpages_views from django.urls import path, include, re_path from django.views.static import serve -from django.views.generic.base import RedirectView +from django.views.generic.base import RedirectView, TemplateView from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView @@ -44,6 +44,7 @@ urlpatterns = [ path('api/swagger/', RedirectView.as_view(url='/api/v1/swagger/')), path('api/v1/', SpectacularAPIView.as_view(), name='schema_v1'), path('api/v1/swagger/', SpectacularSwaggerView.as_view(url_name='schema_v1'), name='swagger'), + path('robots.txt', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')), # Flatpages path('about/', flatpages_views.flatpage, {'url': '/about/'}, name='flatpage-about'), re_path(r'^(?P.*/)$', flatpages_views.flatpage), diff --git a/extensions/templates/robots.txt b/extensions/templates/robots.txt new file mode 100644 index 00000000..2e4c2e54 --- /dev/null +++ b/extensions/templates/robots.txt @@ -0,0 +1,5 @@ +User-Agent: * +Disallow: /approval-queue/ + +User-agent: GPTBot +Disallow: / -- 2.30.2 From f1c0157d3b43ced6ab419b4b3cc2b000bbbccb3a Mon Sep 17 00:00:00 2001 From: Dalai Felinto Date: Mon, 21 Oct 2024 15:50:50 +0200 Subject: [PATCH 2/2] Include simple unittest --- extensions/tests/test_views.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/extensions/tests/test_views.py b/extensions/tests/test_views.py index ea1dad50..fe865823 100644 --- a/extensions/tests/test_views.py +++ b/extensions/tests/test_views.py @@ -309,3 +309,11 @@ class MyExtensionsTest(_BaseTestCase): self.client.force_login(user) response = self.client.get(reverse('extensions:manage-list')) self.assertContains(response, extension.name) + + +class RobotsTxtTests(_BaseTestCase): + def test_get(self): + response = self.client.get("/robots.txt") + + self.assertEqual(response.status_code, 200) + self.assertEqual(response["content-type"], "text/plain") -- 2.30.2