Implement basic robots.txt #264

Merged
Dalai Felinto merged 2 commits from robots into main 2024-10-21 16:49:20 +02:00
3 changed files with 15 additions and 1 deletions

View File

@ -18,7 +18,7 @@ from django.contrib import admin
from django.contrib.flatpages import views as flatpages_views
from django.urls import path, include, re_path
from django.views.static import serve
from django.views.generic.base import RedirectView
from django.views.generic.base import RedirectView, TemplateView
from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
@ -44,6 +44,7 @@ urlpatterns = [
path('api/swagger/', RedirectView.as_view(url='/api/v1/swagger/')),
path('api/v1/', SpectacularAPIView.as_view(), name='schema_v1'),
path('api/v1/swagger/', SpectacularSwaggerView.as_view(url_name='schema_v1'), name='swagger'),
path('robots.txt', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')),
# Flatpages
path('about/', flatpages_views.flatpage, {'url': '/about/'}, name='flatpage-about'),
re_path(r'^(?P<url>.*/)$', flatpages_views.flatpage),

View File

@ -0,0 +1,5 @@
User-Agent: *
Disallow: /approval-queue/
User-agent: GPTBot
Disallow: /

View File

@ -309,3 +309,11 @@ class MyExtensionsTest(_BaseTestCase):
self.client.force_login(user)
response = self.client.get(reverse('extensions:manage-list'))
self.assertContains(response, extension.name)
class RobotsTxtTests(_BaseTestCase):
def test_get(self):
response = self.client.get("/robots.txt")
self.assertEqual(response.status_code, 200)
self.assertEqual(response["content-type"], "text/plain")