Add a robots.txt

This commit is contained in:
Gabriel Augendre 2020-09-05 09:04:45 +02:00
parent 3cff6619cb
commit 603cd31691
No known key found for this signature in database
GPG key ID: 1E693F4CE4AEE7B4
7 changed files with 26 additions and 4 deletions

View file

@ -1,13 +1,23 @@
from articles.models import Article, Page
IGNORED_PATHS = [
"/robots.txt",
]
def pages(request):
if request.path in IGNORED_PATHS:
return {}
return {"pages": Page.objects.filter(status=Article.PUBLISHED)}
def drafts_count(request):
if request.path in IGNORED_PATHS:
return {}
return {"drafts_count": Article.objects.filter(status=Article.DRAFT).count()}
def date_format(request):
if request.path in IGNORED_PATHS:
return {}
return {"CUSTOM_ISO": r"Y-m-d\TH:i:sO"}

View file

@ -1,3 +0,0 @@
from django.test import TestCase
# Create your tests here.

View file

@ -88,7 +88,7 @@ ROOT_URLCONF = "blog.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"DIRS": ["blog/templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [

View file

@ -0,0 +1,2 @@
User-Agent: *
Disallow: /admin/

0
blog/tests/__init__.py Normal file
View file

View file

@ -0,0 +1,6 @@
def test_robots_txt(client):
res = client.get("/robots.txt")
assert res.status_code == 200
assert res["Content-Type"] == "text/plain"
content = res.content.decode("utf-8")
assert "User-Agent" in content

View file

@ -16,11 +16,18 @@ Including another URLconf
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path
from django.views.generic import TemplateView
from articles.views import feeds, html
from blog import settings
urlpatterns = [
path(
"robots.txt",
TemplateView.as_view(
template_name="blog/robots.txt", content_type="text/plain"
),
),
path("admin/", admin.site.urls),
path("", html.ArticlesListView.as_view(), name="articles-list"),
path("drafts/", html.DraftsListView.as_view(), name="drafts-list"),