update ruff rules
This commit is contained in:
parent
963c0fdf69
commit
f30b8ec975
31 changed files with 125 additions and 106 deletions
|
@ -25,11 +25,6 @@ repos:
|
|||
- --markdown-linebreak-ext=md
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py311-plus]
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: 1.13.0
|
||||
hooks:
|
||||
|
|
|
@ -36,57 +36,18 @@ ignore_missing_imports = true
|
|||
[tool.ruff]
|
||||
src = ["src"]
|
||||
target-version = "py311"
|
||||
select = [
|
||||
"F", # pyflakes
|
||||
"E", "W", # pycodestyle
|
||||
"C90", # mccabe
|
||||
"I", # isort
|
||||
"N", # pep8-naming
|
||||
"D", # pydocstyle
|
||||
"S", # flake8-bandit
|
||||
"FBT", # flake8-boolean-trap
|
||||
"B", # flake8-bugbear
|
||||
"A", # flake8-builtins
|
||||
"C4", # flake8-comprehensions
|
||||
"DTZ", # flake8-datetimez
|
||||
"T10", # flake8-debugger
|
||||
"EXE", # flake8-executable
|
||||
"ISC", # flake8-implicit-str-concat
|
||||
"ICN", # flake8-import-conventions
|
||||
"G", # flake8-logging-format
|
||||
"INP", # flake8-no-pep420
|
||||
"PIE", # flake8-pie
|
||||
"T20", # flake8-print
|
||||
"PT", # flake8-pytest-style
|
||||
"RET", # flake8-return
|
||||
"SIM", # flake8-simplify
|
||||
"TID", # flake8-tidy-imports
|
||||
"ARG", # flake8-unused-arguments
|
||||
"PTH", # flake8-use-pathlib
|
||||
"ERA", # eradicate
|
||||
"PD", # pandas-vet
|
||||
"PGH", # pygrep-hooks
|
||||
"PL", # pylint
|
||||
"TRY", # tryceratops
|
||||
"RUF", # ruff-specific rules
|
||||
]
|
||||
select = ["ALL"]
|
||||
unfixable = ["T20", "RUF001", "RUF002", "RUF003"]
|
||||
|
||||
ignore = [
|
||||
"UP", # pyupgrade
|
||||
"YTT", # flake8-2020
|
||||
"ANN", # flake8-annotations
|
||||
"BLE", # flake8-blind-except
|
||||
"COM", # flake8-commas
|
||||
"EM", # flake8-errmsg
|
||||
"Q", # flake8-quotes
|
||||
"TCH", # flake8-type-checking / TODO: revisit later ?
|
||||
|
||||
"E501", # long lines
|
||||
"D1", # missing docstring
|
||||
"TRY003", # Avoid specifying long messages outside the exception class
|
||||
]
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"**/tests/*" = [
|
||||
"S101", # Use of assert detected.
|
||||
|
|
|
@ -48,7 +48,7 @@ class ArticleAdmin(admin.ModelAdmin):
|
|||
("created_at", "updated_at"),
|
||||
("views_count",),
|
||||
("has_code", "has_custom_css"),
|
||||
]
|
||||
],
|
||||
},
|
||||
),
|
||||
(
|
||||
|
@ -116,11 +116,13 @@ class ArticleAdmin(admin.ModelAdmin):
|
|||
"all": (
|
||||
"vendor/fonts/fira-code.css",
|
||||
"admin_articles.css",
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def response_post_save_add(
|
||||
self, request: WSGIRequest, obj: Article
|
||||
self,
|
||||
request: WSGIRequest,
|
||||
obj: Article,
|
||||
) -> HttpResponseRedirect:
|
||||
if "_preview" in request.POST:
|
||||
return cast(HttpResponseRedirect, redirect("article-detail", slug=obj.slug))
|
||||
|
|
|
@ -27,7 +27,9 @@ class LazyImageReferenceInlineProcessor(ImageReferenceInlineProcessor):
|
|||
class LazyLoadingImageExtension(Extension):
|
||||
def extendMarkdown(self, md: Markdown) -> None: # noqa: N802
|
||||
md.inlinePatterns.register(
|
||||
LazyImageInlineProcessor(IMAGE_LINK_RE, md), "image_link", 150
|
||||
LazyImageInlineProcessor(IMAGE_LINK_RE, md),
|
||||
"image_link",
|
||||
150,
|
||||
)
|
||||
md.inlinePatterns.register(
|
||||
LazyImageReferenceInlineProcessor(IMAGE_REFERENCE_RE, md),
|
||||
|
|
|
@ -30,7 +30,9 @@ class Migration(migrations.Migration):
|
|||
(
|
||||
"last_login",
|
||||
models.DateTimeField(
|
||||
blank=True, null=True, verbose_name="last login"
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name="last login",
|
||||
),
|
||||
),
|
||||
(
|
||||
|
@ -45,13 +47,13 @@ class Migration(migrations.Migration):
|
|||
"username",
|
||||
models.CharField(
|
||||
error_messages={
|
||||
"unique": "A user with that username already exists."
|
||||
"unique": "A user with that username already exists.",
|
||||
},
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.contrib.auth.validators.UnicodeUsernameValidator()
|
||||
django.contrib.auth.validators.UnicodeUsernameValidator(),
|
||||
],
|
||||
verbose_name="username",
|
||||
),
|
||||
|
@ -59,19 +61,25 @@ class Migration(migrations.Migration):
|
|||
(
|
||||
"first_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=150, verbose_name="first name"
|
||||
blank=True,
|
||||
max_length=150,
|
||||
verbose_name="first name",
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=150, verbose_name="last name"
|
||||
blank=True,
|
||||
max_length=150,
|
||||
verbose_name="last name",
|
||||
),
|
||||
),
|
||||
(
|
||||
"email",
|
||||
models.EmailField(
|
||||
blank=True, max_length=254, verbose_name="email address"
|
||||
blank=True,
|
||||
max_length=254,
|
||||
verbose_name="email address",
|
||||
),
|
||||
),
|
||||
(
|
||||
|
@ -93,7 +101,8 @@ class Migration(migrations.Migration):
|
|||
(
|
||||
"date_joined",
|
||||
models.DateTimeField(
|
||||
default=django.utils.timezone.now, verbose_name="date joined"
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="date joined",
|
||||
),
|
||||
),
|
||||
(
|
||||
|
|
|
@ -10,9 +10,12 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="page", options={"ordering": ["position", "-published_at"]}
|
||||
name="page",
|
||||
options={"ordering": ["position", "-published_at"]},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="page", name="position", field=models.IntegerField(default=0)
|
||||
model_name="page",
|
||||
name="position",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -25,7 +25,8 @@ class Migration(migrations.Migration):
|
|||
(
|
||||
"username",
|
||||
models.CharField(
|
||||
help_text="Will be displayed with your comment.", max_length=255
|
||||
help_text="Will be displayed with your comment.",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
|
|
|
@ -10,7 +10,8 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="comment", options={"ordering": ["-created_at"]}
|
||||
name="comment",
|
||||
options={"ordering": ["-created_at"]},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="comment",
|
||||
|
|
|
@ -10,6 +10,8 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="comment", old_name="active", new_name="approved"
|
||||
model_name="comment",
|
||||
old_name="active",
|
||||
new_name="approved",
|
||||
),
|
||||
]
|
||||
|
|
|
@ -10,7 +10,8 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="comment", options={"ordering": ["created_at"]}
|
||||
name="comment",
|
||||
options={"ordering": ["created_at"]},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="comment",
|
||||
|
|
|
@ -10,6 +10,7 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="comment", options={"ordering": ["-created_at"]}
|
||||
name="comment",
|
||||
options={"ordering": ["-created_at"]},
|
||||
),
|
||||
]
|
||||
|
|
|
@ -10,6 +10,7 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="comment", options={"ordering": ["created_at"]}
|
||||
name="comment",
|
||||
options={"ordering": ["created_at"]},
|
||||
),
|
||||
]
|
||||
|
|
|
@ -13,7 +13,7 @@ def forwards(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
|||
tags = []
|
||||
keyword: str
|
||||
for keyword in list(
|
||||
filter(None, (keyword.strip() for keyword in article.keywords.split(",")))
|
||||
filter(None, (keyword.strip() for keyword in article.keywords.split(","))),
|
||||
):
|
||||
tag = Tag.objects.using(db_alias).filter(name__iexact=keyword).first()
|
||||
if tag is None:
|
||||
|
|
|
@ -13,7 +13,7 @@ class Migration(migrations.Migration):
|
|||
model_name="article",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
default='!!! warning "Draft"\n This article is still a draft. It may appear by error in your feed if I click on the "publish" button too early 😊'
|
||||
default='!!! warning "Draft"\n This article is still a draft. It may appear by error in your feed if I click on the "publish" button too early 😊',
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -13,7 +13,9 @@ class Migration(migrations.Migration):
|
|||
model_name="article",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="articles", to="articles.Tag"
|
||||
blank=True,
|
||||
related_name="articles",
|
||||
to="articles.Tag",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -6,7 +6,8 @@ from django.urls import reverse
|
|||
|
||||
|
||||
def replace_with_wrapper_url(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
apps: Apps,
|
||||
schema_editor: BaseDatabaseSchemaEditor,
|
||||
) -> None:
|
||||
Attachment = apps.get_model("attachments", "Attachment")
|
||||
Article = apps.get_model("articles", "Article")
|
||||
|
|
|
@ -140,10 +140,10 @@ class Article(models.Model):
|
|||
def get_related_articles(self) -> Sequence[Article]:
|
||||
related_articles = set()
|
||||
published_articles = Article.objects.filter(status=Article.PUBLISHED).exclude(
|
||||
pk=self.pk
|
||||
pk=self.pk,
|
||||
)
|
||||
for tag in self.tags.all().prefetch_related(
|
||||
Prefetch("articles", published_articles, to_attr="published_articles")
|
||||
Prefetch("articles", published_articles, to_attr="published_articles"),
|
||||
):
|
||||
related_articles.update(tag.published_articles)
|
||||
sample_size = min([len(related_articles), 3])
|
||||
|
|
|
@ -9,7 +9,8 @@ from articles.utils import format_article_content
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_unauthenticated_render_redirects(
|
||||
published_article: Article, client: Client
|
||||
published_article: Article,
|
||||
client: Client,
|
||||
) -> None:
|
||||
api_res = client.post(
|
||||
reverse("api-render-article", kwargs={"article_pk": published_article.pk}),
|
||||
|
@ -20,12 +21,13 @@ def test_unauthenticated_render_redirects(
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_render_article_same_content(
|
||||
published_article: Article, client: Client
|
||||
published_article: Article,
|
||||
client: Client,
|
||||
) -> None:
|
||||
client.force_login(published_article.author)
|
||||
api_res = post_article(client, published_article, published_article.content)
|
||||
standard_res = client.get(
|
||||
reverse("article-detail", kwargs={"slug": published_article.slug})
|
||||
reverse("article-detail", kwargs={"slug": published_article.slug}),
|
||||
)
|
||||
assert api_res.status_code == 200
|
||||
assert standard_res.status_code == 200
|
||||
|
@ -42,7 +44,8 @@ def test_render_article_same_content(
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_render_article_change_content(
|
||||
published_article: Article, client: Client
|
||||
published_article: Article,
|
||||
client: Client,
|
||||
) -> None:
|
||||
client.force_login(published_article.author)
|
||||
preview_content = "This is a different content **with strong emphasis**"
|
||||
|
@ -80,7 +83,7 @@ def post_article(client: Client, article: Article, content: str) -> HttpResponse
|
|||
data={
|
||||
"content": content,
|
||||
"tag_ids": ",".join(
|
||||
map(str, article.tags.all().values_list("pk", flat=True))
|
||||
map(str, article.tags.all().values_list("pk", flat=True)),
|
||||
),
|
||||
},
|
||||
)
|
||||
|
|
|
@ -57,28 +57,32 @@ def _assert_article_is_rendered(item: Article, res: HttpResponse) -> None:
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_anonymous_cant_access_draft_detail(
|
||||
client: Client, unpublished_article: Article
|
||||
client: Client,
|
||||
unpublished_article: Article,
|
||||
) -> None:
|
||||
res = client.get(
|
||||
reverse("article-detail", kwargs={"slug": unpublished_article.slug})
|
||||
reverse("article-detail", kwargs={"slug": unpublished_article.slug}),
|
||||
)
|
||||
assert res.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
def test_anonymous_can_access_draft_detail_with_key(
|
||||
client: Client, unpublished_article: Article
|
||||
client: Client,
|
||||
unpublished_article: Article,
|
||||
) -> None:
|
||||
res = client.get(
|
||||
reverse("article-detail", kwargs={"slug": unpublished_article.slug})
|
||||
+ f"?draft_key={unpublished_article.draft_key}"
|
||||
+ f"?draft_key={unpublished_article.draft_key}",
|
||||
)
|
||||
_assert_article_is_rendered(unpublished_article, res)
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
def test_user_can_access_draft_detail(
|
||||
client: Client, author: User, unpublished_article: Article
|
||||
client: Client,
|
||||
author: User,
|
||||
unpublished_article: Article,
|
||||
) -> None:
|
||||
client.force_login(author)
|
||||
_test_access_article_by_slug(client, unpublished_article)
|
||||
|
@ -86,7 +90,8 @@ def test_user_can_access_draft_detail(
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_anonymous_cant_access_drafts_list(
|
||||
client: Client, unpublished_article: Article
|
||||
client: Client,
|
||||
unpublished_article: Article,
|
||||
) -> None:
|
||||
res = client.get(reverse("drafts-list"))
|
||||
assert res.status_code == 302
|
||||
|
@ -94,7 +99,9 @@ def test_anonymous_cant_access_drafts_list(
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_user_can_access_drafts_list(
|
||||
client: Client, author: User, unpublished_article: Article
|
||||
client: Client,
|
||||
author: User,
|
||||
unpublished_article: Article,
|
||||
) -> None:
|
||||
client.force_login(author)
|
||||
res = client.get(reverse("drafts-list"))
|
||||
|
@ -114,7 +121,8 @@ def test_has_goatcounter_if_set(client: Client, settings: SettingsWrapper) -> No
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_doesnt_have_goatcounter_if_unset(
|
||||
client: Client, settings: SettingsWrapper
|
||||
client: Client,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.GOATCOUNTER_DOMAIN = None
|
||||
res = client.get(reverse("articles-list"))
|
||||
|
@ -125,7 +133,9 @@ def test_doesnt_have_goatcounter_if_unset(
|
|||
|
||||
@pytest.mark.django_db()
|
||||
def test_logged_in_user_doesnt_have_goatcounter(
|
||||
client: Client, author: User, settings: SettingsWrapper
|
||||
client: Client,
|
||||
author: User,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
client.force_login(author)
|
||||
settings.GOATCOUNTER_DOMAIN = "gc.gabnotes.org"
|
||||
|
|
|
@ -25,7 +25,7 @@ def test_unpublish_article(published_article: Article) -> None:
|
|||
def test_save_article_adds_missing_slug(author: User) -> None:
|
||||
# Explicitly calling bulk_create with one article because it doesn't call save().
|
||||
articles = Article.objects.bulk_create(
|
||||
[Article(author=author, title="noice title", slug="", status=Article.DRAFT)]
|
||||
[Article(author=author, title="noice title", slug="", status=Article.DRAFT)],
|
||||
)
|
||||
article = articles[0]
|
||||
assert article.slug == ""
|
||||
|
|
|
@ -10,7 +10,9 @@ urlpatterns = [
|
|||
path("tag/<slug:slug>/", views.TagArticlesListView.as_view(), name="tag"),
|
||||
path("feed/", views.CompleteFeed(), name="complete-feed"),
|
||||
path(
|
||||
"api/render/<int:article_pk>/", views.render_article, name="api-render-article"
|
||||
"api/render/<int:article_pk>/",
|
||||
views.render_article,
|
||||
name="api-render-article",
|
||||
),
|
||||
path("<slug:slug>/", views.view_article, name="article-detail"),
|
||||
]
|
||||
|
|
|
@ -24,7 +24,7 @@ def format_article_content(content: str) -> str:
|
|||
TocExtension(anchorlink=True),
|
||||
CodeHiliteExtension(linenums=False, guess_lang=False),
|
||||
LazyLoadingImageExtension(),
|
||||
]
|
||||
],
|
||||
)
|
||||
content = re.sub(r"(\s)#(\w+)", r"\1\#\2", content)
|
||||
return md.convert(content)
|
||||
|
|
|
@ -22,7 +22,7 @@ class BaseFeed(Feed):
|
|||
|
||||
def _get_queryset(self) -> QuerySet[Article]:
|
||||
return Article.objects.filter(status=Article.PUBLISHED).order_by(
|
||||
"-published_at"
|
||||
"-published_at",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ class BaseArticleListView(generic.ListView):
|
|||
context["next_page_querystring"] = querystring
|
||||
if page_obj.has_previous():
|
||||
querystring = self.build_querystring(
|
||||
{"page": page_obj.previous_page_number()}
|
||||
{"page": page_obj.previous_page_number()},
|
||||
)
|
||||
context["previous_page_querystring"] = querystring
|
||||
return context
|
||||
|
@ -55,7 +55,8 @@ class ArticlesListView(PublicArticleListView):
|
|||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
home_article = Article.objects.filter(
|
||||
status=Article.PUBLISHED, is_home=True
|
||||
status=Article.PUBLISHED,
|
||||
is_home=True,
|
||||
).first()
|
||||
context["article"] = home_article
|
||||
return context
|
||||
|
@ -80,11 +81,13 @@ class SearchArticlesListView(PublicArticleListView):
|
|||
return queryset.filter(
|
||||
reduce(operator.and_, (Q(title__icontains=term) for term in search_terms))
|
||||
| reduce(
|
||||
operator.and_, (Q(content__icontains=term) for term in search_terms)
|
||||
operator.and_,
|
||||
(Q(content__icontains=term) for term in search_terms),
|
||||
)
|
||||
| reduce(
|
||||
operator.and_, (Q(tags__name__icontains=term) for term in search_terms)
|
||||
)
|
||||
operator.and_,
|
||||
(Q(tags__name__icontains=term) for term in search_terms),
|
||||
),
|
||||
).distinct()
|
||||
|
||||
def get_additional_querystring_params(self) -> dict[str, str]:
|
||||
|
@ -100,7 +103,10 @@ class TagArticlesListView(PublicArticleListView):
|
|||
html_title = ""
|
||||
|
||||
def dispatch(
|
||||
self, request: WSGIRequest, *args: Any, **kwargs: Any
|
||||
self,
|
||||
request: WSGIRequest,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> HttpResponseBase:
|
||||
self.tag = get_object_or_404(Tag, slug=self.kwargs.get("slug"))
|
||||
self.main_title = self.html_title = f"{self.tag.name} articles"
|
||||
|
|
|
@ -67,7 +67,9 @@ class AttachmentAdmin(admin.ModelAdmin):
|
|||
|
||||
@admin.action(description="Reprocess selected attachments")
|
||||
def reprocess_selected_attachments(
|
||||
self, request: WSGIRequest, queryset: QuerySet
|
||||
self,
|
||||
request: WSGIRequest,
|
||||
queryset: QuerySet,
|
||||
) -> None:
|
||||
if len(queryset) == 0:
|
||||
messages.error(request, "You must select at least one attachment")
|
||||
|
|
|
@ -20,7 +20,9 @@ class Migration(migrations.Migration):
|
|||
model_name="attachment",
|
||||
name="processed_file",
|
||||
field=attachments.models.AbsoluteUrlFileField(
|
||||
blank=True, null=True, upload_to=""
|
||||
blank=True,
|
||||
null=True,
|
||||
upload_to="",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -83,7 +83,7 @@ class Attachment(models.Model):
|
|||
"resize_height": settings.SHORTPIXEL_RESIZE_HEIGHT,
|
||||
"keep_exif": 1,
|
||||
"file_paths": json.dumps(
|
||||
[f"{self.original_file.name}:{self.original_file.path}"]
|
||||
[f"{self.original_file.name}:{self.original_file.path}"],
|
||||
),
|
||||
}
|
||||
data = {**base_data, **post_data}
|
||||
|
|
|
@ -151,7 +151,7 @@ CACHES = {
|
|||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.db.DatabaseCache",
|
||||
"LOCATION": "cache",
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# Database
|
||||
|
@ -169,7 +169,7 @@ INTERNAL_IPS = [
|
|||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
|
||||
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
|
||||
|
|
|
@ -26,7 +26,8 @@ urlpatterns = [
|
|||
path(
|
||||
"robots.txt",
|
||||
TemplateView.as_view(
|
||||
template_name="blog/robots.txt", content_type="text/plain"
|
||||
template_name="blog/robots.txt",
|
||||
content_type="text/plain",
|
||||
),
|
||||
),
|
||||
path("admin/", admin.site.urls),
|
||||
|
|
|
@ -10,11 +10,12 @@ def main() -> None:
|
|||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
msg = (
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
)
|
||||
raise ImportError(msg) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
|
|
18
tasks.py
18
tasks.py
|
@ -16,7 +16,10 @@ def update_dependencies(ctx: Context, *, sync: bool = True) -> None:
|
|||
|
||||
@task
|
||||
def compile_dependencies(
|
||||
ctx: Context, *, update: bool = False, sync: bool = False
|
||||
ctx: Context,
|
||||
*,
|
||||
update: bool = False,
|
||||
sync: bool = False,
|
||||
) -> None:
|
||||
common_args = "-q --allow-unsafe --resolver=backtracking"
|
||||
if update:
|
||||
|
@ -85,7 +88,10 @@ def check(_ctx: Context) -> None:
|
|||
def build(ctx: Context) -> None:
|
||||
with ctx.cd(BASE_DIR):
|
||||
ctx.run(
|
||||
"docker-compose build django", pty=True, echo=True, env=COMPOSE_BUILD_ENV
|
||||
"docker-compose build django",
|
||||
pty=True,
|
||||
echo=True,
|
||||
env=COMPOSE_BUILD_ENV,
|
||||
)
|
||||
|
||||
|
||||
|
@ -93,7 +99,10 @@ def build(ctx: Context) -> None:
|
|||
def publish(ctx: Context) -> None:
|
||||
with ctx.cd(BASE_DIR):
|
||||
ctx.run(
|
||||
"docker-compose push django", pty=True, echo=True, env=COMPOSE_BUILD_ENV
|
||||
"docker-compose push django",
|
||||
pty=True,
|
||||
echo=True,
|
||||
env=COMPOSE_BUILD_ENV,
|
||||
)
|
||||
|
||||
|
||||
|
@ -117,7 +126,8 @@ def check_alive(_ctx: Context) -> None:
|
|||
else:
|
||||
print("Server is up & running") # noqa: T201
|
||||
return
|
||||
raise RuntimeError("Failed to reach the server") from exception
|
||||
msg = "Failed to reach the server"
|
||||
raise RuntimeError(msg) from exception
|
||||
|
||||
|
||||
@task(pre=[check, build, publish, deploy], post=[check_alive])
|
||||
|
|
Reference in a new issue