2
0
Fork 0

Compare commits

...

2 Commits

Author SHA1 Message Date
Luca bef4f433fc chore(pages): add title for team faq
continuous-integration/drone/push Build is passing Details
2024-05-27 18:33:31 +02:00
Luca 28d905a24c refactor(pages): unify code for page import 2024-05-27 18:33:04 +02:00
4 changed files with 61 additions and 37 deletions

View File

@ -1,16 +1,26 @@
from pathlib import Path from pathlib import Path
from django.contrib import admin from django.contrib import admin, messages
from .models import Page from .models import Page
from .parser import import_pages
def reimport(modeladmin, request, queryset): def reimport(modeladmin, request, queryset):
for page in queryset: num_pages = len(list(import_pages(True, set(page.url for page in queryset))))
path = Path(__file__).resolve().parent / "default_content" / f"{page.url}.html"
if path.exists(): if num_pages == 0:
page.content = path.read_text() modeladmin.message_user(
page.save() request, "Es wurden keine Seiten reimportiert.", messages.WARNING
)
elif num_pages == 1:
modeladmin.message_user(
request, "Eine Seite wurde reimportiert.", messages.SUCCESS
)
else:
modeladmin.message_user(
request, f"{num_pages} Seiten wurden reimportiert.", messages.SUCCESS
)
@admin.register(Page) @admin.register(Page)

View File

@ -1,3 +1,4 @@
<title>Team-FAQ</title>
<div class="content"> <div class="content">
<h2 id="Muss-das-sein">Muss das sein?</h2> <h2 id="Muss-das-sein">Muss das sein?</h2>
<p>A: Alle Teammitglieder bekommen Schichten. Selbst AK Sicherheit. Es gibt keine Ausnahmen. Alle packen an.</p> <p>A: Alle Teammitglieder bekommen Schichten. Selbst AK Sicherheit. Es gibt keine Ausnahmen. Alle packen an.</p>

View File

@ -1,9 +1,6 @@
from pathlib import Path from django.core.management.base import BaseCommand
from bs4 import BeautifulSoup from ...parser import import_pages
from django.core.management.base import BaseCommand, CommandError
from ...models import Page
class Command(BaseCommand): class Command(BaseCommand):
@ -16,30 +13,9 @@ class Command(BaseCommand):
parser.add_argument("pages", help="Specify which pages to import", nargs="*") parser.add_argument("pages", help="Specify which pages to import", nargs="*")
def handle(self, *args, **options): def handle(self, *args, **options):
content_path = Path(__file__).resolve().parent.parent.parent / "default_content" pages = import_pages(options["force"], options["pages"])
for file in content_path.iterdir():
if (pages := options["pages"]) and file.stem not in pages:
continue
slug = file.stem for p in pages:
p, created = Page.objects.get_or_create(url=slug) self.stderr.write(
if (not created) and (not options["force"]): self.style.SUCCESS(f'created new page "{p.title}" for slug {p.url}')
continue )
soup = BeautifulSoup(file.read_text(), "html.parser")
if soup.title:
p.title = soup.title.string
soup.title.decompose()
else:
p.title = slug.title()
if visible := soup.find("meta", attrs={"name": "visible"}):
p.visible = "content" not in visible.attrs or visible.attrs[
"content"
].lower() in ("1", "true", "yes")
visible.decompose()
p.content = str(soup).strip()
p.save()
print(f'created new page "{p.title}" for slug {slug}')

View File

@ -0,0 +1,37 @@
from pathlib import Path
from bs4 import BeautifulSoup
from .models import Page
content_path = Path(__file__).resolve().parent / "default_content"
def import_pages(force, pages):
for file in content_path.iterdir():
if pages and file.stem not in pages:
continue
slug = file.stem
p, created = Page.objects.get_or_create(url=slug)
if not created and not force:
continue
soup = BeautifulSoup(file.read_text(), "html.parser")
if soup.title:
p.title = soup.title.string
soup.title.decompose()
else:
p.title = slug.title()
if visible := soup.find("meta", attrs={"name": "visible"}):
p.visible = "content" not in visible.attrs or visible.attrs[
"content"
].lower() in ("1", "true", "yes")
visible.decompose()
p.content = str(soup).strip()
p.save()
yield p