Compare commits
No commits in common. "bef4f433fcc22cfe0ad840d3b53500164753fd1a" and "77a984170cf7532753b51e99334853c41085381e" have entirely different histories.
bef4f433fc
...
77a984170c
|
@ -1,26 +1,16 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.contrib import admin, messages
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Page
|
from .models import Page
|
||||||
from .parser import import_pages
|
|
||||||
|
|
||||||
|
|
||||||
def reimport(modeladmin, request, queryset):
|
def reimport(modeladmin, request, queryset):
|
||||||
num_pages = len(list(import_pages(True, set(page.url for page in queryset))))
|
for page in queryset:
|
||||||
|
path = Path(__file__).resolve().parent / "default_content" / f"{page.url}.html"
|
||||||
if num_pages == 0:
|
if path.exists():
|
||||||
modeladmin.message_user(
|
page.content = path.read_text()
|
||||||
request, "Es wurden keine Seiten reimportiert.", messages.WARNING
|
page.save()
|
||||||
)
|
|
||||||
elif num_pages == 1:
|
|
||||||
modeladmin.message_user(
|
|
||||||
request, "Eine Seite wurde reimportiert.", messages.SUCCESS
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
modeladmin.message_user(
|
|
||||||
request, f"{num_pages} Seiten wurden reimportiert.", messages.SUCCESS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Page)
|
@admin.register(Page)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
<title>Team-FAQ</title>
|
|
||||||
<div class="content">
|
<div class="content">
|
||||||
<h2 id="Muss-das-sein">Muss das sein?</h2>
|
<h2 id="Muss-das-sein">Muss das sein?</h2>
|
||||||
<p>A: Alle Teammitglieder bekommen Schichten. Selbst AK Sicherheit. Es gibt keine Ausnahmen. Alle packen an.</p>
|
<p>A: Alle Teammitglieder bekommen Schichten. Selbst AK Sicherheit. Es gibt keine Ausnahmen. Alle packen an.</p>
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
from django.core.management.base import BaseCommand
|
from pathlib import Path
|
||||||
|
|
||||||
from ...parser import import_pages
|
from bs4 import BeautifulSoup
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
|
from ...models import Page
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
@ -13,9 +16,30 @@ class Command(BaseCommand):
|
||||||
parser.add_argument("pages", help="Specify which pages to import", nargs="*")
|
parser.add_argument("pages", help="Specify which pages to import", nargs="*")
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
pages = import_pages(options["force"], options["pages"])
|
content_path = Path(__file__).resolve().parent.parent.parent / "default_content"
|
||||||
|
for file in content_path.iterdir():
|
||||||
|
if (pages := options["pages"]) and file.stem not in pages:
|
||||||
|
continue
|
||||||
|
|
||||||
for p in pages:
|
slug = file.stem
|
||||||
self.stderr.write(
|
p, created = Page.objects.get_or_create(url=slug)
|
||||||
self.style.SUCCESS(f'created new page "{p.title}" for slug {p.url}')
|
if (not created) and (not options["force"]):
|
||||||
)
|
continue
|
||||||
|
|
||||||
|
soup = BeautifulSoup(file.read_text(), "html.parser")
|
||||||
|
|
||||||
|
if soup.title:
|
||||||
|
p.title = soup.title.string
|
||||||
|
soup.title.decompose()
|
||||||
|
else:
|
||||||
|
p.title = slug.title()
|
||||||
|
|
||||||
|
if visible := soup.find("meta", attrs={"name": "visible"}):
|
||||||
|
p.visible = "content" not in visible.attrs or visible.attrs[
|
||||||
|
"content"
|
||||||
|
].lower() in ("1", "true", "yes")
|
||||||
|
visible.decompose()
|
||||||
|
|
||||||
|
p.content = str(soup).strip()
|
||||||
|
p.save()
|
||||||
|
print(f'created new page "{p.title}" for slug {slug}')
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
from .models import Page
|
|
||||||
|
|
||||||
content_path = Path(__file__).resolve().parent / "default_content"
|
|
||||||
|
|
||||||
|
|
||||||
def import_pages(force, pages):
|
|
||||||
for file in content_path.iterdir():
|
|
||||||
if pages and file.stem not in pages:
|
|
||||||
continue
|
|
||||||
|
|
||||||
slug = file.stem
|
|
||||||
p, created = Page.objects.get_or_create(url=slug)
|
|
||||||
if not created and not force:
|
|
||||||
continue
|
|
||||||
|
|
||||||
soup = BeautifulSoup(file.read_text(), "html.parser")
|
|
||||||
|
|
||||||
if soup.title:
|
|
||||||
p.title = soup.title.string
|
|
||||||
soup.title.decompose()
|
|
||||||
else:
|
|
||||||
p.title = slug.title()
|
|
||||||
|
|
||||||
if visible := soup.find("meta", attrs={"name": "visible"}):
|
|
||||||
p.visible = "content" not in visible.attrs or visible.attrs[
|
|
||||||
"content"
|
|
||||||
].lower() in ("1", "true", "yes")
|
|
||||||
visible.decompose()
|
|
||||||
|
|
||||||
p.content = str(soup).strip()
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
yield p
|
|
Loading…
Reference in New Issue