feat: initialize blog app with blueprints and templates
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Extract blog-specific code from the coop monolith into a standalone repository. Includes auth, blog, post, admin, menu_items, snippets blueprints, associated templates, Dockerfile (APP_MODULE=app:app), entrypoint, and Gitea CI workflow. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
7
bp/blog/__init__.py
Normal file
7
bp/blog/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
# create the blueprint at package import time
|
||||
from .routes import register # = Blueprint("browse_bp", __name__)
|
||||
|
||||
# import routes AFTER browse_bp is defined so routes can attach to it
|
||||
from . import routes # noqa: F401
|
||||
0
bp/blog/admin/__init__.py
Normal file
0
bp/blog/admin/__init__.py
Normal file
173
bp/blog/admin/routes.py
Normal file
173
bp/blog/admin/routes.py
Normal file
@@ -0,0 +1,173 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from quart import (
|
||||
render_template,
|
||||
make_response,
|
||||
Blueprint,
|
||||
redirect,
|
||||
url_for,
|
||||
request,
|
||||
g,
|
||||
)
|
||||
from sqlalchemy import select, delete
|
||||
|
||||
from suma_browser.app.authz import require_admin
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
from suma_browser.app.redis_cacher import invalidate_tag_cache
|
||||
|
||||
from models.tag_group import TagGroup, TagGroupTag
|
||||
from models.ghost_content import Tag
|
||||
|
||||
|
||||
def _slugify(name: str) -> str:
|
||||
s = name.strip().lower()
|
||||
s = re.sub(r"[^\w\s-]", "", s)
|
||||
s = re.sub(r"[\s_]+", "-", s)
|
||||
return s.strip("-")
|
||||
|
||||
|
||||
async def _unassigned_tags(session):
|
||||
"""Return public, non-deleted tags not assigned to any group."""
|
||||
assigned_sq = select(TagGroupTag.tag_id).subquery()
|
||||
q = (
|
||||
select(Tag)
|
||||
.where(
|
||||
Tag.deleted_at.is_(None),
|
||||
(Tag.visibility == "public") | (Tag.visibility.is_(None)),
|
||||
Tag.id.notin_(select(assigned_sq)),
|
||||
)
|
||||
.order_by(Tag.name)
|
||||
)
|
||||
return list((await session.execute(q)).scalars())
|
||||
|
||||
|
||||
def register():
|
||||
bp = Blueprint("tag_groups_admin", __name__, url_prefix="/settings/tag-groups")
|
||||
|
||||
@bp.get("/")
|
||||
@require_admin
|
||||
async def index():
|
||||
groups = list(
|
||||
(await g.s.execute(
|
||||
select(TagGroup).order_by(TagGroup.sort_order, TagGroup.name)
|
||||
)).scalars()
|
||||
)
|
||||
unassigned = await _unassigned_tags(g.s)
|
||||
|
||||
ctx = {"groups": groups, "unassigned_tags": unassigned}
|
||||
|
||||
if not is_htmx_request():
|
||||
return await render_template("_types/blog/admin/tag_groups/index.html", **ctx)
|
||||
else:
|
||||
return await render_template("_types/blog/admin/tag_groups/_oob_elements.html", **ctx)
|
||||
|
||||
@bp.post("/")
|
||||
@require_admin
|
||||
async def create():
|
||||
form = await request.form
|
||||
name = (form.get("name") or "").strip()
|
||||
if not name:
|
||||
return redirect(url_for("blog.tag_groups_admin.index"))
|
||||
|
||||
slug = _slugify(name)
|
||||
feature_image = (form.get("feature_image") or "").strip() or None
|
||||
colour = (form.get("colour") or "").strip() or None
|
||||
sort_order = int(form.get("sort_order") or 0)
|
||||
|
||||
tg = TagGroup(
|
||||
name=name, slug=slug,
|
||||
feature_image=feature_image, colour=colour,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
g.s.add(tg)
|
||||
await g.s.flush()
|
||||
|
||||
await invalidate_tag_cache("blog")
|
||||
return redirect(url_for("blog.tag_groups_admin.index"))
|
||||
|
||||
@bp.get("/<int:id>/")
|
||||
@require_admin
|
||||
async def edit(id: int):
|
||||
tg = await g.s.get(TagGroup, id)
|
||||
if not tg:
|
||||
return redirect(url_for("blog.tag_groups_admin.index"))
|
||||
|
||||
# Assigned tag IDs for this group
|
||||
assigned_rows = list(
|
||||
(await g.s.execute(
|
||||
select(TagGroupTag.tag_id).where(TagGroupTag.tag_group_id == id)
|
||||
)).scalars()
|
||||
)
|
||||
assigned_tag_ids = set(assigned_rows)
|
||||
|
||||
# All public, non-deleted tags
|
||||
all_tags = list(
|
||||
(await g.s.execute(
|
||||
select(Tag).where(
|
||||
Tag.deleted_at.is_(None),
|
||||
(Tag.visibility == "public") | (Tag.visibility.is_(None)),
|
||||
).order_by(Tag.name)
|
||||
)).scalars()
|
||||
)
|
||||
|
||||
ctx = {
|
||||
"group": tg,
|
||||
"all_tags": all_tags,
|
||||
"assigned_tag_ids": assigned_tag_ids,
|
||||
}
|
||||
|
||||
if not is_htmx_request():
|
||||
return await render_template("_types/blog/admin/tag_groups/edit.html", **ctx)
|
||||
else:
|
||||
return await render_template("_types/blog/admin/tag_groups/_edit_oob.html", **ctx)
|
||||
|
||||
@bp.post("/<int:id>/")
|
||||
@require_admin
|
||||
async def save(id: int):
|
||||
tg = await g.s.get(TagGroup, id)
|
||||
if not tg:
|
||||
return redirect(url_for("blog.tag_groups_admin.index"))
|
||||
|
||||
form = await request.form
|
||||
name = (form.get("name") or "").strip()
|
||||
if name:
|
||||
tg.name = name
|
||||
tg.slug = _slugify(name)
|
||||
tg.feature_image = (form.get("feature_image") or "").strip() or None
|
||||
tg.colour = (form.get("colour") or "").strip() or None
|
||||
tg.sort_order = int(form.get("sort_order") or 0)
|
||||
|
||||
# Update tag assignments
|
||||
selected_tag_ids = set()
|
||||
for val in form.getlist("tag_ids"):
|
||||
try:
|
||||
selected_tag_ids.add(int(val))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Remove old assignments
|
||||
await g.s.execute(
|
||||
delete(TagGroupTag).where(TagGroupTag.tag_group_id == id)
|
||||
)
|
||||
await g.s.flush()
|
||||
|
||||
# Add new assignments
|
||||
for tid in selected_tag_ids:
|
||||
g.s.add(TagGroupTag(tag_group_id=id, tag_id=tid))
|
||||
await g.s.flush()
|
||||
|
||||
await invalidate_tag_cache("blog")
|
||||
return redirect(url_for("blog.tag_groups_admin.edit", id=id))
|
||||
|
||||
@bp.post("/<int:id>/delete/")
|
||||
@require_admin
|
||||
async def delete_group(id: int):
|
||||
tg = await g.s.get(TagGroup, id)
|
||||
if tg:
|
||||
await g.s.delete(tg)
|
||||
await g.s.flush()
|
||||
await invalidate_tag_cache("blog")
|
||||
return redirect(url_for("blog.tag_groups_admin.index"))
|
||||
|
||||
return bp
|
||||
120
bp/blog/filters/qs.py
Normal file
120
bp/blog/filters/qs.py
Normal file
@@ -0,0 +1,120 @@
|
||||
from quart import request
|
||||
|
||||
from typing import Iterable, Optional, Union
|
||||
|
||||
from suma_browser.app.filters.qs_base import (
|
||||
KEEP, _norm, make_filter_set, build_qs,
|
||||
)
|
||||
from suma_browser.app.filters.query_types import BlogQuery
|
||||
|
||||
|
||||
def decode() -> BlogQuery:
|
||||
page = int(request.args.get("page", 1))
|
||||
search = request.args.get("search")
|
||||
sort = request.args.get("sort")
|
||||
liked = request.args.get("liked")
|
||||
drafts = request.args.get("drafts")
|
||||
|
||||
selected_tags = tuple(s.strip() for s in request.args.getlist("tag") if s.strip())[:1]
|
||||
selected_authors = tuple(s.strip().lower() for s in request.args.getlist("author") if s.strip())[:1]
|
||||
selected_groups = tuple(s.strip() for s in request.args.getlist("group") if s.strip())[:1]
|
||||
view = request.args.get("view") or None
|
||||
|
||||
return BlogQuery(page, search, sort, selected_tags, selected_authors, liked, view, drafts, selected_groups)
|
||||
|
||||
|
||||
def makeqs_factory():
|
||||
"""
|
||||
Build a makeqs(...) that starts from the current filters + page.
|
||||
Auto-resets page to 1 when filters change unless you pass page explicitly.
|
||||
"""
|
||||
q = decode()
|
||||
base_tags = [s for s in q.selected_tags if (s or "").strip()]
|
||||
base_authors = [s for s in q.selected_authors if (s or "").strip()]
|
||||
base_groups = [s for s in q.selected_groups if (s or "").strip()]
|
||||
base_search = q.search or None
|
||||
base_liked = q.liked or None
|
||||
base_sort = q.sort or None
|
||||
base_page = int(q.page or 1)
|
||||
base_view = q.view or None
|
||||
base_drafts = q.drafts or None
|
||||
|
||||
def makeqs(
|
||||
*,
|
||||
clear_filters: bool = False,
|
||||
add_tag: Union[str, Iterable[str], None] = None,
|
||||
remove_tag: Union[str, Iterable[str], None] = None,
|
||||
add_author: Union[str, Iterable[str], None] = None,
|
||||
remove_author: Union[str, Iterable[str], None] = None,
|
||||
add_group: Union[str, Iterable[str], None] = None,
|
||||
remove_group: Union[str, Iterable[str], None] = None,
|
||||
search: Union[str, None, object] = KEEP,
|
||||
sort: Union[str, None, object] = KEEP,
|
||||
page: Union[int, None, object] = None,
|
||||
extra: Optional[Iterable[tuple]] = None,
|
||||
leading_q: bool = True,
|
||||
liked: Union[bool, None, object] = KEEP,
|
||||
view: Union[str, None, object] = KEEP,
|
||||
drafts: Union[str, None, object] = KEEP,
|
||||
) -> str:
|
||||
groups = make_filter_set(base_groups, add_group, remove_group, clear_filters, single_select=True)
|
||||
tags = make_filter_set(base_tags, add_tag, remove_tag, clear_filters, single_select=True)
|
||||
authors = make_filter_set(base_authors, add_author, remove_author, clear_filters, single_select=True)
|
||||
|
||||
# Mutual exclusion: selecting a group clears tags, selecting a tag clears groups
|
||||
if add_group is not None:
|
||||
tags = []
|
||||
if add_tag is not None:
|
||||
groups = []
|
||||
|
||||
final_search = None if clear_filters else base_search if search is KEEP else ((search or "").strip() or None)
|
||||
final_sort = base_sort if sort is KEEP else (sort or None)
|
||||
final_liked = None if clear_filters else base_liked if liked is KEEP else liked
|
||||
final_view = base_view if view is KEEP else (view or None)
|
||||
final_drafts = None if clear_filters else base_drafts if drafts is KEEP else (drafts or None)
|
||||
|
||||
# Did filters change?
|
||||
filters_changed = (
|
||||
set(map(_norm, tags)) != set(map(_norm, base_tags))
|
||||
or set(map(_norm, authors)) != set(map(_norm, base_authors))
|
||||
or set(map(_norm, groups)) != set(map(_norm, base_groups))
|
||||
or final_search != base_search
|
||||
or final_sort != base_sort
|
||||
or final_liked != base_liked
|
||||
or final_drafts != base_drafts
|
||||
)
|
||||
|
||||
# Page logic
|
||||
if page is KEEP:
|
||||
final_page = 1 if filters_changed else base_page
|
||||
else:
|
||||
final_page = page
|
||||
|
||||
# Build params
|
||||
params = []
|
||||
for s in groups:
|
||||
params.append(("group", s))
|
||||
for s in tags:
|
||||
params.append(("tag", s))
|
||||
for s in authors:
|
||||
params.append(("author", s))
|
||||
if final_search:
|
||||
params.append(("search", final_search))
|
||||
if final_liked is not None:
|
||||
params.append(("liked", final_liked))
|
||||
if final_sort:
|
||||
params.append(("sort", final_sort))
|
||||
if final_view:
|
||||
params.append(("view", final_view))
|
||||
if final_drafts:
|
||||
params.append(("drafts", final_drafts))
|
||||
if final_page is not None:
|
||||
params.append(("page", str(final_page)))
|
||||
if extra:
|
||||
for k, v in extra:
|
||||
if v is not None:
|
||||
params.append((k, str(v)))
|
||||
|
||||
return build_qs(params, leading_q=leading_q)
|
||||
|
||||
return makeqs
|
||||
256
bp/blog/ghost/editor_api.py
Normal file
256
bp/blog/ghost/editor_api.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Editor API proxy – image/media/file uploads and oembed.
|
||||
|
||||
Forwards requests to the Ghost Admin API with JWT auth so the browser
|
||||
never needs direct Ghost access.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import httpx
|
||||
from quart import Blueprint, request, jsonify, g
|
||||
from sqlalchemy import select, or_
|
||||
|
||||
from suma_browser.app.authz import require_admin, require_login
|
||||
from models import Snippet
|
||||
from .ghost_admin_token import make_ghost_admin_jwt
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"]
|
||||
MAX_IMAGE_SIZE = 10 * 1024 * 1024 # 10 MB
|
||||
MAX_MEDIA_SIZE = 100 * 1024 * 1024 # 100 MB
|
||||
MAX_FILE_SIZE = 50 * 1024 * 1024 # 50 MB
|
||||
|
||||
ALLOWED_IMAGE_MIMETYPES = frozenset({
|
||||
"image/jpeg", "image/png", "image/gif", "image/webp", "image/svg+xml",
|
||||
})
|
||||
ALLOWED_MEDIA_MIMETYPES = frozenset({
|
||||
"audio/mpeg", "audio/ogg", "audio/wav", "audio/mp4", "audio/aac",
|
||||
"video/mp4", "video/webm", "video/ogg",
|
||||
})
|
||||
|
||||
editor_api_bp = Blueprint("editor_api", __name__, url_prefix="/editor-api")
|
||||
|
||||
|
||||
def _auth_header() -> dict[str, str]:
|
||||
return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"}
|
||||
|
||||
|
||||
@editor_api_bp.post("/images/upload/")
|
||||
@require_admin
|
||||
async def upload_image():
|
||||
"""Proxy image upload to Ghost Admin API."""
|
||||
files = await request.files
|
||||
uploaded = files.get("file")
|
||||
if not uploaded:
|
||||
return jsonify({"errors": [{"message": "No file provided"}]}), 400
|
||||
|
||||
content = uploaded.read()
|
||||
if len(content) > MAX_IMAGE_SIZE:
|
||||
return jsonify({"errors": [{"message": "File too large (max 10 MB)"}]}), 413
|
||||
|
||||
if uploaded.content_type not in ALLOWED_IMAGE_MIMETYPES:
|
||||
return jsonify({"errors": [{"message": f"Unsupported file type: {uploaded.content_type}"}]}), 415
|
||||
|
||||
url = f"{GHOST_ADMIN_API_URL}/images/upload/"
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.post(
|
||||
url,
|
||||
headers=_auth_header(),
|
||||
files={"file": (uploaded.filename, content, uploaded.content_type)},
|
||||
)
|
||||
|
||||
if not resp.is_success:
|
||||
log.error("Ghost image upload failed %s: %s", resp.status_code, resp.text[:500])
|
||||
|
||||
return resp.json(), resp.status_code
|
||||
|
||||
|
||||
@editor_api_bp.post("/media/upload/")
|
||||
@require_admin
|
||||
async def upload_media():
|
||||
"""Proxy audio/video upload to Ghost Admin API."""
|
||||
files = await request.files
|
||||
uploaded = files.get("file")
|
||||
if not uploaded:
|
||||
return jsonify({"errors": [{"message": "No file provided"}]}), 400
|
||||
|
||||
content = uploaded.read()
|
||||
if len(content) > MAX_MEDIA_SIZE:
|
||||
return jsonify({"errors": [{"message": "File too large (max 100 MB)"}]}), 413
|
||||
|
||||
if uploaded.content_type not in ALLOWED_MEDIA_MIMETYPES:
|
||||
return jsonify({"errors": [{"message": f"Unsupported media type: {uploaded.content_type}"}]}), 415
|
||||
|
||||
ghost_files = {"file": (uploaded.filename, content, uploaded.content_type)}
|
||||
|
||||
# Optional video thumbnail
|
||||
thumbnail = files.get("thumbnail")
|
||||
if thumbnail:
|
||||
thumb_content = thumbnail.read()
|
||||
ghost_files["thumbnail"] = (thumbnail.filename, thumb_content, thumbnail.content_type)
|
||||
|
||||
url = f"{GHOST_ADMIN_API_URL}/media/upload/"
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(url, headers=_auth_header(), files=ghost_files)
|
||||
|
||||
if not resp.is_success:
|
||||
log.error("Ghost media upload failed %s: %s", resp.status_code, resp.text[:500])
|
||||
|
||||
return resp.json(), resp.status_code
|
||||
|
||||
|
||||
@editor_api_bp.post("/files/upload/")
|
||||
@require_admin
|
||||
async def upload_file():
|
||||
"""Proxy file upload to Ghost Admin API."""
|
||||
files = await request.files
|
||||
uploaded = files.get("file")
|
||||
if not uploaded:
|
||||
return jsonify({"errors": [{"message": "No file provided"}]}), 400
|
||||
|
||||
content = uploaded.read()
|
||||
if len(content) > MAX_FILE_SIZE:
|
||||
return jsonify({"errors": [{"message": "File too large (max 50 MB)"}]}), 413
|
||||
|
||||
url = f"{GHOST_ADMIN_API_URL}/files/upload/"
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.post(
|
||||
url,
|
||||
headers=_auth_header(),
|
||||
files={"file": (uploaded.filename, content, uploaded.content_type)},
|
||||
)
|
||||
|
||||
if not resp.is_success:
|
||||
log.error("Ghost file upload failed %s: %s", resp.status_code, resp.text[:500])
|
||||
|
||||
return resp.json(), resp.status_code
|
||||
|
||||
|
||||
@editor_api_bp.get("/oembed/")
|
||||
@require_admin
|
||||
async def oembed_proxy():
|
||||
"""Proxy oembed lookups to Ghost Admin API."""
|
||||
params = dict(request.args)
|
||||
if not params.get("url"):
|
||||
return jsonify({"errors": [{"message": "url parameter required"}]}), 400
|
||||
|
||||
url = f"{GHOST_ADMIN_API_URL}/oembed/"
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.get(url, headers=_auth_header(), params=params)
|
||||
|
||||
if not resp.is_success:
|
||||
log.error("Ghost oembed failed %s: %s", resp.status_code, resp.text[:500])
|
||||
|
||||
return resp.json(), resp.status_code
|
||||
|
||||
|
||||
# ── Snippets ────────────────────────────────────────────────────────
|
||||
|
||||
VALID_VISIBILITY = frozenset({"private", "shared", "admin"})
|
||||
|
||||
|
||||
@editor_api_bp.get("/snippets/")
|
||||
@require_login
|
||||
async def list_snippets():
|
||||
"""Return snippets visible to the current user."""
|
||||
uid = g.user.id
|
||||
is_admin = g.rights.get("admin")
|
||||
|
||||
filters = [Snippet.user_id == uid, Snippet.visibility == "shared"]
|
||||
if is_admin:
|
||||
filters.append(Snippet.visibility == "admin")
|
||||
|
||||
rows = (await g.s.execute(
|
||||
select(Snippet).where(or_(*filters)).order_by(Snippet.name)
|
||||
)).scalars().all()
|
||||
|
||||
return jsonify([
|
||||
{"id": s.id, "name": s.name, "value": s.value, "visibility": s.visibility}
|
||||
for s in rows
|
||||
])
|
||||
|
||||
|
||||
@editor_api_bp.post("/snippets/")
|
||||
@require_login
|
||||
async def create_snippet():
|
||||
"""Create or upsert a snippet by (user_id, name)."""
|
||||
data = await request.get_json(force=True)
|
||||
name = (data.get("name") or "").strip()
|
||||
value = data.get("value")
|
||||
visibility = data.get("visibility", "private")
|
||||
|
||||
if not name or value is None:
|
||||
return jsonify({"error": "name and value are required"}), 400
|
||||
if visibility not in VALID_VISIBILITY:
|
||||
return jsonify({"error": f"visibility must be one of {sorted(VALID_VISIBILITY)}"}), 400
|
||||
if visibility != "private" and not g.rights.get("admin"):
|
||||
visibility = "private"
|
||||
|
||||
uid = g.user.id
|
||||
|
||||
existing = (await g.s.execute(
|
||||
select(Snippet).where(Snippet.user_id == uid, Snippet.name == name)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
existing.value = value
|
||||
existing.visibility = visibility
|
||||
snippet = existing
|
||||
else:
|
||||
snippet = Snippet(user_id=uid, name=name, value=value, visibility=visibility)
|
||||
g.s.add(snippet)
|
||||
|
||||
await g.s.flush()
|
||||
return jsonify({
|
||||
"id": snippet.id, "name": snippet.name,
|
||||
"value": snippet.value, "visibility": snippet.visibility,
|
||||
}), 200 if existing else 201
|
||||
|
||||
|
||||
@editor_api_bp.patch("/snippets/<int:snippet_id>/")
|
||||
@require_login
|
||||
async def patch_snippet(snippet_id: int):
|
||||
"""Update snippet visibility. Only admins may set shared/admin."""
|
||||
snippet = await g.s.get(Snippet, snippet_id)
|
||||
if not snippet:
|
||||
return jsonify({"error": "not found"}), 404
|
||||
|
||||
is_admin = g.rights.get("admin")
|
||||
|
||||
if snippet.user_id != g.user.id and not is_admin:
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
data = await request.get_json(force=True)
|
||||
visibility = data.get("visibility")
|
||||
if visibility is not None:
|
||||
if visibility not in VALID_VISIBILITY:
|
||||
return jsonify({"error": f"visibility must be one of {sorted(VALID_VISIBILITY)}"}), 400
|
||||
if visibility != "private" and not is_admin:
|
||||
return jsonify({"error": "only admins may set shared/admin visibility"}), 403
|
||||
snippet.visibility = visibility
|
||||
|
||||
await g.s.flush()
|
||||
return jsonify({
|
||||
"id": snippet.id, "name": snippet.name,
|
||||
"value": snippet.value, "visibility": snippet.visibility,
|
||||
})
|
||||
|
||||
|
||||
@editor_api_bp.delete("/snippets/<int:snippet_id>/")
|
||||
@require_login
|
||||
async def delete_snippet(snippet_id: int):
|
||||
"""Delete a snippet. Owners can delete their own; admins can delete any."""
|
||||
snippet = await g.s.get(Snippet, snippet_id)
|
||||
if not snippet:
|
||||
return jsonify({"error": "not found"}), 404
|
||||
|
||||
if snippet.user_id != g.user.id and not g.rights.get("admin"):
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
await g.s.delete(snippet)
|
||||
await g.s.flush()
|
||||
return jsonify({"ok": True})
|
||||
46
bp/blog/ghost/ghost_admin_token.py
Normal file
46
bp/blog/ghost/ghost_admin_token.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import os
|
||||
import time
|
||||
import jwt # PyJWT
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
def _split_key(raw_key: str) -> Tuple[str, bytes]:
|
||||
"""
|
||||
raw_key is the 'id:secret' from Ghost.
|
||||
Returns (id, secret_bytes)
|
||||
"""
|
||||
key_id, key_secret_hex = raw_key.split(':', 1)
|
||||
secret_bytes = bytes.fromhex(key_secret_hex)
|
||||
return key_id, secret_bytes
|
||||
|
||||
|
||||
def make_ghost_admin_jwt() -> str:
|
||||
"""
|
||||
Generate a short-lived JWT suitable for Authorization: Ghost <token>
|
||||
"""
|
||||
raw_key = os.environ["GHOST_ADMIN_API_KEY"]
|
||||
key_id, secret_bytes = _split_key(raw_key)
|
||||
|
||||
now = int(time.time())
|
||||
|
||||
payload = {
|
||||
"iat": now,
|
||||
"exp": now + 5 * 60, # now + 5 minutes
|
||||
"aud": "/admin/",
|
||||
}
|
||||
|
||||
headers = {
|
||||
"alg": "HS256",
|
||||
"kid": key_id,
|
||||
"typ": "JWT",
|
||||
}
|
||||
|
||||
token = jwt.encode(
|
||||
payload,
|
||||
secret_bytes,
|
||||
algorithm="HS256",
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# PyJWT returns str in recent versions; Ghost expects bare token string
|
||||
return token
|
||||
170
bp/blog/ghost/ghost_posts.py
Normal file
170
bp/blog/ghost/ghost_posts.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
Ghost Admin API – post CRUD.
|
||||
|
||||
Uses the same JWT auth and httpx patterns as ghost_sync.py.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import httpx
|
||||
|
||||
from .ghost_admin_token import make_ghost_admin_jwt
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"]
|
||||
|
||||
|
||||
def _auth_header() -> dict[str, str]:
|
||||
return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"}
|
||||
|
||||
|
||||
def _check(resp: httpx.Response) -> None:
|
||||
"""Raise with the Ghost error body so callers see what went wrong."""
|
||||
if resp.is_success:
|
||||
return
|
||||
body = resp.text[:2000]
|
||||
log.error("Ghost API %s %s → %s: %s", resp.request.method, resp.request.url, resp.status_code, body)
|
||||
resp.raise_for_status()
|
||||
|
||||
|
||||
async def get_post_for_edit(ghost_id: str) -> dict | None:
|
||||
"""Fetch a single post by Ghost ID, including lexical source."""
|
||||
url = (
|
||||
f"{GHOST_ADMIN_API_URL}/posts/{ghost_id}/"
|
||||
"?formats=lexical,html,mobiledoc&include=newsletters"
|
||||
)
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.get(url, headers=_auth_header())
|
||||
if resp.status_code == 404:
|
||||
return None
|
||||
_check(resp)
|
||||
return resp.json()["posts"][0]
|
||||
|
||||
|
||||
async def create_post(
|
||||
title: str,
|
||||
lexical_json: str,
|
||||
status: str = "draft",
|
||||
feature_image: str | None = None,
|
||||
custom_excerpt: str | None = None,
|
||||
feature_image_caption: str | None = None,
|
||||
) -> dict:
|
||||
"""Create a new post in Ghost. Returns the created post dict."""
|
||||
post_body: dict = {
|
||||
"title": title,
|
||||
"lexical": lexical_json,
|
||||
"mobiledoc": None,
|
||||
"status": status,
|
||||
}
|
||||
if feature_image:
|
||||
post_body["feature_image"] = feature_image
|
||||
if custom_excerpt:
|
||||
post_body["custom_excerpt"] = custom_excerpt
|
||||
if feature_image_caption is not None:
|
||||
post_body["feature_image_caption"] = feature_image_caption
|
||||
payload = {"posts": [post_body]}
|
||||
url = f"{GHOST_ADMIN_API_URL}/posts/"
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.post(url, json=payload, headers=_auth_header())
|
||||
_check(resp)
|
||||
return resp.json()["posts"][0]
|
||||
|
||||
|
||||
async def update_post(
|
||||
ghost_id: str,
|
||||
lexical_json: str,
|
||||
title: str | None,
|
||||
updated_at: str,
|
||||
feature_image: str | None = None,
|
||||
custom_excerpt: str | None = None,
|
||||
feature_image_caption: str | None = None,
|
||||
status: str | None = None,
|
||||
newsletter_slug: str | None = None,
|
||||
email_segment: str | None = None,
|
||||
email_only: bool | None = None,
|
||||
) -> dict:
|
||||
"""Update an existing Ghost post. Returns the updated post dict.
|
||||
|
||||
``updated_at`` is Ghost's optimistic-locking token – pass the value
|
||||
you received from ``get_post_for_edit``.
|
||||
|
||||
When ``newsletter_slug`` is set the publish request also triggers an
|
||||
email send via Ghost's query-parameter API:
|
||||
``?newsletter={slug}&email_segment={segment}``.
|
||||
"""
|
||||
post_body: dict = {
|
||||
"lexical": lexical_json,
|
||||
"mobiledoc": None,
|
||||
"updated_at": updated_at,
|
||||
}
|
||||
if title is not None:
|
||||
post_body["title"] = title
|
||||
if feature_image is not None:
|
||||
post_body["feature_image"] = feature_image or None
|
||||
if custom_excerpt is not None:
|
||||
post_body["custom_excerpt"] = custom_excerpt or None
|
||||
if feature_image_caption is not None:
|
||||
post_body["feature_image_caption"] = feature_image_caption
|
||||
if status is not None:
|
||||
post_body["status"] = status
|
||||
if email_only:
|
||||
post_body["email_only"] = True
|
||||
payload = {"posts": [post_body]}
|
||||
|
||||
url = f"{GHOST_ADMIN_API_URL}/posts/{ghost_id}/"
|
||||
if newsletter_slug:
|
||||
url += f"?newsletter={newsletter_slug}"
|
||||
if email_segment:
|
||||
url += f"&email_segment={email_segment}"
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.put(url, json=payload, headers=_auth_header())
|
||||
_check(resp)
|
||||
return resp.json()["posts"][0]
|
||||
|
||||
|
||||
_SETTINGS_FIELDS = (
|
||||
"slug",
|
||||
"published_at",
|
||||
"featured",
|
||||
"visibility",
|
||||
"email_only",
|
||||
"custom_template",
|
||||
"meta_title",
|
||||
"meta_description",
|
||||
"canonical_url",
|
||||
"og_image",
|
||||
"og_title",
|
||||
"og_description",
|
||||
"twitter_image",
|
||||
"twitter_title",
|
||||
"twitter_description",
|
||||
"tags",
|
||||
"feature_image_alt",
|
||||
)
|
||||
|
||||
|
||||
async def update_post_settings(
|
||||
ghost_id: str,
|
||||
updated_at: str,
|
||||
**kwargs,
|
||||
) -> dict:
|
||||
"""Update Ghost post settings (slug, tags, SEO, social, etc.).
|
||||
|
||||
Only non-None keyword args are included in the PUT payload.
|
||||
Accepts any key from ``_SETTINGS_FIELDS``.
|
||||
"""
|
||||
post_body: dict = {"updated_at": updated_at}
|
||||
for key in _SETTINGS_FIELDS:
|
||||
val = kwargs.get(key)
|
||||
if val is not None:
|
||||
post_body[key] = val
|
||||
|
||||
payload = {"posts": [post_body]}
|
||||
url = f"{GHOST_ADMIN_API_URL}/posts/{ghost_id}/"
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.put(url, json=payload, headers=_auth_header())
|
||||
_check(resp)
|
||||
return resp.json()["posts"][0]
|
||||
1069
bp/blog/ghost/ghost_sync.py
Normal file
1069
bp/blog/ghost/ghost_sync.py
Normal file
File diff suppressed because it is too large
Load Diff
668
bp/blog/ghost/lexical_renderer.py
Normal file
668
bp/blog/ghost/lexical_renderer.py
Normal file
@@ -0,0 +1,668 @@
|
||||
"""
|
||||
Lexical JSON → HTML renderer.
|
||||
|
||||
Produces HTML matching Ghost's ``kg-*`` class conventions so the existing
|
||||
``cards.css`` stylesheet works unchanged.
|
||||
|
||||
Public API
|
||||
----------
|
||||
render_lexical(doc) – Lexical JSON (dict or string) → HTML string
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import json
|
||||
from typing import Callable
|
||||
|
||||
import mistune
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registry
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_RENDERERS: dict[str, Callable[[dict], str]] = {}
|
||||
|
||||
|
||||
def _renderer(node_type: str):
|
||||
"""Decorator — register a function as the renderer for *node_type*."""
|
||||
def decorator(fn: Callable[[dict], str]) -> Callable[[dict], str]:
|
||||
_RENDERERS[node_type] = fn
|
||||
return fn
|
||||
return decorator
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def render_lexical(doc: dict | str) -> str:
|
||||
"""Render a Lexical JSON document to an HTML string."""
|
||||
if isinstance(doc, str):
|
||||
doc = json.loads(doc)
|
||||
root = doc.get("root", doc)
|
||||
return _render_children(root.get("children", []))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Core dispatch
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _render_node(node: dict) -> str:
|
||||
node_type = node.get("type", "")
|
||||
renderer = _RENDERERS.get(node_type)
|
||||
if renderer:
|
||||
return renderer(node)
|
||||
return ""
|
||||
|
||||
|
||||
def _render_children(children: list[dict]) -> str:
|
||||
return "".join(_render_node(c) for c in children)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Text formatting
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Lexical format bitmask
|
||||
_FORMAT_BOLD = 1
|
||||
_FORMAT_ITALIC = 2
|
||||
_FORMAT_STRIKETHROUGH = 4
|
||||
_FORMAT_UNDERLINE = 8
|
||||
_FORMAT_CODE = 16
|
||||
_FORMAT_SUBSCRIPT = 32
|
||||
_FORMAT_SUPERSCRIPT = 64
|
||||
_FORMAT_HIGHLIGHT = 128
|
||||
|
||||
_FORMAT_TAGS: list[tuple[int, str, str]] = [
|
||||
(_FORMAT_BOLD, "<strong>", "</strong>"),
|
||||
(_FORMAT_ITALIC, "<em>", "</em>"),
|
||||
(_FORMAT_STRIKETHROUGH, "<s>", "</s>"),
|
||||
(_FORMAT_UNDERLINE, "<u>", "</u>"),
|
||||
(_FORMAT_CODE, "<code>", "</code>"),
|
||||
(_FORMAT_SUBSCRIPT, "<sub>", "</sub>"),
|
||||
(_FORMAT_SUPERSCRIPT, "<sup>", "</sup>"),
|
||||
(_FORMAT_HIGHLIGHT, "<mark>", "</mark>"),
|
||||
]
|
||||
|
||||
# Element-level alignment from ``format`` field
|
||||
_ALIGN_MAP = {
|
||||
1: "text-align: left",
|
||||
2: "text-align: center",
|
||||
3: "text-align: right",
|
||||
4: "text-align: justify",
|
||||
}
|
||||
|
||||
|
||||
def _align_style(node: dict) -> str:
|
||||
fmt = node.get("format")
|
||||
if isinstance(fmt, int) and fmt in _ALIGN_MAP:
|
||||
return f' style="{_ALIGN_MAP[fmt]}"'
|
||||
if isinstance(fmt, str) and fmt:
|
||||
return f' style="text-align: {fmt}"'
|
||||
return ""
|
||||
|
||||
|
||||
def _wrap_format(text: str, fmt: int) -> str:
|
||||
for mask, open_tag, close_tag in _FORMAT_TAGS:
|
||||
if fmt & mask:
|
||||
text = f"{open_tag}{text}{close_tag}"
|
||||
return text
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tier 1 — text nodes
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@_renderer("text")
|
||||
def _text(node: dict) -> str:
|
||||
text = html.escape(node.get("text", ""))
|
||||
fmt = node.get("format", 0)
|
||||
if isinstance(fmt, int) and fmt:
|
||||
text = _wrap_format(text, fmt)
|
||||
return text
|
||||
|
||||
|
||||
@_renderer("linebreak")
|
||||
def _linebreak(_node: dict) -> str:
|
||||
return "<br>"
|
||||
|
||||
|
||||
@_renderer("tab")
|
||||
def _tab(_node: dict) -> str:
|
||||
return "\t"
|
||||
|
||||
|
||||
@_renderer("paragraph")
|
||||
def _paragraph(node: dict) -> str:
|
||||
inner = _render_children(node.get("children", []))
|
||||
if not inner:
|
||||
inner = "<br>"
|
||||
style = _align_style(node)
|
||||
return f"<p{style}>{inner}</p>"
|
||||
|
||||
|
||||
@_renderer("extended-text")
|
||||
def _extended_text(node: dict) -> str:
|
||||
return _paragraph(node)
|
||||
|
||||
|
||||
@_renderer("heading")
|
||||
def _heading(node: dict) -> str:
|
||||
tag = node.get("tag", "h2")
|
||||
inner = _render_children(node.get("children", []))
|
||||
style = _align_style(node)
|
||||
return f"<{tag}{style}>{inner}</{tag}>"
|
||||
|
||||
|
||||
@_renderer("extended-heading")
|
||||
def _extended_heading(node: dict) -> str:
|
||||
return _heading(node)
|
||||
|
||||
|
||||
@_renderer("quote")
|
||||
def _quote(node: dict) -> str:
|
||||
inner = _render_children(node.get("children", []))
|
||||
return f"<blockquote>{inner}</blockquote>"
|
||||
|
||||
|
||||
@_renderer("extended-quote")
|
||||
def _extended_quote(node: dict) -> str:
|
||||
return _quote(node)
|
||||
|
||||
|
||||
@_renderer("aside")
|
||||
def _aside(node: dict) -> str:
|
||||
inner = _render_children(node.get("children", []))
|
||||
return f"<aside>{inner}</aside>"
|
||||
|
||||
|
||||
@_renderer("link")
|
||||
def _link(node: dict) -> str:
|
||||
href = html.escape(node.get("url", ""), quote=True)
|
||||
target = node.get("target", "")
|
||||
rel = node.get("rel", "")
|
||||
inner = _render_children(node.get("children", []))
|
||||
attrs = f' href="{href}"'
|
||||
if target:
|
||||
attrs += f' target="{html.escape(target, quote=True)}"'
|
||||
if rel:
|
||||
attrs += f' rel="{html.escape(rel, quote=True)}"'
|
||||
return f"<a{attrs}>{inner}</a>"
|
||||
|
||||
|
||||
@_renderer("autolink")
|
||||
def _autolink(node: dict) -> str:
|
||||
return _link(node)
|
||||
|
||||
|
||||
@_renderer("at-link")
|
||||
def _at_link(node: dict) -> str:
|
||||
return _link(node)
|
||||
|
||||
|
||||
@_renderer("list")
|
||||
def _list(node: dict) -> str:
|
||||
tag = "ol" if node.get("listType") == "number" else "ul"
|
||||
start = node.get("start")
|
||||
inner = _render_children(node.get("children", []))
|
||||
attrs = ""
|
||||
if tag == "ol" and start and start != 1:
|
||||
attrs = f' start="{start}"'
|
||||
return f"<{tag}{attrs}>{inner}</{tag}>"
|
||||
|
||||
|
||||
@_renderer("listitem")
|
||||
def _listitem(node: dict) -> str:
|
||||
inner = _render_children(node.get("children", []))
|
||||
return f"<li>{inner}</li>"
|
||||
|
||||
|
||||
@_renderer("horizontalrule")
|
||||
def _horizontalrule(_node: dict) -> str:
|
||||
return "<hr>"
|
||||
|
||||
|
||||
@_renderer("code")
|
||||
def _code(node: dict) -> str:
|
||||
# Inline code nodes from Lexical — just render inner text
|
||||
inner = _render_children(node.get("children", []))
|
||||
return f"<code>{inner}</code>"
|
||||
|
||||
|
||||
@_renderer("codeblock")
|
||||
def _codeblock(node: dict) -> str:
|
||||
lang = node.get("language", "")
|
||||
code = html.escape(node.get("code", ""))
|
||||
cls = f' class="language-{html.escape(lang)}"' if lang else ""
|
||||
return f'<pre><code{cls}>{code}</code></pre>'
|
||||
|
||||
|
||||
@_renderer("code-highlight")
|
||||
def _code_highlight(node: dict) -> str:
|
||||
text = html.escape(node.get("text", ""))
|
||||
highlight_type = node.get("highlightType", "")
|
||||
if highlight_type:
|
||||
return f'<span class="token {html.escape(highlight_type)}">{text}</span>'
|
||||
return text
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tier 2 — common cards
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@_renderer("image")
|
||||
def _image(node: dict) -> str:
|
||||
src = node.get("src", "")
|
||||
alt = node.get("alt", "")
|
||||
caption = node.get("caption", "")
|
||||
width = node.get("cardWidth", "") or node.get("width", "")
|
||||
href = node.get("href", "")
|
||||
|
||||
width_class = ""
|
||||
if width == "wide":
|
||||
width_class = " kg-width-wide"
|
||||
elif width == "full":
|
||||
width_class = " kg-width-full"
|
||||
|
||||
img_tag = f'<img src="{html.escape(src, quote=True)}" alt="{html.escape(alt, quote=True)}" loading="lazy">'
|
||||
if href:
|
||||
img_tag = f'<a href="{html.escape(href, quote=True)}">{img_tag}</a>'
|
||||
|
||||
parts = [f'<figure class="kg-card kg-image-card{width_class}">']
|
||||
parts.append(img_tag)
|
||||
if caption:
|
||||
parts.append(f"<figcaption>{caption}</figcaption>")
|
||||
parts.append("</figure>")
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
@_renderer("gallery")
|
||||
def _gallery(node: dict) -> str:
|
||||
images = node.get("images", [])
|
||||
if not images:
|
||||
return ""
|
||||
|
||||
rows = []
|
||||
for i in range(0, len(images), 3):
|
||||
row_imgs = images[i:i + 3]
|
||||
row_cls = f"kg-gallery-row" if len(row_imgs) <= 3 else "kg-gallery-row"
|
||||
imgs_html = []
|
||||
for img in row_imgs:
|
||||
src = img.get("src", "")
|
||||
alt = img.get("alt", "")
|
||||
caption = img.get("caption", "")
|
||||
img_tag = f'<img src="{html.escape(src, quote=True)}" alt="{html.escape(alt, quote=True)}" loading="lazy">'
|
||||
fig = f'<figure class="kg-gallery-image">{img_tag}'
|
||||
if caption:
|
||||
fig += f"<figcaption>{caption}</figcaption>"
|
||||
fig += "</figure>"
|
||||
imgs_html.append(fig)
|
||||
rows.append(f'<div class="{row_cls}">{"".join(imgs_html)}</div>')
|
||||
|
||||
caption = node.get("caption", "")
|
||||
caption_html = f"<figcaption>{caption}</figcaption>" if caption else ""
|
||||
return (
|
||||
f'<figure class="kg-card kg-gallery-card kg-width-wide">'
|
||||
f'<div class="kg-gallery-container">{"".join(rows)}</div>'
|
||||
f"{caption_html}</figure>"
|
||||
)
|
||||
|
||||
|
||||
@_renderer("html")
|
||||
def _html_card(node: dict) -> str:
|
||||
raw = node.get("html", "")
|
||||
return f"<!--kg-card-begin: html-->{raw}<!--kg-card-end: html-->"
|
||||
|
||||
|
||||
@_renderer("markdown")
|
||||
def _markdown(node: dict) -> str:
|
||||
md_text = node.get("markdown", "")
|
||||
rendered = mistune.html(md_text)
|
||||
return f"<!--kg-card-begin: markdown-->{rendered}<!--kg-card-end: markdown-->"
|
||||
|
||||
|
||||
@_renderer("embed")
|
||||
def _embed(node: dict) -> str:
|
||||
embed_html = node.get("html", "")
|
||||
caption = node.get("caption", "")
|
||||
url = node.get("url", "")
|
||||
caption_html = f"<figcaption>{caption}</figcaption>" if caption else ""
|
||||
return (
|
||||
f'<figure class="kg-card kg-embed-card">'
|
||||
f"{embed_html}{caption_html}</figure>"
|
||||
)
|
||||
|
||||
|
||||
@_renderer("bookmark")
|
||||
def _bookmark(node: dict) -> str:
|
||||
url = node.get("url", "")
|
||||
title = html.escape(node.get("metadata", {}).get("title", "") or node.get("title", ""))
|
||||
description = html.escape(node.get("metadata", {}).get("description", "") or node.get("description", ""))
|
||||
icon = node.get("metadata", {}).get("icon", "") or node.get("icon", "")
|
||||
author = html.escape(node.get("metadata", {}).get("author", "") or node.get("author", ""))
|
||||
publisher = html.escape(node.get("metadata", {}).get("publisher", "") or node.get("publisher", ""))
|
||||
thumbnail = node.get("metadata", {}).get("thumbnail", "") or node.get("thumbnail", "")
|
||||
caption = node.get("caption", "")
|
||||
|
||||
icon_html = f'<img class="kg-bookmark-icon" src="{html.escape(icon, quote=True)}" alt="">' if icon else ""
|
||||
thumbnail_html = (
|
||||
f'<div class="kg-bookmark-thumbnail">'
|
||||
f'<img src="{html.escape(thumbnail, quote=True)}" alt=""></div>'
|
||||
) if thumbnail else ""
|
||||
|
||||
meta_parts = []
|
||||
if icon_html:
|
||||
meta_parts.append(icon_html)
|
||||
if author:
|
||||
meta_parts.append(f'<span class="kg-bookmark-author">{author}</span>')
|
||||
if publisher:
|
||||
meta_parts.append(f'<span class="kg-bookmark-publisher">{publisher}</span>')
|
||||
metadata_html = f'<span class="kg-bookmark-metadata">{"".join(meta_parts)}</span>' if meta_parts else ""
|
||||
|
||||
caption_html = f"<figcaption>{caption}</figcaption>" if caption else ""
|
||||
|
||||
return (
|
||||
f'<figure class="kg-card kg-bookmark-card">'
|
||||
f'<a class="kg-bookmark-container" href="{html.escape(url, quote=True)}">'
|
||||
f'<div class="kg-bookmark-content">'
|
||||
f'<div class="kg-bookmark-title">{title}</div>'
|
||||
f'<div class="kg-bookmark-description">{description}</div>'
|
||||
f'{metadata_html}'
|
||||
f'</div>'
|
||||
f'{thumbnail_html}'
|
||||
f'</a>'
|
||||
f'{caption_html}'
|
||||
f'</figure>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("callout")
|
||||
def _callout(node: dict) -> str:
|
||||
color = node.get("backgroundColor", "grey")
|
||||
emoji = node.get("calloutEmoji", "")
|
||||
inner = _render_children(node.get("children", []))
|
||||
|
||||
emoji_html = f'<div class="kg-callout-emoji">{emoji}</div>' if emoji else ""
|
||||
return (
|
||||
f'<div class="kg-card kg-callout-card kg-callout-card-{html.escape(color)}">'
|
||||
f'{emoji_html}'
|
||||
f'<div class="kg-callout-text">{inner}</div>'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("button")
|
||||
def _button(node: dict) -> str:
|
||||
text = html.escape(node.get("buttonText", ""))
|
||||
url = html.escape(node.get("buttonUrl", ""), quote=True)
|
||||
alignment = node.get("alignment", "center")
|
||||
return (
|
||||
f'<div class="kg-card kg-button-card kg-align-{alignment}">'
|
||||
f'<a href="{url}" class="kg-btn kg-btn-accent">{text}</a>'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("toggle")
|
||||
def _toggle(node: dict) -> str:
|
||||
heading = node.get("heading", "")
|
||||
# Toggle content is in children
|
||||
inner = _render_children(node.get("children", []))
|
||||
return (
|
||||
f'<div class="kg-card kg-toggle-card" data-kg-toggle-state="close">'
|
||||
f'<div class="kg-toggle-heading">'
|
||||
f'<h4 class="kg-toggle-heading-text">{heading}</h4>'
|
||||
f'<button class="kg-toggle-card-icon">'
|
||||
f'<svg viewBox="0 0 14 14"><path d="M7 0a.5.5 0 0 1 .5.5v6h6a.5.5 0 1 1 0 1h-6v6a.5.5 0 1 1-1 0v-6h-6a.5.5 0 0 1 0-1h6v-6A.5.5 0 0 1 7 0Z" fill="currentColor"/></svg>'
|
||||
f'</button>'
|
||||
f'</div>'
|
||||
f'<div class="kg-toggle-content">{inner}</div>'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tier 3 — media & remaining cards
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@_renderer("audio")
|
||||
def _audio(node: dict) -> str:
|
||||
src = node.get("src", "")
|
||||
title = html.escape(node.get("title", ""))
|
||||
duration = node.get("duration", 0)
|
||||
thumbnail = node.get("thumbnailSrc", "")
|
||||
|
||||
duration_min = int(duration) // 60
|
||||
duration_sec = int(duration) % 60
|
||||
duration_str = f"{duration_min}:{duration_sec:02d}"
|
||||
|
||||
if thumbnail:
|
||||
thumb_html = (
|
||||
f'<img src="{html.escape(thumbnail, quote=True)}" alt="audio-thumbnail" '
|
||||
f'class="kg-audio-thumbnail">'
|
||||
)
|
||||
else:
|
||||
thumb_html = (
|
||||
'<div class="kg-audio-thumbnail placeholder">'
|
||||
'<svg viewBox="0 0 24 24"><path d="M2 12C2 6.48 6.48 2 12 2s10 4.48 10 10-4.48 10-10 10S2 17.52 2 12zm7.5 5.25L16 12 9.5 6.75v10.5z" fill="currentColor"/></svg>'
|
||||
'</div>'
|
||||
)
|
||||
|
||||
return (
|
||||
f'<div class="kg-card kg-audio-card">'
|
||||
f'{thumb_html}'
|
||||
f'<div class="kg-audio-player-container">'
|
||||
f'<div class="kg-audio-title">{title}</div>'
|
||||
f'<div class="kg-audio-player">'
|
||||
f'<button class="kg-audio-play-icon"><svg viewBox="0 0 24 24"><path d="M8 5v14l11-7z" fill="currentColor"/></svg></button>'
|
||||
f'<div class="kg-audio-current-time">0:00</div>'
|
||||
f'<div class="kg-audio-time">/ {duration_str}</div>'
|
||||
f'<input type="range" class="kg-audio-seek-slider" max="100" value="0">'
|
||||
f'<button class="kg-audio-playback-rate">1×</button>'
|
||||
f'<button class="kg-audio-unmute-icon"><svg viewBox="0 0 24 24"><path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02zM14 3.23v2.06c2.89.86 5 3.54 5 6.71s-2.11 5.85-5 6.71v2.06c4.01-.91 7-4.49 7-8.77s-2.99-7.86-7-8.77z" fill="currentColor"/></svg></button>'
|
||||
f'<input type="range" class="kg-audio-volume-slider" max="100" value="100">'
|
||||
f'</div>'
|
||||
f'</div>'
|
||||
f'<audio src="{html.escape(src, quote=True)}" preload="metadata"></audio>'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("video")
|
||||
def _video(node: dict) -> str:
|
||||
src = node.get("src", "")
|
||||
caption = node.get("caption", "")
|
||||
width = node.get("cardWidth", "")
|
||||
thumbnail = node.get("thumbnailSrc", "") or node.get("customThumbnailSrc", "")
|
||||
loop = node.get("loop", False)
|
||||
|
||||
width_class = ""
|
||||
if width == "wide":
|
||||
width_class = " kg-width-wide"
|
||||
elif width == "full":
|
||||
width_class = " kg-width-full"
|
||||
|
||||
loop_attr = " loop" if loop else ""
|
||||
poster_attr = f' poster="{html.escape(thumbnail, quote=True)}"' if thumbnail else ""
|
||||
caption_html = f"<figcaption>{caption}</figcaption>" if caption else ""
|
||||
|
||||
return (
|
||||
f'<figure class="kg-card kg-video-card{width_class}">'
|
||||
f'<div class="kg-video-container">'
|
||||
f'<video src="{html.escape(src, quote=True)}" controls preload="metadata"{poster_attr}{loop_attr}></video>'
|
||||
f'</div>'
|
||||
f'{caption_html}'
|
||||
f'</figure>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("file")
|
||||
def _file(node: dict) -> str:
|
||||
src = node.get("src", "")
|
||||
title = html.escape(node.get("fileName", "") or node.get("title", ""))
|
||||
caption = node.get("caption", "")
|
||||
file_size = node.get("fileSize", 0)
|
||||
file_name = html.escape(node.get("fileName", ""))
|
||||
|
||||
# Format size
|
||||
if file_size:
|
||||
kb = file_size / 1024
|
||||
if kb < 1024:
|
||||
size_str = f"{kb:.0f} KB"
|
||||
else:
|
||||
size_str = f"{kb / 1024:.1f} MB"
|
||||
else:
|
||||
size_str = ""
|
||||
|
||||
caption_html = f'<div class="kg-file-card-caption">{caption}</div>' if caption else ""
|
||||
size_html = f'<div class="kg-file-card-filesize">{size_str}</div>' if size_str else ""
|
||||
|
||||
return (
|
||||
f'<div class="kg-card kg-file-card">'
|
||||
f'<a class="kg-file-card-container" href="{html.escape(src, quote=True)}" download="{file_name}">'
|
||||
f'<div class="kg-file-card-contents">'
|
||||
f'<div class="kg-file-card-title">{title}</div>'
|
||||
f'{size_html}'
|
||||
f'</div>'
|
||||
f'<div class="kg-file-card-icon">'
|
||||
f'<svg viewBox="0 0 24 24"><path d="M19 9h-4V3H9v6H5l7 7 7-7zM5 18v2h14v-2H5z" fill="currentColor"/></svg>'
|
||||
f'</div>'
|
||||
f'</a>'
|
||||
f'{caption_html}'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("paywall")
|
||||
def _paywall(_node: dict) -> str:
|
||||
return "<!--members-only-->"
|
||||
|
||||
|
||||
@_renderer("header")
|
||||
def _header(node: dict) -> str:
|
||||
heading = node.get("heading", "")
|
||||
subheading = node.get("subheading", "")
|
||||
size = node.get("size", "small")
|
||||
style = node.get("style", "dark")
|
||||
bg_image = node.get("backgroundImageSrc", "")
|
||||
button_text = node.get("buttonText", "")
|
||||
button_url = node.get("buttonUrl", "")
|
||||
|
||||
bg_style = f' style="background-image: url({html.escape(bg_image, quote=True)})"' if bg_image else ""
|
||||
heading_html = f"<h2>{heading}</h2>" if heading else ""
|
||||
subheading_html = f"<p>{subheading}</p>" if subheading else ""
|
||||
button_html = (
|
||||
f'<a class="kg-header-card-button" href="{html.escape(button_url, quote=True)}">{html.escape(button_text)}</a>'
|
||||
if button_text and button_url else ""
|
||||
)
|
||||
|
||||
return (
|
||||
f'<div class="kg-card kg-header-card kg-style-{html.escape(style)} kg-size-{html.escape(size)}"{bg_style}>'
|
||||
f'{heading_html}{subheading_html}{button_html}'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("signup")
|
||||
def _signup(node: dict) -> str:
|
||||
heading = node.get("heading", "")
|
||||
subheading = node.get("subheading", "")
|
||||
disclaimer = node.get("disclaimer", "")
|
||||
button_text = html.escape(node.get("buttonText", "Subscribe"))
|
||||
button_color = node.get("buttonColor", "")
|
||||
bg_color = node.get("backgroundColor", "")
|
||||
bg_image = node.get("backgroundImageSrc", "")
|
||||
style = node.get("style", "dark")
|
||||
|
||||
bg_style_parts = []
|
||||
if bg_color:
|
||||
bg_style_parts.append(f"background-color: {bg_color}")
|
||||
if bg_image:
|
||||
bg_style_parts.append(f"background-image: url({html.escape(bg_image, quote=True)})")
|
||||
style_attr = f' style="{"; ".join(bg_style_parts)}"' if bg_style_parts else ""
|
||||
|
||||
heading_html = f"<h2>{heading}</h2>" if heading else ""
|
||||
subheading_html = f"<p>{subheading}</p>" if subheading else ""
|
||||
disclaimer_html = f'<p class="kg-signup-card-disclaimer">{disclaimer}</p>' if disclaimer else ""
|
||||
btn_style = f' style="background-color: {button_color}"' if button_color else ""
|
||||
|
||||
return (
|
||||
f'<div class="kg-card kg-signup-card kg-style-{html.escape(style)}"{style_attr}>'
|
||||
f'{heading_html}{subheading_html}'
|
||||
f'<form class="kg-signup-card-form" data-members-form>'
|
||||
f'<input type="email" placeholder="Your email" required>'
|
||||
f'<button type="submit" class="kg-signup-card-button"{btn_style}>{button_text}</button>'
|
||||
f'</form>'
|
||||
f'{disclaimer_html}'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("product")
|
||||
def _product(node: dict) -> str:
|
||||
title = html.escape(node.get("productTitle", "") or node.get("title", ""))
|
||||
description = node.get("productDescription", "") or node.get("description", "")
|
||||
img_src = node.get("productImageSrc", "")
|
||||
button_text = html.escape(node.get("buttonText", ""))
|
||||
button_url = node.get("buttonUrl", "")
|
||||
rating = node.get("rating", 0)
|
||||
|
||||
img_html = (
|
||||
f'<img class="kg-product-card-image" src="{html.escape(img_src, quote=True)}" alt="">'
|
||||
if img_src else ""
|
||||
)
|
||||
button_html = (
|
||||
f'<a class="kg-product-card-button kg-btn kg-btn-accent" href="{html.escape(button_url, quote=True)}">{button_text}</a>'
|
||||
if button_text and button_url else ""
|
||||
)
|
||||
stars = ""
|
||||
if rating:
|
||||
active = int(rating)
|
||||
stars_html = []
|
||||
for i in range(5):
|
||||
cls = "kg-product-card-rating-active" if i < active else ""
|
||||
stars_html.append(
|
||||
f'<svg class="kg-product-card-rating-star {cls}" viewBox="0 0 24 24">'
|
||||
f'<path d="M12 .587l3.668 7.568 8.332 1.151-6.064 5.828 1.48 8.279L12 19.771l-7.416 3.642 1.48-8.279L0 9.306l8.332-1.151z" fill="currentColor"/>'
|
||||
f'</svg>'
|
||||
)
|
||||
stars = f'<div class="kg-product-card-rating">{"".join(stars_html)}</div>'
|
||||
|
||||
return (
|
||||
f'<div class="kg-card kg-product-card">'
|
||||
f'{img_html}'
|
||||
f'<div class="kg-product-card-container">'
|
||||
f'<h4 class="kg-product-card-title">{title}</h4>'
|
||||
f'{stars}'
|
||||
f'<div class="kg-product-card-description">{description}</div>'
|
||||
f'{button_html}'
|
||||
f'</div>'
|
||||
f'</div>'
|
||||
)
|
||||
|
||||
|
||||
@_renderer("email")
|
||||
def _email(node: dict) -> str:
|
||||
raw_html = node.get("html", "")
|
||||
return f"<!--kg-card-begin: email-->{raw_html}<!--kg-card-end: email-->"
|
||||
|
||||
|
||||
@_renderer("email-cta")
|
||||
def _email_cta(node: dict) -> str:
|
||||
raw_html = node.get("html", "")
|
||||
return f"<!--kg-card-begin: email-cta-->{raw_html}<!--kg-card-end: email-cta-->"
|
||||
|
||||
|
||||
@_renderer("call-to-action")
|
||||
def _call_to_action(node: dict) -> str:
|
||||
raw_html = node.get("html", "")
|
||||
sponsor_label = node.get("sponsorLabel", "")
|
||||
label_html = (
|
||||
f'<span class="kg-cta-sponsor-label">{html.escape(sponsor_label)}</span>'
|
||||
if sponsor_label else ""
|
||||
)
|
||||
return (
|
||||
f'<div class="kg-card kg-cta-card">'
|
||||
f'{label_html}{raw_html}'
|
||||
f'</div>'
|
||||
)
|
||||
86
bp/blog/ghost/lexical_validator.py
Normal file
86
bp/blog/ghost/lexical_validator.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Server-side validation for Lexical editor JSON.
|
||||
|
||||
Walk the document tree and reject any node whose ``type`` is not in
|
||||
ALLOWED_NODE_TYPES. This is a belt-and-braces check: the Lexical
|
||||
client already restricts which nodes can be created, but we validate
|
||||
server-side too.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
ALLOWED_NODE_TYPES: frozenset[str] = frozenset(
|
||||
{
|
||||
# Standard Lexical nodes
|
||||
"root",
|
||||
"paragraph",
|
||||
"heading",
|
||||
"quote",
|
||||
"list",
|
||||
"listitem",
|
||||
"link",
|
||||
"autolink",
|
||||
"code",
|
||||
"code-highlight",
|
||||
"linebreak",
|
||||
"text",
|
||||
"horizontalrule",
|
||||
"image",
|
||||
"tab",
|
||||
# Ghost "extended-*" variants
|
||||
"extended-text",
|
||||
"extended-heading",
|
||||
"extended-quote",
|
||||
# Ghost card types
|
||||
"html",
|
||||
"gallery",
|
||||
"embed",
|
||||
"bookmark",
|
||||
"markdown",
|
||||
"email",
|
||||
"email-cta",
|
||||
"button",
|
||||
"callout",
|
||||
"toggle",
|
||||
"video",
|
||||
"audio",
|
||||
"file",
|
||||
"product",
|
||||
"header",
|
||||
"signup",
|
||||
"aside",
|
||||
"codeblock",
|
||||
"call-to-action",
|
||||
"at-link",
|
||||
"paywall",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def validate_lexical(doc: dict) -> tuple[bool, str | None]:
|
||||
"""Recursively validate a Lexical JSON document.
|
||||
|
||||
Returns ``(True, None)`` when the document is valid, or
|
||||
``(False, reason)`` when an unknown node type is found.
|
||||
"""
|
||||
if not isinstance(doc, dict):
|
||||
return False, "Document must be a JSON object"
|
||||
|
||||
root = doc.get("root")
|
||||
if not isinstance(root, dict):
|
||||
return False, "Document must contain a 'root' object"
|
||||
|
||||
return _walk(root)
|
||||
|
||||
|
||||
def _walk(node: dict) -> tuple[bool, str | None]:
|
||||
node_type = node.get("type")
|
||||
if node_type is not None and node_type not in ALLOWED_NODE_TYPES:
|
||||
return False, f"Disallowed node type: {node_type}"
|
||||
|
||||
for child in node.get("children", []):
|
||||
if isinstance(child, dict):
|
||||
ok, reason = _walk(child)
|
||||
if not ok:
|
||||
return False, reason
|
||||
|
||||
return True, None
|
||||
559
bp/blog/ghost_db.py
Normal file
559
bp/blog/ghost_db.py
Normal file
@@ -0,0 +1,559 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
from sqlalchemy import select, func, asc, desc, and_, or_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
|
||||
from models.ghost_content import Post, Author, Tag, PostTag
|
||||
from models.tag_group import TagGroup, TagGroupTag
|
||||
|
||||
|
||||
class DBAPIError(Exception):
|
||||
"""Raised when our local DB returns something unexpected."""
|
||||
|
||||
|
||||
def _author_to_public(a: Optional[Author]) -> Optional[Dict[str, Any]]:
|
||||
if a is None:
|
||||
return None
|
||||
if a.deleted_at is not None:
|
||||
# treat deleted authors as missing
|
||||
return None
|
||||
return {
|
||||
"id": a.ghost_id,
|
||||
"slug": a.slug,
|
||||
"name": a.name,
|
||||
"profile_image": a.profile_image,
|
||||
"cover_image": a.cover_image,
|
||||
# expose more (bio, etc.) if needed
|
||||
}
|
||||
|
||||
|
||||
def _tag_to_public(t: Tag) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": t.ghost_id,
|
||||
"slug": t.slug,
|
||||
"name": t.name,
|
||||
"description": t.description,
|
||||
"feature_image": t.feature_image, # fixed key
|
||||
"visibility": t.visibility,
|
||||
"deleted_at": t.deleted_at,
|
||||
}
|
||||
|
||||
|
||||
def _post_to_public(p: Post) -> Dict[str, Any]:
|
||||
"""
|
||||
Shape a Post to the public JSON used by the app, mirroring GhostClient._normalise_post.
|
||||
"""
|
||||
# Primary author: explicit or first available
|
||||
primary_author = p.primary_author or (p.authors[0] if p.authors else None)
|
||||
|
||||
# Primary tag: prefer explicit relationship, otherwise first public/non-deleted tag
|
||||
primary_tag = getattr(p, "primary_tag", None)
|
||||
if primary_tag is None:
|
||||
public_tags = [
|
||||
t for t in (p.tags or [])
|
||||
if t.deleted_at is None and (t.visibility or "public") == "public"
|
||||
]
|
||||
primary_tag = public_tags[0] if public_tags else None
|
||||
|
||||
return {
|
||||
"id": p.id,
|
||||
"ghost_id": p.ghost_id,
|
||||
"slug": p.slug,
|
||||
"title": p.title,
|
||||
"html": p.html,
|
||||
"is_page": p.is_page,
|
||||
"excerpt": p.custom_excerpt or p.excerpt,
|
||||
"custom_excerpt": p.custom_excerpt,
|
||||
"published_at": p.published_at,
|
||||
"updated_at": p.updated_at,
|
||||
"visibility": p.visibility,
|
||||
"status": p.status,
|
||||
"deleted_at": p.deleted_at,
|
||||
"feature_image": p.feature_image,
|
||||
"user_id": p.user_id,
|
||||
"publish_requested": p.publish_requested,
|
||||
"primary_author": _author_to_public(primary_author),
|
||||
"primary_tag": _tag_to_public(primary_tag) if primary_tag else None,
|
||||
"tags": [
|
||||
_tag_to_public(t)
|
||||
for t in (p.tags or [])
|
||||
if t.deleted_at is None and (t.visibility or "public") == "public"
|
||||
],
|
||||
"authors": [
|
||||
_author_to_public(a)
|
||||
for a in (p.authors or [])
|
||||
if a and a.deleted_at is None
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class DBClient:
|
||||
"""
|
||||
Drop-in replacement for GhostClient, but served from our mirrored tables.
|
||||
Call methods with an AsyncSession.
|
||||
"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.sess = session
|
||||
|
||||
async def list_posts(
|
||||
self,
|
||||
limit: int = 10,
|
||||
page: int = 1,
|
||||
selected_tags: Optional[Sequence[str]] = None,
|
||||
selected_authors: Optional[Sequence[str]] = None,
|
||||
search: Optional[str] = None,
|
||||
drafts: bool = False,
|
||||
drafts_user_id: Optional[int] = None,
|
||||
exclude_covered_tag_ids: Optional[Sequence[int]] = None,
|
||||
) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:
|
||||
"""
|
||||
List published posts, optionally filtered by tags/authors and a search term.
|
||||
When drafts=True, lists draft posts instead (filtered by drafts_user_id if given).
|
||||
Returns (posts, pagination).
|
||||
"""
|
||||
|
||||
# ---- base visibility filters
|
||||
if drafts:
|
||||
base_filters = [
|
||||
Post.deleted_at.is_(None),
|
||||
Post.status == "draft",
|
||||
Post.is_page.is_(False),
|
||||
]
|
||||
if drafts_user_id is not None:
|
||||
base_filters.append(Post.user_id == drafts_user_id)
|
||||
else:
|
||||
base_filters = [
|
||||
Post.deleted_at.is_(None),
|
||||
Post.status == "published",
|
||||
Post.is_page.is_(False),
|
||||
]
|
||||
|
||||
q = select(Post).where(*base_filters)
|
||||
|
||||
# ---- TAG FILTER (matches any tag on the post)
|
||||
if selected_tags:
|
||||
tag_slugs = list(selected_tags)
|
||||
q = q.where(
|
||||
Post.tags.any(
|
||||
and_(
|
||||
Tag.slug.in_(tag_slugs),
|
||||
Tag.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# ---- EXCLUDE-COVERED FILTER ("etc" mode: posts NOT covered by any group)
|
||||
if exclude_covered_tag_ids:
|
||||
covered_sq = (
|
||||
select(PostTag.post_id)
|
||||
.join(Tag, Tag.id == PostTag.tag_id)
|
||||
.where(
|
||||
Tag.id.in_(list(exclude_covered_tag_ids)),
|
||||
Tag.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
q = q.where(Post.id.notin_(covered_sq))
|
||||
|
||||
# ---- AUTHOR FILTER (matches primary or any author)
|
||||
if selected_authors:
|
||||
author_slugs = list(selected_authors)
|
||||
q = q.where(
|
||||
or_(
|
||||
Post.primary_author.has(
|
||||
and_(
|
||||
Author.slug.in_(author_slugs),
|
||||
Author.deleted_at.is_(None),
|
||||
)
|
||||
),
|
||||
Post.authors.any(
|
||||
and_(
|
||||
Author.slug.in_(author_slugs),
|
||||
Author.deleted_at.is_(None),
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# ---- SEARCH FILTER (title OR excerpt OR plaintext contains)
|
||||
if search:
|
||||
term = f"%{search.strip().lower()}%"
|
||||
q = q.where(
|
||||
or_(
|
||||
func.lower(func.coalesce(Post.title, "")).like(term),
|
||||
func.lower(func.coalesce(Post.excerpt, "")).like(term),
|
||||
func.lower(func.coalesce(Post.plaintext,"")).like(term),
|
||||
)
|
||||
)
|
||||
|
||||
# ---- ordering
|
||||
if drafts:
|
||||
q = q.order_by(desc(Post.updated_at))
|
||||
else:
|
||||
q = q.order_by(desc(Post.published_at))
|
||||
|
||||
# ---- pagination math
|
||||
if page < 1:
|
||||
page = 1
|
||||
offset_val = (page - 1) * limit
|
||||
|
||||
# ---- total count with SAME filters (including tag/author/search)
|
||||
q_no_limit = q.with_only_columns(Post.id).order_by(None)
|
||||
count_q = select(func.count()).select_from(q_no_limit.subquery())
|
||||
total = int((await self.sess.execute(count_q)).scalar() or 0)
|
||||
|
||||
# ---- eager load relationships to avoid N+1 / greenlet issues
|
||||
q = (
|
||||
q.options(
|
||||
joinedload(Post.primary_author),
|
||||
joinedload(Post.primary_tag),
|
||||
selectinload(Post.authors),
|
||||
selectinload(Post.tags),
|
||||
)
|
||||
.limit(limit)
|
||||
.offset(offset_val)
|
||||
)
|
||||
|
||||
rows: List[Post] = list((await self.sess.execute(q)).scalars())
|
||||
posts = [_post_to_public(p) for p in rows]
|
||||
|
||||
# ---- search_count: reflect same filters + search (i.e., equals total once filters applied)
|
||||
search_count = total
|
||||
|
||||
pages_total = (total + limit - 1) // limit if limit else 1
|
||||
pagination = {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"pages": pages_total,
|
||||
"total": total,
|
||||
"search_count": search_count,
|
||||
"next": page + 1 if page < pages_total else None,
|
||||
"prev": page - 1 if page > 1 else None,
|
||||
}
|
||||
|
||||
return posts, pagination
|
||||
|
||||
async def posts_by_slug(
|
||||
self,
|
||||
slug: str,
|
||||
include: Sequence[str] = ("tags", "authors"),
|
||||
fields: Sequence[str] = (
|
||||
"id",
|
||||
"slug",
|
||||
"title",
|
||||
"html",
|
||||
"excerpt",
|
||||
"custom_excerpt",
|
||||
"published_at",
|
||||
"feature_image",
|
||||
),
|
||||
include_drafts: bool = False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return posts (usually 1) matching this slug.
|
||||
|
||||
Only returns published, non-deleted posts by default.
|
||||
When include_drafts=True, also returns draft posts (for admin access).
|
||||
|
||||
Eager-load related objects via selectinload/joinedload so we don't N+1 when
|
||||
serializing in _post_to_public().
|
||||
"""
|
||||
|
||||
# Build .options(...) dynamically based on `include`
|
||||
load_options = []
|
||||
|
||||
# Tags
|
||||
if "tags" in include:
|
||||
load_options.append(selectinload(Post.tags))
|
||||
if hasattr(Post, "primary_tag"):
|
||||
# joinedload is fine too; selectin keeps a single extra roundtrip
|
||||
load_options.append(selectinload(Post.primary_tag))
|
||||
|
||||
# Authors
|
||||
if "authors" in include:
|
||||
if hasattr(Post, "primary_author"):
|
||||
load_options.append(selectinload(Post.primary_author))
|
||||
if hasattr(Post, "authors"):
|
||||
load_options.append(selectinload(Post.authors))
|
||||
|
||||
filters = [Post.deleted_at.is_(None), Post.slug == slug]
|
||||
if not include_drafts:
|
||||
filters.append(Post.status == "published")
|
||||
|
||||
q = (
|
||||
select(Post)
|
||||
.where(*filters)
|
||||
.order_by(desc(Post.published_at))
|
||||
.options(*load_options)
|
||||
)
|
||||
|
||||
result = await self.sess.execute(q)
|
||||
rows: List[Post] = list(result.scalars())
|
||||
|
||||
return [(_post_to_public(p), p) for p in rows]
|
||||
|
||||
async def list_tags(
|
||||
self,
|
||||
limit: int = 5000,
|
||||
page: int = 1,
|
||||
is_page=False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return public, not-soft-deleted tags.
|
||||
Include published_post_count = number of published (not deleted) posts using that tag.
|
||||
"""
|
||||
|
||||
if page < 1:
|
||||
page = 1
|
||||
offset_val = (page - 1) * limit
|
||||
|
||||
# Subquery: count published posts per tag
|
||||
tag_post_counts_sq = (
|
||||
select(
|
||||
PostTag.tag_id.label("tag_id"),
|
||||
func.count().label("published_post_count"),
|
||||
)
|
||||
.select_from(PostTag)
|
||||
.join(Post, Post.id == PostTag.post_id)
|
||||
.where(
|
||||
Post.deleted_at.is_(None),
|
||||
Post.published_at.is_not(None),
|
||||
Post.is_page.is_(is_page),
|
||||
)
|
||||
.group_by(PostTag.tag_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = (
|
||||
select(
|
||||
Tag,
|
||||
func.coalesce(tag_post_counts_sq.c.published_post_count, 0).label(
|
||||
"published_post_count"
|
||||
),
|
||||
)
|
||||
.outerjoin(
|
||||
tag_post_counts_sq,
|
||||
tag_post_counts_sq.c.tag_id == Tag.id,
|
||||
)
|
||||
.where(
|
||||
Tag.deleted_at.is_(None),
|
||||
(Tag.visibility == "public") | (Tag.visibility.is_(None)),
|
||||
func.coalesce(tag_post_counts_sq.c.published_post_count, 0) > 0,
|
||||
)
|
||||
.order_by(desc(func.coalesce(tag_post_counts_sq.c.published_post_count, 0)), asc(Tag.name))
|
||||
.limit(limit)
|
||||
.offset(offset_val)
|
||||
)
|
||||
|
||||
result = await self.sess.execute(q)
|
||||
|
||||
# result will return rows like (Tag, published_post_count)
|
||||
rows = list(result.all())
|
||||
|
||||
tags = [
|
||||
{
|
||||
"id": tag.ghost_id,
|
||||
"slug": tag.slug,
|
||||
"name": tag.name,
|
||||
"description": tag.description,
|
||||
"feature_image": tag.feature_image,
|
||||
"visibility": tag.visibility,
|
||||
"published_post_count": count,
|
||||
}
|
||||
for (tag, count) in rows
|
||||
]
|
||||
|
||||
return tags
|
||||
|
||||
async def list_authors(
|
||||
self,
|
||||
limit: int = 5000,
|
||||
page: int = 1,
|
||||
is_page=False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return non-deleted authors.
|
||||
Include published_post_count = number of published (not deleted) posts by that author
|
||||
(counted via Post.primary_author_id).
|
||||
"""
|
||||
|
||||
if page < 1:
|
||||
page = 1
|
||||
offset_val = (page - 1) * limit
|
||||
|
||||
# Subquery: count published posts per primary author
|
||||
author_post_counts_sq = (
|
||||
select(
|
||||
Post.primary_author_id.label("author_id"),
|
||||
func.count().label("published_post_count"),
|
||||
)
|
||||
.where(
|
||||
Post.deleted_at.is_(None),
|
||||
Post.published_at.is_not(None),
|
||||
Post.is_page.is_(is_page),
|
||||
)
|
||||
.group_by(Post.primary_author_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = (
|
||||
select(
|
||||
Author,
|
||||
func.coalesce(author_post_counts_sq.c.published_post_count, 0).label(
|
||||
"published_post_count"
|
||||
),
|
||||
)
|
||||
.outerjoin(
|
||||
author_post_counts_sq,
|
||||
author_post_counts_sq.c.author_id == Author.id,
|
||||
)
|
||||
.where(
|
||||
Author.deleted_at.is_(None),
|
||||
)
|
||||
.order_by(asc(Author.name))
|
||||
.limit(limit)
|
||||
.offset(offset_val)
|
||||
)
|
||||
|
||||
result = await self.sess.execute(q)
|
||||
rows = list(result.all())
|
||||
|
||||
authors = [
|
||||
{
|
||||
"id": a.ghost_id,
|
||||
"slug": a.slug,
|
||||
"name": a.name,
|
||||
"bio": a.bio,
|
||||
"profile_image": a.profile_image,
|
||||
"cover_image": a.cover_image,
|
||||
"website": a.website,
|
||||
"location": a.location,
|
||||
"facebook": a.facebook,
|
||||
"twitter": a.twitter,
|
||||
"published_post_count": count,
|
||||
}
|
||||
for (a, count) in rows
|
||||
]
|
||||
|
||||
return authors
|
||||
|
||||
async def count_drafts(self, user_id: Optional[int] = None) -> int:
|
||||
"""Count draft (non-page, non-deleted) posts, optionally for a single user."""
|
||||
q = select(func.count()).select_from(Post).where(
|
||||
Post.deleted_at.is_(None),
|
||||
Post.status == "draft",
|
||||
Post.is_page.is_(False),
|
||||
)
|
||||
if user_id is not None:
|
||||
q = q.where(Post.user_id == user_id)
|
||||
return int((await self.sess.execute(q)).scalar() or 0)
|
||||
|
||||
async def list_tag_groups_with_counts(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return all tag groups with aggregated published post counts.
|
||||
Each group dict includes a `tag_slugs` list and `tag_ids` list.
|
||||
Count = distinct published posts having ANY member tag.
|
||||
Ordered by sort_order, name.
|
||||
"""
|
||||
# Subquery: distinct published post IDs per tag group
|
||||
post_count_sq = (
|
||||
select(
|
||||
TagGroupTag.tag_group_id.label("group_id"),
|
||||
func.count(func.distinct(PostTag.post_id)).label("post_count"),
|
||||
)
|
||||
.select_from(TagGroupTag)
|
||||
.join(PostTag, PostTag.tag_id == TagGroupTag.tag_id)
|
||||
.join(Post, Post.id == PostTag.post_id)
|
||||
.where(
|
||||
Post.deleted_at.is_(None),
|
||||
Post.published_at.is_not(None),
|
||||
Post.is_page.is_(False),
|
||||
)
|
||||
.group_by(TagGroupTag.tag_group_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = (
|
||||
select(
|
||||
TagGroup,
|
||||
func.coalesce(post_count_sq.c.post_count, 0).label("post_count"),
|
||||
)
|
||||
.outerjoin(post_count_sq, post_count_sq.c.group_id == TagGroup.id)
|
||||
.order_by(asc(TagGroup.sort_order), asc(TagGroup.name))
|
||||
)
|
||||
|
||||
rows = list((await self.sess.execute(q)).all())
|
||||
|
||||
groups = []
|
||||
for tg, count in rows:
|
||||
# Fetch member tag slugs + ids for this group
|
||||
tag_rows = list(
|
||||
(await self.sess.execute(
|
||||
select(Tag.slug, Tag.id)
|
||||
.join(TagGroupTag, TagGroupTag.tag_id == Tag.id)
|
||||
.where(
|
||||
TagGroupTag.tag_group_id == tg.id,
|
||||
Tag.deleted_at.is_(None),
|
||||
(Tag.visibility == "public") | (Tag.visibility.is_(None)),
|
||||
)
|
||||
)).all()
|
||||
)
|
||||
groups.append({
|
||||
"id": tg.id,
|
||||
"name": tg.name,
|
||||
"slug": tg.slug,
|
||||
"feature_image": tg.feature_image,
|
||||
"colour": tg.colour,
|
||||
"sort_order": tg.sort_order,
|
||||
"post_count": count,
|
||||
"tag_slugs": [r[0] for r in tag_rows],
|
||||
"tag_ids": [r[1] for r in tag_rows],
|
||||
})
|
||||
|
||||
return groups
|
||||
|
||||
async def count_etc_posts(self, assigned_tag_ids: List[int]) -> int:
|
||||
"""
|
||||
Count published posts not covered by any tag group.
|
||||
Includes posts with no tags and posts whose tags are all unassigned.
|
||||
"""
|
||||
base = [
|
||||
Post.deleted_at.is_(None),
|
||||
Post.published_at.is_not(None),
|
||||
Post.is_page.is_(False),
|
||||
]
|
||||
if assigned_tag_ids:
|
||||
covered_sq = (
|
||||
select(PostTag.post_id)
|
||||
.join(Tag, Tag.id == PostTag.tag_id)
|
||||
.where(
|
||||
Tag.id.in_(assigned_tag_ids),
|
||||
Tag.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
base.append(Post.id.notin_(covered_sq))
|
||||
|
||||
q = select(func.count()).select_from(Post).where(*base)
|
||||
return int((await self.sess.execute(q)).scalar() or 0)
|
||||
|
||||
async def list_drafts(self) -> List[Dict[str, Any]]:
|
||||
"""Return all draft (non-page, non-deleted) posts, newest-updated first."""
|
||||
q = (
|
||||
select(Post)
|
||||
.where(
|
||||
Post.deleted_at.is_(None),
|
||||
Post.status == "draft",
|
||||
Post.is_page.is_(False),
|
||||
)
|
||||
.order_by(desc(Post.updated_at))
|
||||
.options(
|
||||
joinedload(Post.primary_author),
|
||||
joinedload(Post.primary_tag),
|
||||
selectinload(Post.authors),
|
||||
selectinload(Post.tags),
|
||||
)
|
||||
)
|
||||
rows: List[Post] = list((await self.sess.execute(q)).scalars())
|
||||
return [_post_to_public(p) for p in rows]
|
||||
203
bp/blog/routes.py
Normal file
203
bp/blog/routes.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from __future__ import annotations
|
||||
|
||||
#from quart import Blueprint, g
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from quart import (
|
||||
request,
|
||||
render_template,
|
||||
make_response,
|
||||
g,
|
||||
Blueprint,
|
||||
redirect,
|
||||
url_for,
|
||||
)
|
||||
from .ghost_db import DBClient # adjust import path
|
||||
from db.session import get_session
|
||||
from .filters.qs import makeqs_factory, decode
|
||||
from .services.posts_data import posts_data
|
||||
|
||||
from suma_browser.app.redis_cacher import cache_page, invalidate_tag_cache
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
from suma_browser.app.authz import require_admin
|
||||
from utils import host_url
|
||||
|
||||
def register(url_prefix, title):
|
||||
blogs_bp = Blueprint("blog", __name__, url_prefix)
|
||||
|
||||
from .web_hooks.routes import ghost_webhooks
|
||||
blogs_bp.register_blueprint(ghost_webhooks)
|
||||
|
||||
from .ghost.editor_api import editor_api_bp
|
||||
blogs_bp.register_blueprint(editor_api_bp)
|
||||
|
||||
|
||||
|
||||
from ..post.routes import register as register_blog
|
||||
blogs_bp.register_blueprint(
|
||||
register_blog(),
|
||||
)
|
||||
|
||||
from .admin.routes import register as register_tag_groups_admin
|
||||
blogs_bp.register_blueprint(register_tag_groups_admin())
|
||||
|
||||
|
||||
@blogs_bp.before_app_serving
|
||||
async def init():
|
||||
from .ghost.ghost_sync import (
|
||||
sync_all_content_from_ghost,
|
||||
sync_all_membership_from_ghost,
|
||||
)
|
||||
|
||||
async with get_session() as s:
|
||||
await sync_all_content_from_ghost(s)
|
||||
await sync_all_membership_from_ghost(s)
|
||||
await s.commit()
|
||||
|
||||
@blogs_bp.before_request
|
||||
def route():
|
||||
g.makeqs_factory = makeqs_factory
|
||||
|
||||
|
||||
@blogs_bp.context_processor
|
||||
async def inject_root():
|
||||
return {
|
||||
"blog_title": title,
|
||||
"qs": makeqs_factory()(),
|
||||
"unsplash_api_key": os.environ.get("UNSPLASH_ACCESS_KEY", ""),
|
||||
}
|
||||
|
||||
SORT_MAP = {
|
||||
"newest": "published_at DESC",
|
||||
"oldest": "published_at ASC",
|
||||
"az": "title ASC",
|
||||
"za": "title DESC",
|
||||
"featured": "featured DESC, published_at DESC",
|
||||
}
|
||||
|
||||
@blogs_bp.get("/")
|
||||
async def home():
|
||||
|
||||
q = decode()
|
||||
|
||||
# Drafts filter requires login; ignore if not logged in
|
||||
show_drafts = bool(q.drafts and g.user)
|
||||
is_admin = bool((g.get("rights") or {}).get("admin"))
|
||||
drafts_user_id = None if (not show_drafts or is_admin) else g.user.id
|
||||
|
||||
# For the draft count badge: admin sees all drafts, non-admin sees own
|
||||
count_drafts_uid = None if (g.user and is_admin) else (g.user.id if g.user else False)
|
||||
|
||||
data = await posts_data(
|
||||
g.s, q.page, q.search, q.sort, q.selected_tags, q.selected_authors, q.liked,
|
||||
drafts=show_drafts, drafts_user_id=drafts_user_id,
|
||||
count_drafts_for_user_id=count_drafts_uid,
|
||||
selected_groups=q.selected_groups,
|
||||
)
|
||||
|
||||
context = {
|
||||
**data,
|
||||
"selected_tags": q.selected_tags,
|
||||
"selected_authors": q.selected_authors,
|
||||
"selected_groups": q.selected_groups,
|
||||
"sort": q.sort,
|
||||
"search": q.search,
|
||||
"view": q.view,
|
||||
"drafts": q.drafts if show_drafts else None,
|
||||
}
|
||||
|
||||
# Determine which template to use based on request type and pagination
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/blog/index.html", **context)
|
||||
elif q.page > 1:
|
||||
# HTMX pagination: just blog cards + sentinel
|
||||
html = await render_template("_types/blog/_cards.html", **context)
|
||||
else:
|
||||
# HTMX navigation (page 1): main panel + OOB elements
|
||||
#main_panel = await render_template("_types/blog/_main_panel.html", **context)
|
||||
html = await render_template("_types/blog/_oob_elements.html", **context)
|
||||
#html = oob_elements + main_panel
|
||||
|
||||
return await make_response(html)
|
||||
|
||||
@blogs_bp.get("/new/")
|
||||
@require_admin
|
||||
async def new_post():
|
||||
if not is_htmx_request():
|
||||
html = await render_template("_types/blog_new/index.html")
|
||||
else:
|
||||
html = await render_template("_types/blog_new/_oob_elements.html")
|
||||
return await make_response(html)
|
||||
|
||||
@blogs_bp.post("/new/")
|
||||
@require_admin
|
||||
async def new_post_save():
|
||||
from .ghost.ghost_posts import create_post
|
||||
from .ghost.lexical_validator import validate_lexical
|
||||
from .ghost.ghost_sync import sync_single_post
|
||||
|
||||
form = await request.form
|
||||
title = form.get("title", "").strip() or "Untitled"
|
||||
lexical_raw = form.get("lexical", "")
|
||||
status = form.get("status", "draft")
|
||||
feature_image = form.get("feature_image", "").strip()
|
||||
custom_excerpt = form.get("custom_excerpt", "").strip()
|
||||
feature_image_caption = form.get("feature_image_caption", "").strip()
|
||||
|
||||
# Validate
|
||||
try:
|
||||
lexical_doc = json.loads(lexical_raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
html = await render_template(
|
||||
"_types/blog_new/index.html",
|
||||
save_error="Invalid JSON in editor content.",
|
||||
)
|
||||
return await make_response(html, 400)
|
||||
|
||||
ok, reason = validate_lexical(lexical_doc)
|
||||
if not ok:
|
||||
html = await render_template(
|
||||
"_types/blog_new/index.html",
|
||||
save_error=reason,
|
||||
)
|
||||
return await make_response(html, 400)
|
||||
|
||||
# Create in Ghost
|
||||
ghost_post = await create_post(
|
||||
title=title,
|
||||
lexical_json=lexical_raw,
|
||||
status=status,
|
||||
feature_image=feature_image or None,
|
||||
custom_excerpt=custom_excerpt or None,
|
||||
feature_image_caption=feature_image_caption or None,
|
||||
)
|
||||
|
||||
# Sync to local DB
|
||||
await sync_single_post(g.s, ghost_post["id"])
|
||||
await g.s.flush()
|
||||
|
||||
# Set user_id on the newly created post
|
||||
from models.ghost_content import Post
|
||||
from sqlalchemy import select
|
||||
local_post = (await g.s.execute(
|
||||
select(Post).where(Post.ghost_id == ghost_post["id"])
|
||||
)).scalar_one_or_none()
|
||||
if local_post and local_post.user_id is None:
|
||||
local_post.user_id = g.user.id
|
||||
await g.s.flush()
|
||||
|
||||
# Clear blog listing cache
|
||||
await invalidate_tag_cache("blog")
|
||||
|
||||
# Redirect to the edit page (post is likely a draft, so public detail would 404)
|
||||
return redirect(host_url(url_for("blog.post.admin.edit", slug=ghost_post["slug"])))
|
||||
|
||||
|
||||
@blogs_bp.get("/drafts/")
|
||||
async def drafts():
|
||||
return redirect(host_url(url_for("blog.home")) + "?drafts=1")
|
||||
|
||||
return blogs_bp
|
||||
137
bp/blog/services/posts_data.py
Normal file
137
bp/blog/services/posts_data.py
Normal file
@@ -0,0 +1,137 @@
|
||||
from ..ghost_db import DBClient # adjust import path
|
||||
from sqlalchemy import select
|
||||
from models.ghost_content import PostLike
|
||||
from models.calendars import CalendarEntry, CalendarEntryPost
|
||||
from quart import g
|
||||
|
||||
async def posts_data(
|
||||
session,
|
||||
page, search, sort, selected_tags, selected_authors, liked,
|
||||
drafts=False, drafts_user_id=None, count_drafts_for_user_id=None,
|
||||
selected_groups=(),
|
||||
):
|
||||
client = DBClient(session)
|
||||
|
||||
# --- Tag-group resolution ---
|
||||
tag_groups = await client.list_tag_groups_with_counts()
|
||||
|
||||
# Collect all assigned tag IDs across groups
|
||||
all_assigned_tag_ids = []
|
||||
for grp in tag_groups:
|
||||
all_assigned_tag_ids.extend(grp["tag_ids"])
|
||||
|
||||
# Build slug-lookup for groups
|
||||
group_by_slug = {grp["slug"]: grp for grp in tag_groups}
|
||||
|
||||
# Resolve selected group → post filtering
|
||||
# Groups and tags are mutually exclusive — groups override tags when set
|
||||
effective_tags = selected_tags
|
||||
etc_mode_tag_ids = None # set when "etc" is selected
|
||||
if selected_groups:
|
||||
group_slug = selected_groups[0]
|
||||
if group_slug == "etc":
|
||||
# etc = posts NOT covered by any group (includes untagged)
|
||||
etc_mode_tag_ids = all_assigned_tag_ids
|
||||
effective_tags = ()
|
||||
elif group_slug in group_by_slug:
|
||||
effective_tags = tuple(group_by_slug[group_slug]["tag_slugs"])
|
||||
|
||||
# Compute "etc" virtual group
|
||||
etc_count = await client.count_etc_posts(all_assigned_tag_ids)
|
||||
if etc_count > 0 or (selected_groups and selected_groups[0] == "etc"):
|
||||
tag_groups.append({
|
||||
"id": None,
|
||||
"name": "etc",
|
||||
"slug": "etc",
|
||||
"feature_image": None,
|
||||
"colour": None,
|
||||
"sort_order": 999999,
|
||||
"post_count": etc_count,
|
||||
"tag_slugs": [],
|
||||
"tag_ids": [],
|
||||
})
|
||||
|
||||
posts, pagination = await client.list_posts(
|
||||
limit=10,
|
||||
page=page,
|
||||
selected_tags=effective_tags,
|
||||
selected_authors=selected_authors,
|
||||
search=search,
|
||||
drafts=drafts,
|
||||
drafts_user_id=drafts_user_id,
|
||||
exclude_covered_tag_ids=etc_mode_tag_ids,
|
||||
)
|
||||
|
||||
# Get all post IDs in this batch
|
||||
post_ids = [p["id"] for p in posts]
|
||||
|
||||
# Add is_liked field to each post for current user
|
||||
if g.user:
|
||||
# Fetch all likes for this user and these posts in one query
|
||||
liked_posts = await session.execute(
|
||||
select(PostLike.post_id).where(
|
||||
PostLike.user_id == g.user.id,
|
||||
PostLike.post_id.in_(post_ids),
|
||||
PostLike.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
liked_post_ids = {row[0] for row in liked_posts}
|
||||
|
||||
# Add is_liked to each post
|
||||
for post in posts:
|
||||
post["is_liked"] = post["id"] in liked_post_ids
|
||||
else:
|
||||
# Not logged in - no posts are liked
|
||||
for post in posts:
|
||||
post["is_liked"] = False
|
||||
|
||||
# Fetch associated entries for each post
|
||||
# Get all confirmed entries associated with these posts
|
||||
from sqlalchemy.orm import selectinload
|
||||
entries_result = await session.execute(
|
||||
select(CalendarEntry, CalendarEntryPost.post_id)
|
||||
.join(CalendarEntryPost, CalendarEntry.id == CalendarEntryPost.entry_id)
|
||||
.options(selectinload(CalendarEntry.calendar)) # Eagerly load calendar
|
||||
.where(
|
||||
CalendarEntryPost.post_id.in_(post_ids),
|
||||
CalendarEntryPost.deleted_at.is_(None),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "confirmed"
|
||||
)
|
||||
.order_by(CalendarEntry.start_at.asc())
|
||||
)
|
||||
|
||||
# Group entries by post_id
|
||||
entries_by_post = {}
|
||||
for entry, post_id in entries_result:
|
||||
if post_id not in entries_by_post:
|
||||
entries_by_post[post_id] = []
|
||||
entries_by_post[post_id].append(entry)
|
||||
|
||||
# Add associated_entries to each post
|
||||
for post in posts:
|
||||
post["associated_entries"] = entries_by_post.get(post["id"], [])
|
||||
|
||||
tags=await client.list_tags(
|
||||
limit=50000
|
||||
)
|
||||
authors=await client.list_authors(
|
||||
limit=50000
|
||||
)
|
||||
|
||||
# Draft count for the logged-in user (None → admin sees all)
|
||||
draft_count = 0
|
||||
if count_drafts_for_user_id is not False:
|
||||
draft_count = await client.count_drafts(user_id=count_drafts_for_user_id)
|
||||
|
||||
return {
|
||||
"posts": posts,
|
||||
"page": pagination.get("page", page),
|
||||
"total_pages": pagination.get("pages", 1),
|
||||
"search_count": pagination.get("search_count"),
|
||||
"tags": tags,
|
||||
"authors": authors,
|
||||
"draft_count": draft_count,
|
||||
"tag_groups": tag_groups,
|
||||
"selected_groups": selected_groups,
|
||||
}
|
||||
120
bp/blog/web_hooks/routes.py
Normal file
120
bp/blog/web_hooks/routes.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# suma_browser/webhooks.py
|
||||
from __future__ import annotations
|
||||
import os
|
||||
from quart import Blueprint, request, abort, Response, g
|
||||
|
||||
from ..ghost.ghost_sync import (
|
||||
sync_single_member,
|
||||
sync_single_page,
|
||||
sync_single_post,
|
||||
sync_single_author,
|
||||
sync_single_tag,
|
||||
)
|
||||
from suma_browser.app.redis_cacher import clear_cache
|
||||
from suma_browser.app.csrf import csrf_exempt
|
||||
|
||||
ghost_webhooks = Blueprint("ghost_webhooks", __name__, url_prefix="/__ghost-webhook")
|
||||
|
||||
def _check_secret(req) -> None:
|
||||
expected = os.getenv("GHOST_WEBHOOK_SECRET")
|
||||
if not expected:
|
||||
# if you don't set a secret, we allow anything (dev mode)
|
||||
return
|
||||
got = req.args.get("secret") or req.headers.get("X-Webhook-Secret")
|
||||
if got != expected:
|
||||
abort(401)
|
||||
|
||||
def _extract_id(data: dict, key: str) -> str | None:
|
||||
"""
|
||||
key is "post", "tag", or "user"/"author".
|
||||
Ghost usually sends { key: { current: { id: ... }, previous: { id: ... } } }
|
||||
We'll try current first, then previous.
|
||||
"""
|
||||
block = data.get(key) or {}
|
||||
cur = block.get("current") or {}
|
||||
prev = block.get("previous") or {}
|
||||
return cur.get("id") or prev.get("id")
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@ghost_webhooks.route("/member/", methods=["POST"])
|
||||
#@ghost_webhooks.post("/member/")
|
||||
async def webhook_member() -> Response:
|
||||
_check_secret(request)
|
||||
|
||||
data = await request.get_json(force=True, silent=True) or {}
|
||||
ghost_id = _extract_id(data, "member")
|
||||
if not ghost_id:
|
||||
abort(400, "no member id")
|
||||
|
||||
# sync one post
|
||||
#async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py
|
||||
await sync_single_member(g.s, ghost_id)
|
||||
return Response(status=204)
|
||||
|
||||
@csrf_exempt
|
||||
@ghost_webhooks.post("/post/")
|
||||
@clear_cache(tag='blog')
|
||||
async def webhook_post() -> Response:
|
||||
_check_secret(request)
|
||||
|
||||
data = await request.get_json(force=True, silent=True) or {}
|
||||
ghost_id = _extract_id(data, "post")
|
||||
if not ghost_id:
|
||||
abort(400, "no post id")
|
||||
|
||||
# sync one post
|
||||
#async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py
|
||||
await sync_single_post(g.s, ghost_id)
|
||||
|
||||
return Response(status=204)
|
||||
|
||||
@csrf_exempt
|
||||
@ghost_webhooks.post("/page/")
|
||||
@clear_cache(tag='blog')
|
||||
async def webhook_page() -> Response:
|
||||
_check_secret(request)
|
||||
|
||||
data = await request.get_json(force=True, silent=True) or {}
|
||||
ghost_id = _extract_id(data, "page")
|
||||
if not ghost_id:
|
||||
abort(400, "no page id")
|
||||
|
||||
# sync one post
|
||||
#async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py
|
||||
await sync_single_page(g.s, ghost_id)
|
||||
|
||||
return Response(status=204)
|
||||
|
||||
@csrf_exempt
|
||||
@ghost_webhooks.post("/author/")
|
||||
@clear_cache(tag='blog')
|
||||
async def webhook_author() -> Response:
|
||||
_check_secret(request)
|
||||
|
||||
data = await request.get_json(force=True, silent=True) or {}
|
||||
# Ghost calls them "user" in webhook payload in many versions,
|
||||
# and you want authors in your mirror. We'll try both keys.
|
||||
ghost_id = _extract_id(data, "user") or _extract_id(data, "author")
|
||||
if not ghost_id:
|
||||
abort(400, "no author id")
|
||||
|
||||
#async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"]
|
||||
await sync_single_author(g.s, ghost_id)
|
||||
|
||||
return Response(status=204)
|
||||
|
||||
@csrf_exempt
|
||||
@ghost_webhooks.post("/tag/")
|
||||
@clear_cache(tag='blog')
|
||||
async def webhook_tag() -> Response:
|
||||
_check_secret(request)
|
||||
|
||||
data = await request.get_json(force=True, silent=True) or {}
|
||||
ghost_id = _extract_id(data, "tag")
|
||||
if not ghost_id:
|
||||
abort(400, "no tag id")
|
||||
|
||||
#async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"]
|
||||
await sync_single_tag(g.s, ghost_id)
|
||||
return Response(status=204)
|
||||
Reference in New Issue
Block a user