8 Commits

Author SHA1 Message Date
e7d5c6734b Fix renderDOM swallowing pre-rendered DOM nodes as empty dicts
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 2m20s
renderComponentDOM now eagerly renders kwarg values that are render
expressions (HTML tags, <>, ~components) into DOM nodes. But renderDOM
treated any non-array object as a dict and returned an empty fragment,
silently discarding pre-rendered content. Add a nodeType check to pass
DOM nodes through unchanged.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 14:41:51 +00:00
e4a6d2dfc8 Fix renderStrComponent with same eager-eval pattern as renderComponentDOM
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 3m18s
The string renderer's component call had the same deferred-evaluation
bug — and this is the path actually used for blog card rendering via
renderToString. Apply the same _isRenderExpr check to route render-only
forms through renderStr while data expressions go through sxEval.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 13:50:42 +00:00
0a5562243b Fix renderComponentDOM: route render-only forms through renderDOM
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 2m19s
The previous fix eagerly evaluated all kwarg expressions via sxEval,
which broke render-only forms (<>, raw!, HTML tags, ~components) that
only exist in the render pipeline. Now detect render expressions by
checking if the head symbol is an HTML/SVG tag, <>, raw!, or ~component,
and route those through renderDOM while data expressions still go
through sxEval for correct scope resolution.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 13:45:43 +00:00
2b41aaa6ce Fix renderComponentDOM evaluating kwarg expressions in wrong scope
renderComponentDOM was deferring evaluation of complex expressions
(arrays) passed as component kwargs, storing raw AST instead.  When the
component body later used these values as attributes, the caller's env
(with lambda params like t, a) was no longer available, producing
stringified arrays like "get,t,src" as attribute values — which browsers
interpreted as relative URLs.

Evaluate all non-literal kwarg values eagerly in the caller's env,
matching the behavior of callComponent and the Python-side renderer.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 13:40:50 +00:00
cfe66e5342 Fix back_populates typo in Post.authors relationship
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 13:36:18 +00:00
382d1b7c7a Decouple blog models and BlogService from shared layer
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 2m20s
Move Post/Author/Tag/PostAuthor/PostTag/PostUser models from
shared/models/ghost_content.py to blog/models/content.py so blog-domain
models no longer live in the shared layer. Replace the shared
SqlBlogService + BlogService protocol with a blog-local singleton
(blog_service), and switch entry_associations.py from direct DB access
to HTTP fetch_data("blog", "post-by-id") to respect the inter-service
boundary.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 13:28:11 +00:00
a580a53328 Fix alembic revision IDs to match existing naming convention
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m46s
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 12:38:43 +00:00
0f9af31ffe Phase 0+1: native post writes, Ghost no longer write-primary
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m50s
- Final sync script with HTML verification + author→user migration
- Make ghost_id nullable on posts/authors/tags, add UUID/timestamp defaults
- Add user profile fields (bio, slug, profile_image, etc.) to User model
- New PostUser M2M table (replaces post_authors for new posts)
- PostWriter service: direct DB CRUD with Lexical rendering, optimistic
  locking, AP federation, tag upsert
- Rewrite create/edit/settings routes to use PostWriter (no Ghost API calls)
- Neuter Ghost webhooks (post/page/author/tag → 204 no-op)
- Disable Ghost startup sync

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 12:33:37 +00:00
27 changed files with 1326 additions and 352 deletions

View File

@@ -0,0 +1,43 @@
"""Add author profile fields to users table.
Merges Ghost Author profile data into User — bio, profile_image, cover_image,
website, location, facebook, twitter, slug, is_admin.
Revision ID: 0003
Revises: 0002_hash_oauth_tokens
"""
from alembic import op
import sqlalchemy as sa
revision = "acct_0003"
down_revision = "acct_0002"
branch_labels = None
depends_on = None
def upgrade():
op.add_column("users", sa.Column("slug", sa.String(191), nullable=True))
op.add_column("users", sa.Column("bio", sa.Text(), nullable=True))
op.add_column("users", sa.Column("profile_image", sa.Text(), nullable=True))
op.add_column("users", sa.Column("cover_image", sa.Text(), nullable=True))
op.add_column("users", sa.Column("website", sa.Text(), nullable=True))
op.add_column("users", sa.Column("location", sa.Text(), nullable=True))
op.add_column("users", sa.Column("facebook", sa.Text(), nullable=True))
op.add_column("users", sa.Column("twitter", sa.Text(), nullable=True))
op.add_column("users", sa.Column(
"is_admin", sa.Boolean(), nullable=False, server_default=sa.text("false"),
))
op.create_index("ix_users_slug", "users", ["slug"], unique=True)
def downgrade():
op.drop_index("ix_users_slug")
op.drop_column("users", "is_admin")
op.drop_column("users", "twitter")
op.drop_column("users", "facebook")
op.drop_column("users", "location")
op.drop_column("users", "website")
op.drop_column("users", "cover_image")
op.drop_column("users", "profile_image")
op.drop_column("users", "bio")
op.drop_column("users", "slug")

View File

@@ -2,7 +2,7 @@ from alembic import context
from shared.db.alembic_env import run_alembic from shared.db.alembic_env import run_alembic
MODELS = [ MODELS = [
"shared.models.ghost_content", "blog.models.content",
"shared.models.kv", "shared.models.kv",
"shared.models.menu_item", "shared.models.menu_item",
"shared.models.menu_node", "shared.models.menu_node",
@@ -13,6 +13,7 @@ MODELS = [
TABLES = frozenset({ TABLES = frozenset({
"posts", "authors", "post_authors", "tags", "post_tags", "posts", "authors", "post_authors", "tags", "post_tags",
"post_users",
"snippets", "tag_groups", "tag_group_tags", "snippets", "tag_groups", "tag_group_tags",
"menu_items", "menu_nodes", "kv", "menu_items", "menu_nodes", "kv",
"page_configs", "page_configs",

View File

@@ -0,0 +1,67 @@
"""Make ghost_id nullable, add defaults, create post_users M2M table.
Revision ID: 0004
Revises: 0003_add_page_configs
"""
from alembic import op
import sqlalchemy as sa
revision = "blog_0004"
down_revision = "blog_0003"
branch_labels = None
depends_on = None
def upgrade():
# Make ghost_id nullable
op.alter_column("posts", "ghost_id", existing_type=sa.String(64), nullable=True)
op.alter_column("authors", "ghost_id", existing_type=sa.String(64), nullable=True)
op.alter_column("tags", "ghost_id", existing_type=sa.String(64), nullable=True)
# Add server defaults for Post
op.alter_column(
"posts", "uuid",
existing_type=sa.String(64),
server_default=sa.text("gen_random_uuid()"),
)
op.alter_column(
"posts", "updated_at",
existing_type=sa.DateTime(timezone=True),
server_default=sa.text("now()"),
)
op.alter_column(
"posts", "created_at",
existing_type=sa.DateTime(timezone=True),
server_default=sa.text("now()"),
)
# Create post_users M2M table (replaces post_authors for new posts)
op.create_table(
"post_users",
sa.Column("post_id", sa.Integer, sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("user_id", sa.Integer, primary_key=True),
sa.Column("sort_order", sa.Integer, nullable=False, server_default="0"),
)
op.create_index("ix_post_users_user_id", "post_users", ["user_id"])
# Backfill post_users from post_authors for posts that already have user_id.
# This maps each post's authors to the post's user_id (primary author).
# Multi-author mapping requires the full sync script.
op.execute("""
INSERT INTO post_users (post_id, user_id, sort_order)
SELECT p.id, p.user_id, 0
FROM posts p
WHERE p.user_id IS NOT NULL
AND p.deleted_at IS NULL
ON CONFLICT DO NOTHING
""")
def downgrade():
op.drop_table("post_users")
op.alter_column("posts", "created_at", existing_type=sa.DateTime(timezone=True), server_default=None)
op.alter_column("posts", "updated_at", existing_type=sa.DateTime(timezone=True), server_default=None)
op.alter_column("posts", "uuid", existing_type=sa.String(64), server_default=None)
op.alter_column("tags", "ghost_id", existing_type=sa.String(64), nullable=False)
op.alter_column("authors", "ghost_id", existing_type=sa.String(64), nullable=False)
op.alter_column("posts", "ghost_id", existing_type=sa.String(64), nullable=False)

View File

@@ -134,7 +134,7 @@ def create_app() -> "Quart":
async def oembed(): async def oembed():
from urllib.parse import urlparse from urllib.parse import urlparse
from quart import jsonify from quart import jsonify
from shared.services.registry import services from services import blog_service
from shared.infrastructure.urls import blog_url from shared.infrastructure.urls import blog_url
from shared.infrastructure.oembed import build_oembed_response from shared.infrastructure.oembed import build_oembed_response
@@ -147,7 +147,7 @@ def create_app() -> "Quart":
if not slug: if not slug:
return jsonify({"error": "could not extract slug"}), 404 return jsonify({"error": "could not extract slug"}), 404
post = await services.blog.get_post_by_slug(g.s, slug) post = await blog_service.get_post_by_slug(g.s, slug)
if not post: if not post:
return jsonify({"error": "not found"}), 404 return jsonify({"error": "not found"}), 404

View File

@@ -14,7 +14,6 @@ from quart import (
url_for, url_for,
) )
from .ghost_db import DBClient # adjust import path from .ghost_db import DBClient # adjust import path
from shared.db.session import get_session
from .filters.qs import makeqs_factory, decode from .filters.qs import makeqs_factory, decode
from .services.posts_data import posts_data from .services.posts_data import posts_data
from .services.pages_data import pages_data from .services.pages_data import pages_data
@@ -47,33 +46,9 @@ def register(url_prefix, title):
@blogs_bp.before_app_serving @blogs_bp.before_app_serving
async def init(): async def init():
from .ghost.ghost_sync import sync_all_content_from_ghost # Ghost startup sync disabled (Phase 1) — blog service owns content
from sqlalchemy import text # directly. The final_ghost_sync.py script was run before cutover.
import logging pass
logger = logging.getLogger(__name__)
# Advisory lock prevents multiple Hypercorn workers from
# running the sync concurrently (which causes PK conflicts).
async with get_session() as s:
got_lock = await s.scalar(text("SELECT pg_try_advisory_lock(900001)"))
if not got_lock:
await s.rollback() # clean up before returning connection to pool
return
try:
await sync_all_content_from_ghost(s)
await s.commit()
except Exception:
logger.exception("Ghost sync failed — will retry on next deploy")
try:
await s.rollback()
except Exception:
pass
finally:
try:
await s.execute(text("SELECT pg_advisory_unlock(900001)"))
await s.commit()
except Exception:
pass # lock auto-releases when session closes
@blogs_bp.before_request @blogs_bp.before_request
def route(): def route():
@@ -258,9 +233,8 @@ def register(url_prefix, title):
@blogs_bp.post("/new/") @blogs_bp.post("/new/")
@require_admin @require_admin
async def new_post_save(): async def new_post_save():
from .ghost.ghost_posts import create_post
from .ghost.lexical_validator import validate_lexical from .ghost.lexical_validator import validate_lexical
from .ghost.ghost_sync import sync_single_post from services.post_writer import create_post as writer_create
form = await request.form form = await request.form
title = form.get("title", "").strip() or "Untitled" title = form.get("title", "").strip() or "Untitled"
@@ -290,35 +264,24 @@ def register(url_prefix, title):
html = await render_new_post_page(tctx) html = await render_new_post_page(tctx)
return await make_response(html, 400) return await make_response(html, 400)
# Create in Ghost # Create directly in db_blog
ghost_post = await create_post( post = await writer_create(
g.s,
title=title, title=title,
lexical_json=lexical_raw, lexical_json=lexical_raw,
status=status, status=status,
user_id=g.user.id,
feature_image=feature_image or None, feature_image=feature_image or None,
custom_excerpt=custom_excerpt or None, custom_excerpt=custom_excerpt or None,
feature_image_caption=feature_image_caption or None, feature_image_caption=feature_image_caption or None,
) )
# Sync to local DB
await sync_single_post(g.s, ghost_post["id"])
await g.s.flush() await g.s.flush()
# Set user_id on the newly created post
from models.ghost_content import Post
from sqlalchemy import select
local_post = (await g.s.execute(
select(Post).where(Post.ghost_id == ghost_post["id"])
)).scalar_one_or_none()
if local_post and local_post.user_id is None:
local_post.user_id = g.user.id
await g.s.flush()
# Clear blog listing cache # Clear blog listing cache
await invalidate_tag_cache("blog") await invalidate_tag_cache("blog")
# Redirect to the edit page (post is likely a draft, so public detail would 404) # Redirect to the edit page
return redirect(host_url(url_for("blog.post.admin.edit", slug=ghost_post["slug"]))) return redirect(host_url(url_for("blog.post.admin.edit", slug=post.slug)))
@blogs_bp.get("/new-page/") @blogs_bp.get("/new-page/")
@@ -340,9 +303,8 @@ def register(url_prefix, title):
@blogs_bp.post("/new-page/") @blogs_bp.post("/new-page/")
@require_admin @require_admin
async def new_page_save(): async def new_page_save():
from .ghost.ghost_posts import create_page
from .ghost.lexical_validator import validate_lexical from .ghost.lexical_validator import validate_lexical
from .ghost.ghost_sync import sync_single_page from services.post_writer import create_page as writer_create_page
form = await request.form form = await request.form
title = form.get("title", "").strip() or "Untitled" title = form.get("title", "").strip() or "Untitled"
@@ -374,35 +336,24 @@ def register(url_prefix, title):
html = await render_new_post_page(tctx) html = await render_new_post_page(tctx)
return await make_response(html, 400) return await make_response(html, 400)
# Create in Ghost (as page) # Create directly in db_blog
ghost_page = await create_page( page = await writer_create_page(
g.s,
title=title, title=title,
lexical_json=lexical_raw, lexical_json=lexical_raw,
status=status, status=status,
user_id=g.user.id,
feature_image=feature_image or None, feature_image=feature_image or None,
custom_excerpt=custom_excerpt or None, custom_excerpt=custom_excerpt or None,
feature_image_caption=feature_image_caption or None, feature_image_caption=feature_image_caption or None,
) )
# Sync to local DB (uses pages endpoint)
await sync_single_page(g.s, ghost_page["id"])
await g.s.flush() await g.s.flush()
# Set user_id on the newly created page
from models.ghost_content import Post
from sqlalchemy import select
local_post = (await g.s.execute(
select(Post).where(Post.ghost_id == ghost_page["id"])
)).scalar_one_or_none()
if local_post and local_post.user_id is None:
local_post.user_id = g.user.id
await g.s.flush()
# Clear blog listing cache # Clear blog listing cache
await invalidate_tag_cache("blog") await invalidate_tag_cache("blog")
# Redirect to the page admin # Redirect to the page admin
return redirect(host_url(url_for("blog.post.admin.edit", slug=ghost_page["slug"]))) return redirect(host_url(url_for("blog.post.admin.edit", slug=page.slug)))
@blogs_bp.get("/drafts/") @blogs_bp.get("/drafts/")

View File

@@ -1,15 +1,11 @@
# suma_browser/webhooks.py # Ghost webhooks — neutered (Phase 1).
#
# Post/page/author/tag handlers return 204 no-op.
# Member webhook remains active (membership sync handled by account service).
from __future__ import annotations from __future__ import annotations
import os import os
from quart import Blueprint, request, abort, Response, g from quart import Blueprint, request, abort, Response
from ..ghost.ghost_sync import (
sync_single_page,
sync_single_post,
sync_single_author,
sync_single_tag,
)
from shared.browser.app.redis_cacher import clear_cache
from shared.browser.app.csrf import csrf_exempt from shared.browser.app.csrf import csrf_exempt
ghost_webhooks = Blueprint("ghost_webhooks", __name__, url_prefix="/__ghost-webhook") ghost_webhooks = Blueprint("ghost_webhooks", __name__, url_prefix="/__ghost-webhook")
@@ -32,6 +28,7 @@ def _extract_id(data: dict, key: str) -> str | None:
@csrf_exempt @csrf_exempt
@ghost_webhooks.route("/member/", methods=["POST"]) @ghost_webhooks.route("/member/", methods=["POST"])
async def webhook_member() -> Response: async def webhook_member() -> Response:
"""Member webhook still active — delegates to account service."""
_check_secret(request) _check_secret(request)
data = await request.get_json(force=True, silent=True) or {} data = await request.get_json(force=True, silent=True) or {}
@@ -39,7 +36,6 @@ async def webhook_member() -> Response:
if not ghost_id: if not ghost_id:
abort(400, "no member id") abort(400, "no member id")
# Delegate to account service (membership data lives in db_account)
from shared.infrastructure.actions import call_action from shared.infrastructure.actions import call_action
try: try:
await call_action( await call_action(
@@ -52,61 +48,25 @@ async def webhook_member() -> Response:
logging.getLogger(__name__).error("Member sync via account failed: %s", e) logging.getLogger(__name__).error("Member sync via account failed: %s", e)
return Response(status=204) return Response(status=204)
# --- Neutered handlers: Ghost no longer writes content ---
@csrf_exempt @csrf_exempt
@ghost_webhooks.post("/post/") @ghost_webhooks.post("/post/")
@clear_cache(tag='blog')
async def webhook_post() -> Response: async def webhook_post() -> Response:
_check_secret(request)
data = await request.get_json(force=True, silent=True) or {}
ghost_id = _extract_id(data, "post")
if not ghost_id:
abort(400, "no post id")
await sync_single_post(g.s, ghost_id)
return Response(status=204) return Response(status=204)
@csrf_exempt @csrf_exempt
@ghost_webhooks.post("/page/") @ghost_webhooks.post("/page/")
@clear_cache(tag='blog')
async def webhook_page() -> Response: async def webhook_page() -> Response:
_check_secret(request)
data = await request.get_json(force=True, silent=True) or {}
ghost_id = _extract_id(data, "page")
if not ghost_id:
abort(400, "no page id")
await sync_single_page(g.s, ghost_id)
return Response(status=204) return Response(status=204)
@csrf_exempt @csrf_exempt
@ghost_webhooks.post("/author/") @ghost_webhooks.post("/author/")
@clear_cache(tag='blog')
async def webhook_author() -> Response: async def webhook_author() -> Response:
_check_secret(request)
data = await request.get_json(force=True, silent=True) or {}
ghost_id = _extract_id(data, "user") or _extract_id(data, "author")
if not ghost_id:
abort(400, "no author id")
await sync_single_author(g.s, ghost_id)
return Response(status=204) return Response(status=204)
@csrf_exempt @csrf_exempt
@ghost_webhooks.post("/tag/") @ghost_webhooks.post("/tag/")
@clear_cache(tag='blog')
async def webhook_tag() -> Response: async def webhook_tag() -> Response:
_check_secret(request)
data = await request.get_json(force=True, silent=True) or {}
ghost_id = _extract_id(data, "tag")
if not ghost_id:
abort(400, "no tag id")
await sync_single_tag(g.s, ghost_id)
return Response(status=204) return Response(status=204)

View File

@@ -9,7 +9,7 @@ from quart import Blueprint, g, jsonify, request
from shared.infrastructure.data_client import DATA_HEADER from shared.infrastructure.data_client import DATA_HEADER
from shared.contracts.dtos import dto_to_dict from shared.contracts.dtos import dto_to_dict
from shared.services.registry import services from services import blog_service
def register() -> Blueprint: def register() -> Blueprint:
@@ -36,7 +36,7 @@ def register() -> Blueprint:
# --- post-by-slug --- # --- post-by-slug ---
async def _post_by_slug(): async def _post_by_slug():
slug = request.args.get("slug", "") slug = request.args.get("slug", "")
post = await services.blog.get_post_by_slug(g.s, slug) post = await blog_service.get_post_by_slug(g.s, slug)
if not post: if not post:
return None return None
return dto_to_dict(post) return dto_to_dict(post)
@@ -46,7 +46,7 @@ def register() -> Blueprint:
# --- post-by-id --- # --- post-by-id ---
async def _post_by_id(): async def _post_by_id():
post_id = int(request.args.get("id", 0)) post_id = int(request.args.get("id", 0))
post = await services.blog.get_post_by_id(g.s, post_id) post = await blog_service.get_post_by_id(g.s, post_id)
if not post: if not post:
return None return None
return dto_to_dict(post) return dto_to_dict(post)
@@ -59,7 +59,7 @@ def register() -> Blueprint:
if not ids_raw: if not ids_raw:
return [] return []
ids = [int(x.strip()) for x in ids_raw.split(",") if x.strip()] ids = [int(x.strip()) for x in ids_raw.split(",") if x.strip()]
posts = await services.blog.get_posts_by_ids(g.s, ids) posts = await blog_service.get_posts_by_ids(g.s, ids)
return [dto_to_dict(p) for p in posts] return [dto_to_dict(p) for p in posts]
_handlers["posts-by-ids"] = _posts_by_ids _handlers["posts-by-ids"] = _posts_by_ids
@@ -69,7 +69,7 @@ def register() -> Blueprint:
query = request.args.get("query", "") query = request.args.get("query", "")
page = int(request.args.get("page", 1)) page = int(request.args.get("page", 1))
per_page = int(request.args.get("per_page", 10)) per_page = int(request.args.get("per_page", 10))
posts, total = await services.blog.search_posts(g.s, query, page, per_page) posts, total = await blog_service.search_posts(g.s, query, page, per_page)
return {"posts": [dto_to_dict(p) for p in posts], "total": total} return {"posts": [dto_to_dict(p) for p in posts], "total": total}
_handlers["search-posts"] = _search_posts _handlers["search-posts"] = _search_posts

View File

@@ -125,7 +125,7 @@ def register():
data_app="blog") data_app="blog")
async def _link_card_handler(): async def _link_card_handler():
from shared.services.registry import services from services import blog_service
from shared.infrastructure.urls import blog_url from shared.infrastructure.urls import blog_url
slug = request.args.get("slug", "") slug = request.args.get("slug", "")
@@ -137,7 +137,7 @@ def register():
parts = [] parts = []
for s in slugs: for s in slugs:
parts.append(f"<!-- fragment:{s} -->") parts.append(f"<!-- fragment:{s} -->")
post = await services.blog.get_post_by_slug(g.s, s) post = await blog_service.get_post_by_slug(g.s, s)
if post: if post:
parts.append(_blog_link_card_sx(post, blog_url(f"/{post.slug}"))) parts.append(_blog_link_card_sx(post, blog_url(f"/{post.slug}")))
return "\n".join(parts) return "\n".join(parts)
@@ -145,7 +145,7 @@ def register():
# Single mode # Single mode
if not slug: if not slug:
return "" return ""
post = await services.blog.get_post_by_slug(g.s, slug) post = await blog_service.get_post_by_slug(g.s, slug)
if not post: if not post:
return "" return ""
return _blog_link_card_sx(post, blog_url(f"/{post.slug}")) return _blog_link_card_sx(post, blog_url(f"/{post.slug}"))

View File

@@ -15,6 +15,43 @@ from shared.browser.app.utils.htmx import is_htmx_request
from shared.sx.helpers import sx_response from shared.sx.helpers import sx_response
from shared.utils import host_url from shared.utils import host_url
def _post_to_edit_dict(post) -> dict:
"""Convert an ORM Post to a dict matching the shape templates expect.
The templates were written for Ghost Admin API responses, so we mimic
that structure (dot-access on dicts via Jinja) from ORM columns.
"""
d: dict = {}
for col in (
"id", "slug", "title", "html", "plaintext", "lexical", "mobiledoc",
"feature_image", "feature_image_alt", "feature_image_caption",
"excerpt", "custom_excerpt", "visibility", "status", "featured",
"is_page", "email_only", "canonical_url",
"meta_title", "meta_description",
"og_image", "og_title", "og_description",
"twitter_image", "twitter_title", "twitter_description",
"custom_template", "reading_time", "comment_id",
):
d[col] = getattr(post, col, None)
# Timestamps as ISO strings (templates do [:16] slicing)
for ts in ("published_at", "updated_at", "created_at"):
val = getattr(post, ts, None)
d[ts] = val.isoformat() if val else ""
# Tags as list of dicts with .name (for Jinja map(attribute='name'))
if hasattr(post, "tags") and post.tags:
d["tags"] = [{"name": t.name, "slug": t.slug, "id": t.id} for t in post.tags]
else:
d["tags"] = []
# email/newsletter — not available without Ghost, set safe defaults
d["email"] = None
d["newsletter"] = None
return d
def register(): def register():
bp = Blueprint("admin", __name__, url_prefix='/admin') bp = Blueprint("admin", __name__, url_prefix='/admin')
@@ -227,7 +264,7 @@ def register():
# Get associated entry IDs for this post # Get associated entry IDs for this post
post_id = g.post_data["post"]["id"] post_id = g.post_data["post"]["id"]
associated_entry_ids = await get_post_entry_ids(g.s, post_id) associated_entry_ids = await get_post_entry_ids(post_id)
html = await render_template( html = await render_template(
"_types/post/admin/_calendar_view.html", "_types/post/admin/_calendar_view.html",
@@ -256,7 +293,7 @@ def register():
from sqlalchemy import select from sqlalchemy import select
post_id = g.post_data["post"]["id"] post_id = g.post_data["post"]["id"]
associated_entry_ids = await get_post_entry_ids(g.s, post_id) associated_entry_ids = await get_post_entry_ids(post_id)
# Load ALL calendars (not just this post's calendars) # Load ALL calendars (not just this post's calendars)
result = await g.s.execute( result = await g.s.execute(
@@ -295,7 +332,7 @@ def register():
from quart import jsonify from quart import jsonify
post_id = g.post_data["post"]["id"] post_id = g.post_data["post"]["id"]
is_associated, error = await toggle_entry_association(g.s, post_id, entry_id) is_associated, error = await toggle_entry_association(post_id, entry_id)
if error: if error:
return jsonify({"message": error, "errors": {}}), 400 return jsonify({"message": error, "errors": {}}), 400
@@ -303,7 +340,7 @@ def register():
await g.s.flush() await g.s.flush()
# Return updated association status # Return updated association status
associated_entry_ids = await get_post_entry_ids(g.s, post_id) associated_entry_ids = await get_post_entry_ids(post_id)
# Load ALL calendars # Load ALL calendars
result = await g.s.execute( result = await g.s.execute(
@@ -318,7 +355,7 @@ def register():
await g.s.refresh(calendar, ["entries", "post"]) await g.s.refresh(calendar, ["entries", "post"])
# Fetch associated entries for nav display # Fetch associated entries for nav display
associated_entries = await get_associated_entries(g.s, post_id) associated_entries = await get_associated_entries(post_id)
# Load calendars for this post (for nav display) # Load calendars for this post (for nav display)
calendars = ( calendars = (
@@ -341,11 +378,18 @@ def register():
@bp.get("/settings/") @bp.get("/settings/")
@require_post_author @require_post_author
async def settings(slug: str): async def settings(slug: str):
from ...blog.ghost.ghost_posts import get_post_for_edit from models.ghost_content import Post
from sqlalchemy import select as sa_select
from sqlalchemy.orm import selectinload
ghost_id = g.post_data["post"]["ghost_id"] post_id = g.post_data["post"]["id"]
is_page = bool(g.post_data["post"].get("is_page")) post = (await g.s.execute(
ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) sa_select(Post)
.where(Post.id == post_id)
.options(selectinload(Post.tags))
)).scalar_one_or_none()
ghost_post = _post_to_edit_dict(post) if post else {}
save_success = request.args.get("saved") == "1" save_success = request.args.get("saved") == "1"
from shared.sx.page import get_template_context from shared.sx.page import get_template_context
@@ -368,12 +412,10 @@ def register():
@bp.post("/settings/") @bp.post("/settings/")
@require_post_author @require_post_author
async def settings_save(slug: str): async def settings_save(slug: str):
from ...blog.ghost.ghost_posts import update_post_settings from services.post_writer import update_post_settings, OptimisticLockError
from ...blog.ghost.ghost_sync import sync_single_post, sync_single_page
from shared.browser.app.redis_cacher import invalidate_tag_cache from shared.browser.app.redis_cacher import invalidate_tag_cache
ghost_id = g.post_data["post"]["ghost_id"] post_id = g.post_data["post"]["id"]
is_page = bool(g.post_data["post"].get("is_page"))
form = await request.form form = await request.form
updated_at = form.get("updated_at", "") updated_at = form.get("updated_at", "")
@@ -406,49 +448,55 @@ def register():
kwargs["featured"] = form.get("featured") == "on" kwargs["featured"] = form.get("featured") == "on"
kwargs["email_only"] = form.get("email_only") == "on" kwargs["email_only"] = form.get("email_only") == "on"
# Tags — comma-separated string → list of {"name": "..."} dicts # Tags — comma-separated string → list of names
tags_str = form.get("tags", "").strip() tags_str = form.get("tags", "").strip()
if tags_str: tag_names = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else []
kwargs["tags"] = [{"name": t.strip()} for t in tags_str.split(",") if t.strip()]
else:
kwargs["tags"] = []
# Update in Ghost try:
await update_post_settings( post = await update_post_settings(
ghost_id=ghost_id, g.s,
updated_at=updated_at, post_id=post_id,
is_page=is_page, expected_updated_at=updated_at,
**kwargs, tag_names=tag_names,
) **kwargs,
)
except OptimisticLockError:
from urllib.parse import quote
return redirect(
host_url(url_for("blog.post.admin.settings", slug=slug))
+ "?error=" + quote("Someone else edited this post. Please reload and try again.")
)
# Sync to local DB
if is_page:
await sync_single_page(g.s, ghost_id)
else:
await sync_single_post(g.s, ghost_id)
await g.s.flush() await g.s.flush()
# Clear caches # Clear caches
await invalidate_tag_cache("blog") await invalidate_tag_cache("blog")
await invalidate_tag_cache("post.post_detail") await invalidate_tag_cache("post.post_detail")
return redirect(host_url(url_for("blog.post.admin.settings", slug=slug)) + "?saved=1") # Redirect using the (possibly new) slug
return redirect(host_url(url_for("blog.post.admin.settings", slug=post.slug)) + "?saved=1")
@bp.get("/edit/") @bp.get("/edit/")
@require_post_author @require_post_author
async def edit(slug: str): async def edit(slug: str):
from ...blog.ghost.ghost_posts import get_post_for_edit from models.ghost_content import Post
from sqlalchemy import select as sa_select
from sqlalchemy.orm import selectinload
from shared.infrastructure.data_client import fetch_data from shared.infrastructure.data_client import fetch_data
ghost_id = g.post_data["post"]["ghost_id"] post_id = g.post_data["post"]["id"]
is_page = bool(g.post_data["post"].get("is_page")) post = (await g.s.execute(
ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) sa_select(Post)
.where(Post.id == post_id)
.options(selectinload(Post.tags))
)).scalar_one_or_none()
ghost_post = _post_to_edit_dict(post) if post else {}
save_success = request.args.get("saved") == "1" save_success = request.args.get("saved") == "1"
save_error = request.args.get("error", "") save_error = request.args.get("error", "")
# Newsletters live in db_account — fetch via HTTP # Newsletters live in db_account — fetch via HTTP
raw_newsletters = await fetch_data("account", "newsletters", required=False) or [] raw_newsletters = await fetch_data("account", "newsletters", required=False) or []
# Convert dicts to objects with .name/.ghost_id attributes for template compat
from types import SimpleNamespace from types import SimpleNamespace
newsletters = [SimpleNamespace(**nl) for nl in raw_newsletters] newsletters = [SimpleNamespace(**nl) for nl in raw_newsletters]
@@ -475,20 +523,16 @@ def register():
@require_post_author @require_post_author
async def edit_save(slug: str): async def edit_save(slug: str):
import json import json
from ...blog.ghost.ghost_posts import update_post
from ...blog.ghost.lexical_validator import validate_lexical from ...blog.ghost.lexical_validator import validate_lexical
from ...blog.ghost.ghost_sync import sync_single_post, sync_single_page from services.post_writer import update_post as writer_update, OptimisticLockError
from shared.browser.app.redis_cacher import invalidate_tag_cache from shared.browser.app.redis_cacher import invalidate_tag_cache
ghost_id = g.post_data["post"]["ghost_id"] post_id = g.post_data["post"]["id"]
is_page = bool(g.post_data["post"].get("is_page"))
form = await request.form form = await request.form
title = form.get("title", "").strip() title = form.get("title", "").strip()
lexical_raw = form.get("lexical", "") lexical_raw = form.get("lexical", "")
updated_at = form.get("updated_at", "") updated_at = form.get("updated_at", "")
status = form.get("status", "draft") status = form.get("status", "draft")
publish_mode = form.get("publish_mode", "web")
newsletter_slug = form.get("newsletter_slug", "").strip() or None
feature_image = form.get("feature_image", "").strip() feature_image = form.get("feature_image", "").strip()
custom_excerpt = form.get("custom_excerpt", "").strip() custom_excerpt = form.get("custom_excerpt", "").strip()
feature_image_caption = form.get("feature_image_caption", "").strip() feature_image_caption = form.get("feature_image_caption", "").strip()
@@ -504,76 +548,51 @@ def register():
if not ok: if not ok:
return redirect(host_url(url_for("blog.post.admin.edit", slug=slug)) + "?error=" + quote(reason)) return redirect(host_url(url_for("blog.post.admin.edit", slug=slug)) + "?error=" + quote(reason))
# Update in Ghost (content save — no status change yet)
ghost_post = await update_post(
ghost_id=ghost_id,
lexical_json=lexical_raw,
title=title or None,
updated_at=updated_at,
feature_image=feature_image,
custom_excerpt=custom_excerpt,
feature_image_caption=feature_image_caption,
is_page=is_page,
)
# Publish workflow # Publish workflow
is_admin = bool((g.get("rights") or {}).get("admin")) is_admin = bool((g.get("rights") or {}).get("admin"))
publish_requested_msg = None publish_requested_msg = None
# Guard: if already emailed, force publish_mode to "web" to prevent re-send # Determine effective status
already_emailed = bool(ghost_post.get("email") and ghost_post["email"].get("status")) effective_status: str | None = None
if already_emailed and publish_mode in ("email", "both"): current_status = g.post_data["post"].get("status", "draft")
publish_mode = "web"
if status == "published" and ghost_post.get("status") != "published" and not is_admin: if status == "published" and current_status != "published" and not is_admin:
# Non-admin requesting publish: don't send status to Ghost, set local flag # Non-admin requesting publish: keep as draft, set local flag
publish_requested_msg = "Publish requested — an admin will review." publish_requested_msg = "Publish requested — an admin will review."
elif status and status != ghost_post.get("status"): elif status and status != current_status:
# Status is changing — determine email params based on publish_mode effective_status = status
email_kwargs: dict = {}
if status == "published" and publish_mode in ("email", "both") and newsletter_slug:
email_kwargs["newsletter_slug"] = newsletter_slug
email_kwargs["email_segment"] = "all"
if publish_mode == "email":
email_kwargs["email_only"] = True
from ...blog.ghost.ghost_posts import update_post as _up try:
ghost_post = await _up( post = await writer_update(
ghost_id=ghost_id, g.s,
post_id=post_id,
lexical_json=lexical_raw, lexical_json=lexical_raw,
title=None, title=title or None,
updated_at=ghost_post["updated_at"], expected_updated_at=updated_at,
status=status, feature_image=feature_image or None,
is_page=is_page, custom_excerpt=custom_excerpt or None,
**email_kwargs, feature_image_caption=feature_image_caption or None,
status=effective_status,
)
except OptimisticLockError:
return redirect(
host_url(url_for("blog.post.admin.edit", slug=slug))
+ "?error=" + quote("Someone else edited this post. Please reload and try again.")
) )
# Sync to local DB # Handle publish_requested flag
if is_page: if publish_requested_msg:
await sync_single_page(g.s, ghost_id) post.publish_requested = True
else: elif status == "published" and is_admin:
await sync_single_post(g.s, ghost_id) post.publish_requested = False
await g.s.flush() await g.s.flush()
# Handle publish_requested flag on the local post
from models.ghost_content import Post
from sqlalchemy import select as sa_select
local_post = (await g.s.execute(
sa_select(Post).where(Post.ghost_id == ghost_id)
)).scalar_one_or_none()
if local_post:
if publish_requested_msg:
local_post.publish_requested = True
elif status == "published" and is_admin:
local_post.publish_requested = False
await g.s.flush()
# Clear caches # Clear caches
await invalidate_tag_cache("blog") await invalidate_tag_cache("blog")
await invalidate_tag_cache("post.post_detail") await invalidate_tag_cache("post.post_detail")
# Redirect to GET to avoid resubmit warning on refresh (PRG pattern) # Redirect to GET (PRG pattern) — use post.slug in case it changed
redirect_url = host_url(url_for("blog.post.admin.edit", slug=slug)) + "?saved=1" redirect_url = host_url(url_for("blog.post.admin.edit", slug=post.slug)) + "?saved=1"
if publish_requested_msg: if publish_requested_msg:
redirect_url += "&publish_requested=1" redirect_url += "&publish_requested=1"
return redirect(redirect_url) return redirect(redirect_url)

View File

@@ -7,7 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from shared.contracts.dtos import MarketPlaceDTO from shared.contracts.dtos import MarketPlaceDTO
from shared.infrastructure.actions import call_action, ActionError from shared.infrastructure.actions import call_action, ActionError
from shared.services.registry import services from services import blog_service
class MarketError(ValueError): class MarketError(ValueError):
@@ -33,7 +33,7 @@ async def create_market(sess: AsyncSession, post_id: int, name: str) -> MarketPl
raise MarketError("Market name must not be empty.") raise MarketError("Market name must not be empty.")
slug = slugify(name) slug = slugify(name)
post = await services.blog.get_post_by_id(sess, post_id) post = await blog_service.get_post_by_id(sess, post_id)
if not post: if not post:
raise MarketError(f"Post {post_id} does not exist.") raise MarketError(f"Post {post_id} does not exist.")
@@ -57,7 +57,7 @@ async def create_market(sess: AsyncSession, post_id: int, name: str) -> MarketPl
async def soft_delete_market(sess: AsyncSession, post_slug: str, market_slug: str) -> bool: async def soft_delete_market(sess: AsyncSession, post_slug: str, market_slug: str) -> bool:
post = await services.blog.get_post_by_slug(sess, post_slug) post = await blog_service.get_post_by_slug(sess, post_slug)
if not post: if not post:
return False return False

View File

@@ -1,4 +1,4 @@
from .ghost_content import Post, Author, Tag, PostAuthor, PostTag from .content import Post, Author, Tag, PostAuthor, PostTag, PostUser
from .snippet import Snippet from .snippet import Snippet
from .tag_group import TagGroup, TagGroupTag from .tag_group import TagGroup, TagGroupTag

View File

@@ -19,7 +19,7 @@ class Tag(Base):
__tablename__ = "tags" __tablename__ = "tags"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) ghost_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, unique=True, nullable=True)
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False)
@@ -50,8 +50,8 @@ class Post(Base):
__tablename__ = "posts" __tablename__ = "posts"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) ghost_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, unique=True, nullable=True)
uuid: Mapped[str] = mapped_column(String(64), unique=True, nullable=False) uuid: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, server_default=func.gen_random_uuid())
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
title: Mapped[str] = mapped_column(String(500), nullable=False) title: Mapped[str] = mapped_column(String(500), nullable=False)
@@ -89,8 +89,8 @@ class Post(Base):
comment_id: Mapped[Optional[str]] = mapped_column(String(191)) comment_id: Mapped[Optional[str]] = mapped_column(String(191))
published_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) published_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), server_default=func.now())
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
user_id: Mapped[Optional[int]] = mapped_column( user_id: Mapped[Optional[int]] = mapped_column(
@@ -136,7 +136,7 @@ class Author(Base):
__tablename__ = "authors" __tablename__ = "authors"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) ghost_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, unique=True, nullable=True)
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False)
@@ -192,3 +192,15 @@ class PostTag(Base):
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False) sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
class PostUser(Base):
"""Multi-author M2M: links posts to users (cross-DB, no FK on user_id)."""
__tablename__ = "post_users"
post_id: Mapped[int] = mapped_column(
ForeignKey("posts.id", ondelete="CASCADE"),
primary_key=True,
)
user_id: Mapped[int] = mapped_column(
Integer, primary_key=True, index=True,
)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)

View File

@@ -1,3 +1,3 @@
from shared.models.ghost_content import ( # noqa: F401 from .content import ( # noqa: F401
Tag, Post, Author, PostAuthor, PostTag, Tag, Post, Author, PostAuthor, PostTag, PostUser,
) )

View File

@@ -0,0 +1,469 @@
#!/usr/bin/env python3
"""Final Ghost → db_blog sync with HTML verification + author→user migration.
Run once before cutting over to native writes (Phase 1).
Usage:
cd blog && python -m scripts.final_ghost_sync
Requires GHOST_ADMIN_API_URL, GHOST_ADMIN_API_KEY, DATABASE_URL,
and DATABASE_URL_ACCOUNT env vars.
"""
from __future__ import annotations
import asyncio
import difflib
import logging
import os
import re
import sys
import httpx
from sqlalchemy import select, func, delete
# Ensure project root is importable
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
from shared.db.base import Base # noqa: E402
from shared.db.session import get_session, get_account_session, _engine # noqa: E402
from shared.infrastructure.ghost_admin_token import make_ghost_admin_jwt # noqa: E402
from blog.models.content import Post, Author, Tag, PostUser, PostAuthor # noqa: E402
from shared.models.user import User # noqa: E402
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
)
log = logging.getLogger("final_ghost_sync")
GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"]
def _auth_header() -> dict[str, str]:
return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"}
def _slugify(name: str) -> str:
s = name.strip().lower()
s = re.sub(r"[^\w\s-]", "", s)
s = re.sub(r"[\s_]+", "-", s)
return s.strip("-")
# ---------------------------------------------------------------------------
# Ghost API fetch
# ---------------------------------------------------------------------------
async def _fetch_all(endpoint: str) -> list[dict]:
"""Fetch all items from a Ghost Admin API endpoint."""
url = (
f"{GHOST_ADMIN_API_URL}/{endpoint}/"
"?include=authors,tags&limit=all"
"&formats=html,plaintext,mobiledoc,lexical"
)
async with httpx.AsyncClient(timeout=60) as client:
resp = await client.get(url, headers=_auth_header())
resp.raise_for_status()
key = endpoint # "posts" or "pages"
return resp.json().get(key, [])
async def fetch_all_ghost_content() -> list[dict]:
"""Fetch all posts + pages from Ghost."""
posts, pages = await asyncio.gather(
_fetch_all("posts"),
_fetch_all("pages"),
)
for p in pages:
p["page"] = True
return posts + pages
# ---------------------------------------------------------------------------
# Author → User migration
# ---------------------------------------------------------------------------
async def migrate_authors_to_users(author_bucket: dict[str, dict]) -> dict[str, int]:
"""Ensure every Ghost author has a corresponding User in db_account.
Returns mapping of ghost_author_id → user_id.
"""
ghost_id_to_user_id: dict[str, int] = {}
async with get_account_session() as sess:
async with sess.begin():
for ghost_author_id, ga in author_bucket.items():
email = (ga.get("email") or "").strip().lower()
if not email:
log.warning(
"Author %s (%s) has no email — skipping user creation",
ghost_author_id, ga.get("name"),
)
continue
# Find existing user by email
user = (await sess.execute(
select(User).where(User.email == email)
)).scalar_one_or_none()
if user is None:
# Auto-create user for this Ghost author
user = User(
email=email,
name=ga.get("name"),
slug=ga.get("slug") or _slugify(ga.get("name") or email.split("@")[0]),
bio=ga.get("bio"),
profile_image=ga.get("profile_image"),
cover_image=ga.get("cover_image"),
website=ga.get("website"),
location=ga.get("location"),
facebook=ga.get("facebook"),
twitter=ga.get("twitter"),
is_admin=True, # Ghost authors are admins
)
sess.add(user)
await sess.flush()
log.info("Created user %d for author %s (%s)", user.id, ga.get("name"), email)
else:
# Update profile fields from Ghost author (fill in blanks)
if not user.slug:
user.slug = ga.get("slug") or _slugify(ga.get("name") or email.split("@")[0])
if not user.name and ga.get("name"):
user.name = ga["name"]
if not user.bio and ga.get("bio"):
user.bio = ga["bio"]
if not user.profile_image and ga.get("profile_image"):
user.profile_image = ga["profile_image"]
if not user.cover_image and ga.get("cover_image"):
user.cover_image = ga["cover_image"]
if not user.website and ga.get("website"):
user.website = ga["website"]
if not user.location and ga.get("location"):
user.location = ga["location"]
if not user.facebook and ga.get("facebook"):
user.facebook = ga["facebook"]
if not user.twitter and ga.get("twitter"):
user.twitter = ga["twitter"]
await sess.flush()
log.info("Updated user %d profile from author %s", user.id, ga.get("name"))
ghost_id_to_user_id[ghost_author_id] = user.id
return ghost_id_to_user_id
async def populate_post_users(
data: list[dict],
ghost_author_to_user: dict[str, int],
) -> int:
"""Populate post_users M2M and set user_id on all posts from author mapping.
Returns number of post_users rows created.
"""
rows_created = 0
async with get_session() as sess:
async with sess.begin():
for gp in data:
ghost_post_id = gp["id"]
post = (await sess.execute(
select(Post).where(Post.ghost_id == ghost_post_id)
)).scalar_one_or_none()
if not post:
continue
# Set primary user_id from primary author
pa = gp.get("primary_author")
if pa and pa["id"] in ghost_author_to_user:
post.user_id = ghost_author_to_user[pa["id"]]
# Build post_users from all authors
await sess.execute(
delete(PostUser).where(PostUser.post_id == post.id)
)
seen_user_ids: set[int] = set()
for idx, a in enumerate(gp.get("authors") or []):
uid = ghost_author_to_user.get(a["id"])
if uid and uid not in seen_user_ids:
seen_user_ids.add(uid)
sess.add(PostUser(post_id=post.id, user_id=uid, sort_order=idx))
rows_created += 1
await sess.flush()
return rows_created
# ---------------------------------------------------------------------------
# Content sync (reuse ghost_sync upsert logic)
# ---------------------------------------------------------------------------
async def run_sync() -> dict:
"""Run full Ghost content sync and author→user migration."""
from bp.blog.ghost.ghost_sync import (
_upsert_author,
_upsert_tag,
_upsert_post,
)
log.info("Fetching all content from Ghost...")
data = await fetch_all_ghost_content()
log.info("Received %d posts/pages from Ghost", len(data))
# Collect authors and tags
author_bucket: dict[str, dict] = {}
tag_bucket: dict[str, dict] = {}
for p in data:
for a in p.get("authors") or []:
author_bucket[a["id"]] = a
if p.get("primary_author"):
author_bucket[p["primary_author"]["id"]] = p["primary_author"]
for t in p.get("tags") or []:
tag_bucket[t["id"]] = t
if p.get("primary_tag"):
tag_bucket[p["primary_tag"]["id"]] = p["primary_tag"]
# Step 1: Upsert content into db_blog (existing Ghost sync logic)
async with get_session() as sess:
async with sess.begin():
author_map: dict[str, Author] = {}
for ga in author_bucket.values():
a = await _upsert_author(sess, ga)
author_map[ga["id"]] = a
log.info("Upserted %d authors (legacy table)", len(author_map))
tag_map: dict[str, Tag] = {}
for gt in tag_bucket.values():
t = await _upsert_tag(sess, gt)
tag_map[gt["id"]] = t
log.info("Upserted %d tags", len(tag_map))
for gp in data:
await _upsert_post(sess, gp, author_map, tag_map)
log.info("Upserted %d posts/pages", len(data))
# Step 2: Migrate authors → users in db_account
log.info("")
log.info("--- Migrating Ghost authors → Users ---")
ghost_author_to_user = await migrate_authors_to_users(author_bucket)
log.info("Mapped %d Ghost authors to User records", len(ghost_author_to_user))
# Step 3: Populate post_users M2M and set user_id
log.info("")
log.info("--- Populating post_users M2M ---")
pu_rows = await populate_post_users(data, ghost_author_to_user)
log.info("Created %d post_users rows", pu_rows)
n_posts = sum(1 for p in data if not p.get("page"))
n_pages = sum(1 for p in data if p.get("page"))
return {
"posts": n_posts,
"pages": n_pages,
"authors": len(author_bucket),
"tags": len(tag_bucket),
"users_mapped": len(ghost_author_to_user),
"post_users_rows": pu_rows,
}
# ---------------------------------------------------------------------------
# HTML rendering verification
# ---------------------------------------------------------------------------
def _normalize_html(html: str | None) -> str:
if not html:
return ""
return re.sub(r"\s+", " ", html.strip())
async def verify_html_rendering() -> dict:
"""Re-render all posts from lexical and compare with stored HTML."""
from bp.blog.ghost.lexical_renderer import render_lexical
import json
diffs_found = 0
posts_checked = 0
posts_no_lexical = 0
async with get_session() as sess:
result = await sess.execute(
select(Post).where(
Post.deleted_at.is_(None),
Post.status == "published",
)
)
posts = result.scalars().all()
for post in posts:
if not post.lexical:
posts_no_lexical += 1
continue
posts_checked += 1
try:
rendered = render_lexical(json.loads(post.lexical))
except Exception as e:
log.error(
"Render failed for post %d (%s): %s",
post.id, post.slug, e,
)
diffs_found += 1
continue
ghost_html = _normalize_html(post.html)
our_html = _normalize_html(rendered)
if ghost_html != our_html:
diffs_found += 1
diff = difflib.unified_diff(
ghost_html.splitlines(keepends=True),
our_html.splitlines(keepends=True),
fromfile=f"ghost/{post.slug}",
tofile=f"rendered/{post.slug}",
n=2,
)
diff_text = "".join(diff)
if len(diff_text) > 2000:
diff_text = diff_text[:2000] + "\n... (truncated)"
log.warning(
"HTML diff for post %d (%s):\n%s",
post.id, post.slug, diff_text,
)
return {
"checked": posts_checked,
"no_lexical": posts_no_lexical,
"diffs": diffs_found,
}
# ---------------------------------------------------------------------------
# Verification queries
# ---------------------------------------------------------------------------
async def run_verification() -> dict:
async with get_session() as sess:
total_posts = await sess.scalar(
select(func.count(Post.id)).where(Post.deleted_at.is_(None))
)
null_html = await sess.scalar(
select(func.count(Post.id)).where(
Post.deleted_at.is_(None),
Post.status == "published",
Post.html.is_(None),
)
)
null_lexical = await sess.scalar(
select(func.count(Post.id)).where(
Post.deleted_at.is_(None),
Post.status == "published",
Post.lexical.is_(None),
)
)
# Posts with user_id set
has_user = await sess.scalar(
select(func.count(Post.id)).where(
Post.deleted_at.is_(None),
Post.user_id.isnot(None),
)
)
no_user = await sess.scalar(
select(func.count(Post.id)).where(
Post.deleted_at.is_(None),
Post.user_id.is_(None),
)
)
return {
"total_posts": total_posts,
"published_null_html": null_html,
"published_null_lexical": null_lexical,
"posts_with_user": has_user,
"posts_without_user": no_user,
}
# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------
async def main():
log.info("=" * 60)
log.info("Final Ghost Sync — Cutover Preparation")
log.info("=" * 60)
# Step 1: Full sync + author→user migration
log.info("")
log.info("--- Step 1: Full sync from Ghost + author→user migration ---")
stats = await run_sync()
log.info(
"Sync complete: %d posts, %d pages, %d authors, %d tags, %d users mapped",
stats["posts"], stats["pages"], stats["authors"], stats["tags"],
stats["users_mapped"],
)
# Step 2: Verification queries
log.info("")
log.info("--- Step 2: Verification queries ---")
vq = await run_verification()
log.info("Total non-deleted posts/pages: %d", vq["total_posts"])
log.info("Published with NULL html: %d", vq["published_null_html"])
log.info("Published with NULL lexical: %d", vq["published_null_lexical"])
log.info("Posts with user_id: %d", vq["posts_with_user"])
log.info("Posts WITHOUT user_id: %d", vq["posts_without_user"])
if vq["published_null_html"] > 0:
log.warning("WARN: Some published posts have no HTML!")
if vq["published_null_lexical"] > 0:
log.warning("WARN: Some published posts have no Lexical JSON!")
if vq["posts_without_user"] > 0:
log.warning("WARN: Some posts have no user_id — authors may lack email!")
# Step 3: HTML rendering verification
log.info("")
log.info("--- Step 3: HTML rendering verification ---")
html_stats = await verify_html_rendering()
log.info(
"Checked %d posts, %d with diffs, %d without lexical",
html_stats["checked"], html_stats["diffs"], html_stats["no_lexical"],
)
if html_stats["diffs"] > 0:
log.warning(
"WARN: %d posts have HTML rendering differences — "
"review diffs above before cutover",
html_stats["diffs"],
)
# Summary
log.info("")
log.info("=" * 60)
log.info("SUMMARY")
log.info(" Posts synced: %d", stats["posts"])
log.info(" Pages synced: %d", stats["pages"])
log.info(" Authors synced: %d", stats["authors"])
log.info(" Tags synced: %d", stats["tags"])
log.info(" Users mapped: %d", stats["users_mapped"])
log.info(" post_users rows: %d", stats["post_users_rows"])
log.info(" HTML diffs: %d", html_stats["diffs"])
log.info(" Published null HTML: %d", vq["published_null_html"])
log.info(" Published null lex: %d", vq["published_null_lexical"])
log.info(" Posts without user: %d", vq["posts_without_user"])
log.info("=" * 60)
if (html_stats["diffs"] == 0
and vq["published_null_html"] == 0
and vq["posts_without_user"] == 0):
log.info("All checks passed — safe to proceed with cutover.")
else:
log.warning("Review warnings above before proceeding.")
await _engine.dispose()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,6 +1,68 @@
"""Blog app service registration.""" """Blog app service registration."""
from __future__ import annotations from __future__ import annotations
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from shared.contracts.dtos import PostDTO
from models.content import Post
def _post_to_dto(post: Post) -> PostDTO:
return PostDTO(
id=post.id,
slug=post.slug,
title=post.title,
status=post.status,
visibility=post.visibility,
is_page=post.is_page,
feature_image=post.feature_image,
html=post.html,
excerpt=post.excerpt,
custom_excerpt=post.custom_excerpt,
published_at=post.published_at,
)
class SqlBlogService:
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None:
post = (
await session.execute(select(Post).where(Post.slug == slug))
).scalar_one_or_none()
return _post_to_dto(post) if post else None
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None:
post = (
await session.execute(select(Post).where(Post.id == id))
).scalar_one_or_none()
return _post_to_dto(post) if post else None
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]:
if not ids:
return []
result = await session.execute(select(Post).where(Post.id.in_(ids)))
return [_post_to_dto(p) for p in result.scalars().all()]
async def search_posts(
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
) -> tuple[list[PostDTO], int]:
if query:
count_stmt = select(func.count(Post.id)).where(Post.title.ilike(f"%{query}%"))
posts_stmt = select(Post).where(Post.title.ilike(f"%{query}%")).order_by(Post.title)
else:
count_stmt = select(func.count(Post.id))
posts_stmt = select(Post).order_by(Post.published_at.desc().nullslast())
total = (await session.execute(count_stmt)).scalar() or 0
offset = (page - 1) * per_page
result = await session.execute(posts_stmt.limit(per_page).offset(offset))
return [_post_to_dto(p) for p in result.scalars().all()], total
# Module-level singleton — import this in blog code.
blog_service = SqlBlogService()
def register_domain_services() -> None: def register_domain_services() -> None:
"""Register services for the blog app. """Register services for the blog app.
@@ -8,12 +70,8 @@ def register_domain_services() -> None:
Blog owns: Post, Tag, Author, PostAuthor, PostTag. Blog owns: Post, Tag, Author, PostAuthor, PostTag.
Cross-app calls go over HTTP via call_action() / fetch_data(). Cross-app calls go over HTTP via call_action() / fetch_data().
""" """
from shared.services.registry import services
from shared.services.blog_impl import SqlBlogService
services.blog = SqlBlogService()
# Federation needed for AP shared infrastructure (activitypub blueprint) # Federation needed for AP shared infrastructure (activitypub blueprint)
from shared.services.registry import services
if not services.has("federation"): if not services.has("federation"):
from shared.services.federation_impl import SqlFederationService from shared.services.federation_impl import SqlFederationService
services.federation = SqlFederationService() services.federation = SqlFederationService()

View File

@@ -0,0 +1,457 @@
"""Native post/page CRUD — replaces Ghost Admin API writes.
All operations go directly to db_blog. Ghost is never called.
"""
from __future__ import annotations
import json
import logging
import re
from datetime import datetime
from typing import Any, Optional
import nh3
from sqlalchemy import select, delete, func
from sqlalchemy.ext.asyncio import AsyncSession
from models.ghost_content import Post, Tag, PostTag, PostUser
from shared.browser.app.utils import utcnow
log = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _slugify(name: str) -> str:
s = name.strip().lower()
s = re.sub(r"[^\w\s-]", "", s)
s = re.sub(r"[\s_]+", "-", s)
return s.strip("-")
def _reading_time(plaintext: str | None) -> int:
"""Estimate reading time in minutes (word count / 265, min 1)."""
if not plaintext:
return 0
words = len(plaintext.split())
return max(1, round(words / 265))
def _extract_plaintext(html: str) -> str:
"""Strip HTML tags to get plaintext."""
text = re.sub(r"<[^>]+>", "", html)
text = re.sub(r"\s+", " ", text).strip()
return text
def _sanitize_html(html: str | None) -> str | None:
if not html:
return html
return nh3.clean(
html,
tags={
"a", "abbr", "acronym", "b", "blockquote", "br", "code",
"div", "em", "figcaption", "figure", "h1", "h2", "h3",
"h4", "h5", "h6", "hr", "i", "img", "li", "ol", "p",
"pre", "span", "strong", "sub", "sup", "table", "tbody",
"td", "th", "thead", "tr", "ul", "video", "source",
"picture", "iframe", "audio",
},
attributes={
"*": {"class", "id", "style"},
"a": {"href", "title", "target"},
"img": {"src", "alt", "title", "width", "height", "loading"},
"video": {"src", "controls", "width", "height", "poster"},
"audio": {"src", "controls"},
"source": {"src", "type"},
"iframe": {"src", "width", "height", "frameborder", "allowfullscreen"},
"td": {"colspan", "rowspan"},
"th": {"colspan", "rowspan"},
},
link_rel="noopener noreferrer",
url_schemes={"http", "https", "mailto"},
)
def _render_and_extract(lexical_json: str) -> tuple[str, str, int]:
"""Render HTML from Lexical JSON, extract plaintext, compute reading time.
Returns (html, plaintext, reading_time).
"""
from bp.blog.ghost.lexical_renderer import render_lexical
doc = json.loads(lexical_json) if isinstance(lexical_json, str) else lexical_json
html = render_lexical(doc)
html = _sanitize_html(html)
plaintext = _extract_plaintext(html or "")
rt = _reading_time(plaintext)
return html, plaintext, rt
async def _ensure_slug_unique(sess: AsyncSession, slug: str, exclude_post_id: int | None = None) -> str:
"""Append -2, -3, etc. if slug already taken."""
base_slug = slug
counter = 1
while True:
q = select(Post.id).where(Post.slug == slug, Post.deleted_at.is_(None))
if exclude_post_id:
q = q.where(Post.id != exclude_post_id)
existing = await sess.scalar(q)
if existing is None:
return slug
counter += 1
slug = f"{base_slug}-{counter}"
async def _upsert_tags_by_name(sess: AsyncSession, tag_names: list[str]) -> list[Tag]:
"""Find or create tags by name. Returns Tag objects in order."""
tags: list[Tag] = []
for name in tag_names:
name = name.strip()
if not name:
continue
tag = (await sess.execute(
select(Tag).where(Tag.name == name, Tag.deleted_at.is_(None))
)).scalar_one_or_none()
if tag is None:
tag = Tag(
name=name,
slug=_slugify(name),
visibility="public",
)
sess.add(tag)
await sess.flush()
tags.append(tag)
return tags
async def _rebuild_post_tags(sess: AsyncSession, post_id: int, tags: list[Tag]) -> None:
"""Replace all post_tags for a post."""
await sess.execute(delete(PostTag).where(PostTag.post_id == post_id))
seen: set[int] = set()
for idx, tag in enumerate(tags):
if tag.id not in seen:
seen.add(tag.id)
sess.add(PostTag(post_id=post_id, tag_id=tag.id, sort_order=idx))
await sess.flush()
async def _rebuild_post_users(sess: AsyncSession, post_id: int, user_ids: list[int]) -> None:
"""Replace all post_users for a post."""
await sess.execute(delete(PostUser).where(PostUser.post_id == post_id))
seen: set[int] = set()
for idx, uid in enumerate(user_ids):
if uid not in seen:
seen.add(uid)
sess.add(PostUser(post_id=post_id, user_id=uid, sort_order=idx))
await sess.flush()
async def _fire_ap_publish(
sess: AsyncSession,
post: Post,
old_status: str | None,
tag_objs: list[Tag],
) -> None:
"""Fire AP federation activity on status transitions."""
if post.is_page or not post.user_id:
return
from bp.blog.ghost.ghost_sync import _build_ap_post_data
from shared.services.federation_publish import try_publish
from shared.infrastructure.urls import app_url
post_url = app_url("blog", f"/{post.slug}/")
if post.status == "published":
activity_type = "Create" if old_status != "published" else "Update"
await try_publish(
sess,
user_id=post.user_id,
activity_type=activity_type,
object_type="Note",
object_data=_build_ap_post_data(post, post_url, tag_objs),
source_type="Post",
source_id=post.id,
)
elif old_status == "published" and post.status != "published":
await try_publish(
sess,
user_id=post.user_id,
activity_type="Delete",
object_type="Tombstone",
object_data={
"id": post_url,
"formerType": "Note",
},
source_type="Post",
source_id=post.id,
)
# ---------------------------------------------------------------------------
# Public API
# ---------------------------------------------------------------------------
async def create_post(
sess: AsyncSession,
*,
title: str,
lexical_json: str,
status: str = "draft",
user_id: int,
feature_image: str | None = None,
custom_excerpt: str | None = None,
feature_image_caption: str | None = None,
tag_names: list[str] | None = None,
is_page: bool = False,
) -> Post:
"""Create a new post or page directly in db_blog."""
html, plaintext, reading_time = _render_and_extract(lexical_json)
slug = await _ensure_slug_unique(sess, _slugify(title or "untitled"))
now = utcnow()
post = Post(
title=title or "Untitled",
slug=slug,
lexical=lexical_json if isinstance(lexical_json, str) else json.dumps(lexical_json),
html=html,
plaintext=plaintext,
reading_time=reading_time,
status=status,
is_page=is_page,
feature_image=feature_image,
feature_image_caption=_sanitize_html(feature_image_caption),
custom_excerpt=custom_excerpt,
user_id=user_id,
visibility="public",
created_at=now,
updated_at=now,
published_at=now if status == "published" else None,
)
sess.add(post)
await sess.flush()
# Tags
if tag_names:
tags = await _upsert_tags_by_name(sess, tag_names)
await _rebuild_post_tags(sess, post.id, tags)
if tags:
post.primary_tag_id = tags[0].id
# Post users (author)
await _rebuild_post_users(sess, post.id, [user_id])
# PageConfig for pages
if is_page:
from shared.models.page_config import PageConfig
existing = (await sess.execute(
select(PageConfig).where(
PageConfig.container_type == "page",
PageConfig.container_id == post.id,
)
)).scalar_one_or_none()
if existing is None:
sess.add(PageConfig(
container_type="page",
container_id=post.id,
features={},
))
await sess.flush()
# AP federation
if status == "published":
tag_objs = (await _upsert_tags_by_name(sess, tag_names)) if tag_names else []
await _fire_ap_publish(sess, post, None, tag_objs)
return post
async def create_page(
sess: AsyncSession,
*,
title: str,
lexical_json: str,
status: str = "draft",
user_id: int,
feature_image: str | None = None,
custom_excerpt: str | None = None,
feature_image_caption: str | None = None,
tag_names: list[str] | None = None,
) -> Post:
"""Create a new page. Convenience wrapper around create_post."""
return await create_post(
sess,
title=title,
lexical_json=lexical_json,
status=status,
user_id=user_id,
feature_image=feature_image,
custom_excerpt=custom_excerpt,
feature_image_caption=feature_image_caption,
tag_names=tag_names,
is_page=True,
)
async def update_post(
sess: AsyncSession,
*,
post_id: int,
lexical_json: str,
title: str | None = None,
expected_updated_at: datetime | str,
feature_image: str | None = ..., # type: ignore[assignment]
custom_excerpt: str | None = ..., # type: ignore[assignment]
feature_image_caption: str | None = ..., # type: ignore[assignment]
status: str | None = None,
) -> Post:
"""Update post content. Optimistic lock via expected_updated_at.
Fields set to ... (sentinel) are left unchanged. None clears the field.
Raises ValueError on optimistic lock conflict (409).
"""
_SENTINEL = ...
post = await sess.get(Post, post_id)
if post is None:
raise ValueError(f"Post {post_id} not found")
# Optimistic lock
if isinstance(expected_updated_at, str):
expected_updated_at = datetime.fromisoformat(
expected_updated_at.replace("Z", "+00:00")
)
if post.updated_at and abs((post.updated_at - expected_updated_at).total_seconds()) > 1:
raise OptimisticLockError(
f"Post was modified at {post.updated_at}, expected {expected_updated_at}"
)
old_status = post.status
# Render content
html, plaintext, reading_time = _render_and_extract(lexical_json)
post.lexical = lexical_json if isinstance(lexical_json, str) else json.dumps(lexical_json)
post.html = html
post.plaintext = plaintext
post.reading_time = reading_time
if title is not None:
post.title = title
if feature_image is not _SENTINEL:
post.feature_image = feature_image
if custom_excerpt is not _SENTINEL:
post.custom_excerpt = custom_excerpt
if feature_image_caption is not _SENTINEL:
post.feature_image_caption = _sanitize_html(feature_image_caption)
if status is not None:
post.status = status
if status == "published" and not post.published_at:
post.published_at = utcnow()
post.updated_at = utcnow()
await sess.flush()
# AP federation on status change
tags = list(post.tags) if hasattr(post, "tags") and post.tags else []
await _fire_ap_publish(sess, post, old_status, tags)
return post
_SETTINGS_FIELDS = (
"slug", "published_at", "featured", "visibility", "email_only",
"custom_template", "meta_title", "meta_description", "canonical_url",
"og_image", "og_title", "og_description",
"twitter_image", "twitter_title", "twitter_description",
"feature_image_alt",
)
async def update_post_settings(
sess: AsyncSession,
*,
post_id: int,
expected_updated_at: datetime | str,
tag_names: list[str] | None = None,
**kwargs: Any,
) -> Post:
"""Update post settings (slug, tags, SEO, social, etc.).
Optimistic lock via expected_updated_at.
"""
post = await sess.get(Post, post_id)
if post is None:
raise ValueError(f"Post {post_id} not found")
# Optimistic lock
if isinstance(expected_updated_at, str):
expected_updated_at = datetime.fromisoformat(
expected_updated_at.replace("Z", "+00:00")
)
if post.updated_at and abs((post.updated_at - expected_updated_at).total_seconds()) > 1:
raise OptimisticLockError(
f"Post was modified at {post.updated_at}, expected {expected_updated_at}"
)
old_status = post.status
for field in _SETTINGS_FIELDS:
val = kwargs.get(field)
if val is not None:
if field == "slug":
val = await _ensure_slug_unique(sess, val, exclude_post_id=post.id)
if field == "featured":
val = bool(val)
if field == "email_only":
val = bool(val)
if field == "published_at":
if isinstance(val, str):
val = datetime.fromisoformat(val.replace("Z", "+00:00"))
setattr(post, field, val)
# Tags
if tag_names is not None:
tags = await _upsert_tags_by_name(sess, tag_names)
await _rebuild_post_tags(sess, post.id, tags)
post.primary_tag_id = tags[0].id if tags else None
post.updated_at = utcnow()
await sess.flush()
# AP federation if visibility/status changed
tags_obj = list(post.tags) if hasattr(post, "tags") and post.tags else []
await _fire_ap_publish(sess, post, old_status, tags_obj)
return post
async def delete_post(sess: AsyncSession, post_id: int) -> None:
"""Soft-delete a post via deleted_at."""
post = await sess.get(Post, post_id)
if post is None:
return
old_status = post.status
post.deleted_at = utcnow()
post.status = "deleted"
await sess.flush()
# Fire AP Delete if was published
if old_status == "published" and post.user_id:
tags = list(post.tags) if hasattr(post, "tags") and post.tags else []
await _fire_ap_publish(sess, post, old_status, tags)
# ---------------------------------------------------------------------------
# Exceptions
# ---------------------------------------------------------------------------
class OptimisticLockError(Exception):
"""Raised when optimistic lock check fails (stale updated_at)."""
pass

View File

@@ -235,7 +235,7 @@ def register():
) )
).scalars().all() ).scalars().all()
associated_entries = await get_associated_entries(g.s, post_id) associated_entries = await get_associated_entries(post_id)
nav_oob = render_post_nav_entries_oob(associated_entries, cals, post_data["post"]) nav_oob = render_post_nav_entries_oob(associated_entries, cals, post_data["post"])
html = html + nav_oob html = html + nav_oob

View File

@@ -135,7 +135,7 @@ def register():
for post in entry_posts: for post in entry_posts:
# Get associated entries for this post # Get associated entries for this post
from shared.services.entry_associations import get_associated_entries from shared.services.entry_associations import get_associated_entries
associated_entries = await get_associated_entries(g.s, post.id) associated_entries = await get_associated_entries(post.id)
# Load calendars for this post # Load calendars for this post
from models.calendars import Calendar from models.calendars import Calendar

View File

@@ -84,7 +84,7 @@ def register():
) )
).scalars().all() ).scalars().all()
associated_entries = await get_associated_entries(g.s, post_id) associated_entries = await get_associated_entries(post_id)
nav_oob = render_post_nav_entries_oob(associated_entries, cals, post_data["post"]) nav_oob = render_post_nav_entries_oob(associated_entries, cals, post_data["post"])
html = html + nav_oob html = html + nav_oob

View File

@@ -10,7 +10,6 @@ from .dtos import (
CartSummaryDTO, CartSummaryDTO,
) )
from .protocols import ( from .protocols import (
BlogService,
CalendarService, CalendarService,
MarketService, MarketService,
CartService, CartService,
@@ -24,7 +23,6 @@ __all__ = [
"ProductDTO", "ProductDTO",
"CartItemDTO", "CartItemDTO",
"CartSummaryDTO", "CartSummaryDTO",
"BlogService",
"CalendarService", "CalendarService",
"MarketService", "MarketService",
"CartService", "CartService",

View File

@@ -11,7 +11,6 @@ from typing import Protocol, runtime_checkable
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from .dtos import ( from .dtos import (
PostDTO,
CalendarDTO, CalendarDTO,
CalendarEntryDTO, CalendarEntryDTO,
TicketDTO, TicketDTO,
@@ -29,17 +28,6 @@ from .dtos import (
) )
@runtime_checkable
class BlogService(Protocol):
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None: ...
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: ...
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: ...
async def search_posts(
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
) -> tuple[list[PostDTO], int]: ...
@runtime_checkable @runtime_checkable
class CalendarService(Protocol): class CalendarService(Protocol):
async def calendars_for_container( async def calendars_for_container(

View File

@@ -10,7 +10,6 @@ from .ghost_membership_entities import (
GhostNewsletter, UserNewsletter, GhostNewsletter, UserNewsletter,
GhostTier, GhostSubscription, GhostTier, GhostSubscription,
) )
from .ghost_content import Tag, Post, Author, PostAuthor, PostTag
from .page_config import PageConfig from .page_config import PageConfig
from .order import Order, OrderItem from .order import Order, OrderItem
from .market import ( from .market import (

View File

@@ -23,6 +23,17 @@ class User(Base):
stripe_customer_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True) stripe_customer_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
ghost_raw: Mapped[dict | None] = mapped_column(JSONB, nullable=True) ghost_raw: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
# Author profile fields (merged from Ghost Author)
slug: Mapped[str | None] = mapped_column(String(191), unique=True, index=True, nullable=True)
bio: Mapped[str | None] = mapped_column(Text, nullable=True)
profile_image: Mapped[str | None] = mapped_column(Text, nullable=True)
cover_image: Mapped[str | None] = mapped_column(Text, nullable=True)
website: Mapped[str | None] = mapped_column(Text, nullable=True)
location: Mapped[str | None] = mapped_column(Text, nullable=True)
facebook: Mapped[str | None] = mapped_column(Text, nullable=True)
twitter: Mapped[str | None] = mapped_column(Text, nullable=True)
is_admin: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default=func.false())
# Relationships to Ghost-related entities # Relationships to Ghost-related entities
user_newsletters = relationship("UserNewsletter", back_populates="user", cascade="all, delete-orphan", lazy="selectin") user_newsletters = relationship("UserNewsletter", back_populates="user", cascade="all, delete-orphan", lazy="selectin")

View File

@@ -1,65 +0,0 @@
"""SQL-backed BlogService implementation.
Queries ``shared.models.ghost_content.Post`` — only this module may read
blog-domain tables on behalf of other domains.
"""
from __future__ import annotations
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from shared.models.ghost_content import Post
from shared.contracts.dtos import PostDTO
def _post_to_dto(post: Post) -> PostDTO:
return PostDTO(
id=post.id,
slug=post.slug,
title=post.title,
status=post.status,
visibility=post.visibility,
is_page=post.is_page,
feature_image=post.feature_image,
html=post.html,
excerpt=post.excerpt,
custom_excerpt=post.custom_excerpt,
published_at=post.published_at,
)
class SqlBlogService:
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None:
post = (
await session.execute(select(Post).where(Post.slug == slug))
).scalar_one_or_none()
return _post_to_dto(post) if post else None
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None:
post = (
await session.execute(select(Post).where(Post.id == id))
).scalar_one_or_none()
return _post_to_dto(post) if post else None
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]:
if not ids:
return []
result = await session.execute(select(Post).where(Post.id.in_(ids)))
return [_post_to_dto(p) for p in result.scalars().all()]
async def search_posts(
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
) -> tuple[list[PostDTO], int]:
"""Search posts by title with pagination. Not part of the Protocol
(admin-only use in events), but provided for convenience."""
if query:
count_stmt = select(func.count(Post.id)).where(Post.title.ilike(f"%{query}%"))
posts_stmt = select(Post).where(Post.title.ilike(f"%{query}%")).order_by(Post.title)
else:
count_stmt = select(func.count(Post.id))
posts_stmt = select(Post).order_by(Post.published_at.desc().nullslast())
total = (await session.execute(count_stmt)).scalar() or 0
offset = (page - 1) * per_page
result = await session.execute(posts_stmt.limit(per_page).offset(offset))
return [_post_to_dto(p) for p in result.scalars().all()], total

View File

@@ -4,16 +4,12 @@ Only uses HTTP-based fetch_data/call_action, no direct DB access.
""" """
from __future__ import annotations from __future__ import annotations
from sqlalchemy.ext.asyncio import AsyncSession
from shared.infrastructure.actions import call_action, ActionError from shared.infrastructure.actions import call_action, ActionError
from shared.infrastructure.data_client import fetch_data from shared.infrastructure.data_client import fetch_data
from shared.contracts.dtos import CalendarEntryDTO, dto_from_dict from shared.contracts.dtos import CalendarEntryDTO, dto_from_dict
from shared.services.registry import services
async def toggle_entry_association( async def toggle_entry_association(
session: AsyncSession,
post_id: int, post_id: int,
entry_id: int entry_id: int
) -> tuple[bool, str | None]: ) -> tuple[bool, str | None]:
@@ -21,7 +17,7 @@ async def toggle_entry_association(
Toggle association between a post and calendar entry. Toggle association between a post and calendar entry.
Returns (is_now_associated, error_message). Returns (is_now_associated, error_message).
""" """
post = await services.blog.get_post_by_id(session, post_id) post = await fetch_data("blog", "post-by-id", params={"id": post_id}, required=False)
if not post: if not post:
return False, "Post not found" return False, "Post not found"
@@ -35,7 +31,6 @@ async def toggle_entry_association(
async def get_post_entry_ids( async def get_post_entry_ids(
session: AsyncSession,
post_id: int post_id: int
) -> set[int]: ) -> set[int]:
""" """
@@ -49,7 +44,6 @@ async def get_post_entry_ids(
async def get_associated_entries( async def get_associated_entries(
session: AsyncSession,
post_id: int, post_id: int,
page: int = 1, page: int = 1,
per_page: int = 10 per_page: int = 10

View File

@@ -8,15 +8,14 @@ Usage::
from shared.services.registry import services from shared.services.registry import services
# Register at app startup (own domain only) # Register at app startup (own domain only)
services.blog = SqlBlogService() services.calendar = SqlCalendarService()
# Use locally within the owning app # Use locally within the owning app
post = await services.blog.get_post_by_slug(session, slug) cals = await services.calendar.calendars_for_container(session, "page", page_id)
""" """
from __future__ import annotations from __future__ import annotations
from shared.contracts.protocols import ( from shared.contracts.protocols import (
BlogService,
CalendarService, CalendarService,
MarketService, MarketService,
CartService, CartService,
@@ -33,23 +32,11 @@ class _ServiceRegistry:
""" """
def __init__(self) -> None: def __init__(self) -> None:
self._blog: BlogService | None = None
self._calendar: CalendarService | None = None self._calendar: CalendarService | None = None
self._market: MarketService | None = None self._market: MarketService | None = None
self._cart: CartService | None = None self._cart: CartService | None = None
self._federation: FederationService | None = None self._federation: FederationService | None = None
# -- blog -----------------------------------------------------------------
@property
def blog(self) -> BlogService:
if self._blog is None:
raise RuntimeError("BlogService not registered")
return self._blog
@blog.setter
def blog(self, impl: BlogService) -> None:
self._blog = impl
# -- calendar ------------------------------------------------------------- # -- calendar -------------------------------------------------------------
@property @property
def calendar(self) -> CalendarService: def calendar(self) -> CalendarService:

View File

@@ -691,6 +691,9 @@
// Keyword → text // Keyword → text
if (isKw(expr)) return document.createTextNode(expr.name); if (isKw(expr)) return document.createTextNode(expr.name);
// Pre-rendered DOM node → return as-is
if (expr && expr.nodeType) return expr;
// Dict → empty // Dict → empty
if (expr && typeof expr === "object" && !Array.isArray(expr)) return document.createDocumentFragment(); if (expr && typeof expr === "object" && !Array.isArray(expr)) return document.createDocumentFragment();
@@ -808,18 +811,32 @@
return renderDOM(fn.body, local); return renderDOM(fn.body, local);
} }
/** True when the array expr is a render-only form (HTML tag, <>, raw!, ~comp). */
function _isRenderExpr(v) {
if (!Array.isArray(v) || !v.length) return false;
var h = v[0];
if (!isSym(h)) return false;
var n = h.name;
return !!(HTML_TAGS[n] || SVG_TAGS[n] || n === "<>" || n === "raw!" || n.charAt(0) === "~");
}
function renderComponentDOM(comp, args, env) { function renderComponentDOM(comp, args, env) {
var kwargs = {}, children = []; var kwargs = {}, children = [];
var i = 0; var i = 0;
while (i < args.length) { while (i < args.length) {
if (isKw(args[i]) && i + 1 < args.length) { if (isKw(args[i]) && i + 1 < args.length) {
// Keep kwarg values as AST — renderDOM will handle them when the // Evaluate kwarg values eagerly in the caller's env so expressions
// component body references the param symbol. Simple literals are // like (get t "src") resolve while lambda params are still bound.
// eval'd so strings/numbers resolve immediately. // Render-only forms (HTML tags, <>, ~comp) go through renderDOM instead.
var v = args[i + 1]; var v = args[i + 1];
kwargs[args[i].name] = (typeof v === "string" || typeof v === "number" || if (typeof v === "string" || typeof v === "number" ||
typeof v === "boolean" || isNil(v) || isKw(v)) typeof v === "boolean" || isNil(v)) {
? v : (isSym(v) ? sxEval(v, env) : v); kwargs[args[i].name] = v;
} else if (_isRenderExpr(v)) {
kwargs[args[i].name] = renderDOM(v, env);
} else {
kwargs[args[i].name] = sxEval(v, env);
}
i += 2; i += 2;
} else { } else {
children.push(args[i]); children.push(args[i]);
@@ -1096,10 +1113,18 @@
var i = 0; var i = 0;
while (i < args.length) { while (i < args.length) {
if (isKw(args[i]) && i + 1 < args.length) { if (isKw(args[i]) && i + 1 < args.length) {
// Evaluate kwarg values eagerly in the caller's env so expressions
// like (get t "src") resolve while lambda params are still bound.
// Render-only forms (HTML tags, <>, ~comp) go through renderStr.
var v = args[i + 1]; var v = args[i + 1];
kwargs[args[i].name] = (typeof v === "string" || typeof v === "number" || if (typeof v === "string" || typeof v === "number" ||
typeof v === "boolean" || isNil(v) || isKw(v)) typeof v === "boolean" || isNil(v)) {
? v : (isSym(v) ? sxEval(v, env) : v); kwargs[args[i].name] = v;
} else if (_isRenderExpr(v)) {
kwargs[args[i].name] = new RawHTML(renderStr(v, env));
} else {
kwargs[args[i].name] = sxEval(v, env);
}
i += 2; i += 2;
} else { children.push(args[i]); i++; } } else { children.push(args[i]); i++; }
} }