Unify domain_events + ap_activities into AP-shaped event bus

All cross-service events now flow through ap_activities with a unified
EventProcessor. Internal events use visibility="internal"; federation
activities use visibility="public" and get delivered by a wildcard handler.

- Add processing columns to APActivity (process_state, actor_uri, etc.)
- New emit_activity() / register_activity_handler() API
- EventProcessor polls ap_activities instead of domain_events
- Rewrite all handlers to accept APActivity
- Migrate all 7 emit_event call sites to emit_activity
- publish_activity() sets process_state=pending directly (no emit_event bridge)
- Migration to drop domain_events table

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
giles
2026-02-22 16:19:29 +00:00
parent d697709f60
commit 2e9db11925
15 changed files with 389 additions and 168 deletions

View File

@@ -12,7 +12,7 @@ shared/
models/ # Canonical domain models models/ # Canonical domain models
user.py # User user.py # User
magic_link.py # MagicLink (auth tokens) magic_link.py # MagicLink (auth tokens)
domain_event.py # DomainEvent (transactional outbox) domain_event.py # DomainEvent (legacy — being removed)
kv.py # KeyValue (key-value store) kv.py # KeyValue (key-value store)
menu_item.py # MenuItem (deprecated — use MenuNode) menu_item.py # MenuItem (deprecated — use MenuNode)
menu_node.py # MenuNode (navigation tree) menu_node.py # MenuNode (navigation tree)
@@ -48,13 +48,13 @@ shared/
user_loader.py # Load current user from session user_loader.py # Load current user from session
http_utils.py # HTTP utility functions http_utils.py # HTTP utility functions
events/ events/
bus.py # emit_event(), register_handler() bus.py # emit_activity(), register_activity_handler()
processor.py # EventProcessor (polls domain_events, runs handlers) processor.py # EventProcessor (polls ap_activities, runs handlers)
handlers/ # Shared event handlers handlers/ # Shared activity handlers
container_handlers.py # Navigation rebuild on attach/detach container_handlers.py # Navigation rebuild on attach/detach
login_handlers.py # Cart/entry adoption on login login_handlers.py # Cart/entry adoption on login
order_handlers.py # Order lifecycle events order_handlers.py # Order lifecycle events
ap_delivery_handler.py # AP activity delivery to follower inboxes ap_delivery_handler.py # AP activity delivery to follower inboxes (wildcard)
utils/ utils/
__init__.py __init__.py
calendar_helpers.py # Calendar period/entry utilities calendar_helpers.py # Calendar period/entry utilities
@@ -78,7 +78,7 @@ shared/
- **App factory:** All apps call `create_base_app()` which sets up DB sessions, CSRF, error handling, event processing, logging, widget registration, and domain service wiring. - **App factory:** All apps call `create_base_app()` which sets up DB sessions, CSRF, error handling, event processing, logging, widget registration, and domain service wiring.
- **Service contracts:** Cross-domain communication via typed Protocols + frozen DTO dataclasses. Apps call `services.calendar.method()`, never import models from other domains. - **Service contracts:** Cross-domain communication via typed Protocols + frozen DTO dataclasses. Apps call `services.calendar.method()`, never import models from other domains.
- **Service registry:** Typed singleton (`services.blog`, `.calendar`, `.market`, `.cart`, `.federation`). Apps wire their own domain + stubs for others via `register_domain_services()`. - **Service registry:** Typed singleton (`services.blog`, `.calendar`, `.market`, `.cart`, `.federation`). Apps wire their own domain + stubs for others via `register_domain_services()`.
- **Event bus:** `emit_event()` writes to `domain_events` table in the caller's transaction. `EventProcessor` polls and dispatches to registered handlers. - **Activity bus:** `emit_activity()` writes to `ap_activities` table in the caller's transaction. `EventProcessor` polls pending activities and dispatches to registered handlers. Internal events use `visibility="internal"`; federation activities use `visibility="public"` and are delivered to follower inboxes by the wildcard delivery handler.
- **Widget registry:** Domain services register widgets (nav, card, account); templates consume via `widgets.container_nav`, `widgets.container_cards`. - **Widget registry:** Domain services register widgets (nav, card, account); templates consume via `widgets.container_nav`, `widgets.container_cards`.
- **Cart identity:** `current_cart_identity()` returns `{"user_id": int|None, "session_id": str|None}` from the request session. - **Cart identity:** `current_cart_identity()` returns `{"user_id": int|None, "session_id": str|None}` from the request session.

View File

@@ -0,0 +1,113 @@
"""add unified event bus columns to ap_activities
Revision ID: m3k1h7i9j0
Revises: l2j0g6h8i9
Create Date: 2026-02-22
Adds processing and visibility columns so ap_activities can serve as the
unified event bus for both internal domain events and federation delivery.
"""
revision = "m3k1h7i9j0"
down_revision = "l2j0g6h8i9"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade() -> None:
# Add new columns with defaults so existing rows stay valid
op.add_column(
"ap_activities",
sa.Column("actor_uri", sa.String(512), nullable=True),
)
op.add_column(
"ap_activities",
sa.Column(
"visibility", sa.String(20),
nullable=False, server_default="public",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_state", sa.String(20),
nullable=False, server_default="completed",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_attempts", sa.Integer(),
nullable=False, server_default="0",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_max_attempts", sa.Integer(),
nullable=False, server_default="5",
),
)
op.add_column(
"ap_activities",
sa.Column("process_error", sa.Text(), nullable=True),
)
op.add_column(
"ap_activities",
sa.Column(
"processed_at", sa.DateTime(timezone=True), nullable=True,
),
)
# Backfill actor_uri from the related actor_profile
op.execute(
"""
UPDATE ap_activities a
SET actor_uri = CONCAT(
'https://',
COALESCE(current_setting('app.ap_domain', true), 'rose-ash.com'),
'/users/',
p.preferred_username
)
FROM ap_actor_profiles p
WHERE a.actor_profile_id = p.id
AND a.actor_uri IS NULL
"""
)
# Make actor_profile_id nullable (internal events have no actor profile)
op.alter_column(
"ap_activities", "actor_profile_id",
existing_type=sa.Integer(),
nullable=True,
)
# Index for processor polling
op.create_index(
"ix_ap_activity_process", "ap_activities", ["process_state"],
)
def downgrade() -> None:
op.drop_index("ix_ap_activity_process", table_name="ap_activities")
# Restore actor_profile_id NOT NULL (remove any rows without it first)
op.execute(
"DELETE FROM ap_activities WHERE actor_profile_id IS NULL"
)
op.alter_column(
"ap_activities", "actor_profile_id",
existing_type=sa.Integer(),
nullable=False,
)
op.drop_column("ap_activities", "processed_at")
op.drop_column("ap_activities", "process_error")
op.drop_column("ap_activities", "process_max_attempts")
op.drop_column("ap_activities", "process_attempts")
op.drop_column("ap_activities", "process_state")
op.drop_column("ap_activities", "visibility")
op.drop_column("ap_activities", "actor_uri")

View File

@@ -0,0 +1,46 @@
"""drop domain_events table
Revision ID: n4l2i8j0k1
Revises: m3k1h7i9j0
Create Date: 2026-02-22
The domain_events table is no longer used — all events now flow through
ap_activities with the unified activity bus.
"""
revision = "n4l2i8j0k1"
down_revision = "m3k1h7i9j0"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
def upgrade() -> None:
op.drop_index("ix_domain_events_state", table_name="domain_events")
op.drop_index("ix_domain_events_event_type", table_name="domain_events")
op.drop_table("domain_events")
def downgrade() -> None:
op.create_table(
"domain_events",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("event_type", sa.String(128), nullable=False),
sa.Column("aggregate_type", sa.String(64), nullable=False),
sa.Column("aggregate_id", sa.Integer(), nullable=False),
sa.Column("payload", JSONB(), nullable=True),
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
sa.Column("attempts", sa.Integer(), nullable=False, server_default="0"),
sa.Column("max_attempts", sa.Integer(), nullable=False, server_default="5"),
sa.Column("last_error", sa.Text(), nullable=True),
sa.Column(
"created_at", sa.DateTime(timezone=True),
nullable=False, server_default=sa.func.now(),
),
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_domain_events_event_type", "domain_events", ["event_type"])
op.create_index("ix_domain_events_state", "domain_events", ["state"])

View File

@@ -1,4 +1,9 @@
from .bus import emit_event, register_handler from .bus import emit_activity, register_activity_handler, get_activity_handlers
from .processor import EventProcessor from .processor import EventProcessor
__all__ = ["emit_event", "register_handler", "EventProcessor"] __all__ = [
"emit_activity",
"register_activity_handler",
"get_activity_handlers",
"EventProcessor",
]

View File

@@ -1,56 +1,109 @@
""" """
Transactional outbox event bus. Unified activity bus.
emit_event() writes to the domain_events table within the caller's existing emit_activity() writes an APActivity row with process_state='pending' within
DB transaction — atomic with whatever domain change triggered the event. the caller's existing DB transaction — atomic with the domain change.
register_handler() registers async handler functions that the EventProcessor register_activity_handler() registers async handler functions that the
will call when processing events of a given type. EventProcessor dispatches when processing pending activities.
""" """
from __future__ import annotations from __future__ import annotations
import uuid
from collections import defaultdict from collections import defaultdict
from typing import Any, Awaitable, Callable, Dict, List from typing import Awaitable, Callable, Dict, List, Tuple
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.models.domain_event import DomainEvent from shared.models.federation import APActivity
# handler signature: async def handler(event: DomainEvent, session: AsyncSession) -> None # ---------------------------------------------------------------------------
HandlerFn = Callable[[DomainEvent, AsyncSession], Awaitable[None]] # Activity-handler registry
# ---------------------------------------------------------------------------
# Handler signature: async def handler(activity: APActivity, session: AsyncSession) -> None
ActivityHandlerFn = Callable[[APActivity, AsyncSession], Awaitable[None]]
_handlers: Dict[str, List[HandlerFn]] = defaultdict(list) # Keyed by (activity_type, object_type). object_type="*" is wildcard.
_activity_handlers: Dict[Tuple[str, str], List[ActivityHandlerFn]] = defaultdict(list)
async def emit_event( def register_activity_handler(
activity_type: str,
fn: ActivityHandlerFn,
*,
object_type: str | None = None,
) -> None:
"""Register an async handler for an activity type + optional object type.
Use ``activity_type="*"`` as a wildcard that fires for every activity
(e.g. federation delivery handler).
"""
key = (activity_type, object_type or "*")
_activity_handlers[key].append(fn)
def get_activity_handlers(
activity_type: str,
object_type: str | None = None,
) -> List[ActivityHandlerFn]:
"""Return all matching handlers for an activity.
Matches in order:
1. Exact (activity_type, object_type)
2. (activity_type, "*") — type-level wildcard
3. ("*", "*") — global wildcard (e.g. delivery)
"""
handlers: List[ActivityHandlerFn] = []
ot = object_type or "*"
# Exact match
if ot != "*":
handlers.extend(_activity_handlers.get((activity_type, ot), []))
# Type-level wildcard
handlers.extend(_activity_handlers.get((activity_type, "*"), []))
# Global wildcard
if activity_type != "*":
handlers.extend(_activity_handlers.get(("*", "*"), []))
return handlers
# ---------------------------------------------------------------------------
# emit_activity — the primary way to emit events
# ---------------------------------------------------------------------------
async def emit_activity(
session: AsyncSession, session: AsyncSession,
event_type: str, *,
aggregate_type: str, activity_type: str,
aggregate_id: int, actor_uri: str,
payload: Dict[str, Any] | None = None, object_type: str,
) -> DomainEvent: object_data: dict | None = None,
source_type: str | None = None,
source_id: int | None = None,
visibility: str = "internal",
actor_profile_id: int | None = None,
) -> APActivity:
""" """
Write a domain event to the outbox table in the current transaction. Write an AP-shaped activity to ap_activities with process_state='pending'.
Call this inside your service function, using the same session that Called inside a service function using the same session that performs the
performs the domain change. The event and the change commit together. domain change. The activity and the change commit together.
""" """
event = DomainEvent( activity_uri = f"internal:{uuid.uuid4()}" if visibility == "internal" else f"urn:uuid:{uuid.uuid4()}"
event_type=event_type,
aggregate_type=aggregate_type, activity = APActivity(
aggregate_id=aggregate_id, activity_id=activity_uri,
payload=payload or {}, activity_type=activity_type,
actor_profile_id=actor_profile_id,
actor_uri=actor_uri,
object_type=object_type,
object_data=object_data or {},
is_local=True,
source_type=source_type,
source_id=source_id,
visibility=visibility,
process_state="pending",
) )
session.add(event) session.add(activity)
await session.flush() # assign event.id await session.flush()
return event return activity
def register_handler(event_type: str, fn: HandlerFn) -> None:
"""Register an async handler for a given event type."""
_handlers[event_type].append(fn)
def get_handlers(event_type: str) -> List[HandlerFn]:
"""Return all registered handlers for an event type."""
return _handlers.get(event_type, [])

View File

@@ -1,7 +1,7 @@
"""Deliver AP activities to remote followers. """Deliver AP activities to remote followers.
On ``federation.activity_created`` → load activity + actor + followers → Registered as a wildcard handler — fires for every activity. Skips
sign with HTTP Signatures → POST to each follower inbox. non-public activities and those without an actor profile.
""" """
from __future__ import annotations from __future__ import annotations
@@ -11,7 +11,7 @@ import httpx
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.events.bus import register_handler, DomainEvent from shared.events.bus import register_activity_handler
from shared.models.federation import ActorProfile, APActivity, APFollower from shared.models.federation import ActorProfile, APActivity, APFollower
from shared.services.registry import services from shared.services.registry import services
@@ -33,12 +33,9 @@ def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str)
object_id = activity.activity_id + "/object" object_id = activity.activity_id + "/object"
if activity.activity_type == "Delete": if activity.activity_type == "Delete":
# Delete: object is a Tombstone with just id + type
obj.setdefault("id", object_id) obj.setdefault("id", object_id)
obj.setdefault("type", "Tombstone") obj.setdefault("type", "Tombstone")
else: else:
# Create/Update: full object with attribution
# Prefer stable id from object_data (set by try_publish), fall back to activity-derived
obj.setdefault("id", object_id) obj.setdefault("id", object_id)
obj.setdefault("type", activity.object_type) obj.setdefault("type", activity.object_type)
obj.setdefault("attributedTo", actor_url) obj.setdefault("attributedTo", actor_url)
@@ -105,30 +102,20 @@ async def _deliver_to_inbox(
return False return False
async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None: async def on_any_activity(activity: APActivity, session: AsyncSession) -> None:
"""Deliver a newly created activity to all followers.""" """Deliver a public activity to all followers of its actor."""
import os import os
# Only deliver public activities that have an actor profile
if activity.visibility != "public":
return
if activity.actor_profile_id is None:
return
if not services.has("federation"): if not services.has("federation"):
return return
payload = event.payload
activity_id_uri = payload.get("activity_id")
if not activity_id_uri:
return
domain = os.getenv("AP_DOMAIN", "rose-ash.com") domain = os.getenv("AP_DOMAIN", "rose-ash.com")
# Load the activity
activity = (
await session.execute(
select(APActivity).where(APActivity.activity_id == activity_id_uri)
)
).scalar_one_or_none()
if not activity:
log.warning("Activity not found: %s", activity_id_uri)
return
# Load actor with private key # Load actor with private key
actor = ( actor = (
await session.execute( await session.execute(
@@ -136,7 +123,7 @@ async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None
) )
).scalar_one_or_none() ).scalar_one_or_none()
if not actor or not actor.private_key_pem: if not actor or not actor.private_key_pem:
log.warning("Actor not found or missing key for activity %s", activity_id_uri) log.warning("Actor not found or missing key for activity %s", activity.activity_id)
return return
# Load followers # Load followers
@@ -147,14 +134,13 @@ async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None
).scalars().all() ).scalars().all()
if not followers: if not followers:
log.debug("No followers to deliver to for %s", activity_id_uri) log.debug("No followers to deliver to for %s", activity.activity_id)
return return
# Build activity JSON # Build activity JSON
activity_json = _build_activity_json(activity, actor, domain) activity_json = _build_activity_json(activity, actor, domain)
# Deliver to each follower inbox # Deduplicate inboxes
# Deduplicate inboxes (multiple followers might share a shared inbox)
inboxes = {f.follower_inbox for f in followers if f.follower_inbox} inboxes = {f.follower_inbox for f in followers if f.follower_inbox}
log.info( log.info(
@@ -167,4 +153,5 @@ async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None
await _deliver_to_inbox(client, inbox_url, activity_json, actor, domain) await _deliver_to_inbox(client, inbox_url, activity_json, actor, domain)
register_handler("federation.activity_created", on_activity_created) # Wildcard: fires for every activity
register_activity_handler("*", on_any_activity)

View File

@@ -2,18 +2,18 @@ from __future__ import annotations
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.events import register_handler from shared.events import register_activity_handler
from shared.models.domain_event import DomainEvent from shared.models.federation import APActivity
from shared.services.navigation import rebuild_navigation from shared.services.navigation import rebuild_navigation
async def on_child_attached(event: DomainEvent, session: AsyncSession) -> None: async def on_child_attached(activity: APActivity, session: AsyncSession) -> None:
await rebuild_navigation(session) await rebuild_navigation(session)
async def on_child_detached(event: DomainEvent, session: AsyncSession) -> None: async def on_child_detached(activity: APActivity, session: AsyncSession) -> None:
await rebuild_navigation(session) await rebuild_navigation(session)
register_handler("container.child_attached", on_child_attached) register_activity_handler("Add", on_child_attached, object_type="rose:ContainerRelation")
register_handler("container.child_detached", on_child_detached) register_activity_handler("Remove", on_child_detached, object_type="rose:ContainerRelation")

View File

@@ -2,24 +2,22 @@ from __future__ import annotations
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.events import register_handler from shared.events import register_activity_handler
from shared.models.domain_event import DomainEvent from shared.models.federation import APActivity
from shared.services.registry import services from shared.services.registry import services
async def on_user_logged_in(event: DomainEvent, session: AsyncSession) -> None: async def on_user_logged_in(activity: APActivity, session: AsyncSession) -> None:
payload = event.payload data = activity.object_data
user_id = payload["user_id"] user_id = data["user_id"]
session_id = payload["session_id"] session_id = data["session_id"]
# Adopt cart items (if cart service is registered)
if services.has("cart"): if services.has("cart"):
await services.cart.adopt_cart_for_user(session, user_id, session_id) await services.cart.adopt_cart_for_user(session, user_id, session_id)
# Adopt calendar entries and tickets (if calendar service is registered)
if services.has("calendar"): if services.has("calendar"):
await services.calendar.adopt_entries_for_user(session, user_id, session_id) await services.calendar.adopt_entries_for_user(session, user_id, session_id)
await services.calendar.adopt_tickets_for_user(session, user_id, session_id) await services.calendar.adopt_tickets_for_user(session, user_id, session_id)
register_handler("user.logged_in", on_user_logged_in) register_activity_handler("rose:Login", on_user_logged_in)

View File

@@ -4,19 +4,19 @@ import logging
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.events import register_handler from shared.events import register_activity_handler
from shared.models.domain_event import DomainEvent from shared.models.federation import APActivity
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
async def on_order_created(event: DomainEvent, session: AsyncSession) -> None: async def on_order_created(activity: APActivity, session: AsyncSession) -> None:
log.info("order.created: order_id=%s", event.payload.get("order_id")) log.info("order.created: order_id=%s", activity.object_data.get("order_id"))
async def on_order_paid(event: DomainEvent, session: AsyncSession) -> None: async def on_order_paid(activity: APActivity, session: AsyncSession) -> None:
log.info("order.paid: order_id=%s", event.payload.get("order_id")) log.info("order.paid: order_id=%s", activity.object_data.get("order_id"))
register_handler("order.created", on_order_created) register_activity_handler("Create", on_order_created, object_type="rose:Order")
register_handler("order.paid", on_order_paid) register_activity_handler("rose:OrderPaid", on_order_paid)

View File

@@ -1,6 +1,6 @@
""" """
Event processor — polls the domain_events outbox table and dispatches Event processor — polls the ap_activities table and dispatches to registered
to registered handlers. activity handlers.
Runs as an asyncio background task within each app process. Runs as an asyncio background task within each app process.
Uses SELECT ... FOR UPDATE SKIP LOCKED for safe concurrent processing. Uses SELECT ... FOR UPDATE SKIP LOCKED for safe concurrent processing.
@@ -11,16 +11,16 @@ import asyncio
import traceback import traceback
from datetime import datetime, timezone from datetime import datetime, timezone
from sqlalchemy import select, update from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.db.session import get_session from shared.db.session import get_session
from shared.models.domain_event import DomainEvent from shared.models.federation import APActivity
from .bus import get_handlers from .bus import get_activity_handlers
class EventProcessor: class EventProcessor:
"""Background event processor that polls the outbox table.""" """Background event processor that polls the ap_activities table."""
def __init__( def __init__(
self, self,
@@ -64,54 +64,52 @@ class EventProcessor:
await asyncio.sleep(self._poll_interval) await asyncio.sleep(self._poll_interval)
async def _process_batch(self) -> int: async def _process_batch(self) -> int:
"""Fetch and process a batch of pending events. Returns count processed.""" """Fetch and process a batch of pending activities. Returns count processed."""
processed = 0 processed = 0
async with get_session() as session: async with get_session() as session:
# FOR UPDATE SKIP LOCKED: safe for concurrent processors
stmt = ( stmt = (
select(DomainEvent) select(APActivity)
.where( .where(
DomainEvent.state == "pending", APActivity.process_state == "pending",
DomainEvent.attempts < DomainEvent.max_attempts, APActivity.process_attempts < APActivity.process_max_attempts,
) )
.order_by(DomainEvent.created_at) .order_by(APActivity.created_at)
.limit(self._batch_size) .limit(self._batch_size)
.with_for_update(skip_locked=True) .with_for_update(skip_locked=True)
) )
result = await session.execute(stmt) result = await session.execute(stmt)
events = result.scalars().all() activities = result.scalars().all()
for event in events: for activity in activities:
await self._process_one(session, event) await self._process_one(session, activity)
processed += 1 processed += 1
await session.commit() await session.commit()
return processed return processed
async def _process_one(self, session: AsyncSession, event: DomainEvent) -> None: async def _process_one(self, session: AsyncSession, activity: APActivity) -> None:
"""Run all handlers for a single event.""" """Run all handlers for a single activity."""
handlers = get_handlers(event.event_type) handlers = get_activity_handlers(activity.activity_type, activity.object_type)
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
event.state = "processing" activity.process_state = "processing"
event.attempts += 1 activity.process_attempts += 1
await session.flush() await session.flush()
if not handlers: if not handlers:
# No handlers registered — mark completed (nothing to do) activity.process_state = "completed"
event.state = "completed" activity.processed_at = now
event.processed_at = now
return return
try: try:
for handler in handlers: for handler in handlers:
await handler(event, session) await handler(activity, session)
event.state = "completed" activity.process_state = "completed"
event.processed_at = now activity.processed_at = now
except Exception as exc: except Exception as exc:
event.last_error = f"{exc.__class__.__name__}: {exc}" activity.process_error = f"{exc.__class__.__name__}: {exc}"
if event.attempts >= event.max_attempts: if activity.process_attempts >= activity.process_max_attempts:
event.state = "failed" activity.process_state = "failed"
event.processed_at = now activity.processed_at = now
else: else:
event.state = "pending" # retry activity.process_state = "pending" # retry

View File

@@ -8,8 +8,6 @@ from .ghost_membership_entities import (
GhostNewsletter, UserNewsletter, GhostNewsletter, UserNewsletter,
GhostTier, GhostSubscription, GhostTier, GhostSubscription,
) )
from .domain_event import DomainEvent
from .ghost_content import Tag, Post, Author, PostAuthor, PostTag, PostLike from .ghost_content import Tag, Post, Author, PostAuthor, PostTag, PostLike
from .page_config import PageConfig from .page_config import PageConfig
from .order import Order, OrderItem from .order import Order, OrderItem

View File

@@ -50,14 +50,19 @@ class ActorProfile(Base):
class APActivity(Base): class APActivity(Base):
"""An ActivityPub activity (local or remote).""" """An ActivityPub activity (local or remote).
Also serves as the unified event bus: internal domain events and public
federation activities both live here, distinguished by ``visibility``.
The ``EventProcessor`` polls rows with ``process_state='pending'``.
"""
__tablename__ = "ap_activities" __tablename__ = "ap_activities"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False) activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
activity_type: Mapped[str] = mapped_column(String(64), nullable=False) activity_type: Mapped[str] = mapped_column(String(64), nullable=False)
actor_profile_id: Mapped[int] = mapped_column( actor_profile_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True,
) )
object_type: Mapped[str | None] = mapped_column(String(64), nullable=True) object_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True) object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
@@ -83,6 +88,27 @@ class APActivity(Base):
DateTime(timezone=True), nullable=False, server_default=func.now(), DateTime(timezone=True), nullable=False, server_default=func.now(),
) )
# --- Unified event-bus columns ---
actor_uri: Mapped[str | None] = mapped_column(
String(512), nullable=True,
)
visibility: Mapped[str] = mapped_column(
String(20), nullable=False, default="public", server_default="public",
)
process_state: Mapped[str] = mapped_column(
String(20), nullable=False, default="completed", server_default="completed",
)
process_attempts: Mapped[int] = mapped_column(
Integer, nullable=False, default=0, server_default="0",
)
process_max_attempts: Mapped[int] = mapped_column(
Integer, nullable=False, default=5, server_default="5",
)
process_error: Mapped[str | None] = mapped_column(Text, nullable=True)
processed_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True), nullable=True,
)
# Relationships # Relationships
actor_profile = relationship("ActorProfile", back_populates="activities") actor_profile = relationship("ActorProfile", back_populates="activities")
@@ -90,6 +116,7 @@ class APActivity(Base):
Index("ix_ap_activity_actor", "actor_profile_id"), Index("ix_ap_activity_actor", "actor_profile_id"),
Index("ix_ap_activity_source", "source_type", "source_id"), Index("ix_ap_activity_source", "source_type", "source_id"),
Index("ix_ap_activity_published", "published"), Index("ix_ap_activity_published", "published"),
Index("ix_ap_activity_process", "process_state"),
) )
def __repr__(self) -> str: def __repr__(self) -> str:

View File

@@ -183,16 +183,21 @@ class SqlFederationService:
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
actor_url = f"https://{domain}/users/{username}"
activity = APActivity( activity = APActivity(
activity_id=activity_uri, activity_id=activity_uri,
activity_type=activity_type, activity_type=activity_type,
actor_profile_id=actor.id, actor_profile_id=actor.id,
actor_uri=actor_url,
object_type=object_type, object_type=object_type,
object_data=object_data, object_data=object_data,
published=now, published=now,
is_local=True, is_local=True,
source_type=source_type, source_type=source_type,
source_id=source_id, source_id=source_id,
visibility="public",
process_state="pending",
) )
session.add(activity) session.add(activity)
await session.flush() await session.flush()
@@ -208,7 +213,7 @@ class SqlFederationService:
], ],
"id": activity_uri, "id": activity_uri,
"type": activity_type, "type": activity_type,
"actor": f"https://{domain}/users/{username}", "actor": actor_url,
"published": now.isoformat(), "published": now.isoformat(),
"object": { "object": {
"type": object_type, "type": object_type,
@@ -221,21 +226,6 @@ class SqlFederationService:
except Exception: except Exception:
pass # IPFS failure is non-fatal pass # IPFS failure is non-fatal
# Emit domain event for downstream processing (delivery)
from shared.events import emit_event
await emit_event(
session,
"federation.activity_created",
"APActivity",
activity.id,
{
"activity_id": activity.activity_id,
"activity_type": activity_type,
"actor_username": username,
"object_type": object_type,
},
)
return _activity_to_dto(activity) return _activity_to_dto(activity)
# -- Queries -------------------------------------------------------------- # -- Queries --------------------------------------------------------------

View File

@@ -1,9 +1,9 @@
"""Inline federation publication — called at write time, not via async handler. """Inline federation publication — called at write time, not via async handler.
Replaces the old pattern where emit_event("post.published") → async handler → The originating service calls try_publish() directly, which creates the
publish_activity(). Now the originating service calls try_publish() directly, APActivity (with process_state='pending') in the same DB transaction.
which creates the APActivity in the same DB transaction. AP delivery The EventProcessor picks it up and the delivery wildcard handler POSTs
(federation.activity_created → inbox POST) stays async. to follower inboxes.
""" """
from __future__ import annotations from __future__ import annotations

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from sqlalchemy import select, func from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from shared.events import emit_event from shared.events import emit_activity
from shared.models.container_relation import ContainerRelation from shared.models.container_relation import ContainerRelation
@@ -40,17 +40,19 @@ async def attach_child(
if label is not None: if label is not None:
existing.label = label existing.label = label
await session.flush() await session.flush()
await emit_event( await emit_activity(
session, session,
event_type="container.child_attached", activity_type="Add",
aggregate_type="container_relation", actor_uri="internal:system",
aggregate_id=existing.id, object_type="rose:ContainerRelation",
payload={ object_data={
"parent_type": parent_type, "parent_type": parent_type,
"parent_id": parent_id, "parent_id": parent_id,
"child_type": child_type, "child_type": child_type,
"child_id": child_id, "child_id": child_id,
}, },
source_type="container_relation",
source_id=existing.id,
) )
return existing return existing
# Already attached and active — no-op # Already attached and active — no-op
@@ -77,17 +79,19 @@ async def attach_child(
session.add(rel) session.add(rel)
await session.flush() await session.flush()
await emit_event( await emit_activity(
session, session,
event_type="container.child_attached", activity_type="Add",
aggregate_type="container_relation", actor_uri="internal:system",
aggregate_id=rel.id, object_type="rose:ContainerRelation",
payload={ object_data={
"parent_type": parent_type, "parent_type": parent_type,
"parent_id": parent_id, "parent_id": parent_id,
"child_type": child_type, "child_type": child_type,
"child_id": child_id, "child_id": child_id,
}, },
source_type="container_relation",
source_id=rel.id,
) )
return rel return rel
@@ -139,17 +143,19 @@ async def detach_child(
rel.deleted_at = func.now() rel.deleted_at = func.now()
await session.flush() await session.flush()
await emit_event( await emit_activity(
session, session,
event_type="container.child_detached", activity_type="Remove",
aggregate_type="container_relation", actor_uri="internal:system",
aggregate_id=rel.id, object_type="rose:ContainerRelation",
payload={ object_data={
"parent_type": parent_type, "parent_type": parent_type,
"parent_id": parent_id, "parent_id": parent_id,
"child_type": child_type, "child_type": child_type,
"child_id": child_id, "child_id": child_id,
}, },
source_type="container_relation",
source_id=rel.id,
) )
return True return True