Compare commits
60 Commits
widget-pha
...
b882770828
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b882770828 | ||
|
|
e7d180912b | ||
|
|
beac1b3dab | ||
|
|
25ac3db644 | ||
|
|
d9b51b1f84 | ||
|
|
61ad2db2f3 | ||
|
|
dd9cb9f5f2 | ||
|
|
bbc376aebc | ||
|
|
14fbd59e7b | ||
|
|
856ba94f3b | ||
|
|
1bb19c96ed | ||
|
|
f2262f702b | ||
|
|
001cbffd74 | ||
|
|
38233279a2 | ||
|
|
9cb8cf9e1d | ||
|
|
8951a62b90 | ||
|
|
7b878a501b | ||
|
|
748d28e657 | ||
|
|
cad528d732 | ||
|
|
c4590d1442 | ||
|
|
38a2023ca3 | ||
|
|
de93dfdc73 | ||
|
|
6bb26522a1 | ||
|
|
9a637c6227 | ||
|
|
a93a456ac5 | ||
|
|
223491fad5 | ||
|
|
dfc41ada7d | ||
|
|
60cd08adc9 | ||
|
|
d50f01d41f | ||
|
|
bfd8d55f27 | ||
|
|
d0a5170cd9 | ||
|
|
8323c45711 | ||
|
|
16df62e2c4 | ||
|
|
ea35e040e7 | ||
|
|
46f44f6171 | ||
|
|
326b380135 | ||
|
|
ea8e7da9d4 | ||
|
|
bd18d0befc | ||
|
|
5bed4a6c78 | ||
|
|
798087de9a | ||
|
|
cc22b21b18 | ||
|
|
f085d4a8d0 | ||
|
|
b16ba34b40 | ||
|
|
16e4d3aa57 | ||
|
|
6e438dbfdc | ||
|
|
7316dc6eac | ||
|
|
a3a41dbefd | ||
|
|
30b5a1438b | ||
|
|
0e89dbee55 | ||
|
|
86ccfd25c5 | ||
|
|
b42f5d63db | ||
|
|
2e48760b38 | ||
|
|
9cdd2195df | ||
|
|
46f6ca4a0f | ||
|
|
7de4a2e40e | ||
|
|
1c1ab3576f | ||
|
|
07aa2e2be9 | ||
|
|
2e9db11925 | ||
|
|
d697709f60 | ||
|
|
04f7c5e85c |
82
README.md
82
README.md
@@ -1,6 +1,6 @@
|
||||
# Shared
|
||||
|
||||
Shared infrastructure, models, templates, and configuration used by all four Rose Ash microservices (blog, market, cart, events). Included as a git submodule in each app.
|
||||
Shared infrastructure, models, contracts, services, and templates used by all five Rose Ash microservices (blog, market, cart, events, federation). Included as a git submodule in each app.
|
||||
|
||||
## Structure
|
||||
|
||||
@@ -8,53 +8,78 @@ Shared infrastructure, models, templates, and configuration used by all four Ros
|
||||
shared/
|
||||
db/
|
||||
base.py # SQLAlchemy declarative Base
|
||||
session.py # Async session factory (get_session)
|
||||
models/ # Shared domain models
|
||||
session.py # Async session factory (get_session, register_db)
|
||||
models/ # Canonical domain models
|
||||
user.py # User
|
||||
magic_link.py # MagicLink (auth tokens)
|
||||
domain_event.py # DomainEvent (transactional outbox)
|
||||
(domain_event.py removed — table dropped, see migration n4l2i8j0k1)
|
||||
kv.py # KeyValue (key-value store)
|
||||
menu_item.py # MenuItem
|
||||
menu_item.py # MenuItem (deprecated — use MenuNode)
|
||||
menu_node.py # MenuNode (navigation tree)
|
||||
container_relation.py # ContainerRelation (parent-child content)
|
||||
ghost_membership_entities.py # GhostNewsletter, UserNewsletter
|
||||
federation.py # ActorProfile, APActivity, APFollower, APFollowing,
|
||||
# RemoteActor, APRemotePost, APLocalPost,
|
||||
# APInteraction, APNotification, APAnchor, IPFSPin
|
||||
contracts/
|
||||
dtos.py # Frozen dataclasses for cross-domain data transfer
|
||||
protocols.py # Service protocols (Blog, Calendar, Market, Cart, Federation)
|
||||
widgets.py # Widget types (NavWidget, CardWidget, AccountPageWidget)
|
||||
services/
|
||||
registry.py # Typed singleton: services.blog, .calendar, .market, .cart, .federation
|
||||
blog_impl.py # SqlBlogService
|
||||
calendar_impl.py # SqlCalendarService
|
||||
market_impl.py # SqlMarketService
|
||||
cart_impl.py # SqlCartService
|
||||
federation_impl.py # SqlFederationService
|
||||
federation_publish.py # try_publish() — inline AP publication helper
|
||||
stubs.py # No-op stubs for absent domains
|
||||
navigation.py # get_navigation_tree()
|
||||
relationships.py # attach_child, get_children, detach_child
|
||||
widget_registry.py # Widget registry singleton
|
||||
widgets/ # Per-domain widget registration
|
||||
infrastructure/
|
||||
factory.py # create_base_app() — Quart app factory
|
||||
cart_identity.py # current_cart_identity() (user_id or session_id)
|
||||
cart_loader.py # Cart data loader for context processors
|
||||
context.py # Jinja2 context processors
|
||||
internal_api.py # Inter-app HTTP client (get/post via httpx)
|
||||
jinja_setup.py # Jinja2 template environment setup
|
||||
urls.py # URL helpers (coop_url, market_url, etc.)
|
||||
urls.py # URL helpers (blog_url, market_url, etc.)
|
||||
user_loader.py # Load current user from session
|
||||
http_utils.py # HTTP utility functions
|
||||
events/
|
||||
bus.py # emit_event(), register_handler()
|
||||
processor.py # EventProcessor (polls domain_events, runs handlers)
|
||||
browser/app/
|
||||
csrf.py # CSRF protection
|
||||
errors.py # Error handlers
|
||||
middleware.py # Request/response middleware
|
||||
redis_cacher.py # Tag-based Redis page caching
|
||||
authz.py # Authorization helpers
|
||||
filters/ # Jinja2 template filters (currency, truncate, etc.)
|
||||
utils/ # HTMX helpers, UTC time, parsing
|
||||
payments/sumup.py # SumUp checkout API integration
|
||||
browser/templates/ # ~300 Jinja2 templates shared across all apps
|
||||
config.py # YAML config loader
|
||||
bus.py # emit_activity(), register_activity_handler()
|
||||
processor.py # EventProcessor (polls ap_activities, runs handlers)
|
||||
handlers/ # Shared activity handlers
|
||||
container_handlers.py # Navigation rebuild on attach/detach
|
||||
login_handlers.py # Cart/entry adoption on login
|
||||
order_handlers.py # Order lifecycle events
|
||||
ap_delivery_handler.py # AP activity delivery to follower inboxes (wildcard)
|
||||
utils/
|
||||
__init__.py
|
||||
calendar_helpers.py # Calendar period/entry utilities
|
||||
http_signatures.py # RSA keypair generation, HTTP signature signing/verification
|
||||
ipfs_client.py # Async IPFS client (add_bytes, add_json, pin_cid)
|
||||
anchoring.py # Merkle trees + OpenTimestamps Bitcoin anchoring
|
||||
webfinger.py # WebFinger actor resolution
|
||||
browser/
|
||||
app/ # Middleware, CSRF, errors, Redis caching, authz, filters
|
||||
templates/ # ~300 Jinja2 templates shared across all apps
|
||||
containers.py # ContainerType, container_filter, content_filter helpers
|
||||
config.py # YAML config loader
|
||||
log_config/setup.py # Logging configuration (JSON formatter)
|
||||
utils.py # host_url and other shared utilities
|
||||
static/ # Shared static assets (CSS, JS, images, FontAwesome)
|
||||
editor/ # Koenig (Ghost) rich text editor build
|
||||
alembic/ # Database migrations (25 versions)
|
||||
env.py # Imports models from all apps (with try/except guards)
|
||||
versions/ # Migration files — single head: j0h8e4f6g7
|
||||
alembic/ # Database migrations
|
||||
```
|
||||
|
||||
## Key Patterns
|
||||
|
||||
- **App factory:** All apps call `create_base_app()` which sets up DB sessions, CSRF, error handling, event processing, logging, and the glue handler registry.
|
||||
- **Event bus:** `emit_event()` writes to `domain_events` table in the caller's transaction. `EventProcessor` polls and dispatches to registered handlers.
|
||||
- **Inter-app HTTP:** `internal_api.get/post("cart", "/internal/cart/summary")` for cross-app reads. URLs resolved from `app-config.yaml`.
|
||||
- **App factory:** All apps call `create_base_app()` which sets up DB sessions, CSRF, error handling, event processing, logging, widget registration, and domain service wiring.
|
||||
- **Service contracts:** Cross-domain communication via typed Protocols + frozen DTO dataclasses. Apps call `services.calendar.method()`, never import models from other domains.
|
||||
- **Service registry:** Typed singleton (`services.blog`, `.calendar`, `.market`, `.cart`, `.federation`). Apps wire their own domain + stubs for others via `register_domain_services()`.
|
||||
- **Activity bus:** `emit_activity()` writes to `ap_activities` table in the caller's transaction. `EventProcessor` polls pending activities and dispatches to registered handlers. Internal events use `visibility="internal"`; federation activities use `visibility="public"` and are delivered to follower inboxes by the wildcard delivery handler.
|
||||
- **Widget registry:** Domain services register widgets (nav, card, account); templates consume via `widgets.container_nav`, `widgets.container_cards`.
|
||||
- **Cart identity:** `current_cart_identity()` returns `{"user_id": int|None, "session_id": str|None}` from the request session.
|
||||
|
||||
## Alembic Migrations
|
||||
@@ -62,8 +87,5 @@ shared/
|
||||
All apps share one PostgreSQL database. Migrations are managed here and run from the blog app's entrypoint (other apps skip migrations on startup).
|
||||
|
||||
```bash
|
||||
# From any app directory (shared/ must be on sys.path)
|
||||
alembic -c shared/alembic.ini upgrade head
|
||||
```
|
||||
|
||||
Current head: `j0h8e4f6g7` (drop cross-domain FK constraints).
|
||||
|
||||
@@ -1 +1 @@
|
||||
# shared package — extracted from blog/shared_lib/
|
||||
# shared package — infrastructure, models, contracts, and services
|
||||
|
||||
@@ -19,7 +19,7 @@ from shared.db.base import Base
|
||||
|
||||
# Import ALL models so Base.metadata sees every table
|
||||
import shared.models # noqa: F401 User, KV, MagicLink, MenuItem, Ghost*
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "glue.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
|
||||
113
alembic/versions/m3k1h7i9j0_add_activity_bus_columns.py
Normal file
113
alembic/versions/m3k1h7i9j0_add_activity_bus_columns.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""add unified event bus columns to ap_activities
|
||||
|
||||
Revision ID: m3k1h7i9j0
|
||||
Revises: l2j0g6h8i9
|
||||
Create Date: 2026-02-22
|
||||
|
||||
Adds processing and visibility columns so ap_activities can serve as the
|
||||
unified event bus for both internal domain events and federation delivery.
|
||||
"""
|
||||
|
||||
revision = "m3k1h7i9j0"
|
||||
down_revision = "l2j0g6h8i9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add new columns with defaults so existing rows stay valid
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column("actor_uri", sa.String(512), nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column(
|
||||
"visibility", sa.String(20),
|
||||
nullable=False, server_default="public",
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column(
|
||||
"process_state", sa.String(20),
|
||||
nullable=False, server_default="completed",
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column(
|
||||
"process_attempts", sa.Integer(),
|
||||
nullable=False, server_default="0",
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column(
|
||||
"process_max_attempts", sa.Integer(),
|
||||
nullable=False, server_default="5",
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column("process_error", sa.Text(), nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column(
|
||||
"processed_at", sa.DateTime(timezone=True), nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Backfill actor_uri from the related actor_profile
|
||||
op.execute(
|
||||
"""
|
||||
UPDATE ap_activities a
|
||||
SET actor_uri = CONCAT(
|
||||
'https://',
|
||||
COALESCE(current_setting('app.ap_domain', true), 'rose-ash.com'),
|
||||
'/users/',
|
||||
p.preferred_username
|
||||
)
|
||||
FROM ap_actor_profiles p
|
||||
WHERE a.actor_profile_id = p.id
|
||||
AND a.actor_uri IS NULL
|
||||
"""
|
||||
)
|
||||
|
||||
# Make actor_profile_id nullable (internal events have no actor profile)
|
||||
op.alter_column(
|
||||
"ap_activities", "actor_profile_id",
|
||||
existing_type=sa.Integer(),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Index for processor polling
|
||||
op.create_index(
|
||||
"ix_ap_activity_process", "ap_activities", ["process_state"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_ap_activity_process", table_name="ap_activities")
|
||||
|
||||
# Restore actor_profile_id NOT NULL (remove any rows without it first)
|
||||
op.execute(
|
||||
"DELETE FROM ap_activities WHERE actor_profile_id IS NULL"
|
||||
)
|
||||
op.alter_column(
|
||||
"ap_activities", "actor_profile_id",
|
||||
existing_type=sa.Integer(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
op.drop_column("ap_activities", "processed_at")
|
||||
op.drop_column("ap_activities", "process_error")
|
||||
op.drop_column("ap_activities", "process_max_attempts")
|
||||
op.drop_column("ap_activities", "process_attempts")
|
||||
op.drop_column("ap_activities", "process_state")
|
||||
op.drop_column("ap_activities", "visibility")
|
||||
op.drop_column("ap_activities", "actor_uri")
|
||||
46
alembic/versions/n4l2i8j0k1_drop_domain_events_table.py
Normal file
46
alembic/versions/n4l2i8j0k1_drop_domain_events_table.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""drop domain_events table
|
||||
|
||||
Revision ID: n4l2i8j0k1
|
||||
Revises: m3k1h7i9j0
|
||||
Create Date: 2026-02-22
|
||||
|
||||
The domain_events table is no longer used — all events now flow through
|
||||
ap_activities with the unified activity bus.
|
||||
"""
|
||||
|
||||
revision = "n4l2i8j0k1"
|
||||
down_revision = "m3k1h7i9j0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.drop_index("ix_domain_events_state", table_name="domain_events")
|
||||
op.drop_index("ix_domain_events_event_type", table_name="domain_events")
|
||||
op.drop_table("domain_events")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.create_table(
|
||||
"domain_events",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("event_type", sa.String(128), nullable=False),
|
||||
sa.Column("aggregate_type", sa.String(64), nullable=False),
|
||||
sa.Column("aggregate_id", sa.Integer(), nullable=False),
|
||||
sa.Column("payload", JSONB(), nullable=True),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
|
||||
sa.Column("attempts", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("max_attempts", sa.Integer(), nullable=False, server_default="5"),
|
||||
sa.Column("last_error", sa.Text(), nullable=True),
|
||||
sa.Column(
|
||||
"created_at", sa.DateTime(timezone=True),
|
||||
nullable=False, server_default=sa.func.now(),
|
||||
),
|
||||
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_domain_events_event_type", "domain_events", ["event_type"])
|
||||
op.create_index("ix_domain_events_state", "domain_events", ["state"])
|
||||
35
alembic/versions/o5m3j9k1l2_add_origin_app_column.py
Normal file
35
alembic/versions/o5m3j9k1l2_add_origin_app_column.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Add origin_app column to ap_activities
|
||||
|
||||
Revision ID: o5m3j9k1l2
|
||||
Revises: n4l2i8j0k1
|
||||
Create Date: 2026-02-22
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect as sa_inspect
|
||||
|
||||
revision = "o5m3j9k1l2"
|
||||
down_revision = "n4l2i8j0k1"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = sa_inspect(conn)
|
||||
columns = [c["name"] for c in inspector.get_columns("ap_activities")]
|
||||
if "origin_app" not in columns:
|
||||
op.add_column(
|
||||
"ap_activities",
|
||||
sa.Column("origin_app", sa.String(64), nullable=True),
|
||||
)
|
||||
# Index is idempotent with if_not_exists
|
||||
op.create_index(
|
||||
"ix_ap_activity_origin_app", "ap_activities", ["origin_app"],
|
||||
if_not_exists=True,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_ap_activity_origin_app", table_name="ap_activities")
|
||||
op.drop_column("ap_activities", "origin_app")
|
||||
37
alembic/versions/p6n4k0l2m3_add_oauth_codes_table.py
Normal file
37
alembic/versions/p6n4k0l2m3_add_oauth_codes_table.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Add oauth_codes table
|
||||
|
||||
Revision ID: p6n4k0l2m3
|
||||
Revises: o5m3j9k1l2
|
||||
Create Date: 2026-02-23
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "p6n4k0l2m3"
|
||||
down_revision = "o5m3j9k1l2"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"oauth_codes",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("code", sa.String(128), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("client_id", sa.String(64), nullable=False),
|
||||
sa.Column("redirect_uri", sa.String(512), nullable=False),
|
||||
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("used_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
)
|
||||
op.create_index("ix_oauth_code_code", "oauth_codes", ["code"], unique=True)
|
||||
op.create_index("ix_oauth_code_user", "oauth_codes", ["user_id"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_oauth_code_user", table_name="oauth_codes")
|
||||
op.drop_index("ix_oauth_code_code", table_name="oauth_codes")
|
||||
op.drop_table("oauth_codes")
|
||||
41
alembic/versions/q7o5l1m3n4_add_oauth_grants_table.py
Normal file
41
alembic/versions/q7o5l1m3n4_add_oauth_grants_table.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Add oauth_grants table
|
||||
|
||||
Revision ID: q7o5l1m3n4
|
||||
Revises: p6n4k0l2m3
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "q7o5l1m3n4"
|
||||
down_revision = "p6n4k0l2m3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"oauth_grants",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("token", sa.String(128), unique=True, nullable=False),
|
||||
sa.Column("user_id", sa.Integer, sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("client_id", sa.String(64), nullable=False),
|
||||
sa.Column("issuer_session", sa.String(128), nullable=False),
|
||||
sa.Column("device_id", sa.String(128), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("revoked_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_oauth_grant_token", "oauth_grants", ["token"], unique=True)
|
||||
op.create_index("ix_oauth_grant_issuer", "oauth_grants", ["issuer_session"])
|
||||
op.create_index("ix_oauth_grant_user", "oauth_grants", ["user_id"])
|
||||
op.create_index("ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"])
|
||||
|
||||
# Add grant_token column to oauth_codes to link code → grant
|
||||
op.add_column("oauth_codes", sa.Column("grant_token", sa.String(128), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("oauth_codes", "grant_token")
|
||||
op.drop_index("ix_oauth_grant_user", table_name="oauth_grants")
|
||||
op.drop_index("ix_oauth_grant_issuer", table_name="oauth_grants")
|
||||
op.drop_index("ix_oauth_grant_token", table_name="oauth_grants")
|
||||
op.drop_table("oauth_grants")
|
||||
29
alembic/versions/r8p6m2n4o5_add_device_id_to_oauth_grants.py
Normal file
29
alembic/versions/r8p6m2n4o5_add_device_id_to_oauth_grants.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Add device_id column to oauth_grants
|
||||
|
||||
Revision ID: r8p6m2n4o5
|
||||
Revises: q7o5l1m3n4
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "r8p6m2n4o5"
|
||||
down_revision = "q7o5l1m3n4"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# device_id was added to the create_table migration after it had already
|
||||
# run, so the column is missing from the live DB. Add it now.
|
||||
op.add_column(
|
||||
"oauth_grants",
|
||||
sa.Column("device_id", sa.String(128), nullable=True),
|
||||
)
|
||||
op.create_index(
|
||||
"ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"]
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_oauth_grant_device", table_name="oauth_grants")
|
||||
op.drop_column("oauth_grants", "device_id")
|
||||
30
alembic/versions/s9q7n3o5p6_add_ap_delivery_log_table.py
Normal file
30
alembic/versions/s9q7n3o5p6_add_ap_delivery_log_table.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Add ap_delivery_log table for idempotent federation delivery
|
||||
|
||||
Revision ID: s9q7n3o5p6
|
||||
Revises: r8p6m2n4o5
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "s9q7n3o5p6"
|
||||
down_revision = "r8p6m2n4o5"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"ap_delivery_log",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("inbox_url", sa.String(512), nullable=False),
|
||||
sa.Column("status_code", sa.Integer, nullable=True),
|
||||
sa.Column("delivered_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.UniqueConstraint("activity_id", "inbox_url", name="uq_delivery_activity_inbox"),
|
||||
)
|
||||
op.create_index("ix_ap_delivery_activity", "ap_delivery_log", ["activity_id"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_ap_delivery_activity", table_name="ap_delivery_log")
|
||||
op.drop_table("ap_delivery_log")
|
||||
@@ -0,0 +1,51 @@
|
||||
"""Add app_domain to ap_followers for per-app AP actors
|
||||
|
||||
Revision ID: t0r8n4o6p7
|
||||
Revises: s9q7n3o5p6
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "t0r8n4o6p7"
|
||||
down_revision = "s9q7n3o5p6"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Add column as nullable first so we can backfill
|
||||
op.add_column(
|
||||
"ap_followers",
|
||||
sa.Column("app_domain", sa.String(64), nullable=True),
|
||||
)
|
||||
# Backfill existing rows: all current followers are aggregate
|
||||
op.execute("UPDATE ap_followers SET app_domain = 'federation' WHERE app_domain IS NULL")
|
||||
# Now make it NOT NULL with a default
|
||||
op.alter_column(
|
||||
"ap_followers", "app_domain",
|
||||
nullable=False, server_default="federation",
|
||||
)
|
||||
# Replace old unique constraint with one that includes app_domain
|
||||
op.drop_constraint("uq_follower_acct", "ap_followers", type_="unique")
|
||||
op.create_unique_constraint(
|
||||
"uq_follower_acct_app",
|
||||
"ap_followers",
|
||||
["actor_profile_id", "follower_acct", "app_domain"],
|
||||
)
|
||||
op.create_index(
|
||||
"ix_ap_follower_app_domain",
|
||||
"ap_followers",
|
||||
["actor_profile_id", "app_domain"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_ap_follower_app_domain", table_name="ap_followers")
|
||||
op.drop_constraint("uq_follower_acct_app", "ap_followers", type_="unique")
|
||||
op.create_unique_constraint(
|
||||
"uq_follower_acct",
|
||||
"ap_followers",
|
||||
["actor_profile_id", "follower_acct"],
|
||||
)
|
||||
op.alter_column("ap_followers", "app_domain", nullable=True, server_default=None)
|
||||
op.drop_column("ap_followers", "app_domain")
|
||||
@@ -0,0 +1,33 @@
|
||||
"""Add app_domain to ap_delivery_log for per-domain idempotency
|
||||
|
||||
Revision ID: u1s9o5p7q8
|
||||
Revises: t0r8n4o6p7
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "u1s9o5p7q8"
|
||||
down_revision = "t0r8n4o6p7"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column(
|
||||
"ap_delivery_log",
|
||||
sa.Column("app_domain", sa.String(128), nullable=False, server_default="federation"),
|
||||
)
|
||||
op.drop_constraint("uq_delivery_activity_inbox", "ap_delivery_log", type_="unique")
|
||||
op.create_unique_constraint(
|
||||
"uq_delivery_activity_inbox_domain",
|
||||
"ap_delivery_log",
|
||||
["activity_id", "inbox_url", "app_domain"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_constraint("uq_delivery_activity_inbox_domain", "ap_delivery_log", type_="unique")
|
||||
op.drop_column("ap_delivery_log", "app_domain")
|
||||
op.create_unique_constraint(
|
||||
"uq_delivery_activity_inbox",
|
||||
"ap_delivery_log",
|
||||
["activity_id", "inbox_url"],
|
||||
)
|
||||
@@ -1,9 +1,9 @@
|
||||
# The monolith has been split into three apps (apps/coop, apps/market, apps/cart).
|
||||
# The monolith has been split into three apps (apps/blog, apps/market, apps/cart).
|
||||
# This package remains for shared infrastructure modules (middleware, redis_cacher,
|
||||
# csrf, errors, authz, filters, utils, bp/*).
|
||||
#
|
||||
# To run individual apps:
|
||||
# hypercorn apps.coop.app:app --bind 0.0.0.0:8000
|
||||
# hypercorn apps.blog.app:app --bind 0.0.0.0:8000
|
||||
# hypercorn apps.market.app:app --bind 0.0.0.0:8001
|
||||
# hypercorn apps.cart.app:app --bind 0.0.0.0:8002
|
||||
#
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<form action="{{ coop_url('/auth/logout/') }}" method="post">
|
||||
<form action="/auth/logout/" method="post">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button
|
||||
type="submit"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% call links.link(coop_url('/auth/newsletters/'), hx_select_search, select_colours, True, aclass=styles.nav_button) %}
|
||||
{% call links.link(account_url('/newsletters/'), hx_select_search, select_colours, True, aclass=styles.nav_button) %}
|
||||
newsletters
|
||||
{% endcall %}
|
||||
{% for link in account_nav_links %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<div id="nl-{{ un.newsletter_id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ coop_url('/auth/newsletter/' ~ un.newsletter_id ~ '/toggle/') }}"
|
||||
hx-post="{{ account_url('/newsletter/' ~ un.newsletter_id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ un.newsletter_id }}"
|
||||
hx-swap="outerHTML"
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
{# No subscription row yet — show an off toggle that will create one #}
|
||||
<div id="nl-{{ item.newsletter.id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ coop_url('/auth/newsletter/' ~ item.newsletter.id ~ '/toggle/') }}"
|
||||
hx-post="{{ account_url('/newsletter/' ~ item.newsletter.id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ item.newsletter.id }}"
|
||||
hx-swap="outerHTML"
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
<p class="mt-6 text-sm">
|
||||
<a
|
||||
href="{{ coop_url('/auth/login/') }}"
|
||||
href="{{ blog_url('/auth/login/') }}"
|
||||
class="text-stone-600 dark:text-stone-300 hover:underline"
|
||||
>
|
||||
← Back
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% macro header_row(oob=False) %}
|
||||
{% call links.menu_row(id='auth-row', oob=oob) %}
|
||||
{% call links.link(coop_url('/auth/account/'), hx_select_search ) %}
|
||||
{% call links.link(account_url('/'), hx_select_search ) %}
|
||||
<i class="fa-solid fa-user"></i>
|
||||
<div>account</div>
|
||||
{% endcall %}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
{% endif %}
|
||||
|
||||
<form
|
||||
method="post" action="{{ coop_url('/auth/start/') }}"
|
||||
method="post" action="{{ blog_url('/auth/start/') }}"
|
||||
class="mt-6 space-y-5"
|
||||
>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
The post "{{ slug }}" could not be found.
|
||||
</p>
|
||||
<a
|
||||
href="{{ url_for('blog.home')|host }}"
|
||||
hx-get="{{ url_for('blog.home')|host }}"
|
||||
href="{{ url_for('blog.index')|host }}"
|
||||
hx-get="{{ url_for('blog.index')|host }}"
|
||||
hx-target="#main-panel"
|
||||
hx-select="{{ hx_select }}"
|
||||
hx-swap="outerHTML"
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
{% macro show_cart(oob=False) %}
|
||||
<div id="cart" {% if oob %} hx-swap-oob="{{oob}}" {% endif%}>
|
||||
{# Empty cart #}
|
||||
{% if not cart and not calendar_cart_entries %}
|
||||
<div class="rounded-2xl border border-dashed border-stone-300 bg-white/80 p-6 sm:p-8 text-center">
|
||||
<div class="inline-flex h-10 w-10 sm:h-12 sm:w-12 items-center justify-center rounded-full bg-stone-100 mb-3">
|
||||
<i class="fa fa-shopping-cart text-stone-500 text-sm sm:text-base" aria-hidden="true"></i>
|
||||
</div>
|
||||
<p class="text-base sm:text-lg font-medium text-stone-800">
|
||||
Your cart is empty
|
||||
</p>
|
||||
{#
|
||||
<p class="mt-1 text-xs sm:text-sm text-stone-600">
|
||||
Add some items from the shop to see them here.
|
||||
</p>
|
||||
<div class="mt-4">
|
||||
<a
|
||||
href="{{ market_url('/') }}"
|
||||
class="inline-flex items-center px-4 py-2 text-sm font-semibold rounded-full bg-emerald-600 text-white hover:bg-emerald-700"
|
||||
>
|
||||
Browse products
|
||||
</a>
|
||||
</div> #}
|
||||
</div>
|
||||
|
||||
{% else %}
|
||||
|
||||
<div _class="grid gap-y-6 lg:gap-8 lg:grid-cols-[minmax(0,2fr),minmax(0,1fr)]">
|
||||
{# Items list #}
|
||||
<section class="space-y-3 sm:space-y-4">
|
||||
{% for item in cart %}
|
||||
{% from '_types/product/_cart.html' import cart_item with context %}
|
||||
{{ cart_item()}}
|
||||
{% endfor %}
|
||||
{% if calendar_cart_entries %}
|
||||
<div class="mt-6 border-t border-stone-200 pt-4">
|
||||
<h2 class="text-base font-semibold mb-2">
|
||||
Calendar bookings
|
||||
</h2>
|
||||
|
||||
<ul class="space-y-2">
|
||||
{% for entry in calendar_cart_entries %}
|
||||
<li class="flex items-start justify-between text-sm">
|
||||
<div>
|
||||
<div class="font-medium">
|
||||
{{ entry.name or entry.calendar_name }}
|
||||
</div>
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ entry.start_at }}
|
||||
{% if entry.end_at %}
|
||||
– {{ entry.end_at }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="ml-4 font-medium">
|
||||
£{{ "%.2f"|format(entry.cost or 0) }}
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{{summary(cart, total, calendar_total, calendar_cart_entries,)}}
|
||||
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro summary(cart, total, calendar_total, calendar_cart_entries, oob=False) %}
|
||||
<aside id="cart-summary" class="lg:pl-2" {% if oob %} hx-swap-oob="{{oob}}" {% endif %}>
|
||||
<div class="rounded-2xl bg-white shadow-sm border border-stone-200 p-4 sm:p-5">
|
||||
<h2 class="text-sm sm:text-base font-semibold text-stone-900 mb-3 sm:mb-4">
|
||||
Order summary
|
||||
</h2>
|
||||
|
||||
<dl class="space-y-2 text-xs sm:text-sm">
|
||||
<div class="flex items-center justify-between">
|
||||
<dt class="text-stone-600">Items</dt>
|
||||
<dd class="text-stone-900">
|
||||
{{ cart | sum(attribute="quantity") }}
|
||||
</dd>
|
||||
</div>
|
||||
<div class="flex items-center justify-between">
|
||||
<dt class="text-stone-600">Subtotal</dt>
|
||||
<dd class="text-stone-900">
|
||||
{{ cart_grand_total(cart, total, calendar_total, calendar_cart_entries ) }}
|
||||
</dd>
|
||||
</div>
|
||||
</dl>
|
||||
<div class="flex flex-col items-center w-full">
|
||||
<h1 class="text-5xl mt-2">
|
||||
This is a test - it will not take actual money
|
||||
</h1>
|
||||
<div>
|
||||
use dummy card number: 5555 5555 5555 4444
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-4 sm:mt-5">
|
||||
{% if g.user %}
|
||||
<form
|
||||
method="post"
|
||||
action="{{ page_cart_url(page_post.slug, '/checkout/') if page_post is defined and page_post else cart_url('/checkout/') }}"
|
||||
class="w-full"
|
||||
>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button
|
||||
type="submit"
|
||||
class="w-full inline-flex items-center justify-center px-4 py-2 text-xs sm:text-sm rounded-full border border-emerald-600 bg-emerald-600 text-white hover:bg-emerald-700 transition"
|
||||
>
|
||||
<i class="fa-solid fa-credit-card mr-2" aria-hidden="true"></i>
|
||||
Checkout as {{g.user.email}}
|
||||
</button>
|
||||
</form>
|
||||
{% else %}
|
||||
{% set href=login_url(request.url) %}
|
||||
<div
|
||||
class="w-full flex"
|
||||
>
|
||||
<a
|
||||
href="{{ href }}"
|
||||
hx-get="{{ href }}"
|
||||
hx-target="#main-panel"
|
||||
hx-select ="{{hx_select_search}}"
|
||||
hx-swap="outerHTML"
|
||||
hx-push-url="true"
|
||||
aria-selected="{{ 'true' if local_href == request.path else 'false' }}"
|
||||
class="w-full cursor-pointer flex flex-row items-center justify-center p-3 gap-2 rounded bg-stone-200 text-black {{select_colours}}"
|
||||
data-close-details
|
||||
>
|
||||
<i class="fa-solid fa-key"></i>
|
||||
<span>sign in or register to checkout</span>
|
||||
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro cart_total(cart, total) %}
|
||||
{% set cart_total = total(cart) %}
|
||||
{% if cart_total %}
|
||||
{% set symbol = "£" if cart[0].product.regular_price_currency == "GBP" else cart[0].product.regular_price_currency %}
|
||||
{{ symbol }}{{ "%.2f"|format(cart_total) }}
|
||||
{% else %}
|
||||
–
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro cart_grand_total(cart, total, calendar_total, calendar_cart_entries) %}
|
||||
{% set product_total = total(cart) or 0 %}
|
||||
{% set cal_total = calendar_total(calendar_cart_entries) or 0 %}
|
||||
{% set grand = product_total + cal_total %}
|
||||
|
||||
{% if cart and cart[0].product.regular_price_currency %}
|
||||
{% set symbol = "£" if cart[0].product.regular_price_currency == "GBP" else cart[0].product.regular_price_currency %}
|
||||
{% else %}
|
||||
{% set symbol = "£" %}
|
||||
{% endif %}
|
||||
|
||||
{{ symbol }}{{ "%.2f"|format(grand) }}
|
||||
{% endmacro %}
|
||||
@@ -17,7 +17,7 @@
|
||||
{% if _count == 0 %}
|
||||
<div class="h-12 w-12 rounded-full overflow-hidden border border-stone-300 flex-shrink-0">
|
||||
<a
|
||||
href="{{ coop_url('/') }}"
|
||||
href="{{ blog_url('/') }}"
|
||||
class="h-full w-full font-bold text-5xl flex-shrink-0 flex flex-row items-center gap-1"
|
||||
>
|
||||
<img
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
{% from 'macros/scrolling_menu.html' import scrolling_menu with context %}
|
||||
{% call(entry_post) scrolling_menu('entry-posts-container', entry_posts) %}
|
||||
<a
|
||||
href="{{ coop_url('/' + entry_post.slug + '/') }}"
|
||||
href="{{ blog_url('/' + entry_post.slug + '/') }}"
|
||||
class="flex items-center gap-2 px-3 py-2 hover:bg-stone-100 rounded transition text-sm border sm:whitespace-nowrap sm:flex-shrink-0">
|
||||
{% if entry_post.feature_image %}
|
||||
<img src="{{ entry_post.feature_image }}"
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
{% from 'macros/scrolling_menu.html' import scrolling_menu with context %}
|
||||
{% call(entry_post) scrolling_menu('entry-posts-container', entry_posts) %}
|
||||
<a
|
||||
href="{{ coop_url('/' + entry_post.slug + '/') }}"
|
||||
href="{{ blog_url('/' + entry_post.slug + '/') }}"
|
||||
class="{{styles.nav_button}}"
|
||||
>
|
||||
{% if entry_post.feature_image %}
|
||||
|
||||
19
browser/templates/_types/home/_oob_elements.html
Normal file
19
browser/templates/_types/home/_oob_elements.html
Normal file
@@ -0,0 +1,19 @@
|
||||
{% extends 'oob_elements.html' %}
|
||||
|
||||
{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %}
|
||||
{% from '_types/root/_oob_menu.html' import mobile_menu with context %}
|
||||
|
||||
{% block oobs %}
|
||||
{% from '_types/root/header/_header.html' import header_row with context %}
|
||||
{{ header_row(oob=True) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<article class="relative">
|
||||
<div class="blog-content p-2">
|
||||
{% if post.html %}
|
||||
{{post.html|safe}}
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% endblock %}
|
||||
14
browser/templates/_types/home/index.html
Normal file
14
browser/templates/_types/home/index.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% extends '_types/root/_index.html' %}
|
||||
{% block meta %}
|
||||
{% include '_types/post/_meta.html' %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<article class="relative">
|
||||
<div class="blog-content p-2">
|
||||
{% if post.html %}
|
||||
{{post.html|safe}}
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% endblock %}
|
||||
@@ -2,7 +2,7 @@
|
||||
class="font-bold text-xl flex-shrink-0 flex gap-2 items-center">
|
||||
<div>
|
||||
<i class="fa fa-shop"></i>
|
||||
{{ coop_title }}
|
||||
{{ market_title }}
|
||||
</div>
|
||||
<div class="flex flex-col md:flex-row md:gap-2 text-xs">
|
||||
<div>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
hx-swap-oob="outerHTML">
|
||||
{% from 'macros/scrolling_menu.html' import scrolling_menu with context %}
|
||||
{% call(item) scrolling_menu('menu-items-container', menu_items) %}
|
||||
{% set _href = _app_slugs.get(item.slug, coop_url('/' + item.slug + '/')) %}
|
||||
{% set _href = _app_slugs.get(item.slug, blog_url('/' + item.slug + '/')) %}
|
||||
<a
|
||||
href="{{ _href }}"
|
||||
{% if item.slug not in _app_slugs %}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% macro header_row(oob=False) %}
|
||||
{% call links.menu_row(id='post-row', oob=oob) %}
|
||||
<a href="{{ coop_url('/' + post.slug + '/') }}" class="flex items-center gap-2 px-3 py-2 rounded whitespace-normal text-center break-words leading-snug">
|
||||
<a href="{{ blog_url('/' + post.slug + '/') }}" class="flex items-center gap-2 px-3 py-2 rounded whitespace-normal text-center break-words leading-snug">
|
||||
{% if post.feature_image %}
|
||||
<img
|
||||
src="{{ post.feature_image }}"
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
{% block filter %}
|
||||
{% call layout.details() %}
|
||||
{% call layout.summary('coop-child-header') %}
|
||||
{% call layout.summary('blog-child-header') %}
|
||||
{% endcall %}
|
||||
{% call layout.menu('blog-child-menu') %}
|
||||
{% endcall %}
|
||||
|
||||
@@ -30,8 +30,8 @@
|
||||
{% block filter %}
|
||||
|
||||
{% call layout.details() %}
|
||||
{% call layout.summary('coop-child-header') %}
|
||||
{% block coop_child_summary %}
|
||||
{% call layout.summary('blog-child-header') %}
|
||||
{% block blog_child_summary %}
|
||||
{% endblock %}
|
||||
{% endcall %}
|
||||
{% call layout.menu('blog-child-menu') %}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
{% set href=coop_url('/auth/account/') %}
|
||||
{% set href=account_url('/') %}
|
||||
<a
|
||||
href="{{ href }}"
|
||||
class="justify-center cursor-pointer flex flex-row items-center p-3 gap-2 rounded bg-stone-200 text-black {{select_colours}}"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
id="menu-items-nav-wrapper">
|
||||
{% from 'macros/scrolling_menu.html' import scrolling_menu with context %}
|
||||
{% call(item) scrolling_menu('menu-items-container', menu_items) %}
|
||||
{% set _href = _app_slugs.get(item.slug, coop_url('/' + item.slug + '/')) %}
|
||||
{% set _href = _app_slugs.get(item.slug, blog_url('/' + item.slug + '/')) %}
|
||||
<a
|
||||
href="{{ _href }}"
|
||||
aria-selected="{{ 'true' if (item.slug == _first_seg or item.slug == app_name) else 'false' }}"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% if g.rights.admin %}
|
||||
<a href="{{ coop_url('/settings/') }}" class="{{styles.nav_button}}">
|
||||
<a href="{{ blog_url('/settings/') }}" class="{{styles.nav_button}}">
|
||||
<i class="fa fa-cog" aria-hidden="true"></i>
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
|
||||
<a
|
||||
href="{{ login_url(request.url) }}"
|
||||
href="{{ account_url('/') }}"
|
||||
aria-selected="{{ 'true' if '/auth/login' in request.path else 'false' }}"
|
||||
class="justify-center cursor-pointer flex flex-row items-center p-3 gap-2 rounded bg-stone-200 text-black {{select_colours}}"
|
||||
data-close-details
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
← Go Back
|
||||
</button>
|
||||
<a
|
||||
href="{{ coop_url('/') }}"
|
||||
href="{{ blog_url('/') }}"
|
||||
class="px-4 py-2 bg-stone-800 text-white rounded hover:bg-stone-700 transition-colors text-center"
|
||||
>
|
||||
Home
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
{% set href=coop_url('/auth/account/') %}
|
||||
{% set href=account_url('/') %}
|
||||
<a
|
||||
href="{{ href }}"
|
||||
data-close-details
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
|
||||
<a
|
||||
href="{{ login_url(request.url) }}"
|
||||
href="{{ account_url('/') }}"
|
||||
aria-selected="{{ 'true' if '/auth/login' in request.path else 'false' }}"
|
||||
>
|
||||
<i class="fa-solid fa-key"></i>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% macro title(_class='') %}
|
||||
<a
|
||||
href="{{ coop_url('/') }}"
|
||||
href="{{ blog_url('/') }}"
|
||||
class="{{_class}}"
|
||||
>
|
||||
<h1>
|
||||
|
||||
@@ -176,6 +176,7 @@ class APFollowerDTO:
|
||||
follower_inbox: str
|
||||
follower_actor_url: str
|
||||
created_at: datetime | None = None
|
||||
app_domain: str = "federation"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
|
||||
@@ -129,6 +129,12 @@ class CalendarService(Protocol):
|
||||
self, session: AsyncSession, content_type: str, content_id: int,
|
||||
) -> set[int]: ...
|
||||
|
||||
async def upcoming_entries_for_container(
|
||||
self, session: AsyncSession,
|
||||
container_type: str | None = None, container_id: int | None = None,
|
||||
*, page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]: ...
|
||||
|
||||
async def visible_entries_for_period(
|
||||
self, session: AsyncSession, calendar_id: int,
|
||||
period_start: datetime, period_end: datetime,
|
||||
@@ -149,6 +155,12 @@ class MarketService(Protocol):
|
||||
name: str, slug: str,
|
||||
) -> MarketPlaceDTO: ...
|
||||
|
||||
async def list_marketplaces(
|
||||
self, session: AsyncSession,
|
||||
container_type: str | None = None, container_id: int | None = None,
|
||||
*, page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[MarketPlaceDTO], bool]: ...
|
||||
|
||||
async def soft_delete_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
slug: str,
|
||||
@@ -210,25 +222,39 @@ class FederationService(Protocol):
|
||||
async def get_outbox(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
origin_app: str | None = None,
|
||||
) -> tuple[list[APActivityDTO], int]: ...
|
||||
|
||||
async def get_activity_for_source(
|
||||
self, session: AsyncSession, source_type: str, source_id: int,
|
||||
) -> APActivityDTO | None: ...
|
||||
|
||||
async def count_activities_for_source(
|
||||
self, session: AsyncSession, source_type: str, source_id: int,
|
||||
*, activity_type: str,
|
||||
) -> int: ...
|
||||
|
||||
# -- Followers ------------------------------------------------------------
|
||||
async def get_followers(
|
||||
self, session: AsyncSession, username: str,
|
||||
app_domain: str | None = None,
|
||||
) -> list[APFollowerDTO]: ...
|
||||
|
||||
async def get_followers_paginated(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]: ...
|
||||
|
||||
async def add_follower(
|
||||
self, session: AsyncSession, username: str,
|
||||
follower_acct: str, follower_inbox: str, follower_actor_url: str,
|
||||
follower_public_key: str | None = None,
|
||||
app_domain: str = "federation",
|
||||
) -> APFollowerDTO: ...
|
||||
|
||||
async def remove_follower(
|
||||
self, session: AsyncSession, username: str, follower_acct: str,
|
||||
app_domain: str = "federation",
|
||||
) -> bool: ...
|
||||
|
||||
# -- Remote actors --------------------------------------------------------
|
||||
@@ -240,6 +266,10 @@ class FederationService(Protocol):
|
||||
self, session: AsyncSession, acct: str,
|
||||
) -> RemoteActorDTO | None: ...
|
||||
|
||||
async def search_actors(
|
||||
self, session: AsyncSession, query: str, page: int = 1, limit: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]: ...
|
||||
|
||||
# -- Following (outbound) -------------------------------------------------
|
||||
async def send_follow(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
@@ -283,6 +313,11 @@ class FederationService(Protocol):
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
async def get_actor_timeline(
|
||||
self, session: AsyncSession, remote_actor_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
# -- Local posts ----------------------------------------------------------
|
||||
async def create_local_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
|
||||
@@ -7,7 +7,7 @@ from quart import Quart, g
|
||||
DATABASE_URL = (
|
||||
os.getenv("DATABASE_URL_ASYNC")
|
||||
or os.getenv("DATABASE_URL")
|
||||
or "postgresql+asyncpg://localhost/coop"
|
||||
or "postgresql+asyncpg://localhost/blog"
|
||||
)
|
||||
|
||||
_engine = create_async_engine(
|
||||
@@ -15,7 +15,7 @@ _engine = create_async_engine(
|
||||
future=True,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=-1 # ned to look at this!!!
|
||||
pool_size=0, # 0 = unlimited (NullPool equivalent for asyncpg)
|
||||
)
|
||||
|
||||
_Session = async_sessionmaker(
|
||||
@@ -34,43 +34,42 @@ async def get_session():
|
||||
await sess.close()
|
||||
|
||||
|
||||
|
||||
def register_db(app: Quart):
|
||||
|
||||
@app.before_request
|
||||
async def open_session():
|
||||
g.s = _Session()
|
||||
g.tx = await g.s.begin()
|
||||
g.had_error = False
|
||||
@app.before_request
|
||||
async def open_session():
|
||||
g.s = _Session()
|
||||
g.tx = await g.s.begin()
|
||||
g.had_error = False
|
||||
|
||||
@app.after_request
|
||||
async def maybe_commit(response):
|
||||
# Runs BEFORE bytes are sent.
|
||||
if not g.had_error and 200 <= response.status_code < 400:
|
||||
try:
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.commit()
|
||||
except Exception as e:
|
||||
print(f'commit failed {e}')
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.rollback()
|
||||
from quart import make_response
|
||||
return await make_response("Commit failed", 500)
|
||||
return response
|
||||
@app.after_request
|
||||
async def maybe_commit(response):
|
||||
# Runs BEFORE bytes are sent.
|
||||
if not g.had_error and 200 <= response.status_code < 400:
|
||||
try:
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.commit()
|
||||
except Exception as e:
|
||||
print(f'commit failed {e}')
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.rollback()
|
||||
from quart import make_response
|
||||
return await make_response("Commit failed", 500)
|
||||
return response
|
||||
|
||||
@app.teardown_request
|
||||
async def finish(exc):
|
||||
try:
|
||||
# If an exception occurred OR we didn't commit (still in txn), roll back.
|
||||
if hasattr(g, "s"):
|
||||
if exc is not None or g.s.in_transaction():
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.rollback()
|
||||
finally:
|
||||
if hasattr(g, "s"):
|
||||
await g.s.close()
|
||||
@app.teardown_request
|
||||
async def finish(exc):
|
||||
try:
|
||||
# If an exception occurred OR we didn't commit (still in txn), roll back.
|
||||
if hasattr(g, "s"):
|
||||
if exc is not None or g.s.in_transaction():
|
||||
if hasattr(g, "tx"):
|
||||
await g.tx.rollback()
|
||||
finally:
|
||||
if hasattr(g, "s"):
|
||||
await g.s.close()
|
||||
|
||||
@app.errorhandler(Exception)
|
||||
async def mark_error(e):
|
||||
g.had_error = True
|
||||
raise
|
||||
@app.errorhandler(Exception)
|
||||
async def mark_error(e):
|
||||
g.had_error = True
|
||||
raise
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
from .bus import emit_event, register_handler
|
||||
from .bus import emit_activity, register_activity_handler, get_activity_handlers
|
||||
from .processor import EventProcessor
|
||||
|
||||
__all__ = ["emit_event", "register_handler", "EventProcessor"]
|
||||
__all__ = [
|
||||
"emit_activity",
|
||||
"register_activity_handler",
|
||||
"get_activity_handlers",
|
||||
"EventProcessor",
|
||||
]
|
||||
|
||||
144
events/bus.py
144
events/bus.py
@@ -1,56 +1,126 @@
|
||||
"""
|
||||
Transactional outbox event bus.
|
||||
Unified activity bus.
|
||||
|
||||
emit_event() writes to the domain_events table within the caller's existing
|
||||
DB transaction — atomic with whatever domain change triggered the event.
|
||||
emit_activity() writes an APActivity row with process_state='pending' within
|
||||
the caller's existing DB transaction — atomic with the domain change.
|
||||
|
||||
register_handler() registers async handler functions that the EventProcessor
|
||||
will call when processing events of a given type.
|
||||
register_activity_handler() registers async handler functions that the
|
||||
EventProcessor dispatches when processing pending activities.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from typing import Any, Awaitable, Callable, Dict, List
|
||||
from typing import Awaitable, Callable, Dict, List, Tuple
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.models.federation import APActivity
|
||||
|
||||
# handler signature: async def handler(event: DomainEvent, session: AsyncSession) -> None
|
||||
HandlerFn = Callable[[DomainEvent, AsyncSession], Awaitable[None]]
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_handlers: Dict[str, List[HandlerFn]] = defaultdict(list)
|
||||
# ---------------------------------------------------------------------------
|
||||
# Activity-handler registry
|
||||
# ---------------------------------------------------------------------------
|
||||
# Handler signature: async def handler(activity: APActivity, session: AsyncSession) -> None
|
||||
ActivityHandlerFn = Callable[[APActivity, AsyncSession], Awaitable[None]]
|
||||
|
||||
# Keyed by (activity_type, object_type). object_type="*" is wildcard.
|
||||
_activity_handlers: Dict[Tuple[str, str], List[ActivityHandlerFn]] = defaultdict(list)
|
||||
|
||||
|
||||
async def emit_event(
|
||||
def register_activity_handler(
|
||||
activity_type: str,
|
||||
fn: ActivityHandlerFn,
|
||||
*,
|
||||
object_type: str | None = None,
|
||||
) -> None:
|
||||
"""Register an async handler for an activity type + optional object type.
|
||||
|
||||
Use ``activity_type="*"`` as a wildcard that fires for every activity
|
||||
(e.g. federation delivery handler).
|
||||
"""
|
||||
key = (activity_type, object_type or "*")
|
||||
_activity_handlers[key].append(fn)
|
||||
log.info("Registered activity handler %s.%s for key %s", fn.__module__, fn.__qualname__, key)
|
||||
|
||||
|
||||
def get_activity_handlers(
|
||||
activity_type: str,
|
||||
object_type: str | None = None,
|
||||
) -> List[ActivityHandlerFn]:
|
||||
"""Return all matching handlers for an activity.
|
||||
|
||||
Matches in order:
|
||||
1. Exact (activity_type, object_type)
|
||||
2. (activity_type, "*") — type-level wildcard
|
||||
3. ("*", "*") — global wildcard (e.g. delivery)
|
||||
"""
|
||||
handlers: List[ActivityHandlerFn] = []
|
||||
ot = object_type or "*"
|
||||
|
||||
# Exact match
|
||||
if ot != "*":
|
||||
handlers.extend(_activity_handlers.get((activity_type, ot), []))
|
||||
# Type-level wildcard
|
||||
handlers.extend(_activity_handlers.get((activity_type, "*"), []))
|
||||
# Global wildcard
|
||||
if activity_type != "*":
|
||||
handlers.extend(_activity_handlers.get(("*", "*"), []))
|
||||
|
||||
return handlers
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# emit_activity — the primary way to emit events
|
||||
# ---------------------------------------------------------------------------
|
||||
async def emit_activity(
|
||||
session: AsyncSession,
|
||||
event_type: str,
|
||||
aggregate_type: str,
|
||||
aggregate_id: int,
|
||||
payload: Dict[str, Any] | None = None,
|
||||
) -> DomainEvent:
|
||||
*,
|
||||
activity_type: str,
|
||||
actor_uri: str,
|
||||
object_type: str,
|
||||
object_data: dict | None = None,
|
||||
source_type: str | None = None,
|
||||
source_id: int | None = None,
|
||||
visibility: str = "internal",
|
||||
actor_profile_id: int | None = None,
|
||||
origin_app: str | None = None,
|
||||
) -> APActivity:
|
||||
"""
|
||||
Write a domain event to the outbox table in the current transaction.
|
||||
Write an AP-shaped activity to ap_activities with process_state='pending'.
|
||||
|
||||
Call this inside your service function, using the same session that
|
||||
performs the domain change. The event and the change commit together.
|
||||
Called inside a service function using the same session that performs the
|
||||
domain change. The activity and the change commit together.
|
||||
"""
|
||||
event = DomainEvent(
|
||||
event_type=event_type,
|
||||
aggregate_type=aggregate_type,
|
||||
aggregate_id=aggregate_id,
|
||||
payload=payload or {},
|
||||
if not origin_app:
|
||||
try:
|
||||
from quart import current_app
|
||||
origin_app = current_app.name
|
||||
except (ImportError, RuntimeError):
|
||||
pass
|
||||
|
||||
activity_uri = f"internal:{uuid.uuid4()}" if visibility == "internal" else f"urn:uuid:{uuid.uuid4()}"
|
||||
|
||||
activity = APActivity(
|
||||
activity_id=activity_uri,
|
||||
activity_type=activity_type,
|
||||
actor_profile_id=actor_profile_id,
|
||||
actor_uri=actor_uri,
|
||||
object_type=object_type,
|
||||
object_data=object_data or {},
|
||||
is_local=True,
|
||||
source_type=source_type,
|
||||
source_id=source_id,
|
||||
visibility=visibility,
|
||||
process_state="pending",
|
||||
origin_app=origin_app,
|
||||
)
|
||||
session.add(event)
|
||||
await session.flush() # assign event.id
|
||||
return event
|
||||
|
||||
|
||||
def register_handler(event_type: str, fn: HandlerFn) -> None:
|
||||
"""Register an async handler for a given event type."""
|
||||
_handlers[event_type].append(fn)
|
||||
|
||||
|
||||
def get_handlers(event_type: str) -> List[HandlerFn]:
|
||||
"""Return all registered handlers for an event type."""
|
||||
return _handlers.get(event_type, [])
|
||||
session.add(activity)
|
||||
await session.flush()
|
||||
# Wake any listening EventProcessor as soon as this transaction commits.
|
||||
# NOTIFY is transactional — delivered only after commit.
|
||||
await session.execute(text("NOTIFY ap_activity_pending"))
|
||||
return activity
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Shared event handlers (replaces glue.setup.register_glue_handlers)."""
|
||||
"""Shared event handlers."""
|
||||
|
||||
|
||||
def register_shared_handlers():
|
||||
@@ -6,5 +6,5 @@ def register_shared_handlers():
|
||||
import shared.events.handlers.container_handlers # noqa: F401
|
||||
import shared.events.handlers.login_handlers # noqa: F401
|
||||
import shared.events.handlers.order_handlers # noqa: F401
|
||||
# federation_handlers removed — publication is now inline at write sites
|
||||
import shared.events.handlers.ap_delivery_handler # noqa: F401
|
||||
import shared.events.handlers.external_delivery_handler # noqa: F401
|
||||
|
||||
@@ -1,18 +1,29 @@
|
||||
"""Deliver AP activities to remote followers.
|
||||
|
||||
On ``federation.activity_created`` → load activity + actor + followers →
|
||||
sign with HTTP Signatures → POST to each follower inbox.
|
||||
Registered as a wildcard handler — fires for every activity. Skips
|
||||
non-public activities and those without an actor profile.
|
||||
|
||||
Per-app delivery: activities are delivered using the domain that matches
|
||||
the follower's subscription. A follower of ``@alice@blog.rose-ash.com``
|
||||
receives activities with ``actor: https://blog.rose-ash.com/users/alice``
|
||||
and signatures using that domain's key_id. Aggregate followers
|
||||
(``app_domain='federation'``) receive the federation domain identity.
|
||||
|
||||
Idempotent: successful deliveries are recorded in ap_delivery_log.
|
||||
On retry (at-least-once reaper), already-delivered inboxes are skipped.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, or_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events.bus import register_handler, DomainEvent
|
||||
from shared.models.federation import ActorProfile, APActivity, APFollower
|
||||
from shared.events.bus import register_activity_handler
|
||||
from shared.models.federation import ActorProfile, APActivity, APFollower, APDeliveryLog
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -21,6 +32,12 @@ AP_CONTENT_TYPE = "application/activity+json"
|
||||
DELIVERY_TIMEOUT = 15 # seconds per request
|
||||
|
||||
|
||||
def _domain_for_app(app_name: str) -> str:
|
||||
"""Resolve the public AP domain for an app name."""
|
||||
from shared.infrastructure.activitypub import _ap_domain
|
||||
return _ap_domain(app_name)
|
||||
|
||||
|
||||
def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str) -> dict:
|
||||
"""Build the full AP activity JSON-LD for delivery."""
|
||||
username = actor.preferred_username
|
||||
@@ -28,30 +45,45 @@ def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str)
|
||||
|
||||
obj = dict(activity.object_data or {})
|
||||
|
||||
# Object id MUST be on the actor's domain (Mastodon origin check).
|
||||
# The post URL (e.g. coop.rose-ash.com/slug/) goes in "url" only.
|
||||
object_id = activity.activity_id + "/object"
|
||||
# Rewrite all URLs from the federation domain to the delivery domain
|
||||
# so Mastodon's origin check passes (all IDs must match actor host).
|
||||
import re
|
||||
fed_domain = os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
|
||||
def _rewrite(url: str) -> str:
|
||||
if isinstance(url, str) and fed_domain in url:
|
||||
return url.replace(f"https://{fed_domain}", f"https://{domain}")
|
||||
return url
|
||||
|
||||
activity_id = _rewrite(activity.activity_id)
|
||||
object_id = activity_id + "/object"
|
||||
|
||||
# Rewrite any federation-domain URLs in object_data
|
||||
if "id" in obj:
|
||||
obj["id"] = _rewrite(obj["id"])
|
||||
if "attributedTo" in obj:
|
||||
obj["attributedTo"] = _rewrite(obj["attributedTo"])
|
||||
|
||||
if activity.activity_type == "Delete":
|
||||
# Delete: object is a Tombstone with just id + type
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", "Tombstone")
|
||||
else:
|
||||
# Create/Update: full object with attribution
|
||||
# Prefer stable id from object_data (set by try_publish), fall back to activity-derived
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", activity.object_type)
|
||||
obj.setdefault("attributedTo", actor_url)
|
||||
obj.setdefault("published", activity.published.isoformat() if activity.published else None)
|
||||
obj.setdefault("to", ["https://www.w3.org/ns/activitystreams#Public"])
|
||||
obj.setdefault("cc", [f"{actor_url}/followers"])
|
||||
if activity.activity_type == "Update":
|
||||
from datetime import datetime, timezone
|
||||
obj["updated"] = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
return {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"id": activity.activity_id,
|
||||
"id": activity_id,
|
||||
"type": activity.activity_type,
|
||||
"actor": actor_url,
|
||||
"published": activity.published.isoformat() if activity.published else None,
|
||||
@@ -67,8 +99,8 @@ async def _deliver_to_inbox(
|
||||
body: dict,
|
||||
actor: ActorProfile,
|
||||
domain: str,
|
||||
) -> bool:
|
||||
"""POST signed activity to a single inbox. Returns True on success."""
|
||||
) -> int | None:
|
||||
"""POST signed activity to a single inbox. Returns status code or None on error."""
|
||||
from shared.utils.http_signatures import sign_request
|
||||
from urllib.parse import urlparse
|
||||
import json
|
||||
@@ -96,39 +128,25 @@ async def _deliver_to_inbox(
|
||||
)
|
||||
if resp.status_code < 300:
|
||||
log.info("Delivered to %s → %d", inbox_url, resp.status_code)
|
||||
return True
|
||||
else:
|
||||
log.warning("Delivery to %s → %d: %s", inbox_url, resp.status_code, resp.text[:200])
|
||||
return False
|
||||
return resp.status_code
|
||||
except Exception:
|
||||
log.exception("Delivery failed for %s", inbox_url)
|
||||
return False
|
||||
return None
|
||||
|
||||
|
||||
async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None:
|
||||
"""Deliver a newly created activity to all followers."""
|
||||
import os
|
||||
async def on_any_activity(activity: APActivity, session: AsyncSession) -> None:
|
||||
"""Deliver a public activity to all matching followers of its actor."""
|
||||
|
||||
# Only deliver public activities that have an actor profile
|
||||
if activity.visibility != "public":
|
||||
return
|
||||
if activity.actor_profile_id is None:
|
||||
return
|
||||
if not services.has("federation"):
|
||||
return
|
||||
|
||||
payload = event.payload
|
||||
activity_id_uri = payload.get("activity_id")
|
||||
if not activity_id_uri:
|
||||
return
|
||||
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
|
||||
# Load the activity
|
||||
activity = (
|
||||
await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == activity_id_uri)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not activity:
|
||||
log.warning("Activity not found: %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Load actor with private key
|
||||
actor = (
|
||||
await session.execute(
|
||||
@@ -136,35 +154,97 @@ async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not actor or not actor.private_key_pem:
|
||||
log.warning("Actor not found or missing key for activity %s", activity_id_uri)
|
||||
log.warning("Actor not found or missing key for activity %s", activity.activity_id)
|
||||
return
|
||||
|
||||
# Load followers
|
||||
# Load matching followers.
|
||||
# Aggregate followers (app_domain='federation') always get everything.
|
||||
# Per-app followers only get activities from their app.
|
||||
origin_app = activity.origin_app
|
||||
follower_filters = [APFollower.actor_profile_id == actor.id]
|
||||
|
||||
if origin_app and origin_app != "federation":
|
||||
follower_filters.append(
|
||||
or_(
|
||||
APFollower.app_domain == "federation",
|
||||
APFollower.app_domain == origin_app,
|
||||
)
|
||||
)
|
||||
|
||||
followers = (
|
||||
await session.execute(
|
||||
select(APFollower).where(APFollower.actor_profile_id == actor.id)
|
||||
select(APFollower).where(*follower_filters)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
if not followers:
|
||||
log.debug("No followers to deliver to for %s", activity_id_uri)
|
||||
log.debug("No followers to deliver to for %s", activity.activity_id)
|
||||
return
|
||||
|
||||
# Build activity JSON
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
# Check delivery log — skip (inbox, domain) pairs already delivered (idempotency)
|
||||
existing = (
|
||||
await session.execute(
|
||||
select(APDeliveryLog.inbox_url, APDeliveryLog.app_domain).where(
|
||||
APDeliveryLog.activity_id == activity.id,
|
||||
APDeliveryLog.status_code < 300,
|
||||
)
|
||||
)
|
||||
).all()
|
||||
already_delivered: set[tuple[str, str]] = {(r[0], r[1]) for r in existing}
|
||||
|
||||
# Deliver to each follower inbox
|
||||
# Deduplicate inboxes (multiple followers might share a shared inbox)
|
||||
inboxes = {f.follower_inbox for f in followers if f.follower_inbox}
|
||||
# Collect all (inbox, app_domain) pairs to deliver to.
|
||||
# Each follower subscription gets its own delivery with the correct
|
||||
# actor identity, so followers of @user@blog and @user@federation
|
||||
# both see posts on their respective actor profiles.
|
||||
delivery_pairs: set[tuple[str, str]] = set()
|
||||
for f in followers:
|
||||
if not f.follower_inbox:
|
||||
continue
|
||||
app_dom = f.app_domain or "federation"
|
||||
pair = (f.follower_inbox, app_dom)
|
||||
if pair not in already_delivered:
|
||||
delivery_pairs.add(pair)
|
||||
|
||||
if not delivery_pairs:
|
||||
if already_delivered:
|
||||
log.info("All deliveries already done for %s", activity.activity_id)
|
||||
return
|
||||
|
||||
if already_delivered:
|
||||
log.info(
|
||||
"Skipping %d already-delivered, delivering to %d remaining",
|
||||
len(already_delivered), len(delivery_pairs),
|
||||
)
|
||||
|
||||
# Group by domain to reuse activity JSON per domain
|
||||
domain_inboxes: dict[str, list[str]] = defaultdict(list)
|
||||
for inbox_url, app_dom in delivery_pairs:
|
||||
domain_inboxes[app_dom].append(inbox_url)
|
||||
|
||||
log.info(
|
||||
"Delivering %s to %d inbox(es) for @%s",
|
||||
activity.activity_type, len(inboxes), actor.preferred_username,
|
||||
"Delivering %s to %d target(s) for @%s across %d domain(s)",
|
||||
activity.activity_type, len(delivery_pairs),
|
||||
actor.preferred_username, len(domain_inboxes),
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
for inbox_url in inboxes:
|
||||
await _deliver_to_inbox(client, inbox_url, activity_json, actor, domain)
|
||||
for app_dom, inboxes in domain_inboxes.items():
|
||||
domain = _domain_for_app(app_dom)
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
|
||||
for inbox_url in inboxes:
|
||||
status_code = await _deliver_to_inbox(
|
||||
client, inbox_url, activity_json, actor, domain
|
||||
)
|
||||
if status_code is not None and status_code < 300:
|
||||
session.add(APDeliveryLog(
|
||||
activity_id=activity.id,
|
||||
inbox_url=inbox_url,
|
||||
app_domain=app_dom,
|
||||
status_code=status_code,
|
||||
))
|
||||
await session.flush()
|
||||
|
||||
|
||||
register_handler("federation.activity_created", on_activity_created)
|
||||
# Wildcard: fires for every activity
|
||||
register_activity_handler("*", on_any_activity)
|
||||
|
||||
@@ -2,18 +2,18 @@ from __future__ import annotations
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.events import register_activity_handler
|
||||
from shared.models.federation import APActivity
|
||||
from shared.services.navigation import rebuild_navigation
|
||||
|
||||
|
||||
async def on_child_attached(event: DomainEvent, session: AsyncSession) -> None:
|
||||
async def on_child_attached(activity: APActivity, session: AsyncSession) -> None:
|
||||
await rebuild_navigation(session)
|
||||
|
||||
|
||||
async def on_child_detached(event: DomainEvent, session: AsyncSession) -> None:
|
||||
async def on_child_detached(activity: APActivity, session: AsyncSession) -> None:
|
||||
await rebuild_navigation(session)
|
||||
|
||||
|
||||
register_handler("container.child_attached", on_child_attached)
|
||||
register_handler("container.child_detached", on_child_detached)
|
||||
register_activity_handler("Add", on_child_attached, object_type="rose:ContainerRelation")
|
||||
register_activity_handler("Remove", on_child_detached, object_type="rose:ContainerRelation")
|
||||
|
||||
101
events/handlers/external_delivery_handler.py
Normal file
101
events/handlers/external_delivery_handler.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Deliver activities to external service inboxes via signed HTTP POST.
|
||||
|
||||
External services (like artdag) that don't share the coop database receive
|
||||
activities via HTTP, authenticated with the same HTTP Signatures used for
|
||||
ActivityPub federation.
|
||||
|
||||
Config via env: EXTERNAL_INBOXES=name|url,name2|url2,...
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events.bus import register_activity_handler
|
||||
from shared.models.federation import ActorProfile, APActivity
|
||||
from shared.utils.http_signatures import sign_request
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Activity types to deliver externally
|
||||
_DELIVERABLE_TYPES = {"rose:DeviceAuth"}
|
||||
|
||||
|
||||
def _get_external_inboxes() -> list[tuple[str, str]]:
|
||||
"""Parse EXTERNAL_INBOXES env var into [(name, url), ...]."""
|
||||
raw = os.environ.get("EXTERNAL_INBOXES", "")
|
||||
if not raw:
|
||||
return []
|
||||
result = []
|
||||
for entry in raw.split(","):
|
||||
entry = entry.strip()
|
||||
if "|" in entry:
|
||||
name, url = entry.split("|", 1)
|
||||
result.append((name.strip(), url.strip()))
|
||||
return result
|
||||
|
||||
|
||||
def _get_ap_domain() -> str:
|
||||
return os.environ.get("AP_DOMAIN", "federation.rose-ash.com")
|
||||
|
||||
|
||||
async def on_external_activity(activity: APActivity, session: AsyncSession) -> None:
|
||||
"""Deliver matching activities to configured external inboxes."""
|
||||
if activity.activity_type not in _DELIVERABLE_TYPES:
|
||||
return
|
||||
|
||||
inboxes = _get_external_inboxes()
|
||||
if not inboxes:
|
||||
return
|
||||
|
||||
# Get the first actor profile for signing
|
||||
actor = await session.scalar(select(ActorProfile).limit(1))
|
||||
if not actor:
|
||||
log.warning("No ActorProfile available for signing external deliveries")
|
||||
return
|
||||
|
||||
domain = _get_ap_domain()
|
||||
key_id = f"https://{domain}/users/{actor.preferred_username}#main-key"
|
||||
|
||||
payload = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": activity.activity_type,
|
||||
"actor": activity.actor_uri,
|
||||
"object": activity.object_data,
|
||||
}
|
||||
if activity.published:
|
||||
payload["published"] = activity.published.isoformat()
|
||||
|
||||
body_bytes = json.dumps(payload).encode()
|
||||
|
||||
for name, inbox_url in inboxes:
|
||||
parsed = urlparse(inbox_url)
|
||||
headers = sign_request(
|
||||
private_key_pem=actor.private_key_pem,
|
||||
key_id=key_id,
|
||||
method="POST",
|
||||
path=parsed.path,
|
||||
host=parsed.netloc,
|
||||
body=body_bytes,
|
||||
)
|
||||
headers["Content-Type"] = "application/activity+json"
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=3) as client:
|
||||
resp = await client.post(inbox_url, content=body_bytes, headers=headers)
|
||||
log.info(
|
||||
"External delivery to %s: %d",
|
||||
name, resp.status_code,
|
||||
)
|
||||
except Exception:
|
||||
log.warning("External delivery to %s failed", name, exc_info=True)
|
||||
|
||||
|
||||
# Register for all deliverable types
|
||||
for _t in _DELIVERABLE_TYPES:
|
||||
register_activity_handler(_t, on_external_activity)
|
||||
@@ -1,8 +0,0 @@
|
||||
"""Federation event handlers — REMOVED.
|
||||
|
||||
Federation publication is now inline at the write site (ghost_sync, entries,
|
||||
market routes) via shared.services.federation_publish.try_publish().
|
||||
|
||||
AP delivery (federation.activity_created → inbox POST) remains async via
|
||||
ap_delivery_handler.
|
||||
"""
|
||||
@@ -2,24 +2,22 @@ from __future__ import annotations
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.events import register_activity_handler
|
||||
from shared.models.federation import APActivity
|
||||
from shared.services.registry import services
|
||||
|
||||
|
||||
async def on_user_logged_in(event: DomainEvent, session: AsyncSession) -> None:
|
||||
payload = event.payload
|
||||
user_id = payload["user_id"]
|
||||
session_id = payload["session_id"]
|
||||
async def on_user_logged_in(activity: APActivity, session: AsyncSession) -> None:
|
||||
data = activity.object_data
|
||||
user_id = data["user_id"]
|
||||
session_id = data["session_id"]
|
||||
|
||||
# Adopt cart items (if cart service is registered)
|
||||
if services.has("cart"):
|
||||
await services.cart.adopt_cart_for_user(session, user_id, session_id)
|
||||
|
||||
# Adopt calendar entries and tickets (if calendar service is registered)
|
||||
if services.has("calendar"):
|
||||
await services.calendar.adopt_entries_for_user(session, user_id, session_id)
|
||||
await services.calendar.adopt_tickets_for_user(session, user_id, session_id)
|
||||
|
||||
|
||||
register_handler("user.logged_in", on_user_logged_in)
|
||||
register_activity_handler("rose:Login", on_user_logged_in)
|
||||
|
||||
@@ -4,19 +4,19 @@ import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.events import register_activity_handler
|
||||
from shared.models.federation import APActivity
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def on_order_created(event: DomainEvent, session: AsyncSession) -> None:
|
||||
log.info("order.created: order_id=%s", event.payload.get("order_id"))
|
||||
async def on_order_created(activity: APActivity, session: AsyncSession) -> None:
|
||||
log.info("order.created: order_id=%s", activity.object_data.get("order_id"))
|
||||
|
||||
|
||||
async def on_order_paid(event: DomainEvent, session: AsyncSession) -> None:
|
||||
log.info("order.paid: order_id=%s", event.payload.get("order_id"))
|
||||
async def on_order_paid(activity: APActivity, session: AsyncSession) -> None:
|
||||
log.info("order.paid: order_id=%s", activity.object_data.get("order_id"))
|
||||
|
||||
|
||||
register_handler("order.created", on_order_created)
|
||||
register_handler("order.paid", on_order_paid)
|
||||
register_activity_handler("Create", on_order_created, object_type="rose:Order")
|
||||
register_activity_handler("rose:OrderPaid", on_order_paid)
|
||||
|
||||
@@ -1,48 +1,80 @@
|
||||
"""
|
||||
Event processor — polls the domain_events outbox table and dispatches
|
||||
to registered handlers.
|
||||
Event processor — polls the ap_activities table and dispatches to registered
|
||||
activity handlers.
|
||||
|
||||
Runs as an asyncio background task within each app process.
|
||||
Uses SELECT ... FOR UPDATE SKIP LOCKED for safe concurrent processing.
|
||||
|
||||
A dedicated asyncpg LISTEN connection wakes the poll loop immediately when
|
||||
emit_activity() fires NOTIFY ap_activity_pending, so latency drops from
|
||||
~2 seconds (poll interval) to sub-100 ms. The fixed-interval poll remains
|
||||
as a safety-net fallback.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import asyncpg
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.db.session import get_session
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from .bus import get_handlers
|
||||
from shared.db.session import get_session, DATABASE_URL
|
||||
from shared.models.federation import APActivity
|
||||
from .bus import get_activity_handlers
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventProcessor:
|
||||
"""Background event processor that polls the outbox table."""
|
||||
"""Background event processor that polls the ap_activities table."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
app_name: str | None = None,
|
||||
poll_interval: float = 2.0,
|
||||
batch_size: int = 10,
|
||||
stuck_timeout: float = 300.0,
|
||||
):
|
||||
self._app_name = app_name
|
||||
self._poll_interval = poll_interval
|
||||
self._batch_size = batch_size
|
||||
self._stuck_timeout = stuck_timeout # seconds before "processing" → "pending"
|
||||
self._task: asyncio.Task | None = None
|
||||
self._listen_task: asyncio.Task | None = None
|
||||
self._listen_conn: asyncpg.Connection | None = None
|
||||
self._wake = asyncio.Event()
|
||||
self._running = False
|
||||
self._reap_counter = 0
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Lifecycle
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Start the background polling loop."""
|
||||
if self._task is not None:
|
||||
return
|
||||
self._running = True
|
||||
self._listen_task = asyncio.create_task(self._listen_for_notify())
|
||||
self._task = asyncio.create_task(self._poll_loop())
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop the background polling loop gracefully."""
|
||||
self._running = False
|
||||
if self._listen_task is not None:
|
||||
self._listen_task.cancel()
|
||||
try:
|
||||
await self._listen_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._listen_task = None
|
||||
if self._listen_conn is not None and not self._listen_conn.is_closed():
|
||||
await self._listen_conn.close()
|
||||
self._listen_conn = None
|
||||
if self._task is not None:
|
||||
self._task.cancel()
|
||||
try:
|
||||
@@ -51,67 +83,161 @@ class EventProcessor:
|
||||
pass
|
||||
self._task = None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# LISTEN — wake poll loop on NOTIFY
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def _listen_for_notify(self) -> None:
|
||||
"""Maintain a LISTEN connection and wake the poll loop on NOTIFY."""
|
||||
dsn = DATABASE_URL.replace("+asyncpg", "")
|
||||
while self._running:
|
||||
try:
|
||||
self._listen_conn = await asyncpg.connect(dsn)
|
||||
await self._listen_conn.add_listener(
|
||||
"ap_activity_pending", self._on_notify
|
||||
)
|
||||
log.info("LISTEN ap_activity_pending active")
|
||||
# Keep alive with periodic health check
|
||||
while self._running:
|
||||
await asyncio.sleep(30)
|
||||
await self._listen_conn.execute("SELECT 1")
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
log.warning("LISTEN connection lost, reconnecting…", exc_info=True)
|
||||
await asyncio.sleep(2)
|
||||
finally:
|
||||
if self._listen_conn is not None and not self._listen_conn.is_closed():
|
||||
await self._listen_conn.close()
|
||||
self._listen_conn = None
|
||||
|
||||
def _on_notify(self, conn, pid, channel, payload) -> None:
|
||||
"""Called by asyncpg when a NOTIFY arrives."""
|
||||
self._wake.set()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Poll loop
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def _poll_loop(self) -> None:
|
||||
while self._running:
|
||||
try:
|
||||
# Periodically recover stuck activities (~every 30 cycles)
|
||||
self._reap_counter += 1
|
||||
if self._reap_counter >= 30:
|
||||
self._reap_counter = 0
|
||||
await self._recover_stuck()
|
||||
|
||||
# Clear before processing so any NOTIFY that arrives during
|
||||
# _process_batch sets the event and we loop immediately.
|
||||
self._wake.clear()
|
||||
processed = await self._process_batch()
|
||||
if processed == 0:
|
||||
await asyncio.sleep(self._poll_interval)
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self._wake.wait(), timeout=self._poll_interval
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
pass
|
||||
# processed > 0 → loop immediately to drain the queue
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
await asyncio.sleep(self._poll_interval)
|
||||
|
||||
async def _recover_stuck(self) -> None:
|
||||
"""Reset activities stuck in 'processing' back to 'pending'.
|
||||
|
||||
This handles the case where a process crashed mid-handler.
|
||||
Combined with idempotent handlers, this gives at-least-once delivery.
|
||||
"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(seconds=self._stuck_timeout)
|
||||
try:
|
||||
async with get_session() as session:
|
||||
filters = [
|
||||
APActivity.process_state == "processing",
|
||||
APActivity.created_at < cutoff,
|
||||
]
|
||||
if self._app_name:
|
||||
filters.append(APActivity.origin_app == self._app_name)
|
||||
result = await session.execute(
|
||||
update(APActivity)
|
||||
.where(*filters)
|
||||
.values(process_state="pending")
|
||||
.returning(APActivity.id)
|
||||
)
|
||||
recovered = result.scalars().all()
|
||||
await session.commit()
|
||||
if recovered:
|
||||
log.warning(
|
||||
"Recovered %d stuck activities: %s",
|
||||
len(recovered), recovered,
|
||||
)
|
||||
except Exception:
|
||||
log.exception("Failed to recover stuck activities")
|
||||
|
||||
async def _process_batch(self) -> int:
|
||||
"""Fetch and process a batch of pending events. Returns count processed."""
|
||||
"""Fetch and process a batch of pending activities. Returns count processed."""
|
||||
processed = 0
|
||||
async with get_session() as session:
|
||||
# FOR UPDATE SKIP LOCKED: safe for concurrent processors
|
||||
filters = [
|
||||
APActivity.process_state == "pending",
|
||||
APActivity.process_attempts < APActivity.process_max_attempts,
|
||||
]
|
||||
if self._app_name:
|
||||
filters.append(APActivity.origin_app == self._app_name)
|
||||
stmt = (
|
||||
select(DomainEvent)
|
||||
.where(
|
||||
DomainEvent.state == "pending",
|
||||
DomainEvent.attempts < DomainEvent.max_attempts,
|
||||
)
|
||||
.order_by(DomainEvent.created_at)
|
||||
select(APActivity)
|
||||
.where(*filters)
|
||||
.order_by(APActivity.created_at)
|
||||
.limit(self._batch_size)
|
||||
.with_for_update(skip_locked=True)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
events = result.scalars().all()
|
||||
activities = result.scalars().all()
|
||||
|
||||
for event in events:
|
||||
await self._process_one(session, event)
|
||||
for activity in activities:
|
||||
await self._process_one(session, activity)
|
||||
processed += 1
|
||||
|
||||
await session.commit()
|
||||
return processed
|
||||
|
||||
async def _process_one(self, session: AsyncSession, event: DomainEvent) -> None:
|
||||
"""Run all handlers for a single event."""
|
||||
handlers = get_handlers(event.event_type)
|
||||
async def _process_one(self, session: AsyncSession, activity: APActivity) -> None:
|
||||
"""Run all handlers for a single activity."""
|
||||
handlers = get_activity_handlers(activity.activity_type, activity.object_type)
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
event.state = "processing"
|
||||
event.attempts += 1
|
||||
log.info(
|
||||
"Processing activity %s: type=%s object_type=%s visibility=%s actor_profile_id=%s — %d handler(s) found",
|
||||
activity.id, activity.activity_type, activity.object_type,
|
||||
activity.visibility, activity.actor_profile_id, len(handlers),
|
||||
)
|
||||
for h in handlers:
|
||||
log.info(" handler: %s.%s", h.__module__, h.__qualname__)
|
||||
|
||||
activity.process_state = "processing"
|
||||
activity.process_attempts += 1
|
||||
await session.flush()
|
||||
|
||||
if not handlers:
|
||||
# No handlers registered — mark completed (nothing to do)
|
||||
event.state = "completed"
|
||||
event.processed_at = now
|
||||
activity.process_state = "completed"
|
||||
activity.processed_at = now
|
||||
return
|
||||
|
||||
try:
|
||||
for handler in handlers:
|
||||
await handler(event, session)
|
||||
event.state = "completed"
|
||||
event.processed_at = now
|
||||
log.info(" calling %s.%s …", handler.__module__, handler.__qualname__)
|
||||
await handler(activity, session)
|
||||
log.info(" done %s.%s", handler.__module__, handler.__qualname__)
|
||||
activity.process_state = "completed"
|
||||
activity.processed_at = now
|
||||
except Exception as exc:
|
||||
event.last_error = f"{exc.__class__.__name__}: {exc}"
|
||||
if event.attempts >= event.max_attempts:
|
||||
event.state = "failed"
|
||||
event.processed_at = now
|
||||
log.exception("Handler failed for activity %s", activity.id)
|
||||
activity.process_error = f"{exc.__class__.__name__}: {exc}"
|
||||
if activity.process_attempts >= activity.process_max_attempts:
|
||||
activity.process_state = "failed"
|
||||
activity.processed_at = now
|
||||
else:
|
||||
event.state = "pending" # retry
|
||||
activity.process_state = "pending" # retry
|
||||
|
||||
454
infrastructure/activitypub.py
Normal file
454
infrastructure/activitypub.py
Normal file
@@ -0,0 +1,454 @@
|
||||
"""Per-app ActivityPub blueprint.
|
||||
|
||||
Factory function ``create_activitypub_blueprint(app_name)`` returns a
|
||||
Blueprint with WebFinger, host-meta, nodeinfo, actor profile, inbox,
|
||||
outbox, and followers endpoints.
|
||||
|
||||
Per-app actors are *virtual projections* of the same ``ActorProfile``.
|
||||
Same keypair, same ``preferred_username`` — the only differences are:
|
||||
- the domain in URLs (e.g. blog.rose-ash.com vs federation.rose-ash.com)
|
||||
- which activities are served in the outbox (filtered by ``origin_app``)
|
||||
- which followers are returned (filtered by ``app_domain``)
|
||||
- Follow requests create ``APFollower(app_domain=app_name)``
|
||||
|
||||
Federation app acts as the aggregate: no origin_app filter, app_domain=NULL.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import Blueprint, request, abort, Response, g
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.services.registry import services
|
||||
from shared.models.federation import ActorProfile, APInboxItem
|
||||
from shared.browser.app.csrf import csrf_exempt
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
# Apps that serve per-app AP actors
|
||||
AP_APPS = {"blog", "market", "events", "federation"}
|
||||
|
||||
|
||||
def _ap_domain(app_name: str) -> str:
|
||||
"""Return the public domain for this app's AP identity."""
|
||||
env_key = f"AP_DOMAIN_{app_name.upper()}"
|
||||
env_val = os.getenv(env_key)
|
||||
if env_val:
|
||||
return env_val
|
||||
# Default: {app}.rose-ash.com, except federation uses AP_DOMAIN
|
||||
if app_name == "federation":
|
||||
return os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
return f"{app_name}.rose-ash.com"
|
||||
|
||||
|
||||
def _federation_domain() -> str:
|
||||
"""The aggregate federation domain (for alsoKnownAs links)."""
|
||||
return os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
|
||||
|
||||
def _is_aggregate(app_name: str) -> bool:
|
||||
"""Federation serves the aggregate actor (no per-app filter)."""
|
||||
return app_name == "federation"
|
||||
|
||||
|
||||
def create_activitypub_blueprint(app_name: str) -> Blueprint:
|
||||
"""Return a Blueprint with AP endpoints for *app_name*."""
|
||||
bp = Blueprint("activitypub", __name__)
|
||||
|
||||
domain = _ap_domain(app_name)
|
||||
fed_domain = _federation_domain()
|
||||
aggregate = _is_aggregate(app_name)
|
||||
# For per-app follows, store app_domain; for federation, "federation"
|
||||
follower_app_domain: str = app_name
|
||||
# For per-app outboxes, filter by origin_app; for federation, show all
|
||||
outbox_origin_app: str | None = None if aggregate else app_name
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Well-known endpoints
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/.well-known/webfinger")
|
||||
async def webfinger():
|
||||
resource = request.args.get("resource", "")
|
||||
if not resource.startswith("acct:"):
|
||||
abort(400, "Invalid resource format")
|
||||
|
||||
parts = resource[5:].split("@")
|
||||
if len(parts) != 2:
|
||||
abort(400, "Invalid resource format")
|
||||
|
||||
username, res_domain = parts
|
||||
if res_domain != domain:
|
||||
abort(404, "User not on this server")
|
||||
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404, "User not found")
|
||||
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"subject": resource,
|
||||
"aliases": [actor_url],
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"type": AP_CONTENT_TYPE,
|
||||
"href": actor_url,
|
||||
},
|
||||
{
|
||||
"rel": "http://webfinger.net/rel/profile-page",
|
||||
"type": "text/html",
|
||||
"href": actor_url,
|
||||
},
|
||||
],
|
||||
}),
|
||||
content_type="application/jrd+json",
|
||||
)
|
||||
|
||||
@bp.get("/.well-known/nodeinfo")
|
||||
async def nodeinfo_index():
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"links": [
|
||||
{
|
||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
||||
"href": f"https://{domain}/nodeinfo/2.0",
|
||||
}
|
||||
]
|
||||
}),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
@bp.get("/nodeinfo/2.0")
|
||||
async def nodeinfo():
|
||||
stats = await services.federation.get_stats(g.s)
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"version": "2.0",
|
||||
"software": {
|
||||
"name": "rose-ash",
|
||||
"version": "1.0.0",
|
||||
},
|
||||
"protocols": ["activitypub"],
|
||||
"usage": {
|
||||
"users": {
|
||||
"total": stats.get("actors", 0),
|
||||
"activeMonth": stats.get("actors", 0),
|
||||
},
|
||||
"localPosts": stats.get("activities", 0),
|
||||
},
|
||||
"openRegistrations": False,
|
||||
"metadata": {
|
||||
"nodeName": f"Rose Ash ({app_name})",
|
||||
"nodeDescription": f"Rose Ash {app_name} — ActivityPub federation",
|
||||
},
|
||||
}),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
@bp.get("/.well-known/host-meta")
|
||||
async def host_meta():
|
||||
xml = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
'<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">\n'
|
||||
f' <Link rel="lrdd" type="application/xrd+xml" '
|
||||
f'template="https://{domain}/.well-known/webfinger?resource={{uri}}"/>\n'
|
||||
'</XRD>'
|
||||
)
|
||||
return Response(response=xml, content_type="application/xrd+xml")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actor profile
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>")
|
||||
async def actor_profile(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
accept_header = request.headers.get("accept", "")
|
||||
|
||||
if "application/activity+json" in accept_header or "application/ld+json" in accept_header:
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
actor_json = {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"type": "Person",
|
||||
"id": actor_url,
|
||||
"name": actor.display_name or username,
|
||||
"preferredUsername": username,
|
||||
"summary": actor.summary or "",
|
||||
"manuallyApprovesFollowers": False,
|
||||
"inbox": f"{actor_url}/inbox",
|
||||
"outbox": f"{actor_url}/outbox",
|
||||
"followers": f"{actor_url}/followers",
|
||||
"following": f"{actor_url}/following",
|
||||
"publicKey": {
|
||||
"id": f"{actor_url}#main-key",
|
||||
"owner": actor_url,
|
||||
"publicKeyPem": actor.public_key_pem,
|
||||
},
|
||||
"url": actor_url,
|
||||
}
|
||||
|
||||
if aggregate:
|
||||
# Aggregate actor advertises all per-app actors
|
||||
also_known = [
|
||||
f"https://{_ap_domain(a)}/users/{username}"
|
||||
for a in AP_APPS if a != "federation"
|
||||
]
|
||||
if also_known:
|
||||
actor_json["alsoKnownAs"] = also_known
|
||||
else:
|
||||
# Per-app actors link back to the aggregate federation actor
|
||||
actor_json["alsoKnownAs"] = [
|
||||
f"https://{fed_domain}/users/{username}",
|
||||
]
|
||||
|
||||
return Response(
|
||||
response=json.dumps(actor_json),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
# HTML: federation renders its own profile; other apps redirect there
|
||||
if aggregate:
|
||||
from quart import render_template
|
||||
activities, total = await services.federation.get_outbox(
|
||||
g.s, username, page=1, per_page=20,
|
||||
)
|
||||
return await render_template(
|
||||
"federation/profile.html",
|
||||
actor=actor,
|
||||
activities=activities,
|
||||
total=total,
|
||||
)
|
||||
from quart import redirect
|
||||
return redirect(f"https://{fed_domain}/users/{username}")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Inbox
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@csrf_exempt
|
||||
@bp.post("/users/<username>/inbox")
|
||||
async def inbox(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
body = await request.get_json()
|
||||
if not body:
|
||||
abort(400, "Invalid JSON")
|
||||
|
||||
activity_type = body.get("type", "")
|
||||
from_actor_url = body.get("actor", "")
|
||||
|
||||
# Verify HTTP signature (best-effort)
|
||||
sig_valid = False
|
||||
try:
|
||||
from shared.utils.http_signatures import verify_request_signature
|
||||
from shared.infrastructure.ap_inbox_handlers import fetch_remote_actor
|
||||
|
||||
req_headers = dict(request.headers)
|
||||
sig_header = req_headers.get("Signature", "")
|
||||
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
if remote_actor and sig_header:
|
||||
pub_key_pem = (remote_actor.get("publicKey") or {}).get("publicKeyPem")
|
||||
if pub_key_pem:
|
||||
sig_valid = verify_request_signature(
|
||||
public_key_pem=pub_key_pem,
|
||||
signature_header=sig_header,
|
||||
method="POST",
|
||||
path=f"/users/{username}/inbox",
|
||||
headers=req_headers,
|
||||
)
|
||||
except Exception:
|
||||
log.debug("Signature verification failed for %s", from_actor_url, exc_info=True)
|
||||
|
||||
if not sig_valid:
|
||||
log.warning(
|
||||
"Unverified inbox POST from %s (%s) on %s — accepting anyway for now",
|
||||
from_actor_url, activity_type, domain,
|
||||
)
|
||||
|
||||
# Load actor row for DB operations
|
||||
actor_row = (
|
||||
await g.s.execute(
|
||||
select(ActorProfile).where(
|
||||
ActorProfile.preferred_username == username
|
||||
)
|
||||
)
|
||||
).scalar_one()
|
||||
|
||||
# Store raw inbox item
|
||||
item = APInboxItem(
|
||||
actor_profile_id=actor_row.id,
|
||||
raw_json=body,
|
||||
activity_type=activity_type,
|
||||
from_actor=from_actor_url,
|
||||
)
|
||||
g.s.add(item)
|
||||
await g.s.flush()
|
||||
|
||||
# Dispatch to shared handlers
|
||||
from shared.infrastructure.ap_inbox_handlers import dispatch_inbox_activity
|
||||
await dispatch_inbox_activity(
|
||||
g.s, actor_row, body, from_actor_url,
|
||||
domain=domain,
|
||||
app_domain=follower_app_domain,
|
||||
)
|
||||
|
||||
# Mark as processed
|
||||
item.state = "processed"
|
||||
item.processed_at = datetime.now(timezone.utc)
|
||||
await g.s.flush()
|
||||
|
||||
return Response(status=202)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Outbox
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>/outbox")
|
||||
async def outbox(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
_, total = await services.federation.get_outbox(
|
||||
g.s, username, page=1, per_page=1,
|
||||
origin_app=outbox_origin_app,
|
||||
)
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": f"{actor_url}/outbox",
|
||||
"totalItems": total,
|
||||
"first": f"{actor_url}/outbox?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
page_num = int(page_param)
|
||||
activities, total = await services.federation.get_outbox(
|
||||
g.s, username, page=page_num, per_page=20,
|
||||
origin_app=outbox_origin_app,
|
||||
)
|
||||
|
||||
items = []
|
||||
for a in activities:
|
||||
items.append({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": a.activity_type,
|
||||
"id": a.activity_id,
|
||||
"actor": actor_url,
|
||||
"published": a.published.isoformat() if a.published else None,
|
||||
"object": {
|
||||
"type": a.object_type,
|
||||
**(a.object_data or {}),
|
||||
},
|
||||
})
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{actor_url}/outbox?page={page_num}",
|
||||
"partOf": f"{actor_url}/outbox",
|
||||
"totalItems": total,
|
||||
"orderedItems": items,
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Followers / following collections
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>/followers")
|
||||
async def followers(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
collection_id = f"https://{domain}/users/{username}/followers"
|
||||
follower_list = await services.federation.get_followers(
|
||||
g.s, username, app_domain=follower_app_domain,
|
||||
)
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": collection_id,
|
||||
"totalItems": len(follower_list),
|
||||
"first": f"{collection_id}?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{collection_id}?page=1",
|
||||
"partOf": collection_id,
|
||||
"totalItems": len(follower_list),
|
||||
"orderedItems": [f.follower_actor_url for f in follower_list],
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
@bp.get("/users/<username>/following")
|
||||
async def following(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
collection_id = f"https://{domain}/users/{username}/following"
|
||||
following_list, total = await services.federation.get_following(g.s, username)
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": collection_id,
|
||||
"totalItems": total,
|
||||
"first": f"{collection_id}?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{collection_id}?page=1",
|
||||
"partOf": collection_id,
|
||||
"totalItems": total,
|
||||
"orderedItems": [f.actor_url for f in following_list],
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return bp
|
||||
564
infrastructure/ap_inbox_handlers.py
Normal file
564
infrastructure/ap_inbox_handlers.py
Normal file
@@ -0,0 +1,564 @@
|
||||
"""Reusable AP inbox handlers for all apps.
|
||||
|
||||
Extracted from federation/bp/actors/routes.py so that every app's
|
||||
shared AP blueprint can process Follow, Undo, Accept, Create, etc.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.federation import (
|
||||
ActorProfile, APInboxItem, APInteraction, APNotification,
|
||||
APRemotePost, APActivity, RemoteActor,
|
||||
)
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def fetch_remote_actor(actor_url: str) -> dict | None:
|
||||
"""Fetch a remote actor's JSON-LD profile."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
actor_url,
|
||||
headers={"Accept": AP_CONTENT_TYPE},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
except Exception:
|
||||
log.exception("Failed to fetch remote actor: %s", actor_url)
|
||||
return None
|
||||
|
||||
|
||||
async def send_accept(
|
||||
actor: ActorProfile,
|
||||
follow_activity: dict,
|
||||
follower_inbox: str,
|
||||
domain: str,
|
||||
) -> None:
|
||||
"""Send an Accept activity back to the follower."""
|
||||
from shared.utils.http_signatures import sign_request
|
||||
from urllib.parse import urlparse
|
||||
|
||||
username = actor.preferred_username
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
|
||||
accept_id = f"{actor_url}/activities/{uuid.uuid4()}"
|
||||
accept = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": accept_id,
|
||||
"type": "Accept",
|
||||
"actor": actor_url,
|
||||
"object": follow_activity,
|
||||
}
|
||||
|
||||
body_bytes = json.dumps(accept).encode()
|
||||
key_id = f"{actor_url}#main-key"
|
||||
|
||||
parsed = urlparse(follower_inbox)
|
||||
headers = sign_request(
|
||||
private_key_pem=actor.private_key_pem,
|
||||
key_id=key_id,
|
||||
method="POST",
|
||||
path=parsed.path,
|
||||
host=parsed.netloc,
|
||||
body=body_bytes,
|
||||
)
|
||||
headers["Content-Type"] = AP_CONTENT_TYPE
|
||||
|
||||
log.info("Accept payload → %s: %s", follower_inbox, json.dumps(accept)[:500])
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=15) as client:
|
||||
resp = await client.post(
|
||||
follower_inbox,
|
||||
content=body_bytes,
|
||||
headers=headers,
|
||||
)
|
||||
log.info("Accept → %s: %d %s", follower_inbox, resp.status_code, resp.text[:200])
|
||||
except Exception:
|
||||
log.exception("Failed to send Accept to %s", follower_inbox)
|
||||
|
||||
|
||||
async def backfill_follower(
|
||||
session: AsyncSession,
|
||||
actor: ActorProfile,
|
||||
follower_inbox: str,
|
||||
domain: str,
|
||||
origin_app: str | None = None,
|
||||
) -> None:
|
||||
"""Deliver recent *current* Create activities to a new follower's inbox.
|
||||
|
||||
Skips Creates whose source was later Deleted, and uses the latest
|
||||
Update data when available (so the follower sees the current version).
|
||||
"""
|
||||
from shared.events.handlers.ap_delivery_handler import (
|
||||
_build_activity_json, _deliver_to_inbox,
|
||||
)
|
||||
|
||||
filters = [
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
APActivity.activity_type == "Create",
|
||||
APActivity.source_type.isnot(None),
|
||||
APActivity.source_id.isnot(None),
|
||||
]
|
||||
if origin_app is not None:
|
||||
filters.append(APActivity.origin_app == origin_app)
|
||||
|
||||
creates = (
|
||||
await session.execute(
|
||||
select(APActivity).where(*filters)
|
||||
.order_by(APActivity.published.desc())
|
||||
.limit(40)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
if not creates:
|
||||
return
|
||||
|
||||
# Collect source keys that have been Deleted
|
||||
source_keys = {(c.source_type, c.source_id) for c in creates}
|
||||
deleted_keys: set[tuple[str | None, int | None]] = set()
|
||||
if source_keys:
|
||||
deletes = (
|
||||
await session.execute(
|
||||
select(APActivity.source_type, APActivity.source_id).where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.activity_type == "Delete",
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
)
|
||||
).all()
|
||||
deleted_keys = {(d[0], d[1]) for d in deletes}
|
||||
|
||||
# For sources with Updates, grab the latest Update's object_data
|
||||
updated_data: dict[tuple[str | None, int | None], dict] = {}
|
||||
if source_keys:
|
||||
updates = (
|
||||
await session.execute(
|
||||
select(APActivity).where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.activity_type == "Update",
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
).order_by(APActivity.published.desc())
|
||||
)
|
||||
).scalars().all()
|
||||
for u in updates:
|
||||
key = (u.source_type, u.source_id)
|
||||
if key not in updated_data and key in source_keys:
|
||||
updated_data[key] = u.object_data or {}
|
||||
|
||||
# Filter to current, non-deleted Creates (limit 20)
|
||||
activities = []
|
||||
for c in creates:
|
||||
key = (c.source_type, c.source_id)
|
||||
if key in deleted_keys:
|
||||
continue
|
||||
# Apply latest Update data if available
|
||||
if key in updated_data:
|
||||
c.object_data = updated_data[key]
|
||||
activities.append(c)
|
||||
if len(activities) >= 20:
|
||||
break
|
||||
|
||||
if not activities:
|
||||
return
|
||||
|
||||
log.info(
|
||||
"Backfilling %d posts to %s for @%s",
|
||||
len(activities), follower_inbox, actor.preferred_username,
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
for activity in reversed(activities): # oldest first
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
await _deliver_to_inbox(client, follower_inbox, activity_json, actor, domain)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Inbox activity handlers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def handle_follow(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
domain: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Process a Follow activity: add follower, send Accept, backfill."""
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
if not remote_actor:
|
||||
log.warning("Could not fetch remote actor for Follow: %s", from_actor_url)
|
||||
return
|
||||
|
||||
follower_inbox = remote_actor.get("inbox")
|
||||
if not follower_inbox:
|
||||
log.warning("Remote actor has no inbox: %s", from_actor_url)
|
||||
return
|
||||
|
||||
remote_username = remote_actor.get("preferredUsername", "")
|
||||
from urllib.parse import urlparse
|
||||
remote_domain = urlparse(from_actor_url).netloc
|
||||
follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url
|
||||
|
||||
pub_key = (remote_actor.get("publicKey") or {}).get("publicKeyPem")
|
||||
|
||||
await services.federation.add_follower(
|
||||
session,
|
||||
actor_row.preferred_username,
|
||||
follower_acct=follower_acct,
|
||||
follower_inbox=follower_inbox,
|
||||
follower_actor_url=from_actor_url,
|
||||
follower_public_key=pub_key,
|
||||
app_domain=app_domain,
|
||||
)
|
||||
|
||||
log.info(
|
||||
"New follower: %s → @%s (app_domain=%s)",
|
||||
follower_acct, actor_row.preferred_username, app_domain,
|
||||
)
|
||||
|
||||
# Notification
|
||||
ra = (
|
||||
await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not ra:
|
||||
ra_dto = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if ra_dto:
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if ra:
|
||||
notif = APNotification(
|
||||
actor_profile_id=actor_row.id,
|
||||
notification_type="follow",
|
||||
from_remote_actor_id=ra.id,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
# Send Accept
|
||||
await send_accept(actor_row, body, follower_inbox, domain)
|
||||
|
||||
# Backfill: deliver recent posts (filtered by origin_app for per-app follows)
|
||||
backfill_origin = app_domain if app_domain != "federation" else None
|
||||
await backfill_follower(session, actor_row, follower_inbox, domain, origin_app=backfill_origin)
|
||||
|
||||
|
||||
async def handle_undo(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Process an Undo activity (typically Undo Follow)."""
|
||||
inner = body.get("object")
|
||||
if not inner:
|
||||
return
|
||||
|
||||
inner_type = inner.get("type") if isinstance(inner, dict) else None
|
||||
if inner_type == "Follow":
|
||||
from urllib.parse import urlparse
|
||||
remote_domain = urlparse(from_actor_url).netloc
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
remote_username = ""
|
||||
if remote_actor:
|
||||
remote_username = remote_actor.get("preferredUsername", "")
|
||||
follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url
|
||||
|
||||
removed = await services.federation.remove_follower(
|
||||
session, actor_row.preferred_username, follower_acct,
|
||||
app_domain=app_domain,
|
||||
)
|
||||
if removed:
|
||||
log.info("Unfollowed: %s → @%s (app_domain=%s)", follower_acct, actor_row.preferred_username, app_domain)
|
||||
else:
|
||||
log.debug("Undo Follow: follower not found: %s", follower_acct)
|
||||
else:
|
||||
log.debug("Undo for %s — not handled", inner_type)
|
||||
|
||||
|
||||
async def handle_accept(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Accept activity — update outbound follow state."""
|
||||
inner = body.get("object")
|
||||
if not inner:
|
||||
return
|
||||
|
||||
inner_type = inner.get("type") if isinstance(inner, dict) else None
|
||||
if inner_type == "Follow":
|
||||
await services.federation.accept_follow_response(
|
||||
session, actor_row.preferred_username, from_actor_url,
|
||||
)
|
||||
log.info("Follow accepted by %s for @%s", from_actor_url, actor_row.preferred_username)
|
||||
|
||||
|
||||
async def handle_create(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
federation_domain: str,
|
||||
) -> None:
|
||||
"""Process Create(Note/Article) — ingest remote post."""
|
||||
obj = body.get("object")
|
||||
if not obj or not isinstance(obj, dict):
|
||||
return
|
||||
|
||||
obj_type = obj.get("type", "")
|
||||
if obj_type not in ("Note", "Article"):
|
||||
log.debug("Create with type %s — skipping", obj_type)
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
log.warning("Could not resolve remote actor for Create: %s", from_actor_url)
|
||||
return
|
||||
|
||||
await services.federation.ingest_remote_post(session, remote.id, body, obj)
|
||||
log.info("Ingested %s from %s", obj_type, from_actor_url)
|
||||
|
||||
# Mention notification
|
||||
tags = obj.get("tag", [])
|
||||
if isinstance(tags, list):
|
||||
for tag in tags:
|
||||
if not isinstance(tag, dict):
|
||||
continue
|
||||
if tag.get("type") != "Mention":
|
||||
continue
|
||||
href = tag.get("href", "")
|
||||
if f"https://{federation_domain}/users/" in href:
|
||||
mentioned_username = href.rsplit("/", 1)[-1]
|
||||
mentioned = await services.federation.get_actor_by_username(
|
||||
session, mentioned_username,
|
||||
)
|
||||
if mentioned:
|
||||
rp = (await session.execute(
|
||||
select(APRemotePost).where(
|
||||
APRemotePost.object_id == obj.get("id")
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=mentioned.id,
|
||||
notification_type="mention",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_remote_post_id=rp.id if rp else None,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
# Reply notification
|
||||
in_reply_to = obj.get("inReplyTo")
|
||||
if in_reply_to and f"https://{federation_domain}/users/" in str(in_reply_to):
|
||||
local_activity = (await session.execute(
|
||||
select(APActivity).where(
|
||||
APActivity.activity_id == in_reply_to,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
if local_activity:
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
rp = (await session.execute(
|
||||
select(APRemotePost).where(
|
||||
APRemotePost.object_id == obj.get("id")
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=local_activity.actor_profile_id,
|
||||
notification_type="reply",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_remote_post_id=rp.id if rp else None,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
|
||||
async def handle_update(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Update — re-ingest remote post."""
|
||||
obj = body.get("object")
|
||||
if not obj or not isinstance(obj, dict):
|
||||
return
|
||||
obj_type = obj.get("type", "")
|
||||
if obj_type in ("Note", "Article"):
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if remote:
|
||||
await services.federation.ingest_remote_post(session, remote.id, body, obj)
|
||||
log.info("Updated %s from %s", obj_type, from_actor_url)
|
||||
|
||||
|
||||
async def handle_delete(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Delete — remove remote post."""
|
||||
obj = body.get("object")
|
||||
if isinstance(obj, str):
|
||||
object_id = obj
|
||||
elif isinstance(obj, dict):
|
||||
object_id = obj.get("id", "")
|
||||
else:
|
||||
return
|
||||
if object_id:
|
||||
await services.federation.delete_remote_post(session, object_id)
|
||||
log.info("Deleted remote post %s from %s", object_id, from_actor_url)
|
||||
|
||||
|
||||
async def handle_like(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process incoming Like — record interaction + notify."""
|
||||
object_id = body.get("object", "")
|
||||
if isinstance(object_id, dict):
|
||||
object_id = object_id.get("id", "")
|
||||
if not object_id:
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
return
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
target = (await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == object_id)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if not target:
|
||||
log.info("Like from %s for %s (target not found locally)", from_actor_url, object_id)
|
||||
return
|
||||
|
||||
interaction = APInteraction(
|
||||
remote_actor_id=ra.id if ra else None,
|
||||
post_type="local",
|
||||
post_id=target.id,
|
||||
interaction_type="like",
|
||||
activity_id=body.get("id"),
|
||||
)
|
||||
session.add(interaction)
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=target.actor_profile_id,
|
||||
notification_type="like",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_activity_id=target.id,
|
||||
)
|
||||
session.add(notif)
|
||||
log.info("Like from %s on activity %s", from_actor_url, object_id)
|
||||
|
||||
|
||||
async def handle_announce(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process incoming Announce (boost) — record interaction + notify."""
|
||||
object_id = body.get("object", "")
|
||||
if isinstance(object_id, dict):
|
||||
object_id = object_id.get("id", "")
|
||||
if not object_id:
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
return
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
target = (await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == object_id)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if not target:
|
||||
log.info("Announce from %s for %s (target not found locally)", from_actor_url, object_id)
|
||||
return
|
||||
|
||||
interaction = APInteraction(
|
||||
remote_actor_id=ra.id if ra else None,
|
||||
post_type="local",
|
||||
post_id=target.id,
|
||||
interaction_type="boost",
|
||||
activity_id=body.get("id"),
|
||||
)
|
||||
session.add(interaction)
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=target.actor_profile_id,
|
||||
notification_type="boost",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_activity_id=target.id,
|
||||
)
|
||||
session.add(notif)
|
||||
log.info("Announce from %s on activity %s", from_actor_url, object_id)
|
||||
|
||||
|
||||
async def dispatch_inbox_activity(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
domain: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Route an inbox activity to the correct handler."""
|
||||
activity_type = body.get("type", "")
|
||||
|
||||
if activity_type == "Follow":
|
||||
await handle_follow(session, actor_row, body, from_actor_url, domain, app_domain=app_domain)
|
||||
elif activity_type == "Undo":
|
||||
await handle_undo(session, actor_row, body, from_actor_url, app_domain=app_domain)
|
||||
elif activity_type == "Accept":
|
||||
await handle_accept(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Create":
|
||||
await handle_create(session, actor_row, body, from_actor_url, domain)
|
||||
elif activity_type == "Update":
|
||||
await handle_update(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Delete":
|
||||
await handle_delete(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Like":
|
||||
await handle_like(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Announce":
|
||||
await handle_announce(session, actor_row, body, from_actor_url)
|
||||
@@ -2,16 +2,17 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Callable, Awaitable, Sequence
|
||||
|
||||
from quart import Quart, request, g, send_from_directory
|
||||
from quart import Quart, request, g, redirect, send_from_directory
|
||||
|
||||
from shared.config import init_config, config, pretty
|
||||
from shared.models import KV # ensure shared models imported
|
||||
# Register all app model classes with SQLAlchemy so cross-domain
|
||||
# relationship() string references resolve correctly.
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "account.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
@@ -54,7 +55,7 @@ def create_base_app(
|
||||
context_fn:
|
||||
Async function returning a dict for template context.
|
||||
Each app provides its own — the cart app queries locally,
|
||||
while coop/market apps fetch via internal API.
|
||||
while blog/market apps fetch via internal API.
|
||||
If not provided, a minimal default context is used.
|
||||
before_request_fns:
|
||||
Extra before-request hooks (e.g. cart_loader for the cart app).
|
||||
@@ -80,11 +81,10 @@ def create_base_app(
|
||||
|
||||
app.secret_key = os.getenv("SECRET_KEY", "dev-secret-key-change-me-777")
|
||||
|
||||
# Session cookie shared across subdomains
|
||||
cookie_domain = os.getenv("SESSION_COOKIE_DOMAIN") # e.g. ".rose-ash.com"
|
||||
if cookie_domain:
|
||||
app.config["SESSION_COOKIE_DOMAIN"] = cookie_domain
|
||||
app.config["SESSION_COOKIE_NAME"] = "coop_session"
|
||||
# Per-app first-party session cookie (no shared domain — avoids Safari ITP)
|
||||
app.config["SESSION_COOKIE_NAME"] = f"{name}_session"
|
||||
app.config["SESSION_COOKIE_SAMESITE"] = "Lax"
|
||||
app.config["SESSION_COOKIE_SECURE"] = True
|
||||
|
||||
# Ghost / Redis config
|
||||
app.config["GHOST_API_URL"] = os.getenv("GHOST_API_URL")
|
||||
@@ -102,6 +102,41 @@ def create_base_app(
|
||||
setup_jinja(app)
|
||||
errors(app)
|
||||
|
||||
# Auto-register OAuth client blueprint for non-account apps
|
||||
# (account is the OAuth authorization server)
|
||||
if name != "account":
|
||||
from shared.infrastructure.oauth import create_oauth_blueprint
|
||||
app.register_blueprint(create_oauth_blueprint(name))
|
||||
|
||||
# Auto-register ActivityPub blueprint for AP-enabled apps
|
||||
from shared.infrastructure.activitypub import AP_APPS
|
||||
if name in AP_APPS:
|
||||
from shared.infrastructure.activitypub import create_activitypub_blueprint
|
||||
app.register_blueprint(create_activitypub_blueprint(name))
|
||||
|
||||
# --- device id (all apps, including account) ---
|
||||
_did_cookie = f"{name}_did"
|
||||
|
||||
@app.before_request
|
||||
async def _init_device_id():
|
||||
did = request.cookies.get(_did_cookie)
|
||||
if did:
|
||||
g.device_id = did
|
||||
g._new_device_id = False
|
||||
else:
|
||||
g.device_id = secrets.token_urlsafe(32)
|
||||
g._new_device_id = True
|
||||
|
||||
@app.after_request
|
||||
async def _set_device_cookie(response):
|
||||
if getattr(g, "_new_device_id", False):
|
||||
response.set_cookie(
|
||||
_did_cookie, g.device_id,
|
||||
max_age=30 * 24 * 3600,
|
||||
secure=True, samesite="Lax", httponly=True,
|
||||
)
|
||||
return response
|
||||
|
||||
# --- before-request hooks ---
|
||||
@app.before_request
|
||||
async def _route_log():
|
||||
@@ -118,11 +153,98 @@ def create_base_app(
|
||||
for fn in before_request_fns:
|
||||
app.before_request(fn)
|
||||
|
||||
# Auth state check via grant verification + silent OAuth handshake
|
||||
if name != "account":
|
||||
@app.before_request
|
||||
async def _check_auth_state():
|
||||
from quart import session as qs
|
||||
from urllib.parse import quote as _quote
|
||||
if request.path.startswith(("/auth/", "/static/", "/.well-known/", "/users/", "/nodeinfo/")):
|
||||
return
|
||||
|
||||
uid = qs.get("uid")
|
||||
grant_token = qs.get("grant_token")
|
||||
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
redis = get_redis()
|
||||
|
||||
# Case 1: logged in — verify grant still valid (direct DB, cached)
|
||||
if uid and grant_token:
|
||||
cache_key = f"grant:{grant_token}"
|
||||
if redis:
|
||||
# Quick check: if did_auth was cleared (logout), skip cache
|
||||
device_id = g.device_id
|
||||
did_auth_present = await redis.get(f"did_auth:{device_id}") if device_id else True
|
||||
cached = await redis.get(cache_key)
|
||||
if cached == b"ok" and did_auth_present:
|
||||
return
|
||||
if cached == b"revoked":
|
||||
qs.pop("uid", None)
|
||||
qs.pop("grant_token", None)
|
||||
qs.pop("cart_sid", None)
|
||||
return
|
||||
|
||||
from sqlalchemy import select
|
||||
from shared.db.session import get_session
|
||||
from shared.models.oauth_grant import OAuthGrant
|
||||
try:
|
||||
async with get_session() as s:
|
||||
grant = await s.scalar(
|
||||
select(OAuthGrant).where(OAuthGrant.token == grant_token)
|
||||
)
|
||||
valid = grant is not None and grant.revoked_at is None
|
||||
except Exception:
|
||||
return # DB error — don't log user out
|
||||
|
||||
if redis:
|
||||
await redis.set(cache_key, b"ok" if valid else b"revoked", ex=60)
|
||||
if not valid:
|
||||
qs.pop("uid", None)
|
||||
qs.pop("grant_token", None)
|
||||
qs.pop("cart_sid", None)
|
||||
return
|
||||
|
||||
# Case 2: not logged in — prompt=none OAuth (GET, non-HTMX only)
|
||||
if not uid and request.method == "GET":
|
||||
if request.headers.get("HX-Request"):
|
||||
return
|
||||
import time as _time
|
||||
now = _time.time()
|
||||
pnone_at = qs.get("_pnone_at")
|
||||
device_id = g.device_id
|
||||
|
||||
# Check if account signalled a login after we cached "not logged in"
|
||||
# (blog_did == account_did — same value set during OAuth callback)
|
||||
if device_id and redis and pnone_at:
|
||||
auth_ts = await redis.get(f"did_auth:{device_id}")
|
||||
if auth_ts:
|
||||
try:
|
||||
if float(auth_ts) > pnone_at:
|
||||
qs.pop("_pnone_at", None)
|
||||
return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}")
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
if pnone_at and (now - pnone_at) < 300:
|
||||
return
|
||||
if device_id and redis:
|
||||
cached = await redis.get(f"prompt:{name}:{device_id}")
|
||||
if cached == b"none":
|
||||
return
|
||||
return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}")
|
||||
|
||||
@app.before_request
|
||||
async def _csrf_protect():
|
||||
await protect()
|
||||
|
||||
# --- after-request hooks ---
|
||||
# Clear old shared-domain session cookie (migration from .rose-ash.com)
|
||||
@app.after_request
|
||||
async def _clear_old_shared_cookie(response):
|
||||
if request.cookies.get("blog_session"):
|
||||
response.delete_cookie("blog_session", domain=".rose-ash.com", path="/")
|
||||
return response
|
||||
|
||||
@app.after_request
|
||||
async def _add_hx_preserve_search_header(response):
|
||||
value = request.headers.get("X-Search")
|
||||
@@ -144,7 +266,7 @@ def create_base_app(
|
||||
return await base_context()
|
||||
|
||||
# --- event processor ---
|
||||
_event_processor = EventProcessor()
|
||||
_event_processor = EventProcessor(app_name=name)
|
||||
|
||||
# --- startup ---
|
||||
@app.before_serving
|
||||
|
||||
160
infrastructure/fragments.py
Normal file
160
infrastructure/fragments.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Server-side fragment composition client.
|
||||
|
||||
Each coop app exposes HTML fragments at ``/internal/fragments/{type}``.
|
||||
This module provides helpers to fetch and cache those fragments so that
|
||||
consuming apps can compose cross-app UI without shared templates.
|
||||
|
||||
All functions return ``""`` on error (graceful degradation — a missing
|
||||
fragment simply means a section is absent from the page).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from typing import Sequence
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Re-usable async client (created lazily, one per process)
|
||||
_client: httpx.AsyncClient | None = None
|
||||
|
||||
# Default request timeout (seconds)
|
||||
_DEFAULT_TIMEOUT = 2.0
|
||||
|
||||
# Header sent on every fragment request so providers can distinguish
|
||||
# fragment fetches from normal browser traffic.
|
||||
FRAGMENT_HEADER = "X-Fragment-Request"
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(_DEFAULT_TIMEOUT),
|
||||
follow_redirects=False,
|
||||
)
|
||||
return _client
|
||||
|
||||
|
||||
def _internal_url(app_name: str) -> str:
|
||||
"""Resolve the Docker-internal base URL for *app_name*.
|
||||
|
||||
Looks up ``INTERNAL_URL_{APP}`` first, falls back to
|
||||
``http://{app}:8000``.
|
||||
"""
|
||||
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
||||
return os.getenv(env_key, f"http://{app_name}:8000").rstrip("/")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def fetch_fragment(
|
||||
app_name: str,
|
||||
fragment_type: str,
|
||||
*,
|
||||
params: dict | None = None,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
) -> str:
|
||||
"""Fetch an HTML fragment from another app.
|
||||
|
||||
Returns the raw HTML string, or ``""`` on any error.
|
||||
"""
|
||||
base = _internal_url(app_name)
|
||||
url = f"{base}/internal/fragments/{fragment_type}"
|
||||
try:
|
||||
resp = await _get_client().get(
|
||||
url,
|
||||
params=params,
|
||||
headers={FRAGMENT_HEADER: "1"},
|
||||
timeout=timeout,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.text
|
||||
log.debug("Fragment %s/%s returned %s", app_name, fragment_type, resp.status_code)
|
||||
return ""
|
||||
except Exception:
|
||||
log.debug("Fragment %s/%s failed", app_name, fragment_type, exc_info=True)
|
||||
return ""
|
||||
|
||||
|
||||
async def fetch_fragments(
|
||||
requests: Sequence[tuple[str, str, dict | None]],
|
||||
*,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
) -> list[str]:
|
||||
"""Fetch multiple fragments concurrently.
|
||||
|
||||
*requests* is a sequence of ``(app_name, fragment_type, params)`` tuples.
|
||||
Returns a list of HTML strings in the same order. Failed fetches
|
||||
produce ``""``.
|
||||
"""
|
||||
return list(await asyncio.gather(*(
|
||||
fetch_fragment(app, ftype, params=params, timeout=timeout)
|
||||
for app, ftype, params in requests
|
||||
)))
|
||||
|
||||
|
||||
async def fetch_fragment_cached(
|
||||
app_name: str,
|
||||
fragment_type: str,
|
||||
*,
|
||||
params: dict | None = None,
|
||||
ttl: int = 30,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
) -> str:
|
||||
"""Fetch a fragment with a Redis cache layer.
|
||||
|
||||
Cache key: ``frag:{app}:{type}:{sorted_params}``.
|
||||
Returns ``""`` on error (cache miss + fetch failure).
|
||||
"""
|
||||
# Build a stable cache key
|
||||
suffix = ""
|
||||
if params:
|
||||
sorted_items = sorted(params.items())
|
||||
suffix = ":" + "&".join(f"{k}={v}" for k, v in sorted_items)
|
||||
cache_key = f"frag:{app_name}:{fragment_type}{suffix}"
|
||||
|
||||
# Try Redis cache
|
||||
redis = _get_redis()
|
||||
if redis:
|
||||
try:
|
||||
cached = await redis.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached.decode() if isinstance(cached, bytes) else cached
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Cache miss — fetch from provider
|
||||
html = await fetch_fragment(
|
||||
app_name, fragment_type, params=params, timeout=timeout,
|
||||
)
|
||||
|
||||
# Store in cache (even empty string — avoids hammering a down service)
|
||||
if redis and ttl > 0:
|
||||
try:
|
||||
await redis.set(cache_key, html.encode(), ex=ttl)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return html
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_redis():
|
||||
"""Return the current app's Redis connection, or None."""
|
||||
try:
|
||||
from quart import current_app
|
||||
r = current_app.redis
|
||||
return r if r else None
|
||||
except Exception:
|
||||
return None
|
||||
@@ -13,7 +13,7 @@ from shared.browser.app.csrf import generate_csrf_token
|
||||
from shared.browser.app.authz import has_access
|
||||
from shared.browser.app.filters import register as register_filters
|
||||
|
||||
from .urls import coop_url, market_url, cart_url, events_url, login_url, page_cart_url, market_product_url
|
||||
from .urls import blog_url, market_url, cart_url, events_url, federation_url, account_url, login_url, page_cart_url, market_product_url
|
||||
|
||||
|
||||
def setup_jinja(app: Quart) -> None:
|
||||
@@ -93,10 +93,12 @@ def setup_jinja(app: Quart) -> None:
|
||||
app.jinja_env.globals["site"] = site
|
||||
|
||||
# cross-app URL helpers available in all templates
|
||||
app.jinja_env.globals["coop_url"] = coop_url
|
||||
app.jinja_env.globals["blog_url"] = blog_url
|
||||
app.jinja_env.globals["market_url"] = market_url
|
||||
app.jinja_env.globals["cart_url"] = cart_url
|
||||
app.jinja_env.globals["events_url"] = events_url
|
||||
app.jinja_env.globals["federation_url"] = federation_url
|
||||
app.jinja_env.globals["account_url"] = account_url
|
||||
app.jinja_env.globals["login_url"] = login_url
|
||||
app.jinja_env.globals["page_cart_url"] = page_cart_url
|
||||
app.jinja_env.globals["market_product_url"] = market_product_url
|
||||
@@ -105,5 +107,14 @@ def setup_jinja(app: Quart) -> None:
|
||||
from shared.services.widget_registry import widgets as _widget_registry
|
||||
app.jinja_env.globals["widgets"] = _widget_registry
|
||||
|
||||
# fragment composition helper — fetch HTML from another app's fragment API
|
||||
from shared.infrastructure.fragments import fetch_fragment_cached
|
||||
|
||||
async def _fragment(app_name: str, fragment_type: str, ttl: int = 30, **params) -> str:
|
||||
p = params if params else None
|
||||
return await fetch_fragment_cached(app_name, fragment_type, params=p, ttl=ttl)
|
||||
|
||||
app.jinja_env.globals["fragment"] = _fragment
|
||||
|
||||
# register jinja filters
|
||||
register_filters(app)
|
||||
|
||||
183
infrastructure/oauth.py
Normal file
183
infrastructure/oauth.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""OAuth2 client blueprint for non-account apps.
|
||||
|
||||
Each client app gets /auth/login, /auth/callback, /auth/logout.
|
||||
Account is the OAuth authorization server.
|
||||
|
||||
Device cookie ({app}_did) ties the browser to its auth state so
|
||||
client apps can detect login/logout without cross-domain cookies.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import (
|
||||
Blueprint,
|
||||
redirect,
|
||||
request,
|
||||
session as qsession,
|
||||
g,
|
||||
current_app,
|
||||
make_response,
|
||||
)
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.db.session import get_session
|
||||
from shared.models.oauth_code import OAuthCode
|
||||
from shared.infrastructure.urls import account_url, app_url
|
||||
from shared.infrastructure.cart_identity import current_cart_identity
|
||||
from shared.events import emit_activity
|
||||
|
||||
SESSION_USER_KEY = "uid"
|
||||
GRANT_TOKEN_KEY = "grant_token"
|
||||
|
||||
|
||||
def create_oauth_blueprint(app_name: str) -> Blueprint:
|
||||
"""Return an OAuth client blueprint for *app_name*."""
|
||||
bp = Blueprint("oauth_auth", __name__, url_prefix="/auth")
|
||||
|
||||
@bp.get("/login")
|
||||
@bp.get("/login/")
|
||||
async def login():
|
||||
next_url = request.args.get("next", "/")
|
||||
prompt = request.args.get("prompt", "")
|
||||
state = secrets.token_urlsafe(32)
|
||||
qsession["oauth_state"] = state
|
||||
qsession["oauth_next"] = next_url
|
||||
|
||||
device_id = g.device_id
|
||||
redirect_uri = app_url(app_name, "/auth/callback")
|
||||
params = (
|
||||
f"?client_id={app_name}"
|
||||
f"&redirect_uri={redirect_uri}"
|
||||
f"&device_id={device_id}"
|
||||
f"&state={state}"
|
||||
)
|
||||
if prompt:
|
||||
params += f"&prompt={prompt}"
|
||||
authorize_url = account_url(f"/auth/oauth/authorize{params}")
|
||||
return redirect(authorize_url)
|
||||
|
||||
@bp.get("/callback")
|
||||
@bp.get("/callback/")
|
||||
async def callback():
|
||||
# Adopt account's device id as our own — one identity across all apps
|
||||
account_did = request.args.get("account_did", "")
|
||||
if account_did:
|
||||
qsession["_account_did"] = account_did
|
||||
# Overwrite this app's device cookie with account's device id
|
||||
g.device_id = account_did
|
||||
g._new_device_id = True # factory after_request will set the cookie
|
||||
|
||||
# Handle prompt=none error (user not logged in on account)
|
||||
error = request.args.get("error")
|
||||
if error == "login_required":
|
||||
next_url = qsession.pop("oauth_next", "/")
|
||||
qsession.pop("oauth_state", None)
|
||||
import time as _time
|
||||
qsession["_pnone_at"] = _time.time()
|
||||
device_id = g.device_id
|
||||
if device_id:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.set(
|
||||
f"prompt:{app_name}:{device_id}", b"none", ex=300
|
||||
)
|
||||
return redirect(next_url)
|
||||
|
||||
code = request.args.get("code")
|
||||
state = request.args.get("state")
|
||||
expected_state = qsession.pop("oauth_state", None)
|
||||
next_url = qsession.pop("oauth_next", "/")
|
||||
|
||||
if not code or not state or state != expected_state:
|
||||
current_app.logger.warning("OAuth callback: bad state or missing code")
|
||||
return redirect("/")
|
||||
|
||||
expected_redirect = app_url(app_name, "/auth/callback")
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
result = await s.execute(
|
||||
select(OAuthCode)
|
||||
.where(OAuthCode.code == code)
|
||||
.with_for_update()
|
||||
)
|
||||
oauth_code = result.scalar_one_or_none()
|
||||
|
||||
if not oauth_code:
|
||||
current_app.logger.warning("OAuth callback: code not found")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.used_at is not None:
|
||||
current_app.logger.warning("OAuth callback: code already used")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.expires_at < now:
|
||||
current_app.logger.warning("OAuth callback: code expired")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.client_id != app_name:
|
||||
current_app.logger.warning("OAuth callback: client_id mismatch")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.redirect_uri != expected_redirect:
|
||||
current_app.logger.warning("OAuth callback: redirect_uri mismatch")
|
||||
return redirect("/")
|
||||
|
||||
oauth_code.used_at = now
|
||||
user_id = oauth_code.user_id
|
||||
grant_token = oauth_code.grant_token
|
||||
|
||||
# Set local session with grant token for revocation checking
|
||||
qsession[SESSION_USER_KEY] = user_id
|
||||
if grant_token:
|
||||
qsession[GRANT_TOKEN_KEY] = grant_token
|
||||
qsession.pop("_pnone_at", None)
|
||||
|
||||
# Emit login activity for cart adoption
|
||||
ident = current_cart_identity()
|
||||
anon_session_id = ident.get("session_id")
|
||||
if anon_session_id:
|
||||
try:
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
await emit_activity(
|
||||
s,
|
||||
activity_type="rose:Login",
|
||||
actor_uri="internal:system",
|
||||
object_type="Person",
|
||||
object_data={
|
||||
"user_id": user_id,
|
||||
"session_id": anon_session_id,
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
current_app.logger.exception("OAuth: failed to emit login activity")
|
||||
|
||||
return redirect(next_url, 303)
|
||||
|
||||
@bp.get("/clear")
|
||||
@bp.get("/clear/")
|
||||
async def clear():
|
||||
"""One-time migration helper: clear all session cookies."""
|
||||
qsession.clear()
|
||||
resp = await make_response(redirect("/"))
|
||||
resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/")
|
||||
resp.delete_cookie(f"{app_name}_did", path="/")
|
||||
return resp
|
||||
|
||||
@bp.post("/logout")
|
||||
@bp.post("/logout/")
|
||||
async def logout():
|
||||
qsession.pop(SESSION_USER_KEY, None)
|
||||
qsession.pop(GRANT_TOKEN_KEY, None)
|
||||
qsession.pop("cart_sid", None)
|
||||
qsession.pop("_pnone_at", None)
|
||||
qsession.pop("_account_did", None)
|
||||
# Redirect through account to revoke grants + clear account session
|
||||
return redirect(account_url("/auth/sso-logout/"))
|
||||
|
||||
return bp
|
||||
@@ -21,8 +21,8 @@ def app_url(app_name: str, path: str = "/") -> str:
|
||||
return base + path
|
||||
|
||||
|
||||
def coop_url(path: str = "/") -> str:
|
||||
return app_url("coop", path)
|
||||
def blog_url(path: str = "/") -> str:
|
||||
return app_url("blog", path)
|
||||
|
||||
|
||||
def market_url(path: str = "/") -> str:
|
||||
@@ -41,6 +41,14 @@ def federation_url(path: str = "/") -> str:
|
||||
return app_url("federation", path)
|
||||
|
||||
|
||||
def account_url(path: str = "/") -> str:
|
||||
return app_url("account", path)
|
||||
|
||||
|
||||
def artdag_url(path: str = "/") -> str:
|
||||
return app_url("artdag", path)
|
||||
|
||||
|
||||
def page_cart_url(page_slug: str, path: str = "/") -> str:
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
@@ -66,9 +74,24 @@ def market_product_url(product_slug: str, suffix: str = "", market_place=None) -
|
||||
|
||||
|
||||
def login_url(next_url: str = "") -> str:
|
||||
# Auth lives in blog (coop) for now. Set AUTH_APP=federation to switch.
|
||||
auth_app = os.getenv("AUTH_APP", "coop")
|
||||
base = app_url(auth_app, "/auth/login/")
|
||||
from quart import current_app
|
||||
|
||||
# Account handles login directly (magic link flow — it's the OAuth server)
|
||||
if current_app.name == "account":
|
||||
base = "/auth/login/"
|
||||
params: list[str] = []
|
||||
if next_url:
|
||||
params.append(f"next={quote(next_url, safe='')}")
|
||||
from quart import session as qsession
|
||||
cart_sid = qsession.get("cart_sid")
|
||||
if cart_sid:
|
||||
params.append(f"cart_sid={quote(cart_sid, safe='')}")
|
||||
if params:
|
||||
return f"{base}?{'&'.join(params)}"
|
||||
return base
|
||||
|
||||
# Client apps: local /auth/login triggers OAuth redirect to account
|
||||
base = "/auth/login/"
|
||||
if next_url:
|
||||
return f"{base}?next={quote(next_url, safe='')}"
|
||||
return base
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from .user import User
|
||||
from .kv import KV
|
||||
from .magic_link import MagicLink
|
||||
from .oauth_code import OAuthCode
|
||||
from .oauth_grant import OAuthGrant
|
||||
from .menu_item import MenuItem
|
||||
|
||||
from .ghost_membership_entities import (
|
||||
@@ -8,8 +10,6 @@ from .ghost_membership_entities import (
|
||||
GhostNewsletter, UserNewsletter,
|
||||
GhostTier, GhostSubscription,
|
||||
)
|
||||
from .domain_event import DomainEvent
|
||||
|
||||
from .ghost_content import Tag, Post, Author, PostAuthor, PostTag, PostLike
|
||||
from .page_config import PageConfig
|
||||
from .order import Order, OrderItem
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, DateTime, Text, func
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class DomainEvent(Base):
|
||||
__tablename__ = "domain_events"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
event_type: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
|
||||
aggregate_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
aggregate_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
payload: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="pending", server_default="pending", index=True
|
||||
)
|
||||
attempts: Mapped[int] = mapped_column(Integer, nullable=False, default=0, server_default="0")
|
||||
max_attempts: Mapped[int] = mapped_column(Integer, nullable=False, default=5, server_default="5")
|
||||
last_error: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
processed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<DomainEvent {self.id} {self.event_type} [{self.state}]>"
|
||||
@@ -50,14 +50,19 @@ class ActorProfile(Base):
|
||||
|
||||
|
||||
class APActivity(Base):
|
||||
"""An ActivityPub activity (local or remote)."""
|
||||
"""An ActivityPub activity (local or remote).
|
||||
|
||||
Also serves as the unified event bus: internal domain events and public
|
||||
federation activities both live here, distinguished by ``visibility``.
|
||||
The ``EventProcessor`` polls rows with ``process_state='pending'``.
|
||||
"""
|
||||
__tablename__ = "ap_activities"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
activity_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
actor_profile_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True,
|
||||
)
|
||||
object_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
@@ -83,6 +88,30 @@ class APActivity(Base):
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# --- Unified event-bus columns ---
|
||||
actor_uri: Mapped[str | None] = mapped_column(
|
||||
String(512), nullable=True,
|
||||
)
|
||||
visibility: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="public", server_default="public",
|
||||
)
|
||||
process_state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="completed", server_default="completed",
|
||||
)
|
||||
process_attempts: Mapped[int] = mapped_column(
|
||||
Integer, nullable=False, default=0, server_default="0",
|
||||
)
|
||||
process_max_attempts: Mapped[int] = mapped_column(
|
||||
Integer, nullable=False, default=5, server_default="5",
|
||||
)
|
||||
process_error: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
processed_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True,
|
||||
)
|
||||
origin_app: Mapped[str | None] = mapped_column(
|
||||
String(64), nullable=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", back_populates="activities")
|
||||
|
||||
@@ -90,6 +119,7 @@ class APActivity(Base):
|
||||
Index("ix_ap_activity_actor", "actor_profile_id"),
|
||||
Index("ix_ap_activity_source", "source_type", "source_id"),
|
||||
Index("ix_ap_activity_published", "published"),
|
||||
Index("ix_ap_activity_process", "process_state"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@@ -97,7 +127,12 @@ class APActivity(Base):
|
||||
|
||||
|
||||
class APFollower(Base):
|
||||
"""A remote follower of a local actor."""
|
||||
"""A remote follower of a local actor.
|
||||
|
||||
``app_domain`` scopes the follow to a specific app (e.g. "blog",
|
||||
"market", "events"). "federation" means the aggregate — the
|
||||
follower subscribes to all activities.
|
||||
"""
|
||||
__tablename__ = "ap_followers"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
@@ -108,6 +143,9 @@ class APFollower(Base):
|
||||
follower_inbox: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_actor_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_public_key: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
app_domain: Mapped[str] = mapped_column(
|
||||
String(64), nullable=False, default="federation", server_default="federation",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
@@ -116,8 +154,12 @@ class APFollower(Base):
|
||||
actor_profile = relationship("ActorProfile", back_populates="followers")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
|
||||
UniqueConstraint(
|
||||
"actor_profile_id", "follower_acct", "app_domain",
|
||||
name="uq_follower_acct_app",
|
||||
),
|
||||
Index("ix_ap_follower_actor", "actor_profile_id"),
|
||||
Index("ix_ap_follower_app_domain", "actor_profile_id", "app_domain"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@@ -397,3 +439,28 @@ class APNotification(Base):
|
||||
Index("ix_ap_notification_read", "actor_profile_id", "read"),
|
||||
Index("ix_ap_notification_created", "created_at"),
|
||||
)
|
||||
|
||||
|
||||
class APDeliveryLog(Base):
|
||||
"""Tracks successful deliveries of activities to remote inboxes.
|
||||
|
||||
Used for idempotency: the delivery handler skips inboxes that already
|
||||
have a success row, so retries after a crash never send duplicates.
|
||||
"""
|
||||
__tablename__ = "ap_delivery_log"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
activity_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_activities.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
inbox_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
app_domain: Mapped[str] = mapped_column(String(128), nullable=False, server_default="federation")
|
||||
status_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
delivered_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("activity_id", "inbox_url", "app_domain", name="uq_delivery_activity_inbox_domain"),
|
||||
Index("ix_ap_delivery_activity", "activity_id"),
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@ from shared.db.base import Base
|
||||
|
||||
|
||||
class MenuItem(Base):
|
||||
"""Deprecated — kept so the table isn't dropped. Use glue.models.MenuNode."""
|
||||
"""Deprecated — kept so the table isn't dropped. Use shared.models.menu_node.MenuNode."""
|
||||
__tablename__ = "menu_items"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
26
models/oauth_code.py
Normal file
26
models/oauth_code.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class OAuthCode(Base):
|
||||
__tablename__ = "oauth_codes"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
code: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
client_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
redirect_uri: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
grant_token: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
user = relationship("User", backref="oauth_codes")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_oauth_code_code", "code", unique=True),
|
||||
Index("ix_oauth_code_user", "user_id"),
|
||||
)
|
||||
32
models/oauth_grant.py
Normal file
32
models/oauth_grant.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class OAuthGrant(Base):
|
||||
"""Long-lived grant tracking each client-app session authorization.
|
||||
|
||||
Created when the OAuth authorize endpoint issues a code. Tied to the
|
||||
account session that issued it (``issuer_session``) so that logging out
|
||||
on one device revokes only that device's grants.
|
||||
"""
|
||||
__tablename__ = "oauth_grants"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
token: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
client_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
issuer_session: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
|
||||
device_id: Mapped[str | None] = mapped_column(String(128), nullable=True, index=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
revoked_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
user = relationship("User", backref="oauth_grants")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_oauth_grant_token", "token", unique=True),
|
||||
Index("ix_oauth_grant_issuer", "issuer_session"),
|
||||
Index("ix_oauth_grant_device", "device_id", "client_id"),
|
||||
)
|
||||
@@ -1,5 +1,6 @@
|
||||
starlette>=0.37,<0.39
|
||||
aiofiles==25.1.0
|
||||
aiohttp>=3.9
|
||||
aiosmtplib==5.0.0
|
||||
alembic==1.17.0
|
||||
anyio==4.11.0
|
||||
|
||||
@@ -239,6 +239,45 @@ class SqlCalendarService:
|
||||
merged = sorted(entries_by_id.values(), key=lambda e: e.start_at or period_start)
|
||||
return [_entry_to_dto(e) for e in merged]
|
||||
|
||||
async def upcoming_entries_for_container(
|
||||
self, session: AsyncSession,
|
||||
container_type: str | None = None, container_id: int | None = None,
|
||||
*, page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]:
|
||||
"""Upcoming confirmed entries. Optionally scoped to a container."""
|
||||
filters = [
|
||||
CalendarEntry.state == "confirmed",
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= func.now(),
|
||||
]
|
||||
|
||||
if container_type is not None and container_id is not None:
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == container_type,
|
||||
Calendar.container_id == container_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
filters.append(CalendarEntry.calendar_id.in_(cal_ids))
|
||||
else:
|
||||
# Still exclude entries from deleted calendars
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
filters.append(CalendarEntry.calendar_id.in_(cal_ids))
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(*filters)
|
||||
.order_by(CalendarEntry.start_at.asc())
|
||||
.limit(per_page)
|
||||
.offset(offset)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
entries = result.scalars().all()
|
||||
has_more = len(entries) == per_page
|
||||
return [_entry_to_dto(e) for e in entries], has_more
|
||||
|
||||
async def associated_entries(
|
||||
self, session: AsyncSession, content_type: str, content_id: int, page: int,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]:
|
||||
@@ -371,7 +410,7 @@ class SqlCalendarService:
|
||||
entries_by_post.setdefault(post_id, []).append(_entry_to_dto(entry))
|
||||
return entries_by_post
|
||||
|
||||
# -- writes (absorb glue lifecycle) ---------------------------------------
|
||||
# -- writes ---------------------------------------------------------------
|
||||
|
||||
async def adopt_entries_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
|
||||
@@ -24,7 +24,15 @@ from shared.contracts.dtos import (
|
||||
|
||||
|
||||
def _domain() -> str:
|
||||
return os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
return os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
|
||||
|
||||
def _get_origin_app() -> str | None:
|
||||
try:
|
||||
from quart import current_app
|
||||
return current_app.name
|
||||
except (ImportError, RuntimeError):
|
||||
return None
|
||||
|
||||
|
||||
def _actor_to_dto(actor: ActorProfile) -> ActorProfileDTO:
|
||||
@@ -67,6 +75,7 @@ def _follower_to_dto(f: APFollower) -> APFollowerDTO:
|
||||
follower_inbox=f.follower_inbox,
|
||||
follower_actor_url=f.follower_actor_url,
|
||||
created_at=f.created_at,
|
||||
app_domain=f.app_domain,
|
||||
)
|
||||
|
||||
|
||||
@@ -183,16 +192,22 @@ class SqlFederationService:
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
|
||||
activity = APActivity(
|
||||
activity_id=activity_uri,
|
||||
activity_type=activity_type,
|
||||
actor_profile_id=actor.id,
|
||||
actor_uri=actor_url,
|
||||
object_type=object_type,
|
||||
object_data=object_data,
|
||||
published=now,
|
||||
is_local=True,
|
||||
source_type=source_type,
|
||||
source_id=source_id,
|
||||
visibility="public",
|
||||
process_state="pending",
|
||||
origin_app=_get_origin_app(),
|
||||
)
|
||||
session.add(activity)
|
||||
await session.flush()
|
||||
@@ -208,7 +223,7 @@ class SqlFederationService:
|
||||
],
|
||||
"id": activity_uri,
|
||||
"type": activity_type,
|
||||
"actor": f"https://{domain}/users/{username}",
|
||||
"actor": actor_url,
|
||||
"published": now.isoformat(),
|
||||
"object": {
|
||||
"type": object_type,
|
||||
@@ -221,21 +236,6 @@ class SqlFederationService:
|
||||
except Exception:
|
||||
pass # IPFS failure is non-fatal
|
||||
|
||||
# Emit domain event for downstream processing (delivery)
|
||||
from shared.events import emit_event
|
||||
await emit_event(
|
||||
session,
|
||||
"federation.activity_created",
|
||||
"APActivity",
|
||||
activity.id,
|
||||
{
|
||||
"activity_id": activity.activity_id,
|
||||
"activity_type": activity_type,
|
||||
"actor_username": username,
|
||||
"object_type": object_type,
|
||||
},
|
||||
)
|
||||
|
||||
return _activity_to_dto(activity)
|
||||
|
||||
# -- Queries --------------------------------------------------------------
|
||||
@@ -253,6 +253,7 @@ class SqlFederationService:
|
||||
async def get_outbox(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
origin_app: str | None = None,
|
||||
) -> tuple[list[APActivityDTO], int]:
|
||||
actor = (
|
||||
await session.execute(
|
||||
@@ -262,22 +263,23 @@ class SqlFederationService:
|
||||
if actor is None:
|
||||
return [], 0
|
||||
|
||||
filters = [
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
]
|
||||
if origin_app is not None:
|
||||
filters.append(APActivity.origin_app == origin_app)
|
||||
|
||||
total = (
|
||||
await session.execute(
|
||||
select(func.count(APActivity.id)).where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
select(func.count(APActivity.id)).where(*filters)
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
result = await session.execute(
|
||||
select(APActivity)
|
||||
.where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
.where(*filters)
|
||||
.order_by(APActivity.published.desc())
|
||||
.limit(per_page)
|
||||
.offset(offset)
|
||||
@@ -298,10 +300,25 @@ class SqlFederationService:
|
||||
).scalars().first()
|
||||
return _activity_to_dto(a) if a else None
|
||||
|
||||
async def count_activities_for_source(
|
||||
self, session: AsyncSession, source_type: str, source_id: int,
|
||||
*, activity_type: str,
|
||||
) -> int:
|
||||
from sqlalchemy import func
|
||||
result = await session.execute(
|
||||
select(func.count()).select_from(APActivity).where(
|
||||
APActivity.source_type == source_type,
|
||||
APActivity.source_id == source_id,
|
||||
APActivity.activity_type == activity_type,
|
||||
)
|
||||
)
|
||||
return result.scalar_one()
|
||||
|
||||
# -- Followers ------------------------------------------------------------
|
||||
|
||||
async def get_followers(
|
||||
self, session: AsyncSession, username: str,
|
||||
app_domain: str | None = None,
|
||||
) -> list[APFollowerDTO]:
|
||||
actor = (
|
||||
await session.execute(
|
||||
@@ -311,15 +328,18 @@ class SqlFederationService:
|
||||
if actor is None:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(APFollower).where(APFollower.actor_profile_id == actor.id)
|
||||
)
|
||||
q = select(APFollower).where(APFollower.actor_profile_id == actor.id)
|
||||
if app_domain is not None:
|
||||
q = q.where(APFollower.app_domain == app_domain)
|
||||
|
||||
result = await session.execute(q)
|
||||
return [_follower_to_dto(f) for f in result.scalars().all()]
|
||||
|
||||
async def add_follower(
|
||||
self, session: AsyncSession, username: str,
|
||||
follower_acct: str, follower_inbox: str, follower_actor_url: str,
|
||||
follower_public_key: str | None = None,
|
||||
app_domain: str = "federation",
|
||||
) -> APFollowerDTO:
|
||||
actor = (
|
||||
await session.execute(
|
||||
@@ -329,12 +349,13 @@ class SqlFederationService:
|
||||
if actor is None:
|
||||
raise ValueError(f"Actor not found: {username}")
|
||||
|
||||
# Upsert: update if already following, insert if new
|
||||
# Upsert: update if already following this (actor, acct, app_domain)
|
||||
existing = (
|
||||
await session.execute(
|
||||
select(APFollower).where(
|
||||
APFollower.actor_profile_id == actor.id,
|
||||
APFollower.follower_acct == follower_acct,
|
||||
APFollower.app_domain == app_domain,
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
@@ -352,6 +373,7 @@ class SqlFederationService:
|
||||
follower_inbox=follower_inbox,
|
||||
follower_actor_url=follower_actor_url,
|
||||
follower_public_key=follower_public_key,
|
||||
app_domain=app_domain,
|
||||
)
|
||||
session.add(follower)
|
||||
await session.flush()
|
||||
@@ -359,6 +381,7 @@ class SqlFederationService:
|
||||
|
||||
async def remove_follower(
|
||||
self, session: AsyncSession, username: str, follower_acct: str,
|
||||
app_domain: str = "federation",
|
||||
) -> bool:
|
||||
actor = (
|
||||
await session.execute(
|
||||
@@ -372,10 +395,70 @@ class SqlFederationService:
|
||||
delete(APFollower).where(
|
||||
APFollower.actor_profile_id == actor.id,
|
||||
APFollower.follower_acct == follower_acct,
|
||||
APFollower.app_domain == app_domain,
|
||||
)
|
||||
)
|
||||
return result.rowcount > 0
|
||||
|
||||
async def get_followers_paginated(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]:
|
||||
actor = (
|
||||
await session.execute(
|
||||
select(ActorProfile).where(ActorProfile.preferred_username == username)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if actor is None:
|
||||
return [], 0
|
||||
|
||||
total = (
|
||||
await session.execute(
|
||||
select(func.count(APFollower.id)).where(
|
||||
APFollower.actor_profile_id == actor.id,
|
||||
)
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
followers = (
|
||||
await session.execute(
|
||||
select(APFollower)
|
||||
.where(APFollower.actor_profile_id == actor.id)
|
||||
.order_by(APFollower.created_at.desc())
|
||||
.limit(per_page)
|
||||
.offset(offset)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
results: list[RemoteActorDTO] = []
|
||||
for f in followers:
|
||||
# Try to resolve from cached remote actors first
|
||||
remote = (
|
||||
await session.execute(
|
||||
select(RemoteActor).where(
|
||||
RemoteActor.actor_url == f.follower_actor_url,
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if remote:
|
||||
results.append(_remote_actor_to_dto(remote))
|
||||
else:
|
||||
# Synthesise a minimal DTO from follower data
|
||||
from urllib.parse import urlparse
|
||||
domain = urlparse(f.follower_actor_url).netloc
|
||||
results.append(RemoteActorDTO(
|
||||
id=0,
|
||||
actor_url=f.follower_actor_url,
|
||||
inbox_url=f.follower_inbox,
|
||||
preferred_username=f.follower_acct.split("@")[0] if "@" in f.follower_acct else f.follower_acct,
|
||||
domain=domain,
|
||||
display_name=None,
|
||||
summary=None,
|
||||
icon_url=None,
|
||||
))
|
||||
return results, total
|
||||
|
||||
# -- Remote actors --------------------------------------------------------
|
||||
|
||||
async def get_or_fetch_remote_actor(
|
||||
@@ -469,6 +552,92 @@ class SqlFederationService:
|
||||
|
||||
return await self._upsert_remote_actor(session, actor_url, data)
|
||||
|
||||
async def search_actors(
|
||||
self, session: AsyncSession, query: str, page: int = 1, limit: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]:
|
||||
from sqlalchemy import or_
|
||||
|
||||
pattern = f"%{query}%"
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# WebFinger resolve for @user@domain queries (first page only)
|
||||
webfinger_result: RemoteActorDTO | None = None
|
||||
if page == 1 and "@" in query:
|
||||
webfinger_result = await self.search_remote_actor(session, query)
|
||||
|
||||
# Search cached remote actors
|
||||
remote_filter = or_(
|
||||
RemoteActor.preferred_username.ilike(pattern),
|
||||
RemoteActor.display_name.ilike(pattern),
|
||||
RemoteActor.domain.ilike(pattern),
|
||||
)
|
||||
remote_total = (
|
||||
await session.execute(
|
||||
select(func.count(RemoteActor.id)).where(remote_filter)
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
# Search local actor profiles
|
||||
local_filter = or_(
|
||||
ActorProfile.preferred_username.ilike(pattern),
|
||||
ActorProfile.display_name.ilike(pattern),
|
||||
)
|
||||
local_total = (
|
||||
await session.execute(
|
||||
select(func.count(ActorProfile.id)).where(local_filter)
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
total = remote_total + local_total
|
||||
|
||||
# Fetch remote actors page
|
||||
remote_rows = (
|
||||
await session.execute(
|
||||
select(RemoteActor)
|
||||
.where(remote_filter)
|
||||
.order_by(RemoteActor.preferred_username)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
results: list[RemoteActorDTO] = [_remote_actor_to_dto(r) for r in remote_rows]
|
||||
|
||||
# Fill remaining slots with local actors
|
||||
remaining = limit - len(results)
|
||||
local_offset = max(0, offset - remote_total)
|
||||
if remaining > 0 and offset + len(results) >= remote_total:
|
||||
domain = _domain()
|
||||
local_rows = (
|
||||
await session.execute(
|
||||
select(ActorProfile)
|
||||
.where(local_filter)
|
||||
.order_by(ActorProfile.preferred_username)
|
||||
.limit(remaining)
|
||||
.offset(local_offset)
|
||||
)
|
||||
).scalars().all()
|
||||
for lp in local_rows:
|
||||
results.append(RemoteActorDTO(
|
||||
id=0,
|
||||
actor_url=f"https://{domain}/users/{lp.preferred_username}",
|
||||
inbox_url=f"https://{domain}/users/{lp.preferred_username}/inbox",
|
||||
preferred_username=lp.preferred_username,
|
||||
domain=domain,
|
||||
display_name=lp.display_name,
|
||||
summary=lp.summary,
|
||||
icon_url=None,
|
||||
))
|
||||
|
||||
# Prepend WebFinger result (deduped)
|
||||
if webfinger_result:
|
||||
existing_urls = {r.actor_url for r in results}
|
||||
if webfinger_result.actor_url not in existing_urls:
|
||||
results.insert(0, webfinger_result)
|
||||
total += 1
|
||||
|
||||
return results, total
|
||||
|
||||
# -- Following (outbound) -------------------------------------------------
|
||||
|
||||
async def send_follow(
|
||||
@@ -966,6 +1135,46 @@ class SqlFederationService:
|
||||
))
|
||||
return items
|
||||
|
||||
async def get_actor_timeline(
|
||||
self, session: AsyncSession, remote_actor_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]:
|
||||
remote_actor = (
|
||||
await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.id == remote_actor_id)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not remote_actor:
|
||||
return []
|
||||
|
||||
q = (
|
||||
select(APRemotePost)
|
||||
.where(APRemotePost.remote_actor_id == remote_actor_id)
|
||||
)
|
||||
if before:
|
||||
q = q.where(APRemotePost.published < before)
|
||||
q = q.order_by(APRemotePost.published.desc()).limit(limit)
|
||||
|
||||
posts = (await session.execute(q)).scalars().all()
|
||||
return [
|
||||
TimelineItemDTO(
|
||||
id=f"remote:{p.id}",
|
||||
post_type="remote",
|
||||
content=p.content or "",
|
||||
published=p.published,
|
||||
actor_name=remote_actor.display_name or remote_actor.preferred_username,
|
||||
actor_username=remote_actor.preferred_username,
|
||||
object_id=p.object_id,
|
||||
summary=p.summary,
|
||||
url=p.url,
|
||||
actor_domain=remote_actor.domain,
|
||||
actor_icon=remote_actor.icon_url,
|
||||
actor_url=remote_actor.actor_url,
|
||||
author_inbox=remote_actor.inbox_url,
|
||||
)
|
||||
for p in posts
|
||||
]
|
||||
|
||||
# -- Local posts ----------------------------------------------------------
|
||||
|
||||
async def create_local_post(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
"""Inline federation publication — called at write time, not via async handler.
|
||||
|
||||
Replaces the old pattern where emit_event("post.published") → async handler →
|
||||
publish_activity(). Now the originating service calls try_publish() directly,
|
||||
which creates the APActivity in the same DB transaction. AP delivery
|
||||
(federation.activity_created → inbox POST) stays async.
|
||||
The originating service calls try_publish() directly, which creates the
|
||||
APActivity (with process_state='pending') in the same DB transaction.
|
||||
The EventProcessor picks it up and the delivery wildcard handler POSTs
|
||||
to follower inboxes.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -49,20 +49,30 @@ async def try_publish(
|
||||
if existing:
|
||||
if activity_type == "Create" and existing.activity_type != "Delete":
|
||||
return # already published (allow re-Create after Delete/unpublish)
|
||||
if activity_type == "Update" and existing.activity_type == "Update":
|
||||
return # already updated (Ghost fires duplicate webhooks)
|
||||
if activity_type == "Delete" and existing.activity_type == "Delete":
|
||||
return # already deleted
|
||||
elif activity_type in ("Delete", "Update"):
|
||||
return # never published, nothing to delete/update
|
||||
|
||||
# Stable object ID: same source always gets the same object id so
|
||||
# Mastodon treats Create/Update/Delete as the same post.
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
object_data["id"] = (
|
||||
# Stable object ID within a publish cycle. After Delete + re-Create
|
||||
# we append a version suffix so remote servers (Mastodon) treat it as
|
||||
# a brand-new post rather than ignoring the tombstoned ID.
|
||||
domain = os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
base_object_id = (
|
||||
f"https://{domain}/users/{actor.preferred_username}"
|
||||
f"/objects/{source_type.lower()}/{source_id}"
|
||||
)
|
||||
if activity_type == "Create" and existing and existing.activity_type == "Delete":
|
||||
# Count prior Creates to derive a version number
|
||||
create_count = await services.federation.count_activities_for_source(
|
||||
session, source_type, source_id, activity_type="Create",
|
||||
)
|
||||
object_data["id"] = f"{base_object_id}/v{create_count + 1}"
|
||||
elif activity_type in ("Update", "Delete") and existing and existing.object_data:
|
||||
# Use the same object ID as the most recent activity
|
||||
object_data["id"] = existing.object_data.get("id", base_object_id)
|
||||
else:
|
||||
object_data["id"] = base_object_id
|
||||
|
||||
try:
|
||||
await services.federation.publish_activity(
|
||||
|
||||
@@ -52,6 +52,23 @@ class SqlMarketService:
|
||||
)
|
||||
return [_mp_to_dto(mp) for mp in result.scalars().all()]
|
||||
|
||||
async def list_marketplaces(
|
||||
self, session: AsyncSession,
|
||||
container_type: str | None = None, container_id: int | None = None,
|
||||
*, page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[MarketPlaceDTO], bool]:
|
||||
stmt = select(MarketPlace).where(MarketPlace.deleted_at.is_(None))
|
||||
if container_type is not None and container_id is not None:
|
||||
stmt = stmt.where(
|
||||
MarketPlace.container_type == container_type,
|
||||
MarketPlace.container_id == container_id,
|
||||
)
|
||||
stmt = stmt.order_by(MarketPlace.name.asc())
|
||||
stmt = stmt.offset((page - 1) * per_page).limit(per_page + 1)
|
||||
rows = (await session.execute(stmt)).scalars().all()
|
||||
has_more = len(rows) > per_page
|
||||
return [_mp_to_dto(mp) for mp in rows[:per_page]], has_more
|
||||
|
||||
async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None:
|
||||
product = (
|
||||
await session.execute(select(Product).where(Product.id == product_id))
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import emit_event
|
||||
from shared.events import emit_activity
|
||||
from shared.models.container_relation import ContainerRelation
|
||||
|
||||
|
||||
@@ -40,17 +40,19 @@ async def attach_child(
|
||||
if label is not None:
|
||||
existing.label = label
|
||||
await session.flush()
|
||||
await emit_event(
|
||||
await emit_activity(
|
||||
session,
|
||||
event_type="container.child_attached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=existing.id,
|
||||
payload={
|
||||
activity_type="Add",
|
||||
actor_uri="internal:system",
|
||||
object_type="rose:ContainerRelation",
|
||||
object_data={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
source_type="container_relation",
|
||||
source_id=existing.id,
|
||||
)
|
||||
return existing
|
||||
# Already attached and active — no-op
|
||||
@@ -77,17 +79,19 @@ async def attach_child(
|
||||
session.add(rel)
|
||||
await session.flush()
|
||||
|
||||
await emit_event(
|
||||
await emit_activity(
|
||||
session,
|
||||
event_type="container.child_attached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=rel.id,
|
||||
payload={
|
||||
activity_type="Add",
|
||||
actor_uri="internal:system",
|
||||
object_type="rose:ContainerRelation",
|
||||
object_data={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
source_type="container_relation",
|
||||
source_id=rel.id,
|
||||
)
|
||||
|
||||
return rel
|
||||
@@ -139,17 +143,19 @@ async def detach_child(
|
||||
rel.deleted_at = func.now()
|
||||
await session.flush()
|
||||
|
||||
await emit_event(
|
||||
await emit_activity(
|
||||
session,
|
||||
event_type="container.child_detached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=rel.id,
|
||||
payload={
|
||||
activity_type="Remove",
|
||||
actor_uri="internal:system",
|
||||
object_type="rose:ContainerRelation",
|
||||
object_data={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
source_type="container_relation",
|
||||
source_id=rel.id,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -140,6 +140,9 @@ class StubCalendarService:
|
||||
) -> int:
|
||||
return 0
|
||||
|
||||
async def upcoming_entries_for_container(self, session, container_type, container_id, *, page=1, per_page=20):
|
||||
return [], False
|
||||
|
||||
async def entry_ids_for_content(self, session, content_type, content_id):
|
||||
return set()
|
||||
|
||||
@@ -153,6 +156,13 @@ class StubMarketService:
|
||||
) -> list[MarketPlaceDTO]:
|
||||
return []
|
||||
|
||||
async def list_marketplaces(
|
||||
self, session: AsyncSession,
|
||||
container_type: str | None = None, container_id: int | None = None,
|
||||
*, page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[MarketPlaceDTO], bool]:
|
||||
return [], False
|
||||
|
||||
async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None:
|
||||
return None
|
||||
|
||||
@@ -211,20 +221,24 @@ class StubFederationService:
|
||||
async def get_activity(self, session, activity_id):
|
||||
return None
|
||||
|
||||
async def get_outbox(self, session, username, page=1, per_page=20):
|
||||
async def get_outbox(self, session, username, page=1, per_page=20, origin_app=None):
|
||||
return [], 0
|
||||
|
||||
async def get_activity_for_source(self, session, source_type, source_id):
|
||||
return None
|
||||
|
||||
async def get_followers(self, session, username):
|
||||
async def count_activities_for_source(self, session, source_type, source_id, *, activity_type):
|
||||
return 0
|
||||
|
||||
async def get_followers(self, session, username, app_domain=None):
|
||||
return []
|
||||
|
||||
async def add_follower(self, session, username, follower_acct, follower_inbox,
|
||||
follower_actor_url, follower_public_key=None):
|
||||
follower_actor_url, follower_public_key=None,
|
||||
app_domain="federation"):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def remove_follower(self, session, username, follower_acct):
|
||||
async def remove_follower(self, session, username, follower_acct, app_domain="federation"):
|
||||
return False
|
||||
|
||||
async def get_or_fetch_remote_actor(self, session, actor_url):
|
||||
@@ -233,12 +247,18 @@ class StubFederationService:
|
||||
async def search_remote_actor(self, session, acct):
|
||||
return None
|
||||
|
||||
async def search_actors(self, session, query, page=1, limit=20):
|
||||
return [], 0
|
||||
|
||||
async def send_follow(self, session, local_username, remote_actor_url):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def get_following(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def get_followers_paginated(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def accept_follow_response(self, session, local_username, remote_actor_url):
|
||||
pass
|
||||
|
||||
@@ -260,6 +280,9 @@ class StubFederationService:
|
||||
async def get_public_timeline(self, session, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def get_actor_timeline(self, session, remote_actor_id, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def create_local_post(self, session, actor_profile_id, content, visibility="public", in_reply_to=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ class _WidgetRegistry:
|
||||
slug = w.slug
|
||||
|
||||
def _href(s=slug):
|
||||
from shared.infrastructure.urls import coop_url
|
||||
return coop_url(f"/auth/{s}/")
|
||||
from shared.infrastructure.urls import account_url
|
||||
return account_url(f"/{s}/")
|
||||
|
||||
self._account_nav.append(AccountNavLink(
|
||||
label=w.label,
|
||||
|
||||
@@ -145,7 +145,7 @@ async def upgrade_ots_proof(proof_bytes: bytes) -> tuple[bytes, bool]:
|
||||
"""
|
||||
# OpenTimestamps upgrade is done via the `ots` CLI or their calendar API.
|
||||
# For now, return the proof as-is with is_confirmed=False.
|
||||
# TODO: Implement calendar-based upgrade polling.
|
||||
# Calendar-based upgrade polling not yet implemented.
|
||||
return proof_bytes, False
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user