Monorepo: consolidate 7 repos into one
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m5s
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m5s
Combines shared, blog, market, cart, events, federation, and account into a single repository. Eliminates submodule sync, sibling model copying at build time, and per-app CI orchestration. Changes: - Remove per-app .git, .gitmodules, .gitea, submodule shared/ dirs - Remove stale sibling model copies from each app - Update all 6 Dockerfiles for monorepo build context (root = .) - Add build directives to docker-compose.yml - Add single .gitea/workflows/ci.yml with change detection - Add .dockerignore for monorepo build context - Create __init__.py for federation and account (cross-app imports)
This commit is contained in:
1
shared/infrastructure/__init__.py
Normal file
1
shared/infrastructure/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
454
shared/infrastructure/activitypub.py
Normal file
454
shared/infrastructure/activitypub.py
Normal file
@@ -0,0 +1,454 @@
|
||||
"""Per-app ActivityPub blueprint.
|
||||
|
||||
Factory function ``create_activitypub_blueprint(app_name)`` returns a
|
||||
Blueprint with WebFinger, host-meta, nodeinfo, actor profile, inbox,
|
||||
outbox, and followers endpoints.
|
||||
|
||||
Per-app actors are *virtual projections* of the same ``ActorProfile``.
|
||||
Same keypair, same ``preferred_username`` — the only differences are:
|
||||
- the domain in URLs (e.g. blog.rose-ash.com vs federation.rose-ash.com)
|
||||
- which activities are served in the outbox (filtered by ``origin_app``)
|
||||
- which followers are returned (filtered by ``app_domain``)
|
||||
- Follow requests create ``APFollower(app_domain=app_name)``
|
||||
|
||||
Federation app acts as the aggregate: no origin_app filter, app_domain=NULL.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import Blueprint, request, abort, Response, g
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.services.registry import services
|
||||
from shared.models.federation import ActorProfile, APInboxItem
|
||||
from shared.browser.app.csrf import csrf_exempt
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
# Apps that serve per-app AP actors
|
||||
AP_APPS = {"blog", "market", "events", "federation"}
|
||||
|
||||
|
||||
def _ap_domain(app_name: str) -> str:
|
||||
"""Return the public domain for this app's AP identity."""
|
||||
env_key = f"AP_DOMAIN_{app_name.upper()}"
|
||||
env_val = os.getenv(env_key)
|
||||
if env_val:
|
||||
return env_val
|
||||
# Default: {app}.rose-ash.com, except federation uses AP_DOMAIN
|
||||
if app_name == "federation":
|
||||
return os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
return f"{app_name}.rose-ash.com"
|
||||
|
||||
|
||||
def _federation_domain() -> str:
|
||||
"""The aggregate federation domain (for alsoKnownAs links)."""
|
||||
return os.getenv("AP_DOMAIN", "federation.rose-ash.com")
|
||||
|
||||
|
||||
def _is_aggregate(app_name: str) -> bool:
|
||||
"""Federation serves the aggregate actor (no per-app filter)."""
|
||||
return app_name == "federation"
|
||||
|
||||
|
||||
def create_activitypub_blueprint(app_name: str) -> Blueprint:
|
||||
"""Return a Blueprint with AP endpoints for *app_name*."""
|
||||
bp = Blueprint("activitypub", __name__)
|
||||
|
||||
domain = _ap_domain(app_name)
|
||||
fed_domain = _federation_domain()
|
||||
aggregate = _is_aggregate(app_name)
|
||||
# For per-app follows, store app_domain; for federation, "federation"
|
||||
follower_app_domain: str = app_name
|
||||
# For per-app outboxes, filter by origin_app; for federation, show all
|
||||
outbox_origin_app: str | None = None if aggregate else app_name
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Well-known endpoints
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/.well-known/webfinger")
|
||||
async def webfinger():
|
||||
resource = request.args.get("resource", "")
|
||||
if not resource.startswith("acct:"):
|
||||
abort(400, "Invalid resource format")
|
||||
|
||||
parts = resource[5:].split("@")
|
||||
if len(parts) != 2:
|
||||
abort(400, "Invalid resource format")
|
||||
|
||||
username, res_domain = parts
|
||||
if res_domain != domain:
|
||||
abort(404, "User not on this server")
|
||||
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404, "User not found")
|
||||
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"subject": resource,
|
||||
"aliases": [actor_url],
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"type": AP_CONTENT_TYPE,
|
||||
"href": actor_url,
|
||||
},
|
||||
{
|
||||
"rel": "http://webfinger.net/rel/profile-page",
|
||||
"type": "text/html",
|
||||
"href": actor_url,
|
||||
},
|
||||
],
|
||||
}),
|
||||
content_type="application/jrd+json",
|
||||
)
|
||||
|
||||
@bp.get("/.well-known/nodeinfo")
|
||||
async def nodeinfo_index():
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"links": [
|
||||
{
|
||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
||||
"href": f"https://{domain}/nodeinfo/2.0",
|
||||
}
|
||||
]
|
||||
}),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
@bp.get("/nodeinfo/2.0")
|
||||
async def nodeinfo():
|
||||
stats = await services.federation.get_stats(g.s)
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"version": "2.0",
|
||||
"software": {
|
||||
"name": "rose-ash",
|
||||
"version": "1.0.0",
|
||||
},
|
||||
"protocols": ["activitypub"],
|
||||
"usage": {
|
||||
"users": {
|
||||
"total": stats.get("actors", 0),
|
||||
"activeMonth": stats.get("actors", 0),
|
||||
},
|
||||
"localPosts": stats.get("activities", 0),
|
||||
},
|
||||
"openRegistrations": False,
|
||||
"metadata": {
|
||||
"nodeName": f"Rose Ash ({app_name})",
|
||||
"nodeDescription": f"Rose Ash {app_name} — ActivityPub federation",
|
||||
},
|
||||
}),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
@bp.get("/.well-known/host-meta")
|
||||
async def host_meta():
|
||||
xml = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
'<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">\n'
|
||||
f' <Link rel="lrdd" type="application/xrd+xml" '
|
||||
f'template="https://{domain}/.well-known/webfinger?resource={{uri}}"/>\n'
|
||||
'</XRD>'
|
||||
)
|
||||
return Response(response=xml, content_type="application/xrd+xml")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actor profile
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>")
|
||||
async def actor_profile(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
accept_header = request.headers.get("accept", "")
|
||||
|
||||
if "application/activity+json" in accept_header or "application/ld+json" in accept_header:
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
actor_json = {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"type": "Person",
|
||||
"id": actor_url,
|
||||
"name": actor.display_name or username,
|
||||
"preferredUsername": username,
|
||||
"summary": actor.summary or "",
|
||||
"manuallyApprovesFollowers": False,
|
||||
"inbox": f"{actor_url}/inbox",
|
||||
"outbox": f"{actor_url}/outbox",
|
||||
"followers": f"{actor_url}/followers",
|
||||
"following": f"{actor_url}/following",
|
||||
"publicKey": {
|
||||
"id": f"{actor_url}#main-key",
|
||||
"owner": actor_url,
|
||||
"publicKeyPem": actor.public_key_pem,
|
||||
},
|
||||
"url": actor_url,
|
||||
}
|
||||
|
||||
if aggregate:
|
||||
# Aggregate actor advertises all per-app actors
|
||||
also_known = [
|
||||
f"https://{_ap_domain(a)}/users/{username}"
|
||||
for a in AP_APPS if a != "federation"
|
||||
]
|
||||
if also_known:
|
||||
actor_json["alsoKnownAs"] = also_known
|
||||
else:
|
||||
# Per-app actors link back to the aggregate federation actor
|
||||
actor_json["alsoKnownAs"] = [
|
||||
f"https://{fed_domain}/users/{username}",
|
||||
]
|
||||
|
||||
return Response(
|
||||
response=json.dumps(actor_json),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
# HTML: federation renders its own profile; other apps redirect there
|
||||
if aggregate:
|
||||
from quart import render_template
|
||||
activities, total = await services.federation.get_outbox(
|
||||
g.s, username, page=1, per_page=20,
|
||||
)
|
||||
return await render_template(
|
||||
"federation/profile.html",
|
||||
actor=actor,
|
||||
activities=activities,
|
||||
total=total,
|
||||
)
|
||||
from quart import redirect
|
||||
return redirect(f"https://{fed_domain}/users/{username}")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Inbox
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@csrf_exempt
|
||||
@bp.post("/users/<username>/inbox")
|
||||
async def inbox(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
body = await request.get_json()
|
||||
if not body:
|
||||
abort(400, "Invalid JSON")
|
||||
|
||||
activity_type = body.get("type", "")
|
||||
from_actor_url = body.get("actor", "")
|
||||
|
||||
# Verify HTTP signature (best-effort)
|
||||
sig_valid = False
|
||||
try:
|
||||
from shared.utils.http_signatures import verify_request_signature
|
||||
from shared.infrastructure.ap_inbox_handlers import fetch_remote_actor
|
||||
|
||||
req_headers = dict(request.headers)
|
||||
sig_header = req_headers.get("Signature", "")
|
||||
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
if remote_actor and sig_header:
|
||||
pub_key_pem = (remote_actor.get("publicKey") or {}).get("publicKeyPem")
|
||||
if pub_key_pem:
|
||||
sig_valid = verify_request_signature(
|
||||
public_key_pem=pub_key_pem,
|
||||
signature_header=sig_header,
|
||||
method="POST",
|
||||
path=f"/users/{username}/inbox",
|
||||
headers=req_headers,
|
||||
)
|
||||
except Exception:
|
||||
log.debug("Signature verification failed for %s", from_actor_url, exc_info=True)
|
||||
|
||||
if not sig_valid:
|
||||
log.warning(
|
||||
"Unverified inbox POST from %s (%s) on %s — accepting anyway for now",
|
||||
from_actor_url, activity_type, domain,
|
||||
)
|
||||
|
||||
# Load actor row for DB operations
|
||||
actor_row = (
|
||||
await g.s.execute(
|
||||
select(ActorProfile).where(
|
||||
ActorProfile.preferred_username == username
|
||||
)
|
||||
)
|
||||
).scalar_one()
|
||||
|
||||
# Store raw inbox item
|
||||
item = APInboxItem(
|
||||
actor_profile_id=actor_row.id,
|
||||
raw_json=body,
|
||||
activity_type=activity_type,
|
||||
from_actor=from_actor_url,
|
||||
)
|
||||
g.s.add(item)
|
||||
await g.s.flush()
|
||||
|
||||
# Dispatch to shared handlers
|
||||
from shared.infrastructure.ap_inbox_handlers import dispatch_inbox_activity
|
||||
await dispatch_inbox_activity(
|
||||
g.s, actor_row, body, from_actor_url,
|
||||
domain=domain,
|
||||
app_domain=follower_app_domain,
|
||||
)
|
||||
|
||||
# Mark as processed
|
||||
item.state = "processed"
|
||||
item.processed_at = datetime.now(timezone.utc)
|
||||
await g.s.flush()
|
||||
|
||||
return Response(status=202)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Outbox
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>/outbox")
|
||||
async def outbox(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
_, total = await services.federation.get_outbox(
|
||||
g.s, username, page=1, per_page=1,
|
||||
origin_app=outbox_origin_app,
|
||||
)
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": f"{actor_url}/outbox",
|
||||
"totalItems": total,
|
||||
"first": f"{actor_url}/outbox?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
page_num = int(page_param)
|
||||
activities, total = await services.federation.get_outbox(
|
||||
g.s, username, page=page_num, per_page=20,
|
||||
origin_app=outbox_origin_app,
|
||||
)
|
||||
|
||||
items = []
|
||||
for a in activities:
|
||||
items.append({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": a.activity_type,
|
||||
"id": a.activity_id,
|
||||
"actor": actor_url,
|
||||
"published": a.published.isoformat() if a.published else None,
|
||||
"object": {
|
||||
"type": a.object_type,
|
||||
**(a.object_data or {}),
|
||||
},
|
||||
})
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{actor_url}/outbox?page={page_num}",
|
||||
"partOf": f"{actor_url}/outbox",
|
||||
"totalItems": total,
|
||||
"orderedItems": items,
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Followers / following collections
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@bp.get("/users/<username>/followers")
|
||||
async def followers(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
collection_id = f"https://{domain}/users/{username}/followers"
|
||||
follower_list = await services.federation.get_followers(
|
||||
g.s, username, app_domain=follower_app_domain,
|
||||
)
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": collection_id,
|
||||
"totalItems": len(follower_list),
|
||||
"first": f"{collection_id}?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{collection_id}?page=1",
|
||||
"partOf": collection_id,
|
||||
"totalItems": len(follower_list),
|
||||
"orderedItems": [f.follower_actor_url for f in follower_list],
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
@bp.get("/users/<username>/following")
|
||||
async def following(username: str):
|
||||
actor = await services.federation.get_actor_by_username(g.s, username)
|
||||
if not actor:
|
||||
abort(404)
|
||||
|
||||
collection_id = f"https://{domain}/users/{username}/following"
|
||||
following_list, total = await services.federation.get_following(g.s, username)
|
||||
page_param = request.args.get("page")
|
||||
|
||||
if not page_param:
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": collection_id,
|
||||
"totalItems": total,
|
||||
"first": f"{collection_id}?page=1",
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return Response(
|
||||
response=json.dumps({
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{collection_id}?page=1",
|
||||
"partOf": collection_id,
|
||||
"totalItems": total,
|
||||
"orderedItems": [f.actor_url for f in following_list],
|
||||
}),
|
||||
content_type=AP_CONTENT_TYPE,
|
||||
)
|
||||
|
||||
return bp
|
||||
564
shared/infrastructure/ap_inbox_handlers.py
Normal file
564
shared/infrastructure/ap_inbox_handlers.py
Normal file
@@ -0,0 +1,564 @@
|
||||
"""Reusable AP inbox handlers for all apps.
|
||||
|
||||
Extracted from federation/bp/actors/routes.py so that every app's
|
||||
shared AP blueprint can process Follow, Undo, Accept, Create, etc.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.federation import (
|
||||
ActorProfile, APInboxItem, APInteraction, APNotification,
|
||||
APRemotePost, APActivity, RemoteActor,
|
||||
)
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def fetch_remote_actor(actor_url: str) -> dict | None:
|
||||
"""Fetch a remote actor's JSON-LD profile."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
actor_url,
|
||||
headers={"Accept": AP_CONTENT_TYPE},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
except Exception:
|
||||
log.exception("Failed to fetch remote actor: %s", actor_url)
|
||||
return None
|
||||
|
||||
|
||||
async def send_accept(
|
||||
actor: ActorProfile,
|
||||
follow_activity: dict,
|
||||
follower_inbox: str,
|
||||
domain: str,
|
||||
) -> None:
|
||||
"""Send an Accept activity back to the follower."""
|
||||
from shared.utils.http_signatures import sign_request
|
||||
from urllib.parse import urlparse
|
||||
|
||||
username = actor.preferred_username
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
|
||||
accept_id = f"{actor_url}/activities/{uuid.uuid4()}"
|
||||
accept = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": accept_id,
|
||||
"type": "Accept",
|
||||
"actor": actor_url,
|
||||
"object": follow_activity,
|
||||
}
|
||||
|
||||
body_bytes = json.dumps(accept).encode()
|
||||
key_id = f"{actor_url}#main-key"
|
||||
|
||||
parsed = urlparse(follower_inbox)
|
||||
headers = sign_request(
|
||||
private_key_pem=actor.private_key_pem,
|
||||
key_id=key_id,
|
||||
method="POST",
|
||||
path=parsed.path,
|
||||
host=parsed.netloc,
|
||||
body=body_bytes,
|
||||
)
|
||||
headers["Content-Type"] = AP_CONTENT_TYPE
|
||||
|
||||
log.info("Accept payload → %s: %s", follower_inbox, json.dumps(accept)[:500])
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=15) as client:
|
||||
resp = await client.post(
|
||||
follower_inbox,
|
||||
content=body_bytes,
|
||||
headers=headers,
|
||||
)
|
||||
log.info("Accept → %s: %d %s", follower_inbox, resp.status_code, resp.text[:200])
|
||||
except Exception:
|
||||
log.exception("Failed to send Accept to %s", follower_inbox)
|
||||
|
||||
|
||||
async def backfill_follower(
|
||||
session: AsyncSession,
|
||||
actor: ActorProfile,
|
||||
follower_inbox: str,
|
||||
domain: str,
|
||||
origin_app: str | None = None,
|
||||
) -> None:
|
||||
"""Deliver recent *current* Create activities to a new follower's inbox.
|
||||
|
||||
Skips Creates whose source was later Deleted, and uses the latest
|
||||
Update data when available (so the follower sees the current version).
|
||||
"""
|
||||
from shared.events.handlers.ap_delivery_handler import (
|
||||
_build_activity_json, _deliver_to_inbox,
|
||||
)
|
||||
|
||||
filters = [
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
APActivity.activity_type == "Create",
|
||||
APActivity.source_type.isnot(None),
|
||||
APActivity.source_id.isnot(None),
|
||||
]
|
||||
if origin_app is not None:
|
||||
filters.append(APActivity.origin_app == origin_app)
|
||||
|
||||
creates = (
|
||||
await session.execute(
|
||||
select(APActivity).where(*filters)
|
||||
.order_by(APActivity.published.desc())
|
||||
.limit(40)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
if not creates:
|
||||
return
|
||||
|
||||
# Collect source keys that have been Deleted
|
||||
source_keys = {(c.source_type, c.source_id) for c in creates}
|
||||
deleted_keys: set[tuple[str | None, int | None]] = set()
|
||||
if source_keys:
|
||||
deletes = (
|
||||
await session.execute(
|
||||
select(APActivity.source_type, APActivity.source_id).where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.activity_type == "Delete",
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
)
|
||||
).all()
|
||||
deleted_keys = {(d[0], d[1]) for d in deletes}
|
||||
|
||||
# For sources with Updates, grab the latest Update's object_data
|
||||
updated_data: dict[tuple[str | None, int | None], dict] = {}
|
||||
if source_keys:
|
||||
updates = (
|
||||
await session.execute(
|
||||
select(APActivity).where(
|
||||
APActivity.actor_profile_id == actor.id,
|
||||
APActivity.activity_type == "Update",
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
).order_by(APActivity.published.desc())
|
||||
)
|
||||
).scalars().all()
|
||||
for u in updates:
|
||||
key = (u.source_type, u.source_id)
|
||||
if key not in updated_data and key in source_keys:
|
||||
updated_data[key] = u.object_data or {}
|
||||
|
||||
# Filter to current, non-deleted Creates (limit 20)
|
||||
activities = []
|
||||
for c in creates:
|
||||
key = (c.source_type, c.source_id)
|
||||
if key in deleted_keys:
|
||||
continue
|
||||
# Apply latest Update data if available
|
||||
if key in updated_data:
|
||||
c.object_data = updated_data[key]
|
||||
activities.append(c)
|
||||
if len(activities) >= 20:
|
||||
break
|
||||
|
||||
if not activities:
|
||||
return
|
||||
|
||||
log.info(
|
||||
"Backfilling %d posts to %s for @%s",
|
||||
len(activities), follower_inbox, actor.preferred_username,
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
for activity in reversed(activities): # oldest first
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
await _deliver_to_inbox(client, follower_inbox, activity_json, actor, domain)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Inbox activity handlers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def handle_follow(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
domain: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Process a Follow activity: add follower, send Accept, backfill."""
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
if not remote_actor:
|
||||
log.warning("Could not fetch remote actor for Follow: %s", from_actor_url)
|
||||
return
|
||||
|
||||
follower_inbox = remote_actor.get("inbox")
|
||||
if not follower_inbox:
|
||||
log.warning("Remote actor has no inbox: %s", from_actor_url)
|
||||
return
|
||||
|
||||
remote_username = remote_actor.get("preferredUsername", "")
|
||||
from urllib.parse import urlparse
|
||||
remote_domain = urlparse(from_actor_url).netloc
|
||||
follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url
|
||||
|
||||
pub_key = (remote_actor.get("publicKey") or {}).get("publicKeyPem")
|
||||
|
||||
await services.federation.add_follower(
|
||||
session,
|
||||
actor_row.preferred_username,
|
||||
follower_acct=follower_acct,
|
||||
follower_inbox=follower_inbox,
|
||||
follower_actor_url=from_actor_url,
|
||||
follower_public_key=pub_key,
|
||||
app_domain=app_domain,
|
||||
)
|
||||
|
||||
log.info(
|
||||
"New follower: %s → @%s (app_domain=%s)",
|
||||
follower_acct, actor_row.preferred_username, app_domain,
|
||||
)
|
||||
|
||||
# Notification
|
||||
ra = (
|
||||
await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not ra:
|
||||
ra_dto = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if ra_dto:
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if ra:
|
||||
notif = APNotification(
|
||||
actor_profile_id=actor_row.id,
|
||||
notification_type="follow",
|
||||
from_remote_actor_id=ra.id,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
# Send Accept
|
||||
await send_accept(actor_row, body, follower_inbox, domain)
|
||||
|
||||
# Backfill: deliver recent posts (filtered by origin_app for per-app follows)
|
||||
backfill_origin = app_domain if app_domain != "federation" else None
|
||||
await backfill_follower(session, actor_row, follower_inbox, domain, origin_app=backfill_origin)
|
||||
|
||||
|
||||
async def handle_undo(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Process an Undo activity (typically Undo Follow)."""
|
||||
inner = body.get("object")
|
||||
if not inner:
|
||||
return
|
||||
|
||||
inner_type = inner.get("type") if isinstance(inner, dict) else None
|
||||
if inner_type == "Follow":
|
||||
from urllib.parse import urlparse
|
||||
remote_domain = urlparse(from_actor_url).netloc
|
||||
remote_actor = await fetch_remote_actor(from_actor_url)
|
||||
remote_username = ""
|
||||
if remote_actor:
|
||||
remote_username = remote_actor.get("preferredUsername", "")
|
||||
follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url
|
||||
|
||||
removed = await services.federation.remove_follower(
|
||||
session, actor_row.preferred_username, follower_acct,
|
||||
app_domain=app_domain,
|
||||
)
|
||||
if removed:
|
||||
log.info("Unfollowed: %s → @%s (app_domain=%s)", follower_acct, actor_row.preferred_username, app_domain)
|
||||
else:
|
||||
log.debug("Undo Follow: follower not found: %s", follower_acct)
|
||||
else:
|
||||
log.debug("Undo for %s — not handled", inner_type)
|
||||
|
||||
|
||||
async def handle_accept(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Accept activity — update outbound follow state."""
|
||||
inner = body.get("object")
|
||||
if not inner:
|
||||
return
|
||||
|
||||
inner_type = inner.get("type") if isinstance(inner, dict) else None
|
||||
if inner_type == "Follow":
|
||||
await services.federation.accept_follow_response(
|
||||
session, actor_row.preferred_username, from_actor_url,
|
||||
)
|
||||
log.info("Follow accepted by %s for @%s", from_actor_url, actor_row.preferred_username)
|
||||
|
||||
|
||||
async def handle_create(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
federation_domain: str,
|
||||
) -> None:
|
||||
"""Process Create(Note/Article) — ingest remote post."""
|
||||
obj = body.get("object")
|
||||
if not obj or not isinstance(obj, dict):
|
||||
return
|
||||
|
||||
obj_type = obj.get("type", "")
|
||||
if obj_type not in ("Note", "Article"):
|
||||
log.debug("Create with type %s — skipping", obj_type)
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
log.warning("Could not resolve remote actor for Create: %s", from_actor_url)
|
||||
return
|
||||
|
||||
await services.federation.ingest_remote_post(session, remote.id, body, obj)
|
||||
log.info("Ingested %s from %s", obj_type, from_actor_url)
|
||||
|
||||
# Mention notification
|
||||
tags = obj.get("tag", [])
|
||||
if isinstance(tags, list):
|
||||
for tag in tags:
|
||||
if not isinstance(tag, dict):
|
||||
continue
|
||||
if tag.get("type") != "Mention":
|
||||
continue
|
||||
href = tag.get("href", "")
|
||||
if f"https://{federation_domain}/users/" in href:
|
||||
mentioned_username = href.rsplit("/", 1)[-1]
|
||||
mentioned = await services.federation.get_actor_by_username(
|
||||
session, mentioned_username,
|
||||
)
|
||||
if mentioned:
|
||||
rp = (await session.execute(
|
||||
select(APRemotePost).where(
|
||||
APRemotePost.object_id == obj.get("id")
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=mentioned.id,
|
||||
notification_type="mention",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_remote_post_id=rp.id if rp else None,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
# Reply notification
|
||||
in_reply_to = obj.get("inReplyTo")
|
||||
if in_reply_to and f"https://{federation_domain}/users/" in str(in_reply_to):
|
||||
local_activity = (await session.execute(
|
||||
select(APActivity).where(
|
||||
APActivity.activity_id == in_reply_to,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
if local_activity:
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
rp = (await session.execute(
|
||||
select(APRemotePost).where(
|
||||
APRemotePost.object_id == obj.get("id")
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=local_activity.actor_profile_id,
|
||||
notification_type="reply",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_remote_post_id=rp.id if rp else None,
|
||||
)
|
||||
session.add(notif)
|
||||
|
||||
|
||||
async def handle_update(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Update — re-ingest remote post."""
|
||||
obj = body.get("object")
|
||||
if not obj or not isinstance(obj, dict):
|
||||
return
|
||||
obj_type = obj.get("type", "")
|
||||
if obj_type in ("Note", "Article"):
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if remote:
|
||||
await services.federation.ingest_remote_post(session, remote.id, body, obj)
|
||||
log.info("Updated %s from %s", obj_type, from_actor_url)
|
||||
|
||||
|
||||
async def handle_delete(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process Delete — remove remote post."""
|
||||
obj = body.get("object")
|
||||
if isinstance(obj, str):
|
||||
object_id = obj
|
||||
elif isinstance(obj, dict):
|
||||
object_id = obj.get("id", "")
|
||||
else:
|
||||
return
|
||||
if object_id:
|
||||
await services.federation.delete_remote_post(session, object_id)
|
||||
log.info("Deleted remote post %s from %s", object_id, from_actor_url)
|
||||
|
||||
|
||||
async def handle_like(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process incoming Like — record interaction + notify."""
|
||||
object_id = body.get("object", "")
|
||||
if isinstance(object_id, dict):
|
||||
object_id = object_id.get("id", "")
|
||||
if not object_id:
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
return
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
target = (await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == object_id)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if not target:
|
||||
log.info("Like from %s for %s (target not found locally)", from_actor_url, object_id)
|
||||
return
|
||||
|
||||
interaction = APInteraction(
|
||||
remote_actor_id=ra.id if ra else None,
|
||||
post_type="local",
|
||||
post_id=target.id,
|
||||
interaction_type="like",
|
||||
activity_id=body.get("id"),
|
||||
)
|
||||
session.add(interaction)
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=target.actor_profile_id,
|
||||
notification_type="like",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_activity_id=target.id,
|
||||
)
|
||||
session.add(notif)
|
||||
log.info("Like from %s on activity %s", from_actor_url, object_id)
|
||||
|
||||
|
||||
async def handle_announce(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
) -> None:
|
||||
"""Process incoming Announce (boost) — record interaction + notify."""
|
||||
object_id = body.get("object", "")
|
||||
if isinstance(object_id, dict):
|
||||
object_id = object_id.get("id", "")
|
||||
if not object_id:
|
||||
return
|
||||
|
||||
remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url)
|
||||
if not remote:
|
||||
return
|
||||
|
||||
ra = (await session.execute(
|
||||
select(RemoteActor).where(RemoteActor.actor_url == from_actor_url)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
target = (await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == object_id)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if not target:
|
||||
log.info("Announce from %s for %s (target not found locally)", from_actor_url, object_id)
|
||||
return
|
||||
|
||||
interaction = APInteraction(
|
||||
remote_actor_id=ra.id if ra else None,
|
||||
post_type="local",
|
||||
post_id=target.id,
|
||||
interaction_type="boost",
|
||||
activity_id=body.get("id"),
|
||||
)
|
||||
session.add(interaction)
|
||||
|
||||
notif = APNotification(
|
||||
actor_profile_id=target.actor_profile_id,
|
||||
notification_type="boost",
|
||||
from_remote_actor_id=ra.id if ra else None,
|
||||
target_activity_id=target.id,
|
||||
)
|
||||
session.add(notif)
|
||||
log.info("Announce from %s on activity %s", from_actor_url, object_id)
|
||||
|
||||
|
||||
async def dispatch_inbox_activity(
|
||||
session: AsyncSession,
|
||||
actor_row: ActorProfile,
|
||||
body: dict,
|
||||
from_actor_url: str,
|
||||
domain: str,
|
||||
app_domain: str = "federation",
|
||||
) -> None:
|
||||
"""Route an inbox activity to the correct handler."""
|
||||
activity_type = body.get("type", "")
|
||||
|
||||
if activity_type == "Follow":
|
||||
await handle_follow(session, actor_row, body, from_actor_url, domain, app_domain=app_domain)
|
||||
elif activity_type == "Undo":
|
||||
await handle_undo(session, actor_row, body, from_actor_url, app_domain=app_domain)
|
||||
elif activity_type == "Accept":
|
||||
await handle_accept(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Create":
|
||||
await handle_create(session, actor_row, body, from_actor_url, domain)
|
||||
elif activity_type == "Update":
|
||||
await handle_update(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Delete":
|
||||
await handle_delete(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Like":
|
||||
await handle_like(session, actor_row, body, from_actor_url)
|
||||
elif activity_type == "Announce":
|
||||
await handle_announce(session, actor_row, body, from_actor_url)
|
||||
34
shared/infrastructure/cart_identity.py
Normal file
34
shared/infrastructure/cart_identity.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
Cart identity resolution — shared across all apps that need to know
|
||||
who the current cart owner is (user_id or anonymous session_id).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
from typing import TypedDict, Optional
|
||||
|
||||
from quart import g, session as qsession
|
||||
|
||||
|
||||
class CartIdentity(TypedDict):
|
||||
user_id: Optional[int]
|
||||
session_id: Optional[str]
|
||||
|
||||
|
||||
def current_cart_identity() -> CartIdentity:
|
||||
"""
|
||||
Decide how to identify the cart:
|
||||
|
||||
- If user is logged in -> use user_id (and ignore session_id)
|
||||
- Else -> generate / reuse an anonymous session_id stored in Quart's session
|
||||
"""
|
||||
user = getattr(g, "user", None)
|
||||
if user is not None and getattr(user, "id", None) is not None:
|
||||
return {"user_id": user.id, "session_id": None}
|
||||
|
||||
sid = qsession.get("cart_sid")
|
||||
if not sid:
|
||||
sid = secrets.token_hex(16)
|
||||
qsession["cart_sid"] = sid
|
||||
|
||||
return {"user_id": None, "session_id": sid}
|
||||
58
shared/infrastructure/context.py
Normal file
58
shared/infrastructure/context.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""
|
||||
Base template context shared by all apps.
|
||||
|
||||
This module no longer imports cart or menu_items services directly.
|
||||
Each app provides its own context_fn that calls this base and adds
|
||||
app-specific variables (cart data, menu_items, etc.).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from quart import request, g, current_app
|
||||
|
||||
from shared.config import config
|
||||
from shared.utils import host_url
|
||||
from shared.browser.app.utils import current_route_relative_path
|
||||
|
||||
|
||||
async def base_context() -> dict:
|
||||
"""
|
||||
Common template variables available in every app.
|
||||
|
||||
Does NOT include cart, calendar_cart_entries, total, calendar_total,
|
||||
or menu_items — those are added by each app's context_fn.
|
||||
"""
|
||||
is_htmx = request.headers.get("HX-Request") == "true"
|
||||
search = request.headers.get("X-Search", "")
|
||||
zap_filter = is_htmx and search == ""
|
||||
|
||||
def base_url():
|
||||
return host_url()
|
||||
|
||||
hx_select = "#main-panel"
|
||||
hx_select_search = (
|
||||
hx_select
|
||||
+ ", #search-mobile, #search-count-mobile, #search-desktop, #search-count-desktop, #menu-items-nav-wrapper"
|
||||
)
|
||||
|
||||
return {
|
||||
"is_htmx": is_htmx,
|
||||
"request": request,
|
||||
"now": datetime.now(),
|
||||
"current_local_href": current_route_relative_path(),
|
||||
"config": config(),
|
||||
"asset_url": current_app.jinja_env.globals.get("asset_url", lambda p: ""),
|
||||
"sort_options": [
|
||||
("az", "A\u2013Z", "order/a-z.svg"),
|
||||
("za", "Z\u2013A", "order/z-a.svg"),
|
||||
("price-asc", "\u00a3 low\u2192high", "order/l-h.svg"),
|
||||
("price-desc", "\u00a3 high\u2192low", "order/h-l.svg"),
|
||||
],
|
||||
"zap_filter": zap_filter,
|
||||
"print": print,
|
||||
"base_url": base_url,
|
||||
"base_title": config()["title"],
|
||||
"hx_select": hx_select,
|
||||
"hx_select_search": hx_select_search,
|
||||
}
|
||||
289
shared/infrastructure/factory.py
Normal file
289
shared/infrastructure/factory.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Callable, Awaitable, Sequence
|
||||
|
||||
from quart import Quart, request, g, redirect, send_from_directory
|
||||
|
||||
from shared.config import init_config, config, pretty
|
||||
from shared.models import KV # ensure shared models imported
|
||||
# Register all app model classes with SQLAlchemy so cross-domain
|
||||
# relationship() string references resolve correctly.
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "account.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
pass
|
||||
from shared.log_config import configure_logging
|
||||
from shared.events import EventProcessor
|
||||
|
||||
from shared.db.session import register_db
|
||||
from shared.browser.app.middleware import register as register_middleware
|
||||
from shared.browser.app.redis_cacher import register as register_redis
|
||||
from shared.browser.app.csrf import protect
|
||||
from shared.browser.app.errors import errors
|
||||
|
||||
from .jinja_setup import setup_jinja
|
||||
from .user_loader import load_current_user
|
||||
|
||||
|
||||
# Async init of config (runs once at import)
|
||||
asyncio.run(init_config())
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
STATIC_DIR = str(BASE_DIR / "static")
|
||||
TEMPLATE_DIR = str(BASE_DIR / "browser" / "templates")
|
||||
|
||||
|
||||
def create_base_app(
|
||||
name: str,
|
||||
*,
|
||||
context_fn: Callable[[], Awaitable[dict]] | None = None,
|
||||
before_request_fns: Sequence[Callable[[], Awaitable[None]]] | None = None,
|
||||
domain_services_fn: Callable[[], None] | None = None,
|
||||
) -> Quart:
|
||||
"""
|
||||
Create a Quart app with shared infrastructure.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name:
|
||||
Application name (also used as CACHE_APP_PREFIX).
|
||||
context_fn:
|
||||
Async function returning a dict for template context.
|
||||
Each app provides its own — the cart app queries locally,
|
||||
while blog/market apps fetch via internal API.
|
||||
If not provided, a minimal default context is used.
|
||||
before_request_fns:
|
||||
Extra before-request hooks (e.g. cart_loader for the cart app).
|
||||
domain_services_fn:
|
||||
Callable that registers domain services on the shared registry.
|
||||
Each app provides its own — registering real impls for owned
|
||||
domains and stubs (or real impls) for others.
|
||||
"""
|
||||
if domain_services_fn is not None:
|
||||
domain_services_fn()
|
||||
|
||||
from shared.services.widgets import register_all_widgets
|
||||
register_all_widgets()
|
||||
|
||||
app = Quart(
|
||||
name,
|
||||
static_folder=STATIC_DIR,
|
||||
static_url_path="/static",
|
||||
template_folder=TEMPLATE_DIR,
|
||||
)
|
||||
|
||||
configure_logging(name)
|
||||
|
||||
app.secret_key = os.getenv("SECRET_KEY", "dev-secret-key-change-me-777")
|
||||
|
||||
# Per-app first-party session cookie (no shared domain — avoids Safari ITP)
|
||||
app.config["SESSION_COOKIE_NAME"] = f"{name}_session"
|
||||
app.config["SESSION_COOKIE_SAMESITE"] = "Lax"
|
||||
app.config["SESSION_COOKIE_SECURE"] = True
|
||||
|
||||
# Ghost / Redis config
|
||||
app.config["GHOST_API_URL"] = os.getenv("GHOST_API_URL")
|
||||
app.config["GHOST_PUBLIC_URL"] = os.getenv("GHOST_PUBLIC_URL")
|
||||
app.config["GHOST_CONTENT_KEY"] = os.getenv("GHOST_CONTENT_API_KEY")
|
||||
app.config["REDIS_URL"] = os.getenv("REDIS_URL")
|
||||
|
||||
# Cache app prefix for key namespacing
|
||||
app.config["CACHE_APP_PREFIX"] = name
|
||||
|
||||
# --- infrastructure ---
|
||||
register_middleware(app)
|
||||
register_db(app)
|
||||
register_redis(app)
|
||||
setup_jinja(app)
|
||||
errors(app)
|
||||
|
||||
# Auto-register OAuth client blueprint for non-account apps
|
||||
# (account is the OAuth authorization server)
|
||||
if name != "account":
|
||||
from shared.infrastructure.oauth import create_oauth_blueprint
|
||||
app.register_blueprint(create_oauth_blueprint(name))
|
||||
|
||||
# Auto-register ActivityPub blueprint for AP-enabled apps
|
||||
from shared.infrastructure.activitypub import AP_APPS
|
||||
if name in AP_APPS:
|
||||
from shared.infrastructure.activitypub import create_activitypub_blueprint
|
||||
app.register_blueprint(create_activitypub_blueprint(name))
|
||||
|
||||
# --- device id (all apps, including account) ---
|
||||
_did_cookie = f"{name}_did"
|
||||
|
||||
@app.before_request
|
||||
async def _init_device_id():
|
||||
did = request.cookies.get(_did_cookie)
|
||||
if did:
|
||||
g.device_id = did
|
||||
g._new_device_id = False
|
||||
else:
|
||||
g.device_id = secrets.token_urlsafe(32)
|
||||
g._new_device_id = True
|
||||
|
||||
@app.after_request
|
||||
async def _set_device_cookie(response):
|
||||
if getattr(g, "_new_device_id", False):
|
||||
response.set_cookie(
|
||||
_did_cookie, g.device_id,
|
||||
max_age=30 * 24 * 3600,
|
||||
secure=True, samesite="Lax", httponly=True,
|
||||
)
|
||||
return response
|
||||
|
||||
# --- before-request hooks ---
|
||||
@app.before_request
|
||||
async def _route_log():
|
||||
g.root = request.headers.get("x-forwarded-prefix", "/")
|
||||
g.scheme = request.scheme
|
||||
g.host = request.host
|
||||
|
||||
@app.before_request
|
||||
async def _load_user():
|
||||
await load_current_user()
|
||||
|
||||
# Register any app-specific before-request hooks (e.g. cart loader)
|
||||
if before_request_fns:
|
||||
for fn in before_request_fns:
|
||||
app.before_request(fn)
|
||||
|
||||
# Auth state check via grant verification + silent OAuth handshake
|
||||
if name != "account":
|
||||
@app.before_request
|
||||
async def _check_auth_state():
|
||||
from quart import session as qs
|
||||
from urllib.parse import quote as _quote
|
||||
if request.path.startswith(("/auth/", "/static/", "/.well-known/", "/users/", "/nodeinfo/", "/internal/")):
|
||||
return
|
||||
|
||||
uid = qs.get("uid")
|
||||
grant_token = qs.get("grant_token")
|
||||
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
redis = get_redis()
|
||||
|
||||
# Case 1: logged in — verify grant still valid (direct DB, cached)
|
||||
if uid and grant_token:
|
||||
cache_key = f"grant:{grant_token}"
|
||||
if redis:
|
||||
# Quick check: if did_auth was cleared (logout), skip cache
|
||||
device_id = g.device_id
|
||||
did_auth_present = await redis.get(f"did_auth:{device_id}") if device_id else True
|
||||
cached = await redis.get(cache_key)
|
||||
if cached == b"ok" and did_auth_present:
|
||||
return
|
||||
if cached == b"revoked":
|
||||
qs.pop("uid", None)
|
||||
qs.pop("grant_token", None)
|
||||
qs.pop("cart_sid", None)
|
||||
return
|
||||
|
||||
from sqlalchemy import select
|
||||
from shared.db.session import get_session
|
||||
from shared.models.oauth_grant import OAuthGrant
|
||||
try:
|
||||
async with get_session() as s:
|
||||
grant = await s.scalar(
|
||||
select(OAuthGrant).where(OAuthGrant.token == grant_token)
|
||||
)
|
||||
valid = grant is not None and grant.revoked_at is None
|
||||
except Exception:
|
||||
return # DB error — don't log user out
|
||||
|
||||
if redis:
|
||||
await redis.set(cache_key, b"ok" if valid else b"revoked", ex=60)
|
||||
if not valid:
|
||||
qs.pop("uid", None)
|
||||
qs.pop("grant_token", None)
|
||||
qs.pop("cart_sid", None)
|
||||
return
|
||||
|
||||
# Case 2: not logged in — prompt=none OAuth (GET, non-HTMX only)
|
||||
if not uid and request.method == "GET":
|
||||
if request.headers.get("HX-Request"):
|
||||
return
|
||||
import time as _time
|
||||
now = _time.time()
|
||||
pnone_at = qs.get("_pnone_at")
|
||||
device_id = g.device_id
|
||||
|
||||
# Check if account signalled a login after we cached "not logged in"
|
||||
# (blog_did == account_did — same value set during OAuth callback)
|
||||
if device_id and redis and pnone_at:
|
||||
auth_ts = await redis.get(f"did_auth:{device_id}")
|
||||
if auth_ts:
|
||||
try:
|
||||
if float(auth_ts) > pnone_at:
|
||||
qs.pop("_pnone_at", None)
|
||||
return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}")
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
if pnone_at and (now - pnone_at) < 300:
|
||||
return
|
||||
if device_id and redis:
|
||||
cached = await redis.get(f"prompt:{name}:{device_id}")
|
||||
if cached == b"none":
|
||||
return
|
||||
return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}")
|
||||
|
||||
@app.before_request
|
||||
async def _csrf_protect():
|
||||
await protect()
|
||||
|
||||
# --- after-request hooks ---
|
||||
# Clear old shared-domain session cookie (migration from .rose-ash.com)
|
||||
@app.after_request
|
||||
async def _clear_old_shared_cookie(response):
|
||||
if request.cookies.get("blog_session"):
|
||||
response.delete_cookie("blog_session", domain=".rose-ash.com", path="/")
|
||||
return response
|
||||
|
||||
@app.after_request
|
||||
async def _add_hx_preserve_search_header(response):
|
||||
value = request.headers.get("X-Search")
|
||||
if value is not None:
|
||||
response.headers["HX-Preserve-Search"] = value
|
||||
return response
|
||||
|
||||
# --- context processor ---
|
||||
if context_fn is not None:
|
||||
@app.context_processor
|
||||
async def _inject_base():
|
||||
return await context_fn()
|
||||
else:
|
||||
# Minimal fallback (no cart, no menu_items)
|
||||
from .context import base_context
|
||||
|
||||
@app.context_processor
|
||||
async def _inject_base():
|
||||
return await base_context()
|
||||
|
||||
# --- event processor ---
|
||||
_event_processor = EventProcessor(app_name=name)
|
||||
|
||||
# --- startup ---
|
||||
@app.before_serving
|
||||
async def _startup():
|
||||
from shared.events.handlers import register_shared_handlers
|
||||
register_shared_handlers()
|
||||
await init_config()
|
||||
print(pretty())
|
||||
await _event_processor.start()
|
||||
|
||||
@app.after_serving
|
||||
async def _stop_event_processor():
|
||||
await _event_processor.stop()
|
||||
|
||||
# --- favicon ---
|
||||
@app.get("/favicon.ico")
|
||||
async def favicon():
|
||||
return await send_from_directory("static", "favicon.ico")
|
||||
|
||||
return app
|
||||
193
shared/infrastructure/fragments.py
Normal file
193
shared/infrastructure/fragments.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""
|
||||
Server-side fragment composition client.
|
||||
|
||||
Each coop app exposes HTML fragments at ``/internal/fragments/{type}``.
|
||||
This module provides helpers to fetch and cache those fragments so that
|
||||
consuming apps can compose cross-app UI without shared templates.
|
||||
|
||||
Failures raise ``FragmentError`` by default so broken fragments are
|
||||
immediately visible rather than silently missing from the page.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from typing import Sequence
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Re-usable async client (created lazily, one per process)
|
||||
_client: httpx.AsyncClient | None = None
|
||||
|
||||
# Default request timeout (seconds)
|
||||
_DEFAULT_TIMEOUT = 2.0
|
||||
|
||||
# Header sent on every fragment request so providers can distinguish
|
||||
# fragment fetches from normal browser traffic.
|
||||
FRAGMENT_HEADER = "X-Fragment-Request"
|
||||
|
||||
|
||||
class FragmentError(Exception):
|
||||
"""Raised when a fragment fetch fails."""
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(_DEFAULT_TIMEOUT),
|
||||
follow_redirects=False,
|
||||
)
|
||||
return _client
|
||||
|
||||
|
||||
def _internal_url(app_name: str) -> str:
|
||||
"""Resolve the Docker-internal base URL for *app_name*.
|
||||
|
||||
Looks up ``INTERNAL_URL_{APP}`` first, falls back to
|
||||
``http://{app}:8000``.
|
||||
"""
|
||||
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
||||
return os.getenv(env_key, f"http://{app_name}:8000").rstrip("/")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _is_fragment_request() -> bool:
|
||||
"""True when the current request is itself a fragment fetch."""
|
||||
try:
|
||||
from quart import request as _req
|
||||
return bool(_req.headers.get(FRAGMENT_HEADER))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
async def fetch_fragment(
|
||||
app_name: str,
|
||||
fragment_type: str,
|
||||
*,
|
||||
params: dict | None = None,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
required: bool = True,
|
||||
) -> str:
|
||||
"""Fetch an HTML fragment from another app.
|
||||
|
||||
Returns the raw HTML string. When *required* is True (default),
|
||||
raises ``FragmentError`` on network errors or non-200 responses.
|
||||
When *required* is False, returns ``""`` on failure.
|
||||
|
||||
Automatically returns ``""`` when called inside a fragment request
|
||||
to prevent circular dependencies between apps.
|
||||
"""
|
||||
if _is_fragment_request():
|
||||
return ""
|
||||
|
||||
base = _internal_url(app_name)
|
||||
url = f"{base}/internal/fragments/{fragment_type}"
|
||||
try:
|
||||
resp = await _get_client().get(
|
||||
url,
|
||||
params=params,
|
||||
headers={FRAGMENT_HEADER: "1"},
|
||||
timeout=timeout,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.text
|
||||
msg = f"Fragment {app_name}/{fragment_type} returned {resp.status_code}"
|
||||
if required:
|
||||
log.error(msg)
|
||||
raise FragmentError(msg)
|
||||
log.warning(msg)
|
||||
return ""
|
||||
except FragmentError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
msg = f"Fragment {app_name}/{fragment_type} failed: {exc}"
|
||||
if required:
|
||||
log.error(msg)
|
||||
raise FragmentError(msg) from exc
|
||||
log.warning(msg)
|
||||
return ""
|
||||
|
||||
|
||||
async def fetch_fragments(
|
||||
requests: Sequence[tuple[str, str, dict | None]],
|
||||
*,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
required: bool = True,
|
||||
) -> list[str]:
|
||||
"""Fetch multiple fragments concurrently.
|
||||
|
||||
*requests* is a sequence of ``(app_name, fragment_type, params)`` tuples.
|
||||
Returns a list of HTML strings in the same order. When *required*
|
||||
is True, any single failure raises ``FragmentError``.
|
||||
"""
|
||||
return list(await asyncio.gather(*(
|
||||
fetch_fragment(app, ftype, params=params, timeout=timeout, required=required)
|
||||
for app, ftype, params in requests
|
||||
)))
|
||||
|
||||
|
||||
async def fetch_fragment_cached(
|
||||
app_name: str,
|
||||
fragment_type: str,
|
||||
*,
|
||||
params: dict | None = None,
|
||||
ttl: int = 30,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
required: bool = True,
|
||||
) -> str:
|
||||
"""Fetch a fragment with a Redis cache layer.
|
||||
|
||||
Cache key: ``frag:{app}:{type}:{sorted_params}``.
|
||||
"""
|
||||
# Build a stable cache key
|
||||
suffix = ""
|
||||
if params:
|
||||
sorted_items = sorted(params.items())
|
||||
suffix = ":" + "&".join(f"{k}={v}" for k, v in sorted_items)
|
||||
cache_key = f"frag:{app_name}:{fragment_type}{suffix}"
|
||||
|
||||
# Try Redis cache
|
||||
redis = _get_redis()
|
||||
if redis:
|
||||
try:
|
||||
cached = await redis.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached.decode() if isinstance(cached, bytes) else cached
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Cache miss — fetch from provider
|
||||
html = await fetch_fragment(
|
||||
app_name, fragment_type, params=params, timeout=timeout, required=required,
|
||||
)
|
||||
|
||||
# Store in cache (even empty string — avoids hammering a down service)
|
||||
if redis and ttl > 0:
|
||||
try:
|
||||
await redis.set(cache_key, html.encode(), ex=ttl)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return html
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_redis():
|
||||
"""Return the current app's Redis connection, or None."""
|
||||
try:
|
||||
from quart import current_app
|
||||
r = current_app.redis
|
||||
return r if r else None
|
||||
except Exception:
|
||||
return None
|
||||
49
shared/infrastructure/http_utils.py
Normal file
49
shared/infrastructure/http_utils.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
HTTP utility helpers shared across apps.
|
||||
|
||||
Extracted from browse/services/services.py so order/orders blueprints
|
||||
(which live in the cart app) don't need to import from the browse blueprint.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from quart import g, request
|
||||
from shared.utils import host_url
|
||||
|
||||
|
||||
def vary(resp):
|
||||
"""
|
||||
Ensure HX-Request and X-Origin are part of the Vary header
|
||||
so caches distinguish HTMX from full-page requests.
|
||||
"""
|
||||
v = resp.headers.get("Vary", "")
|
||||
parts = [p.strip() for p in v.split(",") if p.strip()]
|
||||
for h in ("HX-Request", "X-Origin"):
|
||||
if h not in parts:
|
||||
parts.append(h)
|
||||
if parts:
|
||||
resp.headers["Vary"] = ", ".join(parts)
|
||||
return resp
|
||||
|
||||
|
||||
def current_url_without_page():
|
||||
"""
|
||||
Return the current URL with the ``page`` query-string parameter removed.
|
||||
Used for Hx-Push-Url headers on paginated routes.
|
||||
"""
|
||||
(request.script_root or "").rstrip("/")
|
||||
root2 = "/" + g.root
|
||||
path_only = request.path
|
||||
|
||||
if root2 and path_only.startswith(root2):
|
||||
rel = path_only[len(root2):]
|
||||
rel = rel if rel.startswith("/") else "/" + rel
|
||||
else:
|
||||
rel = path_only
|
||||
base = host_url(rel)
|
||||
|
||||
params = request.args.to_dict(flat=False)
|
||||
params.pop("page", None)
|
||||
qs = urlencode(params, doseq=True)
|
||||
return f"{base}?{qs}" if qs else base
|
||||
120
shared/infrastructure/jinja_setup.py
Normal file
120
shared/infrastructure/jinja_setup.py
Normal file
@@ -0,0 +1,120 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from quart import Quart, g, url_for
|
||||
|
||||
from shared.config import config
|
||||
from shared.utils import host_url
|
||||
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from shared.browser.app.authz import has_access
|
||||
from shared.browser.app.filters import register as register_filters
|
||||
|
||||
from .urls import blog_url, market_url, cart_url, events_url, federation_url, account_url, login_url, page_cart_url, market_product_url
|
||||
|
||||
|
||||
def setup_jinja(app: Quart) -> None:
|
||||
app.jinja_env.add_extension("jinja2.ext.do")
|
||||
|
||||
# --- template globals ---
|
||||
app.add_template_global(generate_csrf_token, "csrf_token")
|
||||
app.add_template_global(has_access, "has_access")
|
||||
|
||||
def level():
|
||||
if not hasattr(g, "_level_counter"):
|
||||
g._level_counter = 0
|
||||
return g._level_counter
|
||||
|
||||
def level_up():
|
||||
if not hasattr(g, "_level_counter"):
|
||||
g._level_counter = 0
|
||||
g._level_counter += 1
|
||||
return ""
|
||||
|
||||
app.jinja_env.globals["level"] = level
|
||||
app.jinja_env.globals["level_up"] = level_up
|
||||
app.jinja_env.globals["menu_colour"] = "sky"
|
||||
app.jinja_env.globals["app_name"] = app.name
|
||||
|
||||
select_colours = """
|
||||
[.hover-capable_&]:hover:bg-yellow-300
|
||||
aria-selected:bg-stone-500 aria-selected:text-white
|
||||
[.hover-capable_&[aria-selected=true]:hover]:bg-orange-500"""
|
||||
app.jinja_env.globals["select_colours"] = select_colours
|
||||
|
||||
nav_button = f"""justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black
|
||||
{select_colours}"""
|
||||
|
||||
styles = {
|
||||
"pill": """
|
||||
inline-flex items-center px-3 py-1 rounded-full bg-stone-200 text-stone-700 text-sm
|
||||
hover:bg-stone-300 hover:text-stone-900
|
||||
focus:outline-none focus-visible:ring-2 focus-visible:ring-stone-400
|
||||
""",
|
||||
"tr": "odd:bg-slate-50 even:bg-white hover:bg-slate-100",
|
||||
"action_button": "px-2 py-1 border rounded text-sm bg-sky-300 hover:bg-sky-400 flex gap-1 items-center",
|
||||
"pre_action_button": "px-2 py-1 border rounded text-sm bg-green-200 hover:bg-green-300",
|
||||
"cancel_button": "px-3 py-1.5 rounded-full text-sm border border-stone-300 text-stone-700 hover:bg-stone-100",
|
||||
"list_container": "border border-stone-200 rounded-lg p-3 mb-3 bg-white space-y-3 bg-yellow-200",
|
||||
"nav_button": f"{nav_button} p-3",
|
||||
"nav_button_less_pad": f"{nav_button} p-2",
|
||||
}
|
||||
app.jinja_env.globals["styles"] = styles
|
||||
|
||||
def _asset_url(path: str) -> str:
|
||||
def squash_double_slashes(url: str) -> str:
|
||||
m = re.match(r"(?:[A-Za-z][\w+.-]*:)?//", url)
|
||||
prefix = m.group(0) if m else ""
|
||||
rest = re.sub(r"/+", "/", url[len(prefix):])
|
||||
return prefix + rest
|
||||
|
||||
file_path = Path("static") / path
|
||||
try:
|
||||
digest = hashlib.md5(file_path.read_bytes()).hexdigest()[:8]
|
||||
except Exception:
|
||||
digest = "dev"
|
||||
return squash_double_slashes(
|
||||
f"{g.scheme}://{g.host}{g.root}/{url_for('static', filename=path, v=digest)}"
|
||||
)
|
||||
|
||||
app.jinja_env.globals["asset_url"] = _asset_url
|
||||
|
||||
def site():
|
||||
return {
|
||||
"url": host_url(),
|
||||
"logo": _asset_url("img/logo.jpg"),
|
||||
"default_image": _asset_url("img/logo.jpg"),
|
||||
"title": config()["title"],
|
||||
}
|
||||
|
||||
app.jinja_env.globals["site"] = site
|
||||
|
||||
# cross-app URL helpers available in all templates
|
||||
app.jinja_env.globals["blog_url"] = blog_url
|
||||
app.jinja_env.globals["market_url"] = market_url
|
||||
app.jinja_env.globals["cart_url"] = cart_url
|
||||
app.jinja_env.globals["events_url"] = events_url
|
||||
app.jinja_env.globals["federation_url"] = federation_url
|
||||
app.jinja_env.globals["account_url"] = account_url
|
||||
app.jinja_env.globals["login_url"] = login_url
|
||||
app.jinja_env.globals["page_cart_url"] = page_cart_url
|
||||
app.jinja_env.globals["market_product_url"] = market_product_url
|
||||
|
||||
# widget registry available in all templates
|
||||
from shared.services.widget_registry import widgets as _widget_registry
|
||||
app.jinja_env.globals["widgets"] = _widget_registry
|
||||
|
||||
# fragment composition helper — fetch HTML from another app's fragment API
|
||||
from shared.infrastructure.fragments import fetch_fragment_cached
|
||||
|
||||
async def _fragment(app_name: str, fragment_type: str, ttl: int = 30, **params) -> str:
|
||||
p = params if params else None
|
||||
return await fetch_fragment_cached(app_name, fragment_type, params=p, ttl=ttl)
|
||||
|
||||
app.jinja_env.globals["fragment"] = _fragment
|
||||
|
||||
# register jinja filters
|
||||
register_filters(app)
|
||||
183
shared/infrastructure/oauth.py
Normal file
183
shared/infrastructure/oauth.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""OAuth2 client blueprint for non-account apps.
|
||||
|
||||
Each client app gets /auth/login, /auth/callback, /auth/logout.
|
||||
Account is the OAuth authorization server.
|
||||
|
||||
Device cookie ({app}_did) ties the browser to its auth state so
|
||||
client apps can detect login/logout without cross-domain cookies.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import (
|
||||
Blueprint,
|
||||
redirect,
|
||||
request,
|
||||
session as qsession,
|
||||
g,
|
||||
current_app,
|
||||
make_response,
|
||||
)
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.db.session import get_session
|
||||
from shared.models.oauth_code import OAuthCode
|
||||
from shared.infrastructure.urls import account_url, app_url
|
||||
from shared.infrastructure.cart_identity import current_cart_identity
|
||||
from shared.events import emit_activity
|
||||
|
||||
SESSION_USER_KEY = "uid"
|
||||
GRANT_TOKEN_KEY = "grant_token"
|
||||
|
||||
|
||||
def create_oauth_blueprint(app_name: str) -> Blueprint:
|
||||
"""Return an OAuth client blueprint for *app_name*."""
|
||||
bp = Blueprint("oauth_auth", __name__, url_prefix="/auth")
|
||||
|
||||
@bp.get("/login")
|
||||
@bp.get("/login/")
|
||||
async def login():
|
||||
next_url = request.args.get("next", "/")
|
||||
prompt = request.args.get("prompt", "")
|
||||
state = secrets.token_urlsafe(32)
|
||||
qsession["oauth_state"] = state
|
||||
qsession["oauth_next"] = next_url
|
||||
|
||||
device_id = g.device_id
|
||||
redirect_uri = app_url(app_name, "/auth/callback")
|
||||
params = (
|
||||
f"?client_id={app_name}"
|
||||
f"&redirect_uri={redirect_uri}"
|
||||
f"&device_id={device_id}"
|
||||
f"&state={state}"
|
||||
)
|
||||
if prompt:
|
||||
params += f"&prompt={prompt}"
|
||||
authorize_url = account_url(f"/auth/oauth/authorize{params}")
|
||||
return redirect(authorize_url)
|
||||
|
||||
@bp.get("/callback")
|
||||
@bp.get("/callback/")
|
||||
async def callback():
|
||||
# Adopt account's device id as our own — one identity across all apps
|
||||
account_did = request.args.get("account_did", "")
|
||||
if account_did:
|
||||
qsession["_account_did"] = account_did
|
||||
# Overwrite this app's device cookie with account's device id
|
||||
g.device_id = account_did
|
||||
g._new_device_id = True # factory after_request will set the cookie
|
||||
|
||||
# Handle prompt=none error (user not logged in on account)
|
||||
error = request.args.get("error")
|
||||
if error == "login_required":
|
||||
next_url = qsession.pop("oauth_next", "/")
|
||||
qsession.pop("oauth_state", None)
|
||||
import time as _time
|
||||
qsession["_pnone_at"] = _time.time()
|
||||
device_id = g.device_id
|
||||
if device_id:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.set(
|
||||
f"prompt:{app_name}:{device_id}", b"none", ex=300
|
||||
)
|
||||
return redirect(next_url)
|
||||
|
||||
code = request.args.get("code")
|
||||
state = request.args.get("state")
|
||||
expected_state = qsession.pop("oauth_state", None)
|
||||
next_url = qsession.pop("oauth_next", "/")
|
||||
|
||||
if not code or not state or state != expected_state:
|
||||
current_app.logger.warning("OAuth callback: bad state or missing code")
|
||||
return redirect("/")
|
||||
|
||||
expected_redirect = app_url(app_name, "/auth/callback")
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
result = await s.execute(
|
||||
select(OAuthCode)
|
||||
.where(OAuthCode.code == code)
|
||||
.with_for_update()
|
||||
)
|
||||
oauth_code = result.scalar_one_or_none()
|
||||
|
||||
if not oauth_code:
|
||||
current_app.logger.warning("OAuth callback: code not found")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.used_at is not None:
|
||||
current_app.logger.warning("OAuth callback: code already used")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.expires_at < now:
|
||||
current_app.logger.warning("OAuth callback: code expired")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.client_id != app_name:
|
||||
current_app.logger.warning("OAuth callback: client_id mismatch")
|
||||
return redirect("/")
|
||||
|
||||
if oauth_code.redirect_uri != expected_redirect:
|
||||
current_app.logger.warning("OAuth callback: redirect_uri mismatch")
|
||||
return redirect("/")
|
||||
|
||||
oauth_code.used_at = now
|
||||
user_id = oauth_code.user_id
|
||||
grant_token = oauth_code.grant_token
|
||||
|
||||
# Set local session with grant token for revocation checking
|
||||
qsession[SESSION_USER_KEY] = user_id
|
||||
if grant_token:
|
||||
qsession[GRANT_TOKEN_KEY] = grant_token
|
||||
qsession.pop("_pnone_at", None)
|
||||
|
||||
# Emit login activity for cart adoption
|
||||
ident = current_cart_identity()
|
||||
anon_session_id = ident.get("session_id")
|
||||
if anon_session_id:
|
||||
try:
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
await emit_activity(
|
||||
s,
|
||||
activity_type="rose:Login",
|
||||
actor_uri="internal:system",
|
||||
object_type="Person",
|
||||
object_data={
|
||||
"user_id": user_id,
|
||||
"session_id": anon_session_id,
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
current_app.logger.exception("OAuth: failed to emit login activity")
|
||||
|
||||
return redirect(next_url, 303)
|
||||
|
||||
@bp.get("/clear")
|
||||
@bp.get("/clear/")
|
||||
async def clear():
|
||||
"""One-time migration helper: clear all session cookies."""
|
||||
qsession.clear()
|
||||
resp = await make_response(redirect("/"))
|
||||
resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/")
|
||||
resp.delete_cookie(f"{app_name}_did", path="/")
|
||||
return resp
|
||||
|
||||
@bp.post("/logout")
|
||||
@bp.post("/logout/")
|
||||
async def logout():
|
||||
qsession.pop(SESSION_USER_KEY, None)
|
||||
qsession.pop(GRANT_TOKEN_KEY, None)
|
||||
qsession.pop("cart_sid", None)
|
||||
qsession.pop("_pnone_at", None)
|
||||
qsession.pop("_account_did", None)
|
||||
# Redirect through account to revoke grants + clear account session
|
||||
return redirect(account_url("/auth/sso-logout/"))
|
||||
|
||||
return bp
|
||||
97
shared/infrastructure/urls.py
Normal file
97
shared/infrastructure/urls.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from urllib.parse import quote
|
||||
|
||||
from shared.config import config
|
||||
|
||||
|
||||
def _get_app_url(app_name: str) -> str:
|
||||
env_key = f"APP_URL_{app_name.upper()}"
|
||||
env_val = os.getenv(env_key)
|
||||
if env_val:
|
||||
return env_val.rstrip("/")
|
||||
return config()["app_urls"][app_name].rstrip("/")
|
||||
|
||||
|
||||
def app_url(app_name: str, path: str = "/") -> str:
|
||||
base = _get_app_url(app_name)
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
return base + path
|
||||
|
||||
|
||||
def blog_url(path: str = "/") -> str:
|
||||
return app_url("blog", path)
|
||||
|
||||
|
||||
def market_url(path: str = "/") -> str:
|
||||
return app_url("market", path)
|
||||
|
||||
|
||||
def cart_url(path: str = "/") -> str:
|
||||
return app_url("cart", path)
|
||||
|
||||
|
||||
def events_url(path: str = "/") -> str:
|
||||
return app_url("events", path)
|
||||
|
||||
|
||||
def federation_url(path: str = "/") -> str:
|
||||
return app_url("federation", path)
|
||||
|
||||
|
||||
def account_url(path: str = "/") -> str:
|
||||
return app_url("account", path)
|
||||
|
||||
|
||||
def artdag_url(path: str = "/") -> str:
|
||||
return app_url("artdag", path)
|
||||
|
||||
|
||||
def page_cart_url(page_slug: str, path: str = "/") -> str:
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
return cart_url(f"/{page_slug}{path}")
|
||||
|
||||
|
||||
def market_product_url(product_slug: str, suffix: str = "", market_place=None) -> str:
|
||||
"""Build a market product URL with the correct page/market prefix.
|
||||
|
||||
Resolves the prefix from:
|
||||
- market app context: g.post_slug + g.market_slug
|
||||
- cart app context: g.page_slug + market_place.slug
|
||||
"""
|
||||
from quart import g
|
||||
|
||||
page_slug = getattr(g, "post_slug", None) or getattr(g, "page_slug", None)
|
||||
ms = getattr(g, "market_slug", None) or (
|
||||
getattr(market_place, "slug", None) if market_place else None
|
||||
)
|
||||
prefix = f"/{page_slug}/{ms}" if page_slug and ms else ""
|
||||
tail = f"/{suffix}" if suffix else "/"
|
||||
return market_url(f"{prefix}/product/{product_slug}{tail}")
|
||||
|
||||
|
||||
def login_url(next_url: str = "") -> str:
|
||||
from quart import current_app
|
||||
|
||||
# Account handles login directly (magic link flow — it's the OAuth server)
|
||||
if current_app.name == "account":
|
||||
base = "/auth/login/"
|
||||
params: list[str] = []
|
||||
if next_url:
|
||||
params.append(f"next={quote(next_url, safe='')}")
|
||||
from quart import session as qsession
|
||||
cart_sid = qsession.get("cart_sid")
|
||||
if cart_sid:
|
||||
params.append(f"cart_sid={quote(cart_sid, safe='')}")
|
||||
if params:
|
||||
return f"{base}?{'&'.join(params)}"
|
||||
return base
|
||||
|
||||
# Client apps: local /auth/login triggers OAuth redirect to account
|
||||
base = "/auth/login/"
|
||||
if next_url:
|
||||
return f"{base}?next={quote(next_url, safe='')}"
|
||||
return base
|
||||
35
shared/infrastructure/user_loader.py
Normal file
35
shared/infrastructure/user_loader.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import session as qsession, g
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.models.user import User
|
||||
from shared.models.ghost_membership_entities import UserNewsletter
|
||||
|
||||
|
||||
async def load_user_by_id(session, user_id: int):
|
||||
"""Load a user by ID with labels and newsletters eagerly loaded."""
|
||||
stmt = (
|
||||
select(User)
|
||||
.options(
|
||||
selectinload(User.labels),
|
||||
selectinload(User.user_newsletters).selectinload(
|
||||
UserNewsletter.newsletter
|
||||
),
|
||||
)
|
||||
.where(User.id == user_id)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def load_current_user():
|
||||
uid = qsession.get("uid")
|
||||
if not uid:
|
||||
g.user = None
|
||||
g.rights = {"admin": False}
|
||||
return
|
||||
|
||||
g.user = await load_user_by_id(g.s, uid)
|
||||
g.rights = {l.name: True for l in g.user.labels} if g.user else {}
|
||||
Reference in New Issue
Block a user