Split databases and Redis — prepare infrastructure for per-domain isolation
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 3m20s
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 3m20s
Redis: per-app DB index (0-5) with shared auth DB 15 for SSO keys; flushdb replaces flushall so deploys don't wipe cross-app auth state. Postgres: drop 13 cross-domain FK constraints (migration v2t0p8q9r0), remove dead ORM relationships, add explicit joins for 4 live ones. Multi-engine sessions (account + federation) ready for per-domain DBs via DATABASE_URL_ACCOUNT / DATABASE_URL_FEDERATION env vars. All URLs initially point to the same appdb — zero behaviour change until split-databases.sh is run to migrate data to per-domain DBs. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
12
_config/init-databases.sql
Normal file
12
_config/init-databases.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
-- Per-domain databases for the coop stack.
|
||||
-- Run once on fresh deployments (not needed for existing single-DB setups
|
||||
-- that use the split-databases.sh migration script instead).
|
||||
--
|
||||
-- Usage: psql -U postgres -f init-databases.sql
|
||||
|
||||
CREATE DATABASE db_account;
|
||||
CREATE DATABASE db_blog;
|
||||
CREATE DATABASE db_market;
|
||||
CREATE DATABASE db_cart;
|
||||
CREATE DATABASE db_events;
|
||||
CREATE DATABASE db_federation;
|
||||
153
_config/split-databases.sh
Executable file
153
_config/split-databases.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# split-databases.sh — Migrate from single appdb to per-domain databases.
|
||||
#
|
||||
# Prerequisites:
|
||||
# - All apps stopped (5-min maintenance window)
|
||||
# - init-databases.sql already run (CREATE DATABASE db_*)
|
||||
# - Run from a host that can reach the Postgres container
|
||||
#
|
||||
# Usage:
|
||||
# PGHOST=db PGUSER=postgres PGPASSWORD=change-me bash split-databases.sh
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
SOURCE_DB="${SOURCE_DB:-appdb}"
|
||||
|
||||
# ── Table → database mapping ───────────────────────────────────────────────
|
||||
|
||||
declare -A DB_TABLES
|
||||
|
||||
DB_TABLES[db_account]="
|
||||
users
|
||||
magic_links
|
||||
oauth_codes
|
||||
oauth_grants
|
||||
ghost_labels
|
||||
user_labels
|
||||
ghost_newsletters
|
||||
user_newsletters
|
||||
ghost_tiers
|
||||
ghost_subscriptions
|
||||
kv
|
||||
"
|
||||
|
||||
DB_TABLES[db_blog]="
|
||||
authors
|
||||
tags
|
||||
posts
|
||||
post_authors
|
||||
post_tags
|
||||
post_likes
|
||||
menu_items
|
||||
menu_nodes
|
||||
container_relations
|
||||
"
|
||||
|
||||
DB_TABLES[db_market]="
|
||||
products
|
||||
product_images
|
||||
product_sections
|
||||
product_labels
|
||||
product_stickers
|
||||
product_attributes
|
||||
product_nutrition
|
||||
product_allergens
|
||||
product_likes
|
||||
product_logs
|
||||
market_places
|
||||
nav_tops
|
||||
nav_subs
|
||||
listings
|
||||
listing_items
|
||||
link_errors
|
||||
link_externals
|
||||
subcategory_redirects
|
||||
"
|
||||
|
||||
DB_TABLES[db_cart]="
|
||||
cart_items
|
||||
orders
|
||||
order_items
|
||||
"
|
||||
|
||||
DB_TABLES[db_events]="
|
||||
calendars
|
||||
calendar_slots
|
||||
calendar_entries
|
||||
calendar_entry_posts
|
||||
ticket_types
|
||||
tickets
|
||||
page_configs
|
||||
"
|
||||
|
||||
DB_TABLES[db_federation]="
|
||||
ap_anchors
|
||||
ap_actor_profiles
|
||||
ap_activities
|
||||
ap_followers
|
||||
ap_inbox_items
|
||||
ap_remote_actors
|
||||
ap_following
|
||||
ap_remote_posts
|
||||
ap_local_posts
|
||||
ap_interactions
|
||||
ap_notifications
|
||||
ap_delivery_log
|
||||
ipfs_pins
|
||||
"
|
||||
|
||||
# ── Migrate each domain ────────────────────────────────────────────────────
|
||||
|
||||
for target_db in db_account db_blog db_market db_cart db_events db_federation; do
|
||||
tables="${DB_TABLES[$target_db]}"
|
||||
table_list=""
|
||||
for t in $tables; do
|
||||
table_list="$table_list --table=$t"
|
||||
done
|
||||
|
||||
echo "=== Migrating $target_db ==="
|
||||
echo " Tables: $(echo $tables | tr '\n' ' ')"
|
||||
|
||||
# Dump schema + data for these tables from the source DB
|
||||
pg_dump "$SOURCE_DB" $table_list --no-owner --no-privileges \
|
||||
| psql -q "$target_db"
|
||||
|
||||
echo " Done."
|
||||
done
|
||||
|
||||
# ── Stamp Alembic head in each domain DB ──────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo "=== Stamping Alembic head in each DB ==="
|
||||
for target_db in db_account db_blog db_market db_cart db_events db_federation; do
|
||||
# Create alembic_version table and stamp current head
|
||||
psql -q "$target_db" <<'SQL'
|
||||
CREATE TABLE IF NOT EXISTS alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL,
|
||||
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
|
||||
);
|
||||
DELETE FROM alembic_version;
|
||||
INSERT INTO alembic_version (version_num) VALUES ('v2t0p8q9r0');
|
||||
SQL
|
||||
echo " $target_db stamped at v2t0p8q9r0"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Migration complete ==="
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Update docker-compose.yml — set per-app DATABASE_URL to the new DBs"
|
||||
echo " 2. Remove schema_sql config (no longer needed)"
|
||||
echo " 3. Redeploy all services"
|
||||
echo ""
|
||||
echo "Per-app DATABASE_URL values:"
|
||||
echo " blog: postgresql+asyncpg://postgres:change-me@db:5432/db_blog"
|
||||
echo " market: postgresql+asyncpg://postgres:change-me@db:5432/db_market"
|
||||
echo " cart: postgresql+asyncpg://postgres:change-me@db:5432/db_cart"
|
||||
echo " events: postgresql+asyncpg://postgres:change-me@db:5432/db_events"
|
||||
echo " federation: postgresql+asyncpg://postgres:change-me@db:5432/db_federation"
|
||||
echo " account: postgresql+asyncpg://postgres:change-me@db:5432/db_account"
|
||||
echo ""
|
||||
echo " DATABASE_URL_ACCOUNT: postgresql+asyncpg://postgres:change-me@db:5432/db_account"
|
||||
echo " DATABASE_URL_FEDERATION: postgresql+asyncpg://postgres:change-me@db:5432/db_federation"
|
||||
@@ -364,15 +364,14 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Signal login for this device so client apps can detect it
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
import time as _time
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.set(
|
||||
f"did_auth:{g.device_id}",
|
||||
str(_time.time()).encode(),
|
||||
ex=30 * 24 * 3600,
|
||||
)
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.set(
|
||||
f"did_auth:{g.device_id}",
|
||||
str(_time.time()).encode(),
|
||||
ex=30 * 24 * 3600,
|
||||
)
|
||||
except Exception:
|
||||
current_app.logger.exception("[auth] failed to set did_auth in Redis")
|
||||
|
||||
@@ -398,10 +397,9 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Clear login signal for this device
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.delete(f"did_auth:{g.device_id}")
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.delete(f"did_auth:{g.device_id}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -446,10 +444,9 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Clear login signal for this device
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.delete(f"did_auth:{g.device_id}")
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.delete(f"did_auth:{g.device_id}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
@@ -22,7 +22,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
@@ -19,7 +19,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
@@ -11,6 +11,8 @@ x-app-common: &app-common
|
||||
|
||||
x-app-env: &app-env
|
||||
DATABASE_URL: postgresql+asyncpg://postgres:change-me@db:5432/appdb
|
||||
DATABASE_URL_ACCOUNT: postgresql+asyncpg://postgres:change-me@db:5432/appdb
|
||||
DATABASE_URL_FEDERATION: postgresql+asyncpg://postgres:change-me@db:5432/appdb
|
||||
ALEMBIC_DATABASE_URL: postgresql+psycopg://postgres:change-me@db:5432/appdb
|
||||
SMTP_HOST: ${SMTP_HOST}
|
||||
SMTP_PORT: ${SMTP_PORT}
|
||||
@@ -23,7 +25,7 @@ x-app-env: &app-env
|
||||
GHOST_CONTENT_API_KEY: ${GHOST_CONTENT_API_KEY}
|
||||
GHOST_WEBHOOK_SECRET: ${GHOST_WEBHOOK_SECRET}
|
||||
GHOST_ADMIN_API_KEY: ${GHOST_ADMIN_API_KEY}
|
||||
REDIS_URL: redis://redis:6379
|
||||
REDIS_AUTH_URL: redis://redis:6379/15
|
||||
SECRET_KEY: ${SECRET_KEY}
|
||||
SUMUP_API_KEY: ${SUMUP_API_KEY}
|
||||
APP_URL_BLOG: https://blog.rose-ash.com
|
||||
@@ -56,6 +58,7 @@ services:
|
||||
dockerfile: blog/Dockerfile
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
RUN_MIGRATIONS: "true"
|
||||
@@ -71,6 +74,7 @@ services:
|
||||
- /root/rose-ash/_snapshot:/app/_snapshot
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/1
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
|
||||
@@ -82,6 +86,7 @@ services:
|
||||
dockerfile: cart/Dockerfile
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/2
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
|
||||
@@ -93,6 +98,7 @@ services:
|
||||
dockerfile: events/Dockerfile
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/3
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
|
||||
@@ -104,6 +110,7 @@ services:
|
||||
dockerfile: federation/Dockerfile
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/4
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
|
||||
@@ -115,6 +122,7 @@ services:
|
||||
dockerfile: account/Dockerfile
|
||||
environment:
|
||||
<<: *app-env
|
||||
REDIS_URL: redis://redis:6379/5
|
||||
DATABASE_HOST: db
|
||||
DATABASE_PORT: "5432"
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
@@ -22,7 +22,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
@@ -19,7 +19,7 @@ if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
59
shared/alembic/versions/v2t0p8q9r0_drop_cross_domain_fks.py
Normal file
59
shared/alembic/versions/v2t0p8q9r0_drop_cross_domain_fks.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Drop cross-domain foreign key constraints.
|
||||
|
||||
Columns and indexes remain — only the FK constraints are removed.
|
||||
This prepares for per-domain databases where cross-DB FKs can't exist.
|
||||
|
||||
Revision ID: v2t0p8q9r0
|
||||
Revises: u1s9o5p7q8
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
revision = "v2t0p8q9r0"
|
||||
down_revision = "u1s9o5p7q8"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# blog → account
|
||||
op.drop_constraint("posts_user_id_fkey", "posts", type_="foreignkey")
|
||||
op.drop_constraint("post_likes_user_id_fkey", "post_likes", type_="foreignkey")
|
||||
|
||||
# market → account
|
||||
op.drop_constraint("product_likes_user_id_fkey", "product_likes", type_="foreignkey")
|
||||
|
||||
# cart → account
|
||||
op.drop_constraint("cart_items_user_id_fkey", "cart_items", type_="foreignkey")
|
||||
op.drop_constraint("orders_user_id_fkey", "orders", type_="foreignkey")
|
||||
|
||||
# cart → market
|
||||
op.drop_constraint("cart_items_product_id_fkey", "cart_items", type_="foreignkey")
|
||||
op.drop_constraint("cart_items_market_place_id_fkey", "cart_items", type_="foreignkey")
|
||||
op.drop_constraint("order_items_product_id_fkey", "order_items", type_="foreignkey")
|
||||
|
||||
# cart → events
|
||||
op.drop_constraint("orders_page_config_id_fkey", "orders", type_="foreignkey")
|
||||
|
||||
# events → account
|
||||
op.drop_constraint("calendar_entries_user_id_fkey", "calendar_entries", type_="foreignkey")
|
||||
op.drop_constraint("tickets_user_id_fkey", "tickets", type_="foreignkey")
|
||||
|
||||
# federation → account
|
||||
op.drop_constraint("ap_actor_profiles_user_id_fkey", "ap_actor_profiles", type_="foreignkey")
|
||||
|
||||
# shared (blog-internal but cross-concern)
|
||||
op.drop_constraint("menu_items_post_id_fkey", "menu_items", type_="foreignkey")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.create_foreign_key("posts_user_id_fkey", "posts", "users", ["user_id"], ["id"], ondelete="SET NULL")
|
||||
op.create_foreign_key("post_likes_user_id_fkey", "post_likes", "users", ["user_id"], ["id"], ondelete="CASCADE")
|
||||
op.create_foreign_key("product_likes_user_id_fkey", "product_likes", "users", ["user_id"], ["id"], ondelete="CASCADE")
|
||||
op.create_foreign_key("cart_items_user_id_fkey", "cart_items", "users", ["user_id"], ["id"], ondelete="CASCADE")
|
||||
op.create_foreign_key("cart_items_product_id_fkey", "cart_items", "products", ["product_id"], ["id"], ondelete="CASCADE")
|
||||
op.create_foreign_key("cart_items_market_place_id_fkey", "cart_items", "market_places", ["market_place_id"], ["id"], ondelete="SET NULL")
|
||||
op.create_foreign_key("orders_user_id_fkey", "orders", "users", ["user_id"], ["id"])
|
||||
op.create_foreign_key("orders_page_config_id_fkey", "orders", "page_configs", ["page_config_id"], ["id"], ondelete="SET NULL")
|
||||
op.create_foreign_key("order_items_product_id_fkey", "order_items", "products", ["product_id"], ["id"])
|
||||
op.create_foreign_key("calendar_entries_user_id_fkey", "calendar_entries", "users", ["user_id"], ["id"])
|
||||
op.create_foreign_key("tickets_user_id_fkey", "tickets", "users", ["user_id"], ["id"])
|
||||
op.create_foreign_key("ap_actor_profiles_user_id_fkey", "ap_actor_profiles", "users", ["user_id"], ["id"], ondelete="CASCADE")
|
||||
op.create_foreign_key("menu_items_post_id_fkey", "menu_items", "posts", ["post_id"], ["id"], ondelete="CASCADE")
|
||||
@@ -35,6 +35,72 @@ async def get_session():
|
||||
await sess.close()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cross-domain sessions — account + federation
|
||||
#
|
||||
# Initially DATABASE_URL_ACCOUNT / DATABASE_URL_FEDERATION point to the same
|
||||
# DB as DATABASE_URL (zero behaviour change). When per-domain DBs are ready,
|
||||
# switch the env vars to the new connection strings.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
DATABASE_URL_ACCOUNT = (
|
||||
os.getenv("DATABASE_URL_ACCOUNT") or DATABASE_URL
|
||||
)
|
||||
|
||||
DATABASE_URL_FEDERATION = (
|
||||
os.getenv("DATABASE_URL_FEDERATION") or DATABASE_URL
|
||||
)
|
||||
|
||||
# Engines are created lazily — only allocate a pool if the URL differs
|
||||
_account_engine = (
|
||||
_engine if DATABASE_URL_ACCOUNT == DATABASE_URL
|
||||
else create_async_engine(
|
||||
DATABASE_URL_ACCOUNT,
|
||||
future=True, echo=False, pool_pre_ping=True,
|
||||
pool_size=3, max_overflow=5,
|
||||
)
|
||||
)
|
||||
_AccountSession = async_sessionmaker(
|
||||
bind=_account_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
_federation_engine = (
|
||||
_engine if DATABASE_URL_FEDERATION == DATABASE_URL
|
||||
else create_async_engine(
|
||||
DATABASE_URL_FEDERATION,
|
||||
future=True, echo=False, pool_pre_ping=True,
|
||||
pool_size=3, max_overflow=5,
|
||||
)
|
||||
)
|
||||
_FederationSession = async_sessionmaker(
|
||||
bind=_federation_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_account_session():
|
||||
"""Session targeting the account database (users, grants, oauth codes)."""
|
||||
sess = _AccountSession()
|
||||
try:
|
||||
yield sess
|
||||
finally:
|
||||
await sess.close()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_federation_session():
|
||||
"""Session targeting the federation database (ap_activities, etc.)."""
|
||||
sess = _FederationSession()
|
||||
try:
|
||||
yield sess
|
||||
finally:
|
||||
await sess.close()
|
||||
|
||||
|
||||
def register_db(app: Quart):
|
||||
|
||||
@app.before_request
|
||||
|
||||
@@ -4,6 +4,11 @@ Unified activity bus.
|
||||
emit_activity() writes an APActivity row with process_state='pending' within
|
||||
the caller's existing DB transaction — atomic with the domain change.
|
||||
|
||||
When the federation database is separate (DATABASE_URL_FEDERATION differs from
|
||||
DATABASE_URL), emit_activity() opens its own federation session and commits
|
||||
independently. Atomicity is traded for domain isolation; handlers are
|
||||
idempotent, so at-least-once delivery is safe.
|
||||
|
||||
register_activity_handler() registers async handler functions that the
|
||||
EventProcessor dispatches when processing pending activities.
|
||||
"""
|
||||
@@ -73,6 +78,12 @@ def get_activity_handlers(
|
||||
return handlers
|
||||
|
||||
|
||||
def _needs_federation_session() -> bool:
|
||||
"""True when the federation DB differs from the app's default DB."""
|
||||
from shared.db.session import DATABASE_URL, DATABASE_URL_FEDERATION
|
||||
return DATABASE_URL_FEDERATION != DATABASE_URL
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# emit_activity — the primary way to emit events
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -92,8 +103,10 @@ async def emit_activity(
|
||||
"""
|
||||
Write an AP-shaped activity to ap_activities with process_state='pending'.
|
||||
|
||||
Called inside a service function using the same session that performs the
|
||||
domain change. The activity and the change commit together.
|
||||
When all apps share one database the activity is written in the caller's
|
||||
transaction (atomic with the domain change). When the federation DB is
|
||||
separate, a dedicated federation session is used and committed
|
||||
independently.
|
||||
"""
|
||||
if not origin_app:
|
||||
try:
|
||||
@@ -118,9 +131,17 @@ async def emit_activity(
|
||||
process_state="pending",
|
||||
origin_app=origin_app,
|
||||
)
|
||||
session.add(activity)
|
||||
await session.flush()
|
||||
# Wake any listening EventProcessor as soon as this transaction commits.
|
||||
# NOTIFY is transactional — delivered only after commit.
|
||||
await session.execute(text("NOTIFY ap_activity_pending"))
|
||||
|
||||
if _needs_federation_session():
|
||||
from shared.db.session import get_federation_session
|
||||
async with get_federation_session() as fed_s:
|
||||
async with fed_s.begin():
|
||||
fed_s.add(activity)
|
||||
await fed_s.flush()
|
||||
await fed_s.execute(text("NOTIFY ap_activity_pending"))
|
||||
else:
|
||||
session.add(activity)
|
||||
await session.flush()
|
||||
await session.execute(text("NOTIFY ap_activity_pending"))
|
||||
|
||||
return activity
|
||||
|
||||
@@ -9,6 +9,9 @@ A dedicated asyncpg LISTEN connection wakes the poll loop immediately when
|
||||
emit_activity() fires NOTIFY ap_activity_pending, so latency drops from
|
||||
~2 seconds (poll interval) to sub-100 ms. The fixed-interval poll remains
|
||||
as a safety-net fallback.
|
||||
|
||||
The LISTEN connection and poll queries target the federation database
|
||||
(DATABASE_URL_FEDERATION) since ap_activities lives there.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -21,7 +24,7 @@ import asyncpg
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.db.session import get_session, DATABASE_URL
|
||||
from shared.db.session import get_federation_session, DATABASE_URL_FEDERATION
|
||||
from shared.models.federation import APActivity
|
||||
from .bus import get_activity_handlers
|
||||
|
||||
@@ -89,7 +92,7 @@ class EventProcessor:
|
||||
|
||||
async def _listen_for_notify(self) -> None:
|
||||
"""Maintain a LISTEN connection and wake the poll loop on NOTIFY."""
|
||||
dsn = DATABASE_URL.replace("+asyncpg", "")
|
||||
dsn = DATABASE_URL_FEDERATION.replace("+asyncpg", "")
|
||||
while self._running:
|
||||
try:
|
||||
self._listen_conn = await asyncpg.connect(dsn)
|
||||
@@ -154,7 +157,7 @@ class EventProcessor:
|
||||
"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(seconds=self._stuck_timeout)
|
||||
try:
|
||||
async with get_session() as session:
|
||||
async with get_federation_session() as session:
|
||||
filters = [
|
||||
APActivity.process_state == "processing",
|
||||
APActivity.created_at < cutoff,
|
||||
@@ -180,7 +183,7 @@ class EventProcessor:
|
||||
async def _process_batch(self) -> int:
|
||||
"""Fetch and process a batch of pending activities. Returns count processed."""
|
||||
processed = 0
|
||||
async with get_session() as session:
|
||||
async with get_federation_session() as session:
|
||||
filters = [
|
||||
APActivity.process_state == "pending",
|
||||
APActivity.process_attempts < APActivity.process_max_attempts,
|
||||
|
||||
37
shared/infrastructure/auth_redis.py
Normal file
37
shared/infrastructure/auth_redis.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Shared auth Redis connection (DB 15).
|
||||
|
||||
All cross-app auth keys live here so that per-app FLUSHDB on deploy
|
||||
doesn't wipe SSO state:
|
||||
- did_auth:{device_id} — login signal timestamp
|
||||
- grant:{grant_token} — grant validity cache (ok/revoked)
|
||||
- prompt:{app}:{device_id} — prompt=none cooldown
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from redis import asyncio as aioredis
|
||||
|
||||
_AUTH_REDIS_URL = os.getenv("REDIS_AUTH_URL", "redis://redis:6379/15")
|
||||
|
||||
_auth_redis: aioredis.Redis | None = None
|
||||
|
||||
|
||||
async def get_auth_redis() -> aioredis.Redis:
|
||||
"""Return the shared auth Redis connection (lazy init)."""
|
||||
global _auth_redis
|
||||
if _auth_redis is None:
|
||||
_auth_redis = aioredis.Redis.from_url(
|
||||
_AUTH_REDIS_URL,
|
||||
encoding="utf-8",
|
||||
decode_responses=False,
|
||||
)
|
||||
return _auth_redis
|
||||
|
||||
|
||||
async def close_auth_redis() -> None:
|
||||
"""Close the auth Redis connection (call on app shutdown)."""
|
||||
global _auth_redis
|
||||
if _auth_redis is not None:
|
||||
await _auth_redis.close()
|
||||
_auth_redis = None
|
||||
@@ -162,17 +162,20 @@ def create_base_app(
|
||||
uid = qs.get("uid")
|
||||
grant_token = qs.get("grant_token")
|
||||
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
redis = get_redis()
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
try:
|
||||
auth_redis = await get_auth_redis()
|
||||
except Exception:
|
||||
auth_redis = None
|
||||
|
||||
# Case 1: logged in — verify grant still valid (direct DB, cached)
|
||||
if uid and grant_token:
|
||||
cache_key = f"grant:{grant_token}"
|
||||
if redis:
|
||||
if auth_redis:
|
||||
# Quick check: if did_auth was cleared (logout), skip cache
|
||||
device_id = g.device_id
|
||||
did_auth_present = await redis.get(f"did_auth:{device_id}") if device_id else True
|
||||
cached = await redis.get(cache_key)
|
||||
did_auth_present = await auth_redis.get(f"did_auth:{device_id}") if device_id else True
|
||||
cached = await auth_redis.get(cache_key)
|
||||
if cached == b"ok" and did_auth_present:
|
||||
return
|
||||
if cached == b"revoked":
|
||||
@@ -183,10 +186,10 @@ def create_base_app(
|
||||
return
|
||||
|
||||
from sqlalchemy import select
|
||||
from shared.db.session import get_session
|
||||
from shared.db.session import get_account_session
|
||||
from shared.models.oauth_grant import OAuthGrant
|
||||
try:
|
||||
async with get_session() as s:
|
||||
async with get_account_session() as s:
|
||||
grant = await s.scalar(
|
||||
select(OAuthGrant).where(OAuthGrant.token == grant_token)
|
||||
)
|
||||
@@ -194,8 +197,8 @@ def create_base_app(
|
||||
except Exception:
|
||||
return # DB error — don't log user out
|
||||
|
||||
if redis:
|
||||
await redis.set(cache_key, b"ok" if valid else b"revoked", ex=60)
|
||||
if auth_redis:
|
||||
await auth_redis.set(cache_key, b"ok" if valid else b"revoked", ex=60)
|
||||
if not valid:
|
||||
qs.pop("uid", None)
|
||||
qs.pop("grant_token", None)
|
||||
@@ -214,8 +217,8 @@ def create_base_app(
|
||||
|
||||
# Check if account signalled a login after we cached "not logged in"
|
||||
# (blog_did == account_did — same value set during OAuth callback)
|
||||
if device_id and redis and pnone_at:
|
||||
auth_ts = await redis.get(f"did_auth:{device_id}")
|
||||
if device_id and auth_redis and pnone_at:
|
||||
auth_ts = await auth_redis.get(f"did_auth:{device_id}")
|
||||
if auth_ts:
|
||||
try:
|
||||
if float(auth_ts) > pnone_at:
|
||||
@@ -226,8 +229,8 @@ def create_base_app(
|
||||
|
||||
if pnone_at and (now - pnone_at) < 300:
|
||||
return
|
||||
if device_id and redis:
|
||||
cached = await redis.get(f"prompt:{name}:{device_id}")
|
||||
if device_id and auth_redis:
|
||||
cached = await auth_redis.get(f"prompt:{name}:{device_id}")
|
||||
if cached == b"none":
|
||||
return
|
||||
return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}")
|
||||
@@ -279,6 +282,8 @@ def create_base_app(
|
||||
@app.after_serving
|
||||
async def _stop_event_processor():
|
||||
await _event_processor.stop()
|
||||
from shared.infrastructure.auth_redis import close_auth_redis
|
||||
await close_auth_redis()
|
||||
|
||||
# --- favicon ---
|
||||
@app.get("/favicon.ico")
|
||||
|
||||
@@ -78,12 +78,14 @@ def create_oauth_blueprint(app_name: str) -> Blueprint:
|
||||
qsession["_pnone_at"] = _time.time()
|
||||
device_id = g.device_id
|
||||
if device_id:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.set(
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
try:
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.set(
|
||||
f"prompt:{app_name}:{device_id}", b"none", ex=300
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return redirect(next_url)
|
||||
|
||||
code = request.args.get("code")
|
||||
|
||||
@@ -31,5 +31,17 @@ async def load_current_user():
|
||||
g.rights = {"admin": False}
|
||||
return
|
||||
|
||||
g.user = await load_user_by_id(g.s, uid)
|
||||
# User table lives in the account DB — use account session when
|
||||
# the per-request session (g.s) targets a different database.
|
||||
from shared.db.session import DATABASE_URL, DATABASE_URL_ACCOUNT
|
||||
if DATABASE_URL_ACCOUNT != DATABASE_URL:
|
||||
from shared.db.session import get_account_session
|
||||
async with get_account_session() as s:
|
||||
g.user = await load_user_by_id(s, uid)
|
||||
# Expunge so the object is usable outside this session
|
||||
if g.user:
|
||||
s.expunge(g.user)
|
||||
else:
|
||||
g.user = await load_user_by_id(g.s, uid)
|
||||
|
||||
g.rights = {l.name: True for l in g.user.labels} if g.user else {}
|
||||
|
||||
@@ -77,8 +77,8 @@ class CalendarEntry(Base):
|
||||
index=True,
|
||||
)
|
||||
|
||||
# NEW: ownership + order link
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
|
||||
# Ownership (cross-domain — no FK constraint to users table)
|
||||
user_id = Column(Integer, nullable=True, index=True)
|
||||
session_id = Column(String(64), nullable=True, index=True)
|
||||
order_id = Column(Integer, nullable=True, index=True)
|
||||
|
||||
@@ -246,7 +246,7 @@ class Ticket(Base):
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
|
||||
user_id = Column(Integer, nullable=True, index=True)
|
||||
session_id = Column(String(64), nullable=True, index=True)
|
||||
order_id = Column(Integer, nullable=True, index=True)
|
||||
|
||||
|
||||
@@ -23,8 +23,7 @@ class ActorProfile(Base):
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("users.id", ondelete="CASCADE"),
|
||||
unique=True, nullable=False,
|
||||
Integer, unique=True, nullable=False,
|
||||
)
|
||||
preferred_username: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
@@ -36,7 +35,6 @@ class ActorProfile(Base):
|
||||
)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", backref="actor_profile", uselist=False, lazy="selectin")
|
||||
activities = relationship("APActivity", back_populates="actor_profile", lazy="dynamic")
|
||||
followers = relationship("APFollower", back_populates="actor_profile", lazy="dynamic")
|
||||
|
||||
|
||||
@@ -94,7 +94,7 @@ class Post(Base):
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
user_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer, ForeignKey("users.id", ondelete="SET NULL"), index=True
|
||||
Integer, index=True
|
||||
)
|
||||
publish_requested: Mapped[bool] = mapped_column(Boolean(), default=False, server_default="false", nullable=False)
|
||||
|
||||
@@ -111,9 +111,6 @@ class Post(Base):
|
||||
primary_tag: Mapped[Optional[Tag]] = relationship(
|
||||
"Tag", foreign_keys=[primary_tag_id]
|
||||
)
|
||||
user: Mapped[Optional["User"]] = relationship(
|
||||
"User", foreign_keys=[user_id]
|
||||
)
|
||||
|
||||
# AUTHORS RELATIONSHIP (many-to-many via post_authors)
|
||||
authors: Mapped[List["Author"]] = relationship(
|
||||
@@ -205,7 +202,7 @@ class PostLike(Base):
|
||||
__tablename__ = "post_likes"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||
user_id = Column(Integer, nullable=False)
|
||||
post_id: Mapped[int] = mapped_column(ForeignKey("posts.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
@@ -213,4 +210,3 @@ class PostLike(Base):
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
post: Mapped["Post"] = relationship("Post", back_populates="likes", foreign_keys=[post_id])
|
||||
user = relationship("User", back_populates="liked_posts")
|
||||
|
||||
@@ -112,18 +112,8 @@ class Product(Base):
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
cart_items: Mapped[List["CartItem"]] = relationship(
|
||||
"CartItem",
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# NEW: all order items that reference this product
|
||||
order_items: Mapped[List["OrderItem"]] = relationship(
|
||||
"OrderItem",
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
# cart_items and order_items live in a separate domain (cart DB)
|
||||
# — cross-domain relationships removed
|
||||
|
||||
from sqlalchemy import Column
|
||||
|
||||
@@ -131,7 +121,7 @@ class ProductLike(Base):
|
||||
__tablename__ = "product_likes"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||
user_id = Column(Integer, nullable=False)
|
||||
product_slug: Mapped[str] = mapped_column(ForeignKey("products.slug", ondelete="CASCADE"))
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
@@ -139,8 +129,6 @@ class ProductLike(Base):
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
product: Mapped["Product"] = relationship("Product", back_populates="likes", foreign_keys=[product_slug])
|
||||
|
||||
user = relationship("User", back_populates="liked_products") # optional, if you want reverse access
|
||||
|
||||
|
||||
class ProductImage(Base):
|
||||
__tablename__ = "product_images"
|
||||
@@ -381,7 +369,7 @@ class CartItem(Base):
|
||||
|
||||
# Either a logged-in user OR an anonymous session
|
||||
user_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="CASCADE"),
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
session_id: Mapped[str | None] = mapped_column(
|
||||
@@ -389,9 +377,8 @@ class CartItem(Base):
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# IMPORTANT: link to product *id*, not slug
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("products.id", ondelete="CASCADE"),
|
||||
Integer,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
@@ -413,7 +400,7 @@ class CartItem(Base):
|
||||
server_default=func.now(),
|
||||
)
|
||||
market_place_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("market_places.id", ondelete="SET NULL"),
|
||||
Integer,
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
@@ -423,17 +410,19 @@ class CartItem(Base):
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
|
||||
# Cross-domain relationships — explicit join, viewonly (no FK constraint)
|
||||
market_place: Mapped["MarketPlace | None"] = relationship(
|
||||
"MarketPlace",
|
||||
foreign_keys=[market_place_id],
|
||||
primaryjoin="CartItem.market_place_id == MarketPlace.id",
|
||||
foreign_keys="[CartItem.market_place_id]",
|
||||
viewonly=True,
|
||||
)
|
||||
product: Mapped["Product"] = relationship(
|
||||
"Product",
|
||||
back_populates="cart_items",
|
||||
primaryjoin="CartItem.product_id == Product.id",
|
||||
foreign_keys="[CartItem.product_id]",
|
||||
viewonly=True,
|
||||
)
|
||||
user: Mapped["User | None"] = relationship("User", back_populates="cart_items")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_cart_items_user_product", "user_id", "product_id"),
|
||||
|
||||
@@ -13,7 +13,6 @@ class MenuItem(Base):
|
||||
|
||||
post_id: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("posts.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True
|
||||
)
|
||||
|
||||
@@ -14,11 +14,11 @@ class Order(Base):
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True)
|
||||
user_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
||||
session_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
|
||||
|
||||
page_config_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey("page_configs.id", ondelete="SET NULL"),
|
||||
Integer,
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
@@ -69,9 +69,12 @@ class Order(Base):
|
||||
cascade="all, delete-orphan",
|
||||
lazy="selectin",
|
||||
)
|
||||
# Cross-domain relationship — explicit join, viewonly (no FK constraint)
|
||||
page_config: Mapped[Optional["PageConfig"]] = relationship(
|
||||
"PageConfig",
|
||||
foreign_keys=[page_config_id],
|
||||
primaryjoin="Order.page_config_id == PageConfig.id",
|
||||
foreign_keys="[Order.page_config_id]",
|
||||
viewonly=True,
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
@@ -86,7 +89,7 @@ class OrderItem(Base):
|
||||
)
|
||||
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("products.id"),
|
||||
Integer,
|
||||
nullable=False,
|
||||
)
|
||||
product_title: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
|
||||
@@ -106,9 +109,11 @@ class OrderItem(Base):
|
||||
back_populates="items",
|
||||
)
|
||||
|
||||
# NEW: link each order item to its product
|
||||
# Cross-domain relationship — explicit join, viewonly (no FK constraint)
|
||||
product: Mapped["Product"] = relationship(
|
||||
"Product",
|
||||
back_populates="order_items",
|
||||
primaryjoin="OrderItem.product_id == Product.id",
|
||||
foreign_keys="[OrderItem.product_id]",
|
||||
viewonly=True,
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
@@ -30,13 +30,8 @@ class User(Base):
|
||||
labels = relationship("GhostLabel", secondary="user_labels", back_populates="users", lazy="selectin")
|
||||
subscriptions = relationship("GhostSubscription", back_populates="user", cascade="all, delete-orphan", lazy="selectin")
|
||||
|
||||
liked_products = relationship("ProductLike", back_populates="user", cascade="all, delete-orphan")
|
||||
liked_posts = relationship("PostLike", back_populates="user", cascade="all, delete-orphan")
|
||||
cart_items = relationship(
|
||||
"CartItem",
|
||||
back_populates="user",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
# Cross-domain reverse relationships removed (liked_products, liked_posts,
|
||||
# cart_items) — those tables live in different domain DBs
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_user_email", "email", unique=True),
|
||||
|
||||
Reference in New Issue
Block a user