Decouple per-service Alembic migrations and fix cross-DB queries

Each service (blog, market, cart, events, federation, account) now owns
its own database schema with independent Alembic migrations. Removes the
monolithic shared/alembic/ that ran all migrations against a single DB.

- Add per-service alembic.ini, env.py, and 0001_initial.py migrations
- Add shared/db/alembic_env.py helper with table-name filtering
- Fix cross-DB FK in blog/models/snippet.py (users lives in db_account)
- Fix cart_impl.py cross-DB queries: fetch products and market_places
  via internal data endpoints instead of direct SQL joins
- Fix blog ghost_sync to fetch page_configs from cart via data endpoint
- Add products-by-ids and page-config-ensure data endpoints
- Update all entrypoint.sh to create own DB and run own migrations
- Cart now uses db_cart instead of db_market
- Add docker-compose.dev.yml, dev.sh for local development
- CI deploys both rose-ash swarm stack and rose-ash-dev compose stack
- Fix Quart namespace package crash (root_path in factory.py)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-26 12:07:24 +00:00
parent bde2fd73b8
commit e65bd41ebe
77 changed files with 2405 additions and 2335 deletions

View File

@@ -1,35 +0,0 @@
[alembic]
script_location = alembic
sqlalchemy.url =
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s

View File

@@ -1,69 +0,0 @@
from __future__ import annotations
import os, sys
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
config = context.config
if config.config_file_name is not None:
try:
fileConfig(config.config_file_name)
except Exception:
pass
# Add project root so all app model packages are importable
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
from shared.db.base import Base
# Import ALL models so Base.metadata sees every table
import shared.models # noqa: F401 User, KV, MagicLink, MenuItem, Ghost*
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"):
try:
__import__(_mod)
except ImportError:
pass # OK in Docker — only needed for autogenerate
target_metadata = Base.metadata
def _get_url() -> str:
url = os.getenv(
"ALEMBIC_DATABASE_URL",
os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url") or "")
)
print(url)
return url
def run_migrations_offline() -> None:
url = _get_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
url = _get_url()
if url:
config.set_main_option("sqlalchemy.url", url)
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -1,24 +0,0 @@
<%text>
# Alembic migration script template
</%text>
"""empty message
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -1,33 +0,0 @@
"""Initial database schema from schema.sql"""
from alembic import op
import sqlalchemy as sa
import pathlib
# revision identifiers, used by Alembic
revision = '0001_initial_schema'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
return
schema_path = pathlib.Path(__file__).parent.parent.parent / "schema.sql"
with open(schema_path, encoding="utf-8") as f:
sql = f.read()
conn = op.get_bind()
conn.execute(sa.text(sql))
def downgrade():
return
# Drop all user-defined tables in the 'public' schema
conn = op.get_bind()
conn.execute(sa.text("""
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') LOOP
EXECUTE 'DROP TABLE IF EXISTS public.' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
"""))

View File

@@ -1,78 +0,0 @@
"""Add cart_items table for shopping cart"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0002_add_cart_items"
down_revision = "0001_initial_schema"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"cart_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
# Either a logged-in user *or* an anonymous session_id
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
),
sa.Column("session_id", sa.String(length=128), nullable=True),
# IMPORTANT: reference products.id (PK), not slug
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Indexes to speed up cart lookups
op.create_index(
"ix_cart_items_user_product",
"cart_items",
["user_id", "product_id"],
unique=False,
)
op.create_index(
"ix_cart_items_session_product",
"cart_items",
["session_id", "product_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_cart_items_session_product", table_name="cart_items")
op.drop_index("ix_cart_items_user_product", table_name="cart_items")
op.drop_table("cart_items")

View File

@@ -1,118 +0,0 @@
"""Add orders and order_items tables for checkout"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0003_add_orders"
down_revision = "0002_add_cart_items"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"orders",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True),
sa.Column("session_id", sa.String(length=64), nullable=True),
sa.Column(
"status",
sa.String(length=32),
nullable=False,
server_default="pending",
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"total_amount",
sa.Numeric(12, 2),
nullable=False,
),
# SumUp integration fields
sa.Column("sumup_checkout_id", sa.String(length=128), nullable=True),
sa.Column("sumup_status", sa.String(length=32), nullable=True),
sa.Column("sumup_hosted_url", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# Indexes to match model hints (session_id + sumup_checkout_id index=True)
op.create_index(
"ix_orders_session_id",
"orders",
["session_id"],
unique=False,
)
op.create_index(
"ix_orders_sumup_checkout_id",
"orders",
["sumup_checkout_id"],
unique=False,
)
op.create_table(
"order_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"order_id",
sa.Integer(),
sa.ForeignKey("orders.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id"),
nullable=False,
),
sa.Column("product_title", sa.String(length=512), nullable=True),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"unit_price",
sa.Numeric(12, 2),
nullable=False,
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
def downgrade() -> None:
op.drop_table("order_items")
op.drop_index("ix_orders_sumup_checkout_id", table_name="orders")
op.drop_index("ix_orders_session_id", table_name="orders")
op.drop_table("orders")

View File

@@ -1,27 +0,0 @@
"""Add sumup_reference to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0004_add_sumup_reference"
down_revision = "0003_add_orders"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("sumup_reference", sa.String(length=255), nullable=True),
)
op.create_index(
"ix_orders_sumup_reference",
"orders",
["sumup_reference"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_sumup_reference", table_name="orders")
op.drop_column("orders", "sumup_reference")

View File

@@ -1,27 +0,0 @@
"""Add description field to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0005_add_description"
down_revision = "0004_add_sumup_reference"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("description", sa.Text(), nullable=True),
)
op.create_index(
"ix_orders_description",
"orders",
["description"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_description", table_name="orders")
op.drop_column("orders", "description")

View File

@@ -1,28 +0,0 @@
from alembic import op
import sqlalchemy as sa
revision = '0006_update_calendar_entries'
down_revision = '0005_add_description' # use the appropriate previous revision ID
branch_labels = None
depends_on = None
def upgrade():
# Add user_id and session_id columns
op.add_column('calendar_entries', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_calendar_entries_user_id', 'calendar_entries', 'users', ['user_id'], ['id'])
op.add_column('calendar_entries', sa.Column('session_id', sa.String(length=128), nullable=True))
# Add state and cost columns
op.add_column('calendar_entries', sa.Column('state', sa.String(length=20), nullable=False, server_default='pending'))
op.add_column('calendar_entries', sa.Column('cost', sa.Numeric(10,2), nullable=False, server_default='10'))
# (Optional) Create indexes on the new columns
op.create_index('ix_calendar_entries_user_id', 'calendar_entries', ['user_id'])
op.create_index('ix_calendar_entries_session_id', 'calendar_entries', ['session_id'])
def downgrade():
op.drop_index('ix_calendar_entries_session_id', table_name='calendar_entries')
op.drop_index('ix_calendar_entries_user_id', table_name='calendar_entries')
op.drop_column('calendar_entries', 'cost')
op.drop_column('calendar_entries', 'state')
op.drop_column('calendar_entries', 'session_id')
op.drop_constraint('fk_calendar_entries_user_id', 'calendar_entries', type_='foreignkey')
op.drop_column('calendar_entries', 'user_id')

View File

@@ -1,50 +0,0 @@
from alembic import op
import sqlalchemy as sa
revision = "0007_add_oid_entries"
down_revision = "0006_update_calendar_entries"
branch_labels = None
depends_on = None
def upgrade():
# Add order_id column
op.add_column(
"calendar_entries",
sa.Column("order_id", sa.Integer(), nullable=True),
)
op.create_foreign_key(
"fk_calendar_entries_order_id",
"calendar_entries",
"orders",
["order_id"],
["id"],
ondelete="SET NULL",
)
op.create_index(
"ix_calendar_entries_order_id",
"calendar_entries",
["order_id"],
unique=False,
)
# Optional: add an index on state if you want faster queries by state
op.create_index(
"ix_calendar_entries_state",
"calendar_entries",
["state"],
unique=False,
)
def downgrade():
# Drop indexes and FK in reverse order
op.drop_index("ix_calendar_entries_state", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_order_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_order_id",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "order_id")

View File

@@ -1,33 +0,0 @@
"""add flexible flag to calendar_slots
Revision ID: 0008_add_flexible_to_calendar_slots
Revises: 0007_add_order_id_to_calendar_entries
Create Date: 2025-12-06 12:34:56.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0008_add_flexible_to_slots"
down_revision = "0007_add_oid_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"calendar_slots",
sa.Column(
"flexible",
sa.Boolean(),
nullable=False,
server_default=sa.false(), # set existing rows to False
),
)
# Optional: drop server_default so future inserts must supply a value
op.alter_column("calendar_slots", "flexible", server_default=None)
def downgrade() -> None:
op.drop_column("calendar_slots", "flexible")

View File

@@ -1,54 +0,0 @@
"""add slot_id to calendar_entries
Revision ID: 0009_add_slot_id_to_entries
Revises: 0008_add_flexible_to_slots
Create Date: 2025-12-06 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0009_add_slot_id_to_entries"
down_revision = "0008_add_flexible_to_slots"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add slot_id column as nullable initially
op.add_column(
"calendar_entries",
sa.Column(
"slot_id",
sa.Integer(),
nullable=True,
),
)
# Add foreign key constraint
op.create_foreign_key(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
"calendar_slots",
["slot_id"],
["id"],
ondelete="SET NULL",
)
# Add index for better query performance
op.create_index(
"ix_calendar_entries_slot_id",
"calendar_entries",
["slot_id"],
)
def downgrade() -> None:
op.drop_index("ix_calendar_entries_slot_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "slot_id")

View File

@@ -1,64 +0,0 @@
"""Add post_likes table for liking blog posts
Revision ID: 0010_add_post_likes
Revises: 0009_add_slot_id_to_entries
Create Date: 2025-12-07 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0010_add_post_likes"
down_revision = "0009_add_slot_id_to_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"post_likes",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"post_id",
sa.Integer(),
sa.ForeignKey("posts.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Index for fast user+post lookups
op.create_index(
"ix_post_likes_user_post",
"post_likes",
["user_id", "post_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_post_likes_user_post", table_name="post_likes")
op.drop_table("post_likes")

View File

@@ -1,43 +0,0 @@
"""Add ticket_price and ticket_count to calendar_entries
Revision ID: 0011_add_entry_tickets
Revises: 0010_add_post_likes
Create Date: 2025-12-07 14:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import NUMERIC
# revision identifiers, used by Alembic.
revision = "0011_add_entry_tickets"
down_revision = "0010_add_post_likes"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add ticket_price column (nullable - NULL means no tickets)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_price",
NUMERIC(10, 2),
nullable=True,
),
)
# Add ticket_count column (nullable - NULL means unlimited)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_count",
sa.Integer(),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("calendar_entries", "ticket_count")
op.drop_column("calendar_entries", "ticket_price")

View File

@@ -1,41 +0,0 @@
# Alembic migration script template
"""add ticket_types table
Revision ID: 47fc53fc0d2b
Revises: a9f54e4eaf02
Create Date: 2025-12-08 07:29:11.422435
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '47fc53fc0d2b'
down_revision = 'a9f54e4eaf02'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'ticket_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('entry_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('cost', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['entry_id'], ['calendar_entries.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_ticket_types_entry_id', 'ticket_types', ['entry_id'], unique=False)
op.create_index('ix_ticket_types_name', 'ticket_types', ['name'], unique=False)
def downgrade() -> None:
op.drop_index('ix_ticket_types_name', table_name='ticket_types')
op.drop_index('ix_ticket_types_entry_id', table_name='ticket_types')
op.drop_table('ticket_types')

View File

@@ -1,36 +0,0 @@
# Alembic migration script template
"""Add calendar_entry_posts association table
Revision ID: 6cb124491c9d
Revises: 0011_add_entry_tickets
Create Date: 2025-12-07 03:40:49.194068
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import TIMESTAMP
# revision identifiers, used by Alembic.
revision = '6cb124491c9d'
down_revision = '0011_add_entry_tickets'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'calendar_entry_posts',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('post_id', sa.Integer(), sa.ForeignKey('posts.id', ondelete='CASCADE'), nullable=False),
sa.Column('created_at', TIMESTAMP(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('deleted_at', TIMESTAMP(timezone=True), nullable=True),
)
op.create_index('ix_entry_posts_entry_id', 'calendar_entry_posts', ['entry_id'])
op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id'])
def downgrade() -> None:
op.drop_index('ix_entry_posts_post_id', 'calendar_entry_posts')
op.drop_index('ix_entry_posts_entry_id', 'calendar_entry_posts')
op.drop_table('calendar_entry_posts')

View File

@@ -1,74 +0,0 @@
"""add page_configs table
Revision ID: a1b2c3d4e5f6
Revises: f6d4a1b2c3e7
Create Date: 2026-02-10
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
revision = 'a1b2c3d4e5f6'
down_revision = 'f6d4a1b2c3e7'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'page_configs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('features', sa.JSON(), server_default='{}', nullable=False),
sa.Column('sumup_merchant_code', sa.String(64), nullable=True),
sa.Column('sumup_api_key', sa.Text(), nullable=True),
sa.Column('sumup_checkout_prefix', sa.String(64), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('post_id'),
)
# Backfill: create PageConfig for every existing page
conn = op.get_bind()
# 1. Pages with calendars -> features={"calendar": true}
conn.execute(text("""
INSERT INTO page_configs (post_id, features, created_at, updated_at)
SELECT p.id, '{"calendar": true}'::jsonb, now(), now()
FROM posts p
WHERE p.is_page = true
AND p.deleted_at IS NULL
AND EXISTS (
SELECT 1 FROM calendars c
WHERE c.post_id = p.id AND c.deleted_at IS NULL
)
"""))
# 2. Market page (slug='market', is_page=true) -> features={"market": true}
# Only if not already inserted above
conn.execute(text("""
INSERT INTO page_configs (post_id, features, created_at, updated_at)
SELECT p.id, '{"market": true}'::jsonb, now(), now()
FROM posts p
WHERE p.slug = 'market'
AND p.is_page = true
AND p.deleted_at IS NULL
AND p.id NOT IN (SELECT post_id FROM page_configs)
"""))
# 3. All other pages -> features={}
conn.execute(text("""
INSERT INTO page_configs (post_id, features, created_at, updated_at)
SELECT p.id, '{}'::jsonb, now(), now()
FROM posts p
WHERE p.is_page = true
AND p.deleted_at IS NULL
AND p.id NOT IN (SELECT post_id FROM page_configs)
"""))
def downgrade() -> None:
op.drop_table('page_configs')

View File

@@ -1,37 +0,0 @@
# Alembic migration script template
"""add menu_items table
Revision ID: a9f54e4eaf02
Revises: 6cb124491c9d
Create Date: 2025-12-07 17:38:54.839296
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a9f54e4eaf02'
down_revision = '6cb124491c9d'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('menu_items',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('sort_order', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_menu_items_post_id'), 'menu_items', ['post_id'], unique=False)
op.create_index(op.f('ix_menu_items_sort_order'), 'menu_items', ['sort_order'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_menu_items_sort_order'), table_name='menu_items')
op.drop_index(op.f('ix_menu_items_post_id'), table_name='menu_items')
op.drop_table('menu_items')

View File

@@ -1,97 +0,0 @@
"""add market_places table and nav_tops.market_id
Revision ID: b2c3d4e5f6a7
Revises: a1b2c3d4e5f6
Create Date: 2026-02-10
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
revision = 'b2c3d4e5f6a7'
down_revision = 'a1b2c3d4e5f6'
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1. Create market_places table
op.create_table(
'market_places',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('slug', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('ix_market_places_post_id', 'market_places', ['post_id'])
op.create_index(
'ux_market_places_slug_active',
'market_places',
[sa.text('lower(slug)')],
unique=True,
postgresql_where=sa.text('deleted_at IS NULL'),
)
# 2. Add market_id column to nav_tops
op.add_column(
'nav_tops',
sa.Column('market_id', sa.Integer(), nullable=True),
)
op.create_foreign_key(
'fk_nav_tops_market_id',
'nav_tops',
'market_places',
['market_id'],
['id'],
ondelete='SET NULL',
)
op.create_index('ix_nav_tops_market_id', 'nav_tops', ['market_id'])
# 3. Backfill: create default MarketPlace for the 'market' page
conn = op.get_bind()
# Find the market page
result = conn.execute(text("""
SELECT id FROM posts
WHERE slug = 'market' AND is_page = true AND deleted_at IS NULL
LIMIT 1
"""))
row = result.fetchone()
if row:
post_id = row[0]
# Insert the default market
conn.execute(text("""
INSERT INTO market_places (post_id, name, slug, created_at, updated_at)
VALUES (:post_id, 'Suma Market', 'suma-market', now(), now())
"""), {"post_id": post_id})
# Get the new market_places id
market_row = conn.execute(text("""
SELECT id FROM market_places
WHERE slug = 'suma-market' AND deleted_at IS NULL
LIMIT 1
""")).fetchone()
if market_row:
market_id = market_row[0]
# Assign all active nav_tops to this market
conn.execute(text("""
UPDATE nav_tops SET market_id = :market_id
WHERE deleted_at IS NULL
"""), {"market_id": market_id})
def downgrade() -> None:
op.drop_index('ix_nav_tops_market_id', table_name='nav_tops')
op.drop_constraint('fk_nav_tops_market_id', 'nav_tops', type_='foreignkey')
op.drop_column('nav_tops', 'market_id')
op.drop_index('ux_market_places_slug_active', table_name='market_places')
op.drop_index('ix_market_places_post_id', table_name='market_places')
op.drop_table('market_places')

View File

@@ -1,35 +0,0 @@
"""add snippets table
Revision ID: c3a1f7b9d4e5
Revises: 47fc53fc0d2b
Create Date: 2026-02-07
"""
from alembic import op
import sqlalchemy as sa
revision = 'c3a1f7b9d4e5'
down_revision = '47fc53fc0d2b'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'snippets',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.Column('visibility', sa.String(length=20), server_default='private', nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'name', name='uq_snippets_user_name'),
)
op.create_index('ix_snippets_visibility', 'snippets', ['visibility'])
def downgrade() -> None:
op.drop_index('ix_snippets_visibility', table_name='snippets')
op.drop_table('snippets')

View File

@@ -1,55 +0,0 @@
"""add page_config_id to orders, market_place_id to cart_items
Revision ID: c3d4e5f6a7b8
Revises: b2c3d4e5f6a7
Create Date: 2026-02-10
"""
from alembic import op
import sqlalchemy as sa
revision = 'c3d4e5f6a7b8'
down_revision = 'b2c3d4e5f6a7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1. Add market_place_id to cart_items
op.add_column(
'cart_items',
sa.Column('market_place_id', sa.Integer(), nullable=True),
)
op.create_foreign_key(
'fk_cart_items_market_place_id',
'cart_items',
'market_places',
['market_place_id'],
['id'],
ondelete='SET NULL',
)
op.create_index('ix_cart_items_market_place_id', 'cart_items', ['market_place_id'])
# 2. Add page_config_id to orders
op.add_column(
'orders',
sa.Column('page_config_id', sa.Integer(), nullable=True),
)
op.create_foreign_key(
'fk_orders_page_config_id',
'orders',
'page_configs',
['page_config_id'],
['id'],
ondelete='SET NULL',
)
op.create_index('ix_orders_page_config_id', 'orders', ['page_config_id'])
def downgrade() -> None:
op.drop_index('ix_orders_page_config_id', table_name='orders')
op.drop_constraint('fk_orders_page_config_id', 'orders', type_='foreignkey')
op.drop_column('orders', 'page_config_id')
op.drop_index('ix_cart_items_market_place_id', table_name='cart_items')
op.drop_constraint('fk_cart_items_market_place_id', 'cart_items', type_='foreignkey')
op.drop_column('cart_items', 'market_place_id')

View File

@@ -1,45 +0,0 @@
"""add post user_id, author email, publish_requested
Revision ID: d4b2e8f1a3c7
Revises: c3a1f7b9d4e5
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'd4b2e8f1a3c7'
down_revision = 'c3a1f7b9d4e5'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add author.email
op.add_column('authors', sa.Column('email', sa.String(255), nullable=True))
# Add post.user_id FK
op.add_column('posts', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_posts_user_id', 'posts', 'users', ['user_id'], ['id'], ondelete='SET NULL')
op.create_index('ix_posts_user_id', 'posts', ['user_id'])
# Add post.publish_requested
op.add_column('posts', sa.Column('publish_requested', sa.Boolean(), server_default='false', nullable=False))
# Backfill: match posts to users via primary_author email
op.execute("""
UPDATE posts
SET user_id = u.id
FROM authors a
JOIN users u ON lower(a.email) = lower(u.email)
WHERE posts.primary_author_id = a.id
AND posts.user_id IS NULL
AND a.email IS NOT NULL
""")
def downgrade() -> None:
op.drop_column('posts', 'publish_requested')
op.drop_index('ix_posts_user_id', table_name='posts')
op.drop_constraint('fk_posts_user_id', 'posts', type_='foreignkey')
op.drop_column('posts', 'user_id')
op.drop_column('authors', 'email')

View File

@@ -1,45 +0,0 @@
"""add tag_groups and tag_group_tags
Revision ID: e5c3f9a2b1d6
Revises: d4b2e8f1a3c7
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'e5c3f9a2b1d6'
down_revision = 'd4b2e8f1a3c7'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tag_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=191), nullable=False),
sa.Column('feature_image', sa.Text(), nullable=True),
sa.Column('colour', sa.String(length=32), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug'),
)
op.create_table(
'tag_group_tags',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('tag_group_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['tag_group_id'], ['tag_groups.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('tag_group_id', 'tag_id', name='uq_tag_group_tag'),
)
def downgrade() -> None:
op.drop_table('tag_group_tags')
op.drop_table('tag_groups')

View File

@@ -1,40 +0,0 @@
"""add domain_events table
Revision ID: f6d4a0b2c3e7
Revises: e5c3f9a2b1d6
Create Date: 2026-02-11
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = 'f6d4a0b2c3e7'
down_revision = 'e5c3f9a2b1d6'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'domain_events',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('event_type', sa.String(128), nullable=False),
sa.Column('aggregate_type', sa.String(64), nullable=False),
sa.Column('aggregate_id', sa.Integer(), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('state', sa.String(20), server_default='pending', nullable=False),
sa.Column('attempts', sa.Integer(), server_default='0', nullable=False),
sa.Column('max_attempts', sa.Integer(), server_default='5', nullable=False),
sa.Column('last_error', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('ix_domain_events_event_type', 'domain_events', ['event_type'])
op.create_index('ix_domain_events_state', 'domain_events', ['state'])
def downgrade() -> None:
op.drop_index('ix_domain_events_state', table_name='domain_events')
op.drop_index('ix_domain_events_event_type', table_name='domain_events')
op.drop_table('domain_events')

View File

@@ -1,47 +0,0 @@
"""add tickets table
Revision ID: f6d4a1b2c3e7
Revises: e5c3f9a2b1d6
Create Date: 2026-02-09
"""
from alembic import op
import sqlalchemy as sa
revision = 'f6d4a1b2c3e7'
down_revision = 'e5c3f9a2b1d6'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tickets',
sa.Column('id', sa.Integer(), primary_key=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('ticket_type_id', sa.Integer(), sa.ForeignKey('ticket_types.id', ondelete='SET NULL'), nullable=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
sa.Column('session_id', sa.String(64), nullable=True),
sa.Column('order_id', sa.Integer(), sa.ForeignKey('orders.id', ondelete='SET NULL'), nullable=True),
sa.Column('code', sa.String(64), unique=True, nullable=False),
sa.Column('state', sa.String(20), nullable=False, server_default=sa.text("'reserved'")),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('checked_in_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_tickets_entry_id', 'tickets', ['entry_id'])
op.create_index('ix_tickets_ticket_type_id', 'tickets', ['ticket_type_id'])
op.create_index('ix_tickets_user_id', 'tickets', ['user_id'])
op.create_index('ix_tickets_session_id', 'tickets', ['session_id'])
op.create_index('ix_tickets_order_id', 'tickets', ['order_id'])
op.create_index('ix_tickets_code', 'tickets', ['code'], unique=True)
op.create_index('ix_tickets_state', 'tickets', ['state'])
def downgrade() -> None:
op.drop_index('ix_tickets_state', 'tickets')
op.drop_index('ix_tickets_code', 'tickets')
op.drop_index('ix_tickets_order_id', 'tickets')
op.drop_index('ix_tickets_session_id', 'tickets')
op.drop_index('ix_tickets_user_id', 'tickets')
op.drop_index('ix_tickets_ticket_type_id', 'tickets')
op.drop_index('ix_tickets_entry_id', 'tickets')
op.drop_table('tickets')

View File

@@ -1,115 +0,0 @@
"""replace post_id FKs with container_type + container_id
Revision ID: g7e5b1c3d4f8
Revises: f6d4a0b2c3e7
Create Date: 2026-02-11
"""
from alembic import op
import sqlalchemy as sa
revision = 'g7e5b1c3d4f8'
down_revision = 'f6d4a0b2c3e7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# --- calendars: post_id → container_type + container_id ---
op.add_column('calendars', sa.Column('container_type', sa.String(32), nullable=True))
op.add_column('calendars', sa.Column('container_id', sa.Integer(), nullable=True))
op.execute("UPDATE calendars SET container_type = 'page', container_id = post_id")
op.alter_column('calendars', 'container_type', nullable=False, server_default=sa.text("'page'"))
op.alter_column('calendars', 'container_id', nullable=False)
op.drop_index('ix_calendars_post_id', table_name='calendars')
op.drop_index('ux_calendars_post_slug_active', table_name='calendars')
op.drop_constraint('calendars_post_id_fkey', 'calendars', type_='foreignkey')
op.drop_column('calendars', 'post_id')
op.create_index('ix_calendars_container', 'calendars', ['container_type', 'container_id'])
op.create_index(
'ux_calendars_container_slug_active',
'calendars',
['container_type', 'container_id', sa.text('lower(slug)')],
unique=True,
postgresql_where=sa.text('deleted_at IS NULL'),
)
# --- market_places: post_id → container_type + container_id ---
op.add_column('market_places', sa.Column('container_type', sa.String(32), nullable=True))
op.add_column('market_places', sa.Column('container_id', sa.Integer(), nullable=True))
op.execute("UPDATE market_places SET container_type = 'page', container_id = post_id")
op.alter_column('market_places', 'container_type', nullable=False, server_default=sa.text("'page'"))
op.alter_column('market_places', 'container_id', nullable=False)
op.drop_index('ix_market_places_post_id', table_name='market_places')
op.drop_constraint('market_places_post_id_fkey', 'market_places', type_='foreignkey')
op.drop_column('market_places', 'post_id')
op.create_index('ix_market_places_container', 'market_places', ['container_type', 'container_id'])
# --- page_configs: post_id → container_type + container_id ---
op.add_column('page_configs', sa.Column('container_type', sa.String(32), nullable=True))
op.add_column('page_configs', sa.Column('container_id', sa.Integer(), nullable=True))
op.execute("UPDATE page_configs SET container_type = 'page', container_id = post_id")
op.alter_column('page_configs', 'container_type', nullable=False, server_default=sa.text("'page'"))
op.alter_column('page_configs', 'container_id', nullable=False)
op.drop_constraint('page_configs_post_id_fkey', 'page_configs', type_='foreignkey')
op.drop_column('page_configs', 'post_id')
op.create_index('ix_page_configs_container', 'page_configs', ['container_type', 'container_id'])
# --- calendar_entry_posts: post_id → content_type + content_id ---
op.add_column('calendar_entry_posts', sa.Column('content_type', sa.String(32), nullable=True))
op.add_column('calendar_entry_posts', sa.Column('content_id', sa.Integer(), nullable=True))
op.execute("UPDATE calendar_entry_posts SET content_type = 'post', content_id = post_id")
op.alter_column('calendar_entry_posts', 'content_type', nullable=False, server_default=sa.text("'post'"))
op.alter_column('calendar_entry_posts', 'content_id', nullable=False)
op.drop_index('ix_entry_posts_post_id', table_name='calendar_entry_posts')
op.drop_constraint('calendar_entry_posts_post_id_fkey', 'calendar_entry_posts', type_='foreignkey')
op.drop_column('calendar_entry_posts', 'post_id')
op.create_index('ix_entry_posts_content', 'calendar_entry_posts', ['content_type', 'content_id'])
def downgrade() -> None:
# --- calendar_entry_posts: restore post_id ---
op.add_column('calendar_entry_posts', sa.Column('post_id', sa.Integer(), nullable=True))
op.execute("UPDATE calendar_entry_posts SET post_id = content_id WHERE content_type = 'post'")
op.alter_column('calendar_entry_posts', 'post_id', nullable=False)
op.create_foreign_key('calendar_entry_posts_post_id_fkey', 'calendar_entry_posts', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id'])
op.drop_index('ix_entry_posts_content', table_name='calendar_entry_posts')
op.drop_column('calendar_entry_posts', 'content_id')
op.drop_column('calendar_entry_posts', 'content_type')
# --- page_configs: restore post_id ---
op.add_column('page_configs', sa.Column('post_id', sa.Integer(), nullable=True))
op.execute("UPDATE page_configs SET post_id = container_id WHERE container_type = 'page'")
op.alter_column('page_configs', 'post_id', nullable=False)
op.create_foreign_key('page_configs_post_id_fkey', 'page_configs', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
op.drop_index('ix_page_configs_container', table_name='page_configs')
op.drop_column('page_configs', 'container_id')
op.drop_column('page_configs', 'container_type')
# --- market_places: restore post_id ---
op.add_column('market_places', sa.Column('post_id', sa.Integer(), nullable=True))
op.execute("UPDATE market_places SET post_id = container_id WHERE container_type = 'page'")
op.alter_column('market_places', 'post_id', nullable=False)
op.create_foreign_key('market_places_post_id_fkey', 'market_places', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
op.create_index('ix_market_places_post_id', 'market_places', ['post_id'])
op.drop_index('ix_market_places_container', table_name='market_places')
op.drop_column('market_places', 'container_id')
op.drop_column('market_places', 'container_type')
# --- calendars: restore post_id ---
op.add_column('calendars', sa.Column('post_id', sa.Integer(), nullable=True))
op.execute("UPDATE calendars SET post_id = container_id WHERE container_type = 'page'")
op.alter_column('calendars', 'post_id', nullable=False)
op.create_foreign_key('calendars_post_id_fkey', 'calendars', 'posts', ['post_id'], ['id'], ondelete='CASCADE')
op.create_index('ix_calendars_post_id', 'calendars', ['post_id'])
op.create_index(
'ux_calendars_post_slug_active',
'calendars',
['post_id', sa.text('lower(slug)')],
unique=True,
postgresql_where=sa.text('deleted_at IS NULL'),
)
op.drop_index('ux_calendars_container_slug_active', table_name='calendars')
op.drop_index('ix_calendars_container', table_name='calendars')
op.drop_column('calendars', 'container_id')
op.drop_column('calendars', 'container_type')

View File

@@ -1,23 +0,0 @@
"""merge heads
Revision ID: h8f6c2d4e5a9
Revises: c3d4e5f6a7b8, g7e5b1c3d4f8
Create Date: 2026-02-11 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'h8f6c2d4e5a9'
down_revision = ('c3d4e5f6a7b8', 'g7e5b1c3d4f8')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -1,98 +0,0 @@
"""add glue layer tables (container_relations + menu_nodes)
Revision ID: i9g7d3e5f6
Revises: h8f6c2d4e5a9
Create Date: 2026-02-11
"""
from alembic import op
import sqlalchemy as sa
revision = 'i9g7d3e5f6'
down_revision = 'h8f6c2d4e5a9'
branch_labels = None
depends_on = None
def upgrade() -> None:
# --- container_relations ---
op.create_table(
'container_relations',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('parent_type', sa.String(32), nullable=False),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('child_type', sa.String(32), nullable=False),
sa.Column('child_id', sa.Integer(), nullable=False),
sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'),
sa.Column('label', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint(
'parent_type', 'parent_id', 'child_type', 'child_id',
name='uq_container_relations_parent_child',
),
)
op.create_index('ix_container_relations_parent', 'container_relations', ['parent_type', 'parent_id'])
op.create_index('ix_container_relations_child', 'container_relations', ['child_type', 'child_id'])
# --- menu_nodes ---
op.create_table(
'menu_nodes',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('container_type', sa.String(32), nullable=False),
sa.Column('container_id', sa.Integer(), nullable=False),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'),
sa.Column('depth', sa.Integer(), nullable=False, server_default='0'),
sa.Column('label', sa.String(255), nullable=False),
sa.Column('slug', sa.String(255), nullable=True),
sa.Column('href', sa.String(1024), nullable=True),
sa.Column('icon', sa.String(64), nullable=True),
sa.Column('feature_image', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['parent_id'], ['menu_nodes.id'], ondelete='SET NULL'),
)
op.create_index('ix_menu_nodes_container', 'menu_nodes', ['container_type', 'container_id'])
op.create_index('ix_menu_nodes_parent_id', 'menu_nodes', ['parent_id'])
# --- Backfill container_relations from existing container-pattern tables ---
op.execute("""
INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order)
SELECT 'page', container_id, 'calendar', id, 0
FROM calendars
WHERE deleted_at IS NULL AND container_type = 'page'
""")
op.execute("""
INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order)
SELECT 'page', container_id, 'market', id, 0
FROM market_places
WHERE deleted_at IS NULL AND container_type = 'page'
""")
op.execute("""
INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order)
SELECT 'page', container_id, 'page_config', id, 0
FROM page_configs
WHERE deleted_at IS NULL AND container_type = 'page'
""")
# --- Backfill menu_nodes from existing menu_items + posts ---
op.execute("""
INSERT INTO menu_nodes (container_type, container_id, label, slug, feature_image, sort_order)
SELECT 'page', mi.post_id, p.title, p.slug, p.feature_image, mi.sort_order
FROM menu_items mi
JOIN posts p ON mi.post_id = p.id
WHERE mi.deleted_at IS NULL
""")
def downgrade() -> None:
op.drop_index('ix_menu_nodes_parent_id', table_name='menu_nodes')
op.drop_index('ix_menu_nodes_container', table_name='menu_nodes')
op.drop_table('menu_nodes')
op.drop_index('ix_container_relations_child', table_name='container_relations')
op.drop_index('ix_container_relations_parent', table_name='container_relations')
op.drop_table('container_relations')

View File

@@ -1,51 +0,0 @@
"""drop cross-domain FK constraints (events → cart)
Merge three existing heads and remove:
- calendar_entries.order_id FK → orders.id
- tickets.order_id FK → orders.id
Columns are kept as plain integers.
Revision ID: j0h8e4f6g7
Revises: c3d4e5f6a7b8, i9g7d3e5f6, g7e5b1c3d4f8
Create Date: 2026-02-14
"""
from alembic import op
import sqlalchemy as sa
revision = 'j0h8e4f6g7'
down_revision = ('c3d4e5f6a7b8', 'i9g7d3e5f6', 'g7e5b1c3d4f8')
branch_labels = None
depends_on = None
def upgrade() -> None:
op.drop_constraint(
'fk_calendar_entries_order_id',
'calendar_entries',
type_='foreignkey',
)
op.drop_constraint(
'tickets_order_id_fkey',
'tickets',
type_='foreignkey',
)
def downgrade() -> None:
op.create_foreign_key(
'fk_calendar_entries_order_id',
'calendar_entries',
'orders',
['order_id'],
['id'],
ondelete='SET NULL',
)
op.create_foreign_key(
'tickets_order_id_fkey',
'tickets',
'orders',
['order_id'],
['id'],
ondelete='SET NULL',
)

View File

@@ -1,142 +0,0 @@
"""add federation tables
Revision ID: k1i9f5g7h8
Revises: j0h8e4f6g7
Create Date: 2026-02-21
Creates:
- ap_actor_profiles — AP identity per user
- ap_activities — local + remote AP activities
- ap_followers — remote followers
- ap_inbox_items — raw incoming AP activities
- ap_anchors — OpenTimestamps merkle batches
- ipfs_pins — IPFS content tracking (platform-wide)
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = "k1i9f5g7h8"
down_revision = "j0h8e4f6g7"
branch_labels = None
depends_on = None
def upgrade() -> None:
# -- ap_anchors (referenced by ap_activities) ----------------------------
op.create_table(
"ap_anchors",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("merkle_root", sa.String(128), nullable=False),
sa.Column("tree_ipfs_cid", sa.String(128), nullable=True),
sa.Column("ots_proof_cid", sa.String(128), nullable=True),
sa.Column("activity_count", sa.Integer(), nullable=False, server_default="0"),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("bitcoin_txid", sa.String(128), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
# -- ap_actor_profiles ---------------------------------------------------
op.create_table(
"ap_actor_profiles",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("preferred_username", sa.String(64), nullable=False),
sa.Column("display_name", sa.String(255), nullable=True),
sa.Column("summary", sa.Text(), nullable=True),
sa.Column("public_key_pem", sa.Text(), nullable=False),
sa.Column("private_key_pem", sa.Text(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("preferred_username"),
sa.UniqueConstraint("user_id"),
)
op.create_index("ix_ap_actor_user_id", "ap_actor_profiles", ["user_id"], unique=True)
op.create_index("ix_ap_actor_username", "ap_actor_profiles", ["preferred_username"], unique=True)
# -- ap_activities -------------------------------------------------------
op.create_table(
"ap_activities",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("activity_id", sa.String(512), nullable=False),
sa.Column("activity_type", sa.String(64), nullable=False),
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
sa.Column("object_type", sa.String(64), nullable=True),
sa.Column("object_data", postgresql.JSONB(), nullable=True),
sa.Column("published", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("signature", postgresql.JSONB(), nullable=True),
sa.Column("is_local", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("source_type", sa.String(64), nullable=True),
sa.Column("source_id", sa.Integer(), nullable=True),
sa.Column("ipfs_cid", sa.String(128), nullable=True),
sa.Column("anchor_id", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["anchor_id"], ["ap_anchors.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("activity_id"),
)
op.create_index("ix_ap_activity_actor", "ap_activities", ["actor_profile_id"])
op.create_index("ix_ap_activity_source", "ap_activities", ["source_type", "source_id"])
op.create_index("ix_ap_activity_published", "ap_activities", ["published"])
# -- ap_followers --------------------------------------------------------
op.create_table(
"ap_followers",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
sa.Column("follower_acct", sa.String(512), nullable=False),
sa.Column("follower_inbox", sa.String(512), nullable=False),
sa.Column("follower_actor_url", sa.String(512), nullable=False),
sa.Column("follower_public_key", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
)
op.create_index("ix_ap_follower_actor", "ap_followers", ["actor_profile_id"])
# -- ap_inbox_items ------------------------------------------------------
op.create_table(
"ap_inbox_items",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
sa.Column("raw_json", postgresql.JSONB(), nullable=False),
sa.Column("activity_type", sa.String(64), nullable=True),
sa.Column("from_actor", sa.String(512), nullable=True),
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_ap_inbox_state", "ap_inbox_items", ["state"])
op.create_index("ix_ap_inbox_actor", "ap_inbox_items", ["actor_profile_id"])
# -- ipfs_pins -----------------------------------------------------------
op.create_table(
"ipfs_pins",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("content_hash", sa.String(128), nullable=False),
sa.Column("ipfs_cid", sa.String(128), nullable=False),
sa.Column("pin_type", sa.String(64), nullable=False),
sa.Column("source_type", sa.String(64), nullable=True),
sa.Column("source_id", sa.Integer(), nullable=True),
sa.Column("size_bytes", sa.BigInteger(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("ipfs_cid"),
)
op.create_index("ix_ipfs_pin_source", "ipfs_pins", ["source_type", "source_id"])
op.create_index("ix_ipfs_pin_cid", "ipfs_pins", ["ipfs_cid"], unique=True)
def downgrade() -> None:
op.drop_table("ipfs_pins")
op.drop_table("ap_inbox_items")
op.drop_table("ap_followers")
op.drop_table("ap_activities")
op.drop_table("ap_actor_profiles")
op.drop_table("ap_anchors")

View File

@@ -1,138 +0,0 @@
"""add fediverse social tables
Revision ID: l2j0g6h8i9
Revises: k1i9f5g7h8
Create Date: 2026-02-22
Creates:
- ap_remote_actors — cached profiles of remote actors
- ap_following — outbound follows (local → remote)
- ap_remote_posts — ingested posts from remote actors
- ap_local_posts — native posts composed in federation UI
- ap_interactions — likes and boosts
- ap_notifications — follow/like/boost/mention/reply notifications
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
revision = "l2j0g6h8i9"
down_revision = "k1i9f5g7h8"
branch_labels = None
depends_on = None
def upgrade() -> None:
# -- ap_remote_actors --
op.create_table(
"ap_remote_actors",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("actor_url", sa.String(512), unique=True, nullable=False),
sa.Column("inbox_url", sa.String(512), nullable=False),
sa.Column("shared_inbox_url", sa.String(512), nullable=True),
sa.Column("preferred_username", sa.String(255), nullable=False),
sa.Column("display_name", sa.String(255), nullable=True),
sa.Column("summary", sa.Text, nullable=True),
sa.Column("icon_url", sa.String(512), nullable=True),
sa.Column("public_key_pem", sa.Text, nullable=True),
sa.Column("domain", sa.String(255), nullable=False),
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)
op.create_index("ix_ap_remote_actor_url", "ap_remote_actors", ["actor_url"], unique=True)
op.create_index("ix_ap_remote_actor_domain", "ap_remote_actors", ["domain"])
# -- ap_following --
op.create_table(
"ap_following",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("accepted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"),
)
op.create_index("ix_ap_following_actor", "ap_following", ["actor_profile_id"])
op.create_index("ix_ap_following_remote", "ap_following", ["remote_actor_id"])
# -- ap_remote_posts --
op.create_table(
"ap_remote_posts",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
sa.Column("activity_id", sa.String(512), unique=True, nullable=False),
sa.Column("object_id", sa.String(512), unique=True, nullable=False),
sa.Column("object_type", sa.String(64), nullable=False, server_default="Note"),
sa.Column("content", sa.Text, nullable=True),
sa.Column("summary", sa.Text, nullable=True),
sa.Column("url", sa.String(512), nullable=True),
sa.Column("attachment_data", JSONB, nullable=True),
sa.Column("tag_data", JSONB, nullable=True),
sa.Column("in_reply_to", sa.String(512), nullable=True),
sa.Column("conversation", sa.String(512), nullable=True),
sa.Column("published", sa.DateTime(timezone=True), nullable=True),
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)
op.create_index("ix_ap_remote_post_actor", "ap_remote_posts", ["remote_actor_id"])
op.create_index("ix_ap_remote_post_published", "ap_remote_posts", ["published"])
op.create_index("ix_ap_remote_post_object", "ap_remote_posts", ["object_id"], unique=True)
# -- ap_local_posts --
op.create_table(
"ap_local_posts",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
sa.Column("content", sa.Text, nullable=False),
sa.Column("visibility", sa.String(20), nullable=False, server_default="public"),
sa.Column("in_reply_to", sa.String(512), nullable=True),
sa.Column("published", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)
op.create_index("ix_ap_local_post_actor", "ap_local_posts", ["actor_profile_id"])
op.create_index("ix_ap_local_post_published", "ap_local_posts", ["published"])
# -- ap_interactions --
op.create_table(
"ap_interactions",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True),
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True),
sa.Column("post_type", sa.String(20), nullable=False),
sa.Column("post_id", sa.Integer, nullable=False),
sa.Column("interaction_type", sa.String(20), nullable=False),
sa.Column("activity_id", sa.String(512), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)
op.create_index("ix_ap_interaction_post", "ap_interactions", ["post_type", "post_id"])
op.create_index("ix_ap_interaction_actor", "ap_interactions", ["actor_profile_id"])
op.create_index("ix_ap_interaction_remote", "ap_interactions", ["remote_actor_id"])
# -- ap_notifications --
op.create_table(
"ap_notifications",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
sa.Column("notification_type", sa.String(20), nullable=False),
sa.Column("from_remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True),
sa.Column("from_actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True),
sa.Column("target_activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True),
sa.Column("target_remote_post_id", sa.Integer, sa.ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True),
sa.Column("read", sa.Boolean, nullable=False, server_default="false"),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
)
op.create_index("ix_ap_notification_actor", "ap_notifications", ["actor_profile_id"])
op.create_index("ix_ap_notification_read", "ap_notifications", ["actor_profile_id", "read"])
op.create_index("ix_ap_notification_created", "ap_notifications", ["created_at"])
def downgrade() -> None:
op.drop_table("ap_notifications")
op.drop_table("ap_interactions")
op.drop_table("ap_local_posts")
op.drop_table("ap_remote_posts")
op.drop_table("ap_following")
op.drop_table("ap_remote_actors")

View File

@@ -1,113 +0,0 @@
"""add unified event bus columns to ap_activities
Revision ID: m3k1h7i9j0
Revises: l2j0g6h8i9
Create Date: 2026-02-22
Adds processing and visibility columns so ap_activities can serve as the
unified event bus for both internal domain events and federation delivery.
"""
revision = "m3k1h7i9j0"
down_revision = "l2j0g6h8i9"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade() -> None:
# Add new columns with defaults so existing rows stay valid
op.add_column(
"ap_activities",
sa.Column("actor_uri", sa.String(512), nullable=True),
)
op.add_column(
"ap_activities",
sa.Column(
"visibility", sa.String(20),
nullable=False, server_default="public",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_state", sa.String(20),
nullable=False, server_default="completed",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_attempts", sa.Integer(),
nullable=False, server_default="0",
),
)
op.add_column(
"ap_activities",
sa.Column(
"process_max_attempts", sa.Integer(),
nullable=False, server_default="5",
),
)
op.add_column(
"ap_activities",
sa.Column("process_error", sa.Text(), nullable=True),
)
op.add_column(
"ap_activities",
sa.Column(
"processed_at", sa.DateTime(timezone=True), nullable=True,
),
)
# Backfill actor_uri from the related actor_profile
op.execute(
"""
UPDATE ap_activities a
SET actor_uri = CONCAT(
'https://',
COALESCE(current_setting('app.ap_domain', true), 'rose-ash.com'),
'/users/',
p.preferred_username
)
FROM ap_actor_profiles p
WHERE a.actor_profile_id = p.id
AND a.actor_uri IS NULL
"""
)
# Make actor_profile_id nullable (internal events have no actor profile)
op.alter_column(
"ap_activities", "actor_profile_id",
existing_type=sa.Integer(),
nullable=True,
)
# Index for processor polling
op.create_index(
"ix_ap_activity_process", "ap_activities", ["process_state"],
)
def downgrade() -> None:
op.drop_index("ix_ap_activity_process", table_name="ap_activities")
# Restore actor_profile_id NOT NULL (remove any rows without it first)
op.execute(
"DELETE FROM ap_activities WHERE actor_profile_id IS NULL"
)
op.alter_column(
"ap_activities", "actor_profile_id",
existing_type=sa.Integer(),
nullable=False,
)
op.drop_column("ap_activities", "processed_at")
op.drop_column("ap_activities", "process_error")
op.drop_column("ap_activities", "process_max_attempts")
op.drop_column("ap_activities", "process_attempts")
op.drop_column("ap_activities", "process_state")
op.drop_column("ap_activities", "visibility")
op.drop_column("ap_activities", "actor_uri")

View File

@@ -1,46 +0,0 @@
"""drop domain_events table
Revision ID: n4l2i8j0k1
Revises: m3k1h7i9j0
Create Date: 2026-02-22
The domain_events table is no longer used — all events now flow through
ap_activities with the unified activity bus.
"""
revision = "n4l2i8j0k1"
down_revision = "m3k1h7i9j0"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
def upgrade() -> None:
op.drop_index("ix_domain_events_state", table_name="domain_events")
op.drop_index("ix_domain_events_event_type", table_name="domain_events")
op.drop_table("domain_events")
def downgrade() -> None:
op.create_table(
"domain_events",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("event_type", sa.String(128), nullable=False),
sa.Column("aggregate_type", sa.String(64), nullable=False),
sa.Column("aggregate_id", sa.Integer(), nullable=False),
sa.Column("payload", JSONB(), nullable=True),
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
sa.Column("attempts", sa.Integer(), nullable=False, server_default="0"),
sa.Column("max_attempts", sa.Integer(), nullable=False, server_default="5"),
sa.Column("last_error", sa.Text(), nullable=True),
sa.Column(
"created_at", sa.DateTime(timezone=True),
nullable=False, server_default=sa.func.now(),
),
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_domain_events_event_type", "domain_events", ["event_type"])
op.create_index("ix_domain_events_state", "domain_events", ["state"])

View File

@@ -1,35 +0,0 @@
"""Add origin_app column to ap_activities
Revision ID: o5m3j9k1l2
Revises: n4l2i8j0k1
Create Date: 2026-02-22
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect as sa_inspect
revision = "o5m3j9k1l2"
down_revision = "n4l2i8j0k1"
branch_labels = None
depends_on = None
def upgrade() -> None:
conn = op.get_bind()
inspector = sa_inspect(conn)
columns = [c["name"] for c in inspector.get_columns("ap_activities")]
if "origin_app" not in columns:
op.add_column(
"ap_activities",
sa.Column("origin_app", sa.String(64), nullable=True),
)
# Index is idempotent with if_not_exists
op.create_index(
"ix_ap_activity_origin_app", "ap_activities", ["origin_app"],
if_not_exists=True,
)
def downgrade() -> None:
op.drop_index("ix_ap_activity_origin_app", table_name="ap_activities")
op.drop_column("ap_activities", "origin_app")

View File

@@ -1,37 +0,0 @@
"""Add oauth_codes table
Revision ID: p6n4k0l2m3
Revises: o5m3j9k1l2
Create Date: 2026-02-23
"""
from alembic import op
import sqlalchemy as sa
revision = "p6n4k0l2m3"
down_revision = "o5m3j9k1l2"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"oauth_codes",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("code", sa.String(128), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("client_id", sa.String(64), nullable=False),
sa.Column("redirect_uri", sa.String(512), nullable=False),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("used_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
)
op.create_index("ix_oauth_code_code", "oauth_codes", ["code"], unique=True)
op.create_index("ix_oauth_code_user", "oauth_codes", ["user_id"])
def downgrade() -> None:
op.drop_index("ix_oauth_code_user", table_name="oauth_codes")
op.drop_index("ix_oauth_code_code", table_name="oauth_codes")
op.drop_table("oauth_codes")

View File

@@ -1,41 +0,0 @@
"""Add oauth_grants table
Revision ID: q7o5l1m3n4
Revises: p6n4k0l2m3
"""
from alembic import op
import sqlalchemy as sa
revision = "q7o5l1m3n4"
down_revision = "p6n4k0l2m3"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"oauth_grants",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("token", sa.String(128), unique=True, nullable=False),
sa.Column("user_id", sa.Integer, sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("client_id", sa.String(64), nullable=False),
sa.Column("issuer_session", sa.String(128), nullable=False),
sa.Column("device_id", sa.String(128), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("revoked_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_oauth_grant_token", "oauth_grants", ["token"], unique=True)
op.create_index("ix_oauth_grant_issuer", "oauth_grants", ["issuer_session"])
op.create_index("ix_oauth_grant_user", "oauth_grants", ["user_id"])
op.create_index("ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"])
# Add grant_token column to oauth_codes to link code → grant
op.add_column("oauth_codes", sa.Column("grant_token", sa.String(128), nullable=True))
def downgrade():
op.drop_column("oauth_codes", "grant_token")
op.drop_index("ix_oauth_grant_user", table_name="oauth_grants")
op.drop_index("ix_oauth_grant_issuer", table_name="oauth_grants")
op.drop_index("ix_oauth_grant_token", table_name="oauth_grants")
op.drop_table("oauth_grants")

View File

@@ -1,29 +0,0 @@
"""Add device_id column to oauth_grants
Revision ID: r8p6m2n4o5
Revises: q7o5l1m3n4
"""
from alembic import op
import sqlalchemy as sa
revision = "r8p6m2n4o5"
down_revision = "q7o5l1m3n4"
branch_labels = None
depends_on = None
def upgrade():
# device_id was added to the create_table migration after it had already
# run, so the column is missing from the live DB. Add it now.
op.add_column(
"oauth_grants",
sa.Column("device_id", sa.String(128), nullable=True),
)
op.create_index(
"ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"]
)
def downgrade():
op.drop_index("ix_oauth_grant_device", table_name="oauth_grants")
op.drop_column("oauth_grants", "device_id")

View File

@@ -1,30 +0,0 @@
"""Add ap_delivery_log table for idempotent federation delivery
Revision ID: s9q7n3o5p6
Revises: r8p6m2n4o5
"""
from alembic import op
import sqlalchemy as sa
revision = "s9q7n3o5p6"
down_revision = "r8p6m2n4o5"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"ap_delivery_log",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="CASCADE"), nullable=False),
sa.Column("inbox_url", sa.String(512), nullable=False),
sa.Column("status_code", sa.Integer, nullable=True),
sa.Column("delivered_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.UniqueConstraint("activity_id", "inbox_url", name="uq_delivery_activity_inbox"),
)
op.create_index("ix_ap_delivery_activity", "ap_delivery_log", ["activity_id"])
def downgrade():
op.drop_index("ix_ap_delivery_activity", table_name="ap_delivery_log")
op.drop_table("ap_delivery_log")

View File

@@ -1,51 +0,0 @@
"""Add app_domain to ap_followers for per-app AP actors
Revision ID: t0r8n4o6p7
Revises: s9q7n3o5p6
"""
from alembic import op
import sqlalchemy as sa
revision = "t0r8n4o6p7"
down_revision = "s9q7n3o5p6"
branch_labels = None
depends_on = None
def upgrade():
# Add column as nullable first so we can backfill
op.add_column(
"ap_followers",
sa.Column("app_domain", sa.String(64), nullable=True),
)
# Backfill existing rows: all current followers are aggregate
op.execute("UPDATE ap_followers SET app_domain = 'federation' WHERE app_domain IS NULL")
# Now make it NOT NULL with a default
op.alter_column(
"ap_followers", "app_domain",
nullable=False, server_default="federation",
)
# Replace old unique constraint with one that includes app_domain
op.drop_constraint("uq_follower_acct", "ap_followers", type_="unique")
op.create_unique_constraint(
"uq_follower_acct_app",
"ap_followers",
["actor_profile_id", "follower_acct", "app_domain"],
)
op.create_index(
"ix_ap_follower_app_domain",
"ap_followers",
["actor_profile_id", "app_domain"],
)
def downgrade():
op.drop_index("ix_ap_follower_app_domain", table_name="ap_followers")
op.drop_constraint("uq_follower_acct_app", "ap_followers", type_="unique")
op.create_unique_constraint(
"uq_follower_acct",
"ap_followers",
["actor_profile_id", "follower_acct"],
)
op.alter_column("ap_followers", "app_domain", nullable=True, server_default=None)
op.drop_column("ap_followers", "app_domain")

View File

@@ -1,33 +0,0 @@
"""Add app_domain to ap_delivery_log for per-domain idempotency
Revision ID: u1s9o5p7q8
Revises: t0r8n4o6p7
"""
from alembic import op
import sqlalchemy as sa
revision = "u1s9o5p7q8"
down_revision = "t0r8n4o6p7"
def upgrade() -> None:
op.add_column(
"ap_delivery_log",
sa.Column("app_domain", sa.String(128), nullable=False, server_default="federation"),
)
op.drop_constraint("uq_delivery_activity_inbox", "ap_delivery_log", type_="unique")
op.create_unique_constraint(
"uq_delivery_activity_inbox_domain",
"ap_delivery_log",
["activity_id", "inbox_url", "app_domain"],
)
def downgrade() -> None:
op.drop_constraint("uq_delivery_activity_inbox_domain", "ap_delivery_log", type_="unique")
op.drop_column("ap_delivery_log", "app_domain")
op.create_unique_constraint(
"uq_delivery_activity_inbox",
"ap_delivery_log",
["activity_id", "inbox_url"],
)

View File

@@ -1,59 +0,0 @@
"""Drop cross-domain foreign key constraints.
Columns and indexes remain — only the FK constraints are removed.
This prepares for per-domain databases where cross-DB FKs can't exist.
Revision ID: v2t0p8q9r0
Revises: u1s9o5p7q8
"""
from alembic import op
revision = "v2t0p8q9r0"
down_revision = "u1s9o5p7q8"
def upgrade() -> None:
# blog → account
op.drop_constraint("fk_posts_user_id", "posts", type_="foreignkey")
op.drop_constraint("post_likes_user_id_fkey", "post_likes", type_="foreignkey")
# market → account
op.drop_constraint("product_likes_user_id_fkey", "product_likes", type_="foreignkey")
# cart → account
op.drop_constraint("cart_items_user_id_fkey", "cart_items", type_="foreignkey")
op.drop_constraint("orders_user_id_fkey", "orders", type_="foreignkey")
# cart → market
op.drop_constraint("cart_items_product_id_fkey", "cart_items", type_="foreignkey")
op.drop_constraint("fk_cart_items_market_place_id", "cart_items", type_="foreignkey")
op.drop_constraint("order_items_product_id_fkey", "order_items", type_="foreignkey")
# cart → events
op.drop_constraint("fk_orders_page_config_id", "orders", type_="foreignkey")
# events → account
op.drop_constraint("fk_calendar_entries_user_id", "calendar_entries", type_="foreignkey")
op.drop_constraint("tickets_user_id_fkey", "tickets", type_="foreignkey")
# federation → account
op.drop_constraint("ap_actor_profiles_user_id_fkey", "ap_actor_profiles", type_="foreignkey")
# shared (blog-internal but cross-concern)
op.drop_constraint("menu_items_post_id_fkey", "menu_items", type_="foreignkey")
def downgrade() -> None:
op.create_foreign_key("fk_posts_user_id", "posts", "users", ["user_id"], ["id"], ondelete="SET NULL")
op.create_foreign_key("post_likes_user_id_fkey", "post_likes", "users", ["user_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key("product_likes_user_id_fkey", "product_likes", "users", ["user_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key("cart_items_user_id_fkey", "cart_items", "users", ["user_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key("cart_items_product_id_fkey", "cart_items", "products", ["product_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key("fk_cart_items_market_place_id", "cart_items", "market_places", ["market_place_id"], ["id"], ondelete="SET NULL")
op.create_foreign_key("orders_user_id_fkey", "orders", "users", ["user_id"], ["id"])
op.create_foreign_key("fk_orders_page_config_id", "orders", "page_configs", ["page_config_id"], ["id"], ondelete="SET NULL")
op.create_foreign_key("order_items_product_id_fkey", "order_items", "products", ["product_id"], ["id"])
op.create_foreign_key("fk_calendar_entries_user_id", "calendar_entries", "users", ["user_id"], ["id"])
op.create_foreign_key("tickets_user_id_fkey", "tickets", "users", ["user_id"], ["id"])
op.create_foreign_key("ap_actor_profiles_user_id_fkey", "ap_actor_profiles", "users", ["user_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key("menu_items_post_id_fkey", "menu_items", "posts", ["post_id"], ["id"], ondelete="CASCADE")

View File

@@ -1,23 +0,0 @@
"""Add app_domain to ap_notifications.
Revision ID: w3u1q9r0s1
Revises: v2t0p8q9r0
"""
from alembic import op
import sqlalchemy as sa
revision = "w3u1q9r0s1"
down_revision = "v2t0p8q9r0"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"ap_notifications",
sa.Column("app_domain", sa.String(30), nullable=True),
)
def downgrade() -> None:
op.drop_column("ap_notifications", "app_domain")

102
shared/db/alembic_env.py Normal file
View File

@@ -0,0 +1,102 @@
"""Reusable Alembic env.py helper for per-service migrations.
Each service calls ``run_alembic(config, model_modules, table_names)``
from its own ``alembic/env.py``. The helper:
* Imports only the requested model modules (so ``Base.metadata`` sees
only the tables that belong to the service).
* Uses an ``include_name`` callback to filter ``CREATE TABLE`` to only
the service's tables (belt-and-suspenders on top of the import filter).
* Reads ``ALEMBIC_DATABASE_URL`` for the connection string.
"""
from __future__ import annotations
import importlib
import os
import sys
from typing import Sequence
from alembic import context
from sqlalchemy import engine_from_config, pool
def run_alembic(
config,
model_modules: Sequence[str],
table_names: frozenset[str],
) -> None:
"""Run Alembic migrations filtered to *table_names*.
Parameters
----------
config:
The ``alembic.config.Config`` instance (``context.config``).
model_modules:
Dotted module paths to import so that ``Base.metadata`` is
populated (e.g. ``["shared.models.user", "blog.models"]``).
table_names:
The set of table names this service owns. Only these tables
will be created / altered / dropped.
"""
# Ensure project root is importable
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
if project_root not in sys.path:
sys.path.insert(0, project_root)
# Import models so Base.metadata sees the tables
for mod in model_modules:
try:
importlib.import_module(mod)
except ImportError:
pass # OK in Docker images that don't ship sibling apps
from shared.db.base import Base
target_metadata = Base.metadata
# ---- include_name filter ------------------------------------------------
def _include_name(name, type_, parent_names):
if type_ == "table":
return name in table_names
# Always include indexes/constraints that belong to included tables
return True
# ---- connection URL -----------------------------------------------------
def _get_url() -> str:
return os.getenv(
"ALEMBIC_DATABASE_URL",
os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url") or ""),
)
# ---- offline / online ---------------------------------------------------
if context.is_offline_mode():
context.configure(
url=_get_url(),
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
include_name=_include_name,
)
with context.begin_transaction():
context.run_migrations()
else:
url = _get_url()
if url:
config.set_main_option("sqlalchemy.url", url)
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
include_name=_include_name,
)
with context.begin_transaction():
context.run_migrations()

View File

@@ -72,6 +72,7 @@ def create_base_app(
static_folder=STATIC_DIR,
static_url_path="/static",
template_folder=TEMPLATE_DIR,
root_path=str(BASE_DIR),
)
configure_logging(name)

View File

@@ -9,27 +9,34 @@ from decimal import Decimal
from sqlalchemy import select, update, func
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from shared.models.market import CartItem
from shared.models.market_place import MarketPlace
from shared.contracts.dtos import CartItemDTO, CartSummaryDTO
def _item_to_dto(ci: CartItem) -> CartItemDTO:
product = ci.product
def _item_to_dto(ci: CartItem, product: dict | None) -> CartItemDTO:
return CartItemDTO(
id=ci.id,
product_id=ci.product_id,
quantity=ci.quantity,
product_title=product.title if product else None,
product_slug=product.slug if product else None,
product_image=product.image if product else None,
unit_price=Decimal(str(product.special_price or product.regular_price or 0)) if product else None,
product_title=product["title"] if product else None,
product_slug=product["slug"] if product else None,
product_image=product["image"] if product else None,
unit_price=Decimal(str(product.get("special_price") or product.get("regular_price") or 0)) if product else None,
market_place_id=ci.market_place_id,
)
async def _fetch_products_map(fetch_data, product_ids: list[int]) -> dict[int, dict]:
"""Fetch product details from market service, return {id: product_dict}."""
if not product_ids:
return {}
raw = await fetch_data("market", "products-by-ids",
params={"ids": ",".join(str(i) for i in product_ids)},
required=False) or []
return {p["id"]: p for p in raw}
class SqlCartService:
async def cart_summary(
@@ -59,24 +66,31 @@ class SqlCartService:
return CartSummaryDTO()
if page_post_id is not None:
mp_ids = select(MarketPlace.id).where(
MarketPlace.container_type == "page",
MarketPlace.container_id == page_post_id,
MarketPlace.deleted_at.is_(None),
).scalar_subquery()
cart_q = cart_q.where(CartItem.market_place_id.in_(mp_ids))
mps = await fetch_data("market", "marketplaces-for-container",
params={"type": "page", "id": page_post_id},
required=False) or []
mp_ids = [mp["id"] for mp in mps]
if mp_ids:
cart_q = cart_q.where(CartItem.market_place_id.in_(mp_ids))
else:
return CartSummaryDTO()
cart_q = cart_q.options(selectinload(CartItem.product))
result = await session.execute(cart_q)
cart_items = result.scalars().all()
count = sum(ci.quantity for ci in cart_items)
total = sum(
Decimal(str(ci.product.special_price or ci.product.regular_price or 0)) * ci.quantity
for ci in cart_items
if ci.product and (ci.product.special_price or ci.product.regular_price)
products = await _fetch_products_map(
fetch_data, list({ci.product_id for ci in cart_items}),
)
count = sum(ci.quantity for ci in cart_items)
total = Decimal("0")
for ci in cart_items:
p = products.get(ci.product_id)
if p:
price = p.get("special_price") or p.get("regular_price")
if price:
total += Decimal(str(price)) * ci.quantity
# --- calendar entries via events data endpoint ---
cal_params: dict = {}
if user_id is not None:
@@ -109,7 +123,7 @@ class SqlCartService:
ticket_count = len(tickets)
ticket_total = sum(Decimal(str(t.price or 0)) for t in tickets)
items = [_item_to_dto(ci) for ci in cart_items]
items = [_item_to_dto(ci, products.get(ci.product_id)) for ci in cart_items]
return CartSummaryDTO(
count=count,
@@ -125,6 +139,8 @@ class SqlCartService:
self, session: AsyncSession, *,
user_id: int | None, session_id: str | None,
) -> list[CartItemDTO]:
from shared.infrastructure.data_client import fetch_data
cart_q = select(CartItem).where(CartItem.deleted_at.is_(None))
if user_id is not None:
cart_q = cart_q.where(CartItem.user_id == user_id)
@@ -133,9 +149,14 @@ class SqlCartService:
else:
return []
cart_q = cart_q.options(selectinload(CartItem.product)).order_by(CartItem.created_at.desc())
cart_q = cart_q.order_by(CartItem.created_at.desc())
result = await session.execute(cart_q)
return [_item_to_dto(ci) for ci in result.scalars().all()]
items = result.scalars().all()
products = await _fetch_products_map(
fetch_data, list({ci.product_id for ci in items}),
)
return [_item_to_dto(ci, products.get(ci.product_id)) for ci in items]
async def adopt_cart_for_user(
self, session: AsyncSession, user_id: int, session_id: str,