Decouple per-service Alembic migrations and fix cross-DB queries
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m19s
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 1m19s
Each service (blog, market, cart, events, federation, account) now owns its own database schema with independent Alembic migrations. Removes the monolithic shared/alembic/ that ran all migrations against a single DB. - Add per-service alembic.ini, env.py, and 0001_initial.py migrations - Add shared/db/alembic_env.py helper with table-name filtering - Fix cross-DB FK in blog/models/snippet.py (users lives in db_account) - Fix cart_impl.py cross-DB queries: fetch products and market_places via internal data endpoints instead of direct SQL joins - Fix blog ghost_sync to fetch page_configs from cart via data endpoint - Add products-by-ids and page-config-ensure data endpoints - Update all entrypoint.sh to create own DB and run own migrations - Cart now uses db_cart instead of db_market - Add docker-compose.dev.yml, dev.sh for local development - CI deploys both rose-ash swarm stack and rose-ash-dev compose stack - Fix Quart namespace package crash (root_path in factory.py) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
35
events/alembic.ini
Normal file
35
events/alembic.ini
Normal file
@@ -0,0 +1,35 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
sqlalchemy.url =
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
13
events/alembic/env.py
Normal file
13
events/alembic/env.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from alembic import context
|
||||
from shared.db.alembic_env import run_alembic
|
||||
|
||||
MODELS = [
|
||||
"shared.models.calendars",
|
||||
]
|
||||
|
||||
TABLES = frozenset({
|
||||
"calendars", "calendar_entries", "calendar_slots",
|
||||
"ticket_types", "tickets", "calendar_entry_posts",
|
||||
})
|
||||
|
||||
run_alembic(context.config, MODELS, TABLES)
|
||||
161
events/alembic/versions/0001_initial.py
Normal file
161
events/alembic/versions/0001_initial.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""Initial events schema
|
||||
|
||||
Revision ID: events_0001
|
||||
Revises: -
|
||||
Create Date: 2026-02-26
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "events_0001"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _table_exists(conn, name):
|
||||
result = conn.execute(sa.text(
|
||||
"SELECT 1 FROM information_schema.tables WHERE table_schema='public' AND table_name=:t"
|
||||
), {"t": name})
|
||||
return result.scalar() is not None
|
||||
|
||||
|
||||
def upgrade():
|
||||
if _table_exists(op.get_bind(), "calendars"):
|
||||
return
|
||||
|
||||
# 1. calendars
|
||||
op.create_table(
|
||||
"calendars",
|
||||
sa.Column("id", sa.Integer(), primary_key=True),
|
||||
sa.Column("container_type", sa.String(32), nullable=False, server_default="'page'"),
|
||||
sa.Column("container_id", sa.Integer(), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("slug", sa.String(255), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_calendars_container", "calendars", ["container_type", "container_id"])
|
||||
op.create_index("ix_calendars_name", "calendars", ["name"])
|
||||
op.create_index("ix_calendars_slug", "calendars", ["slug"])
|
||||
op.execute(
|
||||
"CREATE UNIQUE INDEX IF NOT EXISTS ux_calendars_container_slug_active "
|
||||
"ON calendars (container_type, container_id, LOWER(slug)) WHERE deleted_at IS NULL"
|
||||
)
|
||||
|
||||
# 2. calendar_slots
|
||||
op.create_table(
|
||||
"calendar_slots",
|
||||
sa.Column("id", sa.Integer(), primary_key=True),
|
||||
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("mon", sa.Boolean(), nullable=False),
|
||||
sa.Column("tue", sa.Boolean(), nullable=False),
|
||||
sa.Column("wed", sa.Boolean(), nullable=False),
|
||||
sa.Column("thu", sa.Boolean(), nullable=False),
|
||||
sa.Column("fri", sa.Boolean(), nullable=False),
|
||||
sa.Column("sat", sa.Boolean(), nullable=False),
|
||||
sa.Column("sun", sa.Boolean(), nullable=False),
|
||||
sa.Column("flexible", sa.Boolean(), nullable=False, server_default="false"),
|
||||
sa.Column("time_start", sa.Time(), nullable=False),
|
||||
sa.Column("time_end", sa.Time(), nullable=False),
|
||||
sa.Column("cost", sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("(time_end > time_start)", name="ck_calendar_slots_time_end_after_start"),
|
||||
)
|
||||
op.create_index("ix_calendar_slots_calendar_id", "calendar_slots", ["calendar_id"])
|
||||
op.create_index("ix_calendar_slots_time_start", "calendar_slots", ["time_start"])
|
||||
|
||||
# 3. calendar_entries
|
||||
op.create_table(
|
||||
"calendar_entries",
|
||||
sa.Column("id", sa.Integer(), primary_key=True),
|
||||
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("session_id", sa.String(64), nullable=True),
|
||||
sa.Column("order_id", sa.Integer(), nullable=True),
|
||||
sa.Column("slot_id", sa.Integer(), sa.ForeignKey("calendar_slots.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("start_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("end_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="'pending'"),
|
||||
sa.Column("cost", sa.Numeric(10, 2), nullable=False, server_default="10"),
|
||||
sa.Column("ticket_price", sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column("ticket_count", sa.Integer(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("(end_at IS NULL) OR (end_at >= start_at)", name="ck_calendar_entries_end_after_start"),
|
||||
)
|
||||
op.create_index("ix_calendar_entries_name", "calendar_entries", ["name"])
|
||||
op.create_index("ix_calendar_entries_start_at", "calendar_entries", ["start_at"])
|
||||
op.create_index("ix_calendar_entries_user_id", "calendar_entries", ["user_id"])
|
||||
op.create_index("ix_calendar_entries_session_id", "calendar_entries", ["session_id"])
|
||||
op.create_index("ix_calendar_entries_state", "calendar_entries", ["state"])
|
||||
op.create_index("ix_calendar_entries_order_id", "calendar_entries", ["order_id"])
|
||||
op.create_index("ix_calendar_entries_slot_id", "calendar_entries", ["slot_id"])
|
||||
|
||||
# 4. ticket_types
|
||||
op.create_table(
|
||||
"ticket_types",
|
||||
sa.Column("id", sa.Integer(), primary_key=True),
|
||||
sa.Column("entry_id", sa.Integer(), sa.ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("cost", sa.Numeric(10, 2), nullable=False),
|
||||
sa.Column("count", sa.Integer(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_ticket_types_entry_id", "ticket_types", ["entry_id"])
|
||||
op.create_index("ix_ticket_types_name", "ticket_types", ["name"])
|
||||
|
||||
# 5. tickets
|
||||
op.create_table(
|
||||
"tickets",
|
||||
sa.Column("id", sa.Integer(), primary_key=True),
|
||||
sa.Column("entry_id", sa.Integer(), sa.ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("ticket_type_id", sa.Integer(), sa.ForeignKey("ticket_types.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("session_id", sa.String(64), nullable=True),
|
||||
sa.Column("order_id", sa.Integer(), nullable=True),
|
||||
sa.Column("code", sa.String(64), nullable=False, unique=True),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="'reserved'"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("checked_in_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_tickets_entry_id", "tickets", ["entry_id"])
|
||||
op.create_index("ix_tickets_ticket_type_id", "tickets", ["ticket_type_id"])
|
||||
op.create_index("ix_tickets_user_id", "tickets", ["user_id"])
|
||||
op.create_index("ix_tickets_session_id", "tickets", ["session_id"])
|
||||
op.create_index("ix_tickets_order_id", "tickets", ["order_id"])
|
||||
op.create_index("ix_tickets_code", "tickets", ["code"], unique=True)
|
||||
op.create_index("ix_tickets_state", "tickets", ["state"])
|
||||
|
||||
# 6. calendar_entry_posts
|
||||
op.create_table(
|
||||
"calendar_entry_posts",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("entry_id", sa.Integer(), sa.ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("content_type", sa.String(32), nullable=False, server_default="'post'"),
|
||||
sa.Column("content_id", sa.Integer(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
op.create_index("ix_entry_posts_entry_id", "calendar_entry_posts", ["entry_id"])
|
||||
op.create_index("ix_entry_posts_content", "calendar_entry_posts", ["content_type", "content_id"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("calendar_entry_posts")
|
||||
op.drop_table("tickets")
|
||||
op.drop_table("ticket_types")
|
||||
op.drop_table("calendar_entries")
|
||||
op.drop_table("calendar_slots")
|
||||
op.drop_table("calendars")
|
||||
40
events/entrypoint.sh
Normal file → Executable file
40
events/entrypoint.sh
Normal file → Executable file
@@ -10,8 +10,34 @@ if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then
|
||||
done
|
||||
fi
|
||||
|
||||
# NOTE: Events app does NOT run Alembic migrations.
|
||||
# Migrations are managed by the blog app which owns the shared database schema.
|
||||
# Create own database + run own migrations
|
||||
if [[ "${RUN_MIGRATIONS:-}" == "true" && -n "${ALEMBIC_DATABASE_URL:-}" ]]; then
|
||||
python3 -c "
|
||||
import os, re
|
||||
url = os.environ['ALEMBIC_DATABASE_URL']
|
||||
m = re.match(r'postgresql\+\w+://([^:]+):([^@]+)@([^:]+):(\d+)/(.+)', url)
|
||||
if not m:
|
||||
print('Could not parse ALEMBIC_DATABASE_URL, skipping DB creation')
|
||||
exit(0)
|
||||
user, password, host, port, dbname = m.groups()
|
||||
|
||||
import psycopg
|
||||
conn = psycopg.connect(
|
||||
f'postgresql://{user}:{password}@{host}:{port}/postgres',
|
||||
autocommit=True,
|
||||
)
|
||||
cur = conn.execute('SELECT 1 FROM pg_database WHERE datname = %s', (dbname,))
|
||||
if not cur.fetchone():
|
||||
conn.execute(f'CREATE DATABASE {dbname}')
|
||||
print(f'Created database {dbname}')
|
||||
else:
|
||||
print(f'Database {dbname} already exists')
|
||||
conn.close()
|
||||
" || echo "DB creation failed (non-fatal), continuing..."
|
||||
|
||||
echo "Running events Alembic migrations..."
|
||||
(cd events && alembic upgrade head)
|
||||
fi
|
||||
|
||||
# Clear Redis page cache on deploy
|
||||
if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
@@ -25,5 +51,11 @@ print('Redis cache cleared.')
|
||||
fi
|
||||
|
||||
# Start the app
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app})..."
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} --workers ${WORKERS:-2} --keep-alive 75
|
||||
RELOAD_FLAG=""
|
||||
if [[ "${RELOAD:-}" == "true" ]]; then
|
||||
RELOAD_FLAG="--reload"
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app}) with auto-reload..."
|
||||
else
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app})..."
|
||||
fi
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} --workers ${WORKERS:-2} --keep-alive 75 ${RELOAD_FLAG}
|
||||
|
||||
Reference in New Issue
Block a user