feat: initial shared library extraction

Contains shared infrastructure for all coop services:
- shared/ (factory, urls, user_loader, context, internal_api, jinja_setup)
- models/ (User, Order, Calendar, Ticket, Product, Ghost CMS)
- db/ (SQLAlchemy async session, base)
- suma_browser/app/ (csrf, middleware, errors, authz, redis_cacher, payments, filters, utils)
- suma_browser/templates/ (shared base layouts, macros, error pages)
- static/ (CSS, JS, fonts, images)
- alembic/ (database migrations)
- config/ (app-config.yaml)
- editor/ (Lexical editor Node.js build)
- requirements.txt

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
giles
2026-02-09 23:11:36 +00:00
commit 668d9c7df8
446 changed files with 22741 additions and 0 deletions

61
alembic/env.py Normal file
View File

@@ -0,0 +1,61 @@
from __future__ import annotations
import os, sys
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
config = context.config
if config.config_file_name is not None:
try:
fileConfig(config.config_file_name)
except Exception:
pass
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from db.base import Base
import models # noqa: F401
target_metadata = Base.metadata
def _get_url() -> str:
url = os.getenv(
"ALEMBIC_DATABASE_URL",
os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url") or "")
)
print(url)
return url
def run_migrations_offline() -> None:
url = _get_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
url = _get_url()
if url:
config.set_main_option("sqlalchemy.url", url)
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,241 @@
"""snapshot writes to postgres (products/nav/listings/reports)
Revision ID: 20251107_090500_snapshot_to_db
Revises: 20251106_152905_calendar_config
Create Date: 2025-11-07T09:05:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_090500_snapshot_to_db"
down_revision = "20251106_152905_calendar_config"
branch_labels = None
depends_on = None
def upgrade() -> None:
# products (if not already present in your DB — keep idempotent with if_exists checks in env if needed)
if not op.get_bind().dialect.has_table(op.get_bind(), "products"):
op.create_table(
"products",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=False, unique=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("image", sa.Text(), nullable=True),
sa.Column("description_short", sa.Text(), nullable=True),
sa.Column("description_html", sa.Text(), nullable=True),
sa.Column("suma_href", sa.Text(), nullable=True),
sa.Column("brand", sa.String(length=255), nullable=True),
sa.Column("rrp", sa.Numeric(12, 2), nullable=True),
sa.Column("rrp_currency", sa.String(length=16), nullable=True),
sa.Column("rrp_raw", sa.String(length=128), nullable=True),
sa.Column("price_per_unit", sa.Numeric(12, 4), nullable=True),
sa.Column("price_per_unit_currency", sa.String(length=16), nullable=True),
sa.Column("price_per_unit_raw", sa.String(length=128), nullable=True),
sa.Column("special_price", sa.Numeric(12, 2), nullable=True),
sa.Column("special_price_currency", sa.String(length=16), nullable=True),
sa.Column("special_price_raw", sa.String(length=128), nullable=True),
sa.Column("case_size_count", sa.Integer(), nullable=True),
sa.Column("case_size_item_qty", sa.Numeric(12, 3), nullable=True),
sa.Column("case_size_item_unit", sa.String(length=32), nullable=True),
sa.Column("case_size_raw", sa.String(length=128), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_products_slug", "products", ["slug"], unique=False)
# product_sections
if not op.get_bind().dialect.has_table(op.get_bind(), "product_sections"):
op.create_table(
"product_sections",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("title", sa.String(length=255), nullable=False),
sa.Column("html", sa.Text(), nullable=False),
)
op.create_index("ix_product_sections_product_id", "product_sections", ["product_id"], unique=False)
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
# product_images (add kind + adjust unique)
if not op.get_bind().dialect.has_table(op.get_bind(), "product_images"):
op.create_table(
"product_images",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("url", sa.Text(), nullable=False),
sa.Column("position", sa.Integer(), nullable=False, server_default="0"),
sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"),
sa.CheckConstraint("position >= 0", name="ck_product_images_position_nonneg"),
)
op.create_index("ix_product_images_product_id", "product_images", ["product_id"], unique=False)
op.create_index("ix_product_images_position", "product_images", ["position"], unique=False)
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
else:
# alter existing table to add `kind` and update unique
with op.batch_alter_table("product_images") as batch_op:
if not op.get_bind().dialect.has_column(op.get_bind(), "product_images", "kind"):
batch_op.add_column(sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"))
try:
batch_op.drop_constraint("uq_product_images_product_url", type_="unique")
except Exception:
pass
batch_op.create_unique_constraint("uq_product_images_product_url_kind", ["product_id", "url", "kind"])
# nav_tops / nav_subs
op.create_table(
"nav_tops",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("label", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_nav_tops_slug", "nav_tops", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_tops_label_slug", "nav_tops", ["label", "slug"])
op.create_table(
"nav_subs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_id", sa.Integer(), sa.ForeignKey("nav_tops.id", ondelete="CASCADE"), nullable=False),
sa.Column("label", sa.String(length=255), nullable=True),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("href", sa.Text(), nullable=True),
)
op.create_index("ix_nav_subs_top_id", "nav_subs", ["top_id"], unique=False)
op.create_index("ix_nav_subs_slug", "nav_subs", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_subs_top_slug", "nav_subs", ["top_id", "slug"])
# listings & listing_items
op.create_table(
"listings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_slug", sa.String(length=255), nullable=False),
sa.Column("sub_slug", sa.String(length=255), nullable=True),
sa.Column("total_pages", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_listings_top_slug", "listings", ["top_slug"], unique=False)
op.create_index("ix_listings_sub_slug", "listings", ["sub_slug"], unique=False)
op.create_unique_constraint("uq_listings_top_sub", "listings", ["top_slug", "sub_slug"])
op.create_table(
"listing_items",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("listing_id", sa.Integer(), sa.ForeignKey("listings.id", ondelete="CASCADE"), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
)
op.create_index("ix_listing_items_listing_id", "listing_items", ["listing_id"], unique=False)
op.create_index("ix_listing_items_slug", "listing_items", ["slug"], unique=False)
op.create_unique_constraint("uq_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
# reports: link_errors, link_externals, subcategory_redirects, product_logs
op.create_table(
"link_errors",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("top", sa.String(length=255), nullable=True),
sa.Column("sub", sa.String(length=255), nullable=True),
sa.Column("target_slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_errors_product_slug", "link_errors", ["product_slug"], unique=False)
op.create_table(
"link_externals",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("host", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_externals_product_slug", "link_externals", ["product_slug"], unique=False)
op.create_table(
"subcategory_redirects",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("old_path", sa.String(length=512), nullable=False),
sa.Column("new_path", sa.String(length=512), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_subcategory_redirects_old_path", "subcategory_redirects", ["old_path"], unique=False)
op.create_unique_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", ["old_path", "new_path"])
op.create_table(
"product_logs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("href_tried", sa.Text(), nullable=True),
sa.Column("ok", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("error_type", sa.String(length=255), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("http_status", sa.Integer(), nullable=True),
sa.Column("final_url", sa.Text(), nullable=True),
sa.Column("transport_error", sa.Boolean(), nullable=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("has_description_html", sa.Boolean(), nullable=True),
sa.Column("has_description_short", sa.Boolean(), nullable=True),
sa.Column("sections_count", sa.Integer(), nullable=True),
sa.Column("images_count", sa.Integer(), nullable=True),
sa.Column("embedded_images_count", sa.Integer(), nullable=True),
sa.Column("all_images_count", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_product_logs_slug", "product_logs", ["slug"], unique=False)
def downgrade() -> None:
op.drop_index("ix_product_logs_slug", table_name="product_logs")
op.drop_table("product_logs")
op.drop_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", type_="unique")
op.drop_index("ix_subcategory_redirects_old_path", table_name="subcategory_redirects")
op.drop_table("subcategory_redirects")
op.drop_index("ix_link_externals_product_slug", table_name="link_externals")
op.drop_table("link_externals")
op.drop_index("ix_link_errors_product_slug", table_name="link_errors")
op.drop_table("link_errors")
op.drop_index("ix_listing_items_slug", table_name="listing_items")
op.drop_index("ix_listing_items_listing_id", table_name="listing_items")
op.drop_table("listing_items")
op.drop_constraint("uq_listings_top_sub", "listings", type_="unique")
op.drop_index("ix_listings_sub_slug", table_name="listings")
op.drop_index("ix_listings_top_slug", table_name="listings")
op.drop_table("listings")
op.drop_constraint("uq_nav_subs_top_slug", "nav_subs", type_="unique")
op.drop_index("ix_nav_subs_slug", table_name="nav_subs")
op.drop_index("ix_nav_subs_top_id", table_name="nav_subs")
op.drop_table("nav_subs")
op.drop_constraint("uq_nav_tops_label_slug", "nav_tops", type_="unique")
op.drop_index("ix_nav_tops_slug", table_name="nav_tops")
op.drop_table("nav_tops")
with op.batch_alter_table("product_images") as batch_op:
try:
batch_op.drop_constraint("uq_product_images_product_url_kind", type_="unique")
except Exception:
pass
# Do not drop 'kind' column automatically since existing code may rely on it.
# If needed, uncomment:
# batch_op.drop_column("kind")
op.drop_index("ix_product_images_position", table_name="product_images")
op.drop_index("ix_product_images_product_id", table_name="product_images")
op.drop_table("product_images")
op.drop_constraint("uq_product_sections_product_title", "product_sections", type_="unique")
op.drop_index("ix_product_sections_product_id", table_name="product_sections")
op.drop_table("product_sections")
op.drop_index("ix_products_slug", table_name="products")
op.drop_table("products")

View File

@@ -0,0 +1,67 @@
# Alembic migration script template
"""empty message
Revision ID: 0d767ad92dd7
Revises: 20251021_add_user_and_magic_link
Create Date: 2025-10-24 23:36:41.985357
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0d767ad92dd7'
down_revision = '20251021_add_user_and_magic_link'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_likes",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id",
ondelete="CASCADE",
),
primary_key=True,
nullable=False,
),
sa.Column(
"product_slug",
sa.String(length=255),
primary_key=True,
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# If you want an index to quickly fetch "all likes for this user":
op.create_index(
"ix_product_likes_user_id",
"product_likes",
["user_id"],
unique=False,
)
# If you want an index to quickly fetch "who liked this product":
op.create_index(
"ix_product_likes_product_slug",
"product_likes",
["product_slug"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_product_likes_product_slug", table_name="product_likes")
op.drop_index("ix_product_likes_user_id", table_name="product_likes")
op.drop_table("product_likes")

View File

@@ -0,0 +1,24 @@
# Alembic migration script template
"""empty message
Revision ID: 1a1f1f1fc71c
Revises: 20251107_180000_link_listings_to_nav_ids, 20251107_add_product_id_to_likes
Create Date: 2025-11-07 19:34:18.228002
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1a1f1f1fc71c'
down_revision = ('20251107_180000_link_listings_to_nav_ids', '20251107_add_product_id_to_likes')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -0,0 +1,20 @@
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251021211617"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'kv',
sa.Column('key', sa.String(length=120), nullable=False),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('key')
)
def downgrade() -> None:
op.drop_table('kv')

View File

@@ -0,0 +1,47 @@
"""add users and magic_links tables
Revision ID: 20251021_add_user_and_magic_link
Revises: a1b2c3d4e5f6 # <-- REPLACE with your actual head
Create Date: 2025-10-21
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '20251021_add_user_and_magic_link'
down_revision: Union[str, None] = '20251021211617' # <-- REPLACE THIS
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'users',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('email', sa.String(length=255), nullable=False, unique=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('last_login_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_table(
'magic_links',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('token', sa.String(length=128), nullable=False, unique=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('purpose', sa.String(length=32), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('ip', sa.String(length=64), nullable=True),
sa.Column('user_agent', sa.String(length=256), nullable=True),
)
op.create_index('ix_magic_links_token', 'magic_links', ['token'], unique=True)
op.create_index('ix_magic_links_user', 'magic_links', ['user_id'])
def downgrade() -> None:
op.drop_index('ix_magic_links_user', table_name='magic_links')
op.drop_index('ix_magic_links_token', table_name='magic_links')
op.drop_table('magic_links')
op.drop_index('ix_users_email', table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,135 @@
"""ghost content mirror (posts/pages/authors/tags)
Revision ID: 20251028_ghost_content
Revises: 0d767ad92dd7
Create Date: 2025-10-28
"""
from alembic import op
import sqlalchemy as sa
revision = "20251028_ghost_content"
down_revision = "0d767ad92dd7"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"authors",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("profile_image", sa.Text(), nullable=True),
sa.Column("cover_image", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.Column("website", sa.Text(), nullable=True),
sa.Column("location", sa.Text(), nullable=True),
sa.Column("facebook", sa.Text(), nullable=True),
sa.Column("twitter", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_authors_ghost_id"),
)
op.create_index("ix_authors_ghost_id", "authors", ["ghost_id"])
op.create_index("ix_authors_slug", "authors", ["slug"])
op.create_table(
"tags",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=300), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_tags_ghost_id"),
)
op.create_index("ix_tags_ghost_id", "tags", ["ghost_id"])
op.create_index("ix_tags_slug", "tags", ["slug"])
op.create_table(
"posts",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("uuid", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("title", sa.String(length=500), nullable=False),
sa.Column("html", sa.Text(), nullable=True),
sa.Column("plaintext", sa.Text(), nullable=True),
sa.Column("mobiledoc", sa.Text(), nullable=True),
sa.Column("lexical", sa.Text(), nullable=True),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("feature_image_alt", sa.Text(), nullable=True),
sa.Column("feature_image_caption", sa.Text(), nullable=True),
sa.Column("excerpt", sa.Text(), nullable=True),
sa.Column("custom_excerpt", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("status", sa.String(length=32), nullable=False, server_default="draft"),
sa.Column("featured", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("is_page", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("email_only", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("canonical_url", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=500), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("og_image", sa.Text(), nullable=True),
sa.Column("og_title", sa.String(length=500), nullable=True),
sa.Column("og_description", sa.Text(), nullable=True),
sa.Column("twitter_image", sa.Text(), nullable=True),
sa.Column("twitter_title", sa.String(length=500), nullable=True),
sa.Column("twitter_description", sa.Text(), nullable=True),
sa.Column("custom_template", sa.String(length=191), nullable=True),
sa.Column("reading_time", sa.Integer(), nullable=True),
sa.Column("comment_id", sa.String(length=191), nullable=True),
sa.Column("published_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("primary_author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="SET NULL"), nullable=True),
sa.Column("primary_tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="SET NULL"), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_posts_ghost_id"),
sa.UniqueConstraint("uuid", name="uq_posts_uuid"),
)
op.create_index("ix_posts_ghost_id", "posts", ["ghost_id"])
op.create_index("ix_posts_slug", "posts", ["slug"])
op.create_index("ix_posts_status", "posts", ["status"])
op.create_index("ix_posts_visibility", "posts", ["visibility"])
op.create_index("ix_posts_is_page", "posts", ["is_page"])
op.create_index("ix_posts_published_at", "posts", ["published_at"])
op.create_table(
"post_authors",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
op.create_table(
"post_tags",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
def downgrade():
op.drop_table("post_tags")
op.drop_table("post_authors")
op.drop_index("ix_posts_published_at", table_name="posts")
op.drop_index("ix_posts_is_page", table_name="posts")
op.drop_index("ix_posts_visibility", table_name="posts")
op.drop_index("ix_posts_status", table_name="posts")
op.drop_index("ix_posts_slug", table_name="posts")
op.drop_index("ix_posts_ghost_id", table_name="posts")
op.drop_table("posts")
op.drop_index("ix_tags_slug", table_name="tags")
op.drop_index("ix_tags_ghost_id", table_name="tags")
op.drop_table("tags")
op.drop_index("ix_authors_slug", table_name="authors")
op.drop_index("ix_authors_ghost_id", table_name="authors")
op.drop_table("authors")

View File

@@ -0,0 +1,128 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "20251102_223123"
down_revision = "20251028_ghost_content"
branch_labels = None
depends_on = None
def upgrade():
# Extend users
op.add_column("users", sa.Column("ghost_id", sa.String(length=64), nullable=True))
op.add_column("users", sa.Column("name", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_status", sa.String(length=50), nullable=True))
op.add_column("users", sa.Column("ghost_subscribed", sa.Boolean(), nullable=False, server_default=sa.true()))
op.add_column("users", sa.Column("ghost_note", sa.Text(), nullable=True))
op.add_column("users", sa.Column("avatar_image", sa.Text(), nullable=True))
op.add_column("users", sa.Column("stripe_customer_id", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.create_index("ix_users_ghost_id", "users", ["ghost_id"], unique=True)
op.create_index("ix_users_stripe_customer_id", "users", ["stripe_customer_id"])
# Labels
op.create_table(
"ghost_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_labels_ghost_id", "ghost_labels", ["ghost_id"], unique=True)
op.create_table(
"user_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("label_id", sa.Integer(), sa.ForeignKey("ghost_labels.id", ondelete="CASCADE"), nullable=False),
sa.UniqueConstraint("user_id", "label_id", name="uq_user_label"),
)
# Newsletters
op.create_table(
"ghost_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_newsletters_ghost_id", "ghost_newsletters", ["ghost_id"], unique=True)
op.create_table(
"user_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("newsletter_id", sa.Integer(), sa.ForeignKey("ghost_newsletters.id", ondelete="CASCADE"), nullable=False),
sa.Column("subscribed", sa.Boolean(), nullable=False, server_default=sa.true()),
sa.UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"),
)
# Tiers
op.create_table(
"ghost_tiers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=50), nullable=True),
sa.Column("visibility", sa.String(length=50), nullable=True),
)
op.create_index("ix_ghost_tiers_ghost_id", "ghost_tiers", ["ghost_id"], unique=True)
# Subscriptions
op.create_table(
"ghost_subscriptions",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("status", sa.String(length=50), nullable=True),
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("ghost_tiers.id", ondelete="SET NULL"), nullable=True),
sa.Column("cadence", sa.String(length=50), nullable=True),
sa.Column("price_amount", sa.Integer(), nullable=True),
sa.Column("price_currency", sa.String(length=10), nullable=True),
sa.Column("stripe_customer_id", sa.String(length=255), nullable=True),
sa.Column("stripe_subscription_id", sa.String(length=255), nullable=True),
sa.Column("raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
)
op.create_index("ix_ghost_subscriptions_ghost_id", "ghost_subscriptions", ["ghost_id"], unique=True)
op.create_index("ix_ghost_subscriptions_user_id", "ghost_subscriptions", ["user_id"])
op.create_index("ix_ghost_subscriptions_tier_id", "ghost_subscriptions", ["tier_id"])
op.create_index("ix_ghost_subscriptions_stripe_customer_id", "ghost_subscriptions", ["stripe_customer_id"])
op.create_index("ix_ghost_subscriptions_stripe_subscription_id", "ghost_subscriptions", ["stripe_subscription_id"])
def downgrade():
op.drop_index("ix_ghost_subscriptions_stripe_subscription_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_stripe_customer_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_tier_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_user_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_ghost_id", table_name="ghost_subscriptions")
op.drop_table("ghost_subscriptions")
op.drop_index("ix_ghost_tiers_ghost_id", table_name="ghost_tiers")
op.drop_table("ghost_tiers")
op.drop_table("user_newsletters")
op.drop_index("ix_ghost_newsletters_ghost_id", table_name="ghost_newsletters")
op.drop_table("ghost_newsletters")
op.drop_table("user_labels")
op.drop_index("ix_ghost_labels_ghost_id", table_name="ghost_labels")
op.drop_table("ghost_labels")
op.drop_index("ix_users_stripe_customer_id", table_name="users")
op.drop_index("ix_users_ghost_id", table_name="users")
op.drop_column("users", "ghost_raw")
op.drop_column("users", "stripe_customer_id")
op.drop_column("users", "avatar_image")
op.drop_column("users", "ghost_note")
op.drop_column("users", "ghost_subscribed")
op.drop_column("users", "ghost_status")
op.drop_column("users", "name")
op.drop_column("users", "ghost_id")

View File

@@ -0,0 +1,62 @@
"""add calendar description and slots
Revision ID: 20251106_152905_calendar_config
Revises: 215330c5ec15
Create Date: 2025-11-06T15:29:05.243479
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251106_152905_calendar_config"
down_revision = "215330c5ec15"
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("calendars") as batch_op:
batch_op.add_column(sa.Column("description", sa.Text(), nullable=True))
op.create_table(
"calendar_slots",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("mon", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("tue", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("wed", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("thu", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("fri", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sat", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sun", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("time_start", sa.Time(timezone=False), nullable=False),
sa.Column("time_end", sa.Time(timezone=False), nullable=False),
sa.Column("cost", sa.Numeric(10, 2), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(time_end > time_start)", name="ck_calendar_slots_time_end_after_start"),
)
op.create_index("ix_calendar_slots_calendar_id", "calendar_slots", ["calendar_id"], unique=False)
op.create_index("ix_calendar_slots_time_start", "calendar_slots", ["time_start"], unique=False)
op.create_unique_constraint(
"uq_calendar_slots_unique_band",
"calendar_slots",
["calendar_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_calendar_slots_unique_band", "calendar_slots", type_="unique")
op.drop_index("ix_calendar_slots_time_start", table_name="calendar_slots")
op.drop_index("ix_calendar_slots_calendar_id", table_name="calendar_slots")
op.drop_table("calendar_slots")
with op.batch_alter_table("calendars") as batch_op:
batch_op.drop_column("description")

View File

@@ -0,0 +1,52 @@
"""add product labels and stickers
Revision ID: 20251107_121500_add_labels_stickers
Revises: 20251107_090500_snapshot_to_db
Create Date: 2025-11-07T12:15:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_121500_labels_stickers"
down_revision = "20251107_090500_snapshot_to_db"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_labels_product_id", "product_labels", ["product_id"], unique=False)
op.create_index("ix_product_labels_name", "product_labels", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_labels_product_name", "product_labels", ["product_id", "name"]
)
op.create_table(
"product_stickers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_stickers_product_id", "product_stickers", ["product_id"], unique=False)
op.create_index("ix_product_stickers_name", "product_stickers", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_stickers_product_name", "product_stickers", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_stickers_product_name", "product_stickers", type_="unique")
op.drop_index("ix_product_stickers_name", table_name="product_stickers")
op.drop_index("ix_product_stickers_product_id", table_name="product_stickers")
op.drop_table("product_stickers")
op.drop_constraint("uq_product_labels_product_name", "product_labels", type_="unique")
op.drop_index("ix_product_labels_name", table_name="product_labels")
op.drop_index("ix_product_labels_product_id", table_name="product_labels")
op.drop_table("product_labels")

View File

@@ -0,0 +1,44 @@
"""widen alembic_version.version_num to 255
Revision ID: 20251107_123000_widen_alembic_version
Revises: 20251107_121500_labels_stickers
Create Date: 2025-11-07T12:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_123000_widen_alembic_version"
down_revision = "20251107_121500_labels_stickers"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Increase the size of alembic_version.version_num to 255."""
# Most projects use Postgres; this raw SQL is explicit and works reliably.
# Widening requires no USING clause on Postgres, but we'll be explicit for clarity.
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(255)"
)
# If you need cross-dialect support later, you could add dialect checks
# and use batch_alter_table for SQLite. For your Postgres setup, this is sufficient.
def downgrade() -> None:
"""Shrink alembic_version.version_num back to 32.
On Postgres, shrinking can fail if any row exceeds 32 chars.
We proactively truncate to 32 to guarantee a clean downgrade.
"""
# Truncate any too-long values to fit back into VARCHAR(32)
op.execute(
"UPDATE alembic_version SET version_num = LEFT(version_num, 32)"
)
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(32)"
)

View File

@@ -0,0 +1,93 @@
"""add product attributes, nutrition, allergens and extra product columns
Revision ID: 20251107_153000_product_attributes_nutrition
Revises: 20251107_123000_widen_alembic_version
Create Date: 2025-11-07T15:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_153000_product_attributes_nutrition"
down_revision = "20251107_123000_widen_alembic_version"
branch_labels = None
depends_on = None
def upgrade() -> None:
# --- products extra columns ---
with op.batch_alter_table("products") as batch_op:
batch_op.add_column(sa.Column("ean", sa.String(length=64), nullable=True))
batch_op.add_column(sa.Column("sku", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("unit_size", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("pack_size", sa.String(length=128), nullable=True))
batch_op.create_index("ix_products_ean", ["ean"], unique=False)
batch_op.create_index("ix_products_sku", ["sku"], unique=False)
# --- attributes: arbitrary key/value facts (e.g., Brand, Origin, etc.) ---
op.create_table(
"product_attributes",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.Text(), nullable=True),
)
op.create_index("ix_product_attributes_product_id", "product_attributes", ["product_id"], unique=False)
op.create_index("ix_product_attributes_key", "product_attributes", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_attributes_product_key", "product_attributes", ["product_id", "key"]
)
# --- nutrition: key/value[+unit] rows (e.g., Energy, Fat, Protein) ---
op.create_table(
"product_nutrition",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("unit", sa.String(length=64), nullable=True),
)
op.create_index("ix_product_nutrition_product_id", "product_nutrition", ["product_id"], unique=False)
op.create_index("ix_product_nutrition_key", "product_nutrition", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"]
)
# --- allergens: one row per allergen mention (name + contains boolean) ---
op.create_table(
"product_allergens",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("contains", sa.Boolean(), nullable=False, server_default=sa.false()),
)
op.create_index("ix_product_allergens_product_id", "product_allergens", ["product_id"], unique=False)
op.create_index("ix_product_allergens_name", "product_allergens", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_allergens_product_name", "product_allergens", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_allergens_product_name", "product_allergens", type_="unique")
op.drop_index("ix_product_allergens_name", table_name="product_allergens")
op.drop_index("ix_product_allergens_product_id", table_name="product_allergens")
op.drop_table("product_allergens")
op.drop_constraint("uq_product_nutrition_product_key", "product_nutrition", type_="unique")
op.drop_index("ix_product_nutrition_key", table_name="product_nutrition")
op.drop_index("ix_product_nutrition_product_id", table_name="product_nutrition")
op.drop_table("product_nutrition")
op.drop_constraint("uq_product_attributes_product_key", "product_attributes", type_="unique")
op.drop_index("ix_product_attributes_key", table_name="product_attributes")
op.drop_index("ix_product_attributes_product_id", table_name="product_attributes")
op.drop_table("product_attributes")
with op.batch_alter_table("products") as batch_op:
batch_op.drop_index("ix_products_sku")
batch_op.drop_index("ix_products_ean")
batch_op.drop_column("pack_size")
batch_op.drop_column("unit_size")
batch_op.drop_column("sku")
batch_op.drop_column("ean")

View File

@@ -0,0 +1,30 @@
"""Add regular_price and oe_list_price fields to Product
Revision ID: 20251107_163500_add_regular_price_and_oe_list_price
Revises: 20251107_153000_product_attributes_nutrition
Create Date: 2025-11-07 16:35:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_163500_add_regular_price_and_oe_list_price"
down_revision = "20251107_153000_product_attributes_nutrition"
branch_labels = None
depends_on = None
def upgrade():
op.add_column('products', sa.Column('regular_price', sa.Numeric(12, 2), nullable=True))
op.add_column('products', sa.Column('regular_price_currency', sa.String(length=16), nullable=True))
op.add_column('products', sa.Column('regular_price_raw', sa.String(length=128), nullable=True))
op.add_column('products', sa.Column('oe_list_price', sa.Numeric(12, 2), nullable=True))
def downgrade():
op.drop_column('products', 'oe_list_price')
op.drop_column('products', 'regular_price_raw')
op.drop_column('products', 'regular_price_currency')
op.drop_column('products', 'regular_price')

View File

@@ -0,0 +1,72 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select, update
from sqlalchemy.orm.session import Session
# revision identifiers, used by Alembic.
revision = '20251107_180000_link_listings_to_nav_ids'
down_revision = '20251107_163500_add_regular_price_and_oe_list_price'
branch_labels = None
depends_on = None
def upgrade():
# Add new nullable columns first
op.add_column('listings', sa.Column('top_id', sa.Integer(), nullable=True))
op.add_column('listings', sa.Column('sub_id', sa.Integer(), nullable=True))
bind = op.get_bind()
session = Session(bind=bind)
nav_tops = sa.table(
'nav_tops',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
)
nav_subs = sa.table(
'nav_subs',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
sa.column('top_id', sa.Integer),
)
listings = sa.table(
'listings',
sa.column('id', sa.Integer),
sa.column('top_slug', sa.String),
sa.column('sub_slug', sa.String),
sa.column('top_id', sa.Integer),
sa.column('sub_id', sa.Integer),
)
# Map top_slug -> top_id
top_slug_to_id = {
slug: id_ for id_, slug in session.execute(select(nav_tops.c.id, nav_tops.c.slug))
}
sub_slug_to_id = {
(top_id, slug): id_ for id_, slug, top_id in session.execute(
select(nav_subs.c.id, nav_subs.c.slug, nav_subs.c.top_id)
)
}
for row in session.execute(select(listings.c.id, listings.c.top_slug, listings.c.sub_slug)):
listing_id, top_slug, sub_slug = row
top_id = top_slug_to_id.get(top_slug)
sub_id = sub_slug_to_id.get((top_id, sub_slug)) if sub_slug else None
session.execute(
listings.update()
.where(listings.c.id == listing_id)
.values(top_id=top_id, sub_id=sub_id)
)
session.commit()
# Add foreign keys and constraints
op.create_foreign_key(None, 'listings', 'nav_tops', ['top_id'], ['id'])
op.create_foreign_key(None, 'listings', 'nav_subs', ['sub_id'], ['id'])
op.alter_column('listings', 'top_id', nullable=False)
# Optional: remove old slug fields
# op.drop_column('listings', 'top_slug')
# op.drop_column('listings', 'sub_slug')
def downgrade():
raise NotImplementedError("No downgrade")

View File

@@ -0,0 +1,26 @@
from alembic import op
import sqlalchemy as sa
revision = '20251107_add_missing_indexes'
down_revision = '1a1f1f1fc71c' # Adjust if needed to match your current head
depends_on = None
branch_labels = None
def upgrade() -> None:
# Index for sorting by price
op.create_index("ix_products_regular_price", "products", ["regular_price"])
# Index for filtering/aggregating by brand
op.create_index("ix_products_brand", "products", ["brand"])
# Index for product_likes.product_id (if not already covered by FK)
op.create_index("ix_product_likes_product_id", "product_likes", ["product_id"])
# Composite index on listing_items (may be partially redundant with existing constraints)
op.create_index("ix_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
def downgrade() -> None:
op.drop_index("ix_listing_items_listing_slug", table_name="listing_items")
op.drop_index("ix_product_likes_product_id", table_name="product_likes")
op.drop_index("ix_products_brand", table_name="products")
op.drop_index("ix_products_regular_price", table_name="products")

View File

@@ -0,0 +1,52 @@
"""Add surrogate key and product_id FK to product_likes"""
from alembic import op
import sqlalchemy as sa
# Revision identifiers
revision = '20251107_add_product_id_to_likes'
down_revision = '0d767ad92dd7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add surrogate primary key and product_id foreign key column
op.add_column("product_likes", sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True))
op.add_column("product_likes", sa.Column("product_id", sa.Integer(), nullable=True))
# Create temporary FK without constraint for backfill
op.execute("""
UPDATE product_likes
SET product_id = (
SELECT id FROM products WHERE products.slug = product_likes.product_slug
)
""")
# Add real FK constraint
op.create_foreign_key(
"fk_product_likes_product_id_products",
source_table="product_likes",
referent_table="products",
local_cols=["product_id"],
remote_cols=["id"],
ondelete="CASCADE"
)
# Make product_id non-nullable now that its backfilled
op.alter_column("product_likes", "product_id", nullable=False)
# Add index for efficient lookup
op.create_index(
"ix_product_likes_user_product",
"product_likes",
["user_id", "product_id"],
unique=True
)
def downgrade() -> None:
op.drop_index("ix_product_likes_user_product", table_name="product_likes")
op.drop_constraint("fk_product_likes_product_id_products", "product_likes", type_="foreignkey")
op.drop_column("product_likes", "product_id")
op.drop_column("product_likes", "id")

View File

@@ -0,0 +1,164 @@
"""Add soft delete and update unique constraints to include deleted_at
Revision ID: soft_delete_all
Revises:
Create Date: 2025-11-08 00:38:03
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_soft_delete_all'
down_revision = 'remove_product_slug_20251107'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column('product_likes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_likes_product_user', 'product_likes', ['product_id', 'user_id', 'deleted_at'])
# Drop the old unique index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Create a new unique index that includes deleted_at
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id', 'deleted_at'],
unique=True
)
op.add_column('product_allergens', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_allergens', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name', 'deleted_at'])
op.add_column('product_attributes', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_attributes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key', 'deleted_at'])
op.add_column('product_images', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_images', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_images', 'product_images', ['product_id', 'position', 'deleted_at'])
op.add_column('product_labels', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_labels', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_labels', 'product_labels', ['product_id', 'name', 'deleted_at'])
op.add_column('product_nutrition', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_nutrition', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_nutrition', 'product_nutrition', ['product_id', 'key', 'deleted_at'])
op.add_column('product_sections', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_sections', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_sections', 'product_sections', ['product_id', 'title', 'deleted_at'])
op.add_column('product_stickers', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_stickers', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_stickers', 'product_stickers', ['product_id', 'name', 'deleted_at'])
op.add_column('nav_tops', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_tops', 'nav_tops', ['slug', 'deleted_at'])
op.add_column('nav_subs', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('nav_subs', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_subs', 'nav_subs', ['top_id', 'slug', 'deleted_at'])
op.add_column('listings', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id', 'deleted_at'])
op.add_column('listing_items', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('listing_items', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_listing_items', 'listing_items', ['listing_id', 'slug', 'deleted_at'])
def downgrade() -> None:
# Drop the modified index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Recreate the old unique index
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id'],
unique=True
)
op.drop_constraint('uq_product_likes_product_user', 'product_likes', type_='unique')
op.drop_column('product_likes', 'deleted_at')
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.drop_column('product_allergens', 'deleted_at')
op.drop_column('product_allergens', 'created_at')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name'])
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.drop_column('product_attributes', 'deleted_at')
op.drop_column('product_attributes', 'created_at')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key'])
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_column('product_images', 'deleted_at')
op.drop_column('product_images', 'created_at')
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_column('product_labels', 'deleted_at')
op.drop_column('product_labels', 'created_at')
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_column('product_nutrition', 'deleted_at')
op.drop_column('product_nutrition', 'created_at')
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_column('product_sections', 'deleted_at')
op.drop_column('product_sections', 'created_at')
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_column('product_stickers', 'deleted_at')
op.drop_column('product_stickers', 'created_at')
op.drop_constraint('uq_nav_tops', 'nav_tops', type_='unique')
op.drop_column('nav_tops', 'deleted_at')
op.drop_constraint('uq_nav_subs', 'nav_subs', type_='unique')
op.drop_column('nav_subs', 'deleted_at')
op.drop_column('nav_subs', 'created_at')
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.drop_column('listings', 'deleted_at')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id'])
op.drop_constraint('uq_listing_items', 'listing_items', type_='unique')
op.drop_column('listing_items', 'deleted_at')
op.drop_column('listing_items', 'created_at')

View File

@@ -0,0 +1,60 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision='20251108_1_remove extra_uqs'
down_revision = '20251108_nav_tops_soft_delete'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind", "deleted_at"])
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", "deleted_at"])
def downgrade():
# Restore old constraint
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
op.create_unique_constraint("uq_product_images", "product_images", ["product_id", "position", "deleted_at"])
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name"])
op.create_unique_constraint("uq_product_labels", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"])
op.create_unique_constraint("uq_product_nutrition", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
op.create_unique_constraint("uq_product_sections", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", ])
op.create_unique_constraint("uq_product_stickers", "product_stickers", ["product_id", "name", "deleted_at" ])

View File

@@ -0,0 +1,36 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_nav_tops_soft_delete'
down_revision = '20251108_soft_delete_all'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Add new constraint including deleted_at
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug', 'deleted_at']
)
def downgrade():
# Drop new constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Restore old constraint
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug']
)

View File

@@ -0,0 +1,92 @@
"""add calendars & calendar_entries
Revision ID: 215330c5ec15
Revises: 20251102_223123
Create Date: 2025-11-03 13:07:10.387189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "215330c5ec15"
down_revision = "20251102_223123"
branch_labels = None
depends_on = None
def upgrade():
# calendars
op.create_table(
"calendars",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
# no hard UniqueConstraint; we enforce soft-delete-aware uniqueness with a partial index below
)
# helpful lookup indexes
op.create_index("ix_calendars_post_id", "calendars", ["post_id"], unique=False)
op.create_index("ix_calendars_name", "calendars", ["name"], unique=False)
op.create_index("ix_calendars_slug", "calendars", ["slug"], unique=False)
# calendar_entries
op.create_table(
"calendar_entries",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("start_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("end_at", sa.DateTime(timezone=True), nullable=True), # <- allow open-ended
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(end_at IS NULL) OR (end_at >= start_at)", name="ck_calendar_entries_end_after_start"),
)
op.create_index("ix_calendar_entries_calendar_id", "calendar_entries", ["calendar_id"], unique=False)
op.create_index("ix_calendar_entries_start_at", "calendar_entries", ["start_at"], unique=False)
op.create_index("ix_calendar_entries_name", "calendar_entries", ["name"], unique=False)
# ---- Soft-delete-aware uniqueness for calendars (Postgres) ----
# One active calendar per (post_id, lower(slug))
if op.get_bind().dialect.name == "postgresql":
# cleanup any active duplicates (defensive; table is new on fresh runs)
op.execute("""
WITH ranked AS (
SELECT
id,
ROW_NUMBER() OVER (
PARTITION BY post_id, lower(slug)
ORDER BY updated_at DESC, created_at DESC, id DESC
) AS rn
FROM calendars
WHERE deleted_at IS NULL
)
UPDATE calendars c
SET deleted_at = NOW()
FROM ranked r
WHERE c.id = r.id AND r.rn > 1;
""")
op.execute("""
CREATE UNIQUE INDEX IF NOT EXISTS ux_calendars_post_slug_active
ON calendars (post_id, lower(slug))
WHERE deleted_at IS NULL;
""")
def downgrade():
# drop in reverse dependency order
op.drop_index("ix_calendar_entries_name", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_start_at", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_calendar_id", table_name="calendar_entries")
op.drop_table("calendar_entries")
if op.get_bind().dialect.name == "postgresql":
op.execute("DROP INDEX IF EXISTS ux_calendars_post_slug_active;")
op.drop_index("ix_calendars_slug", table_name="calendars")
op.drop_index("ix_calendars_name", table_name="calendars")
op.drop_index("ix_calendars_post_id", table_name="calendars")
op.drop_table("calendars")

View File

@@ -0,0 +1,29 @@
"""Remove product_slug from product_likes
Revision ID: remove_product_slug_20251107
Revises: 0d767ad92dd7
Create Date: 2025-11-07
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'remove_product_slug_20251107'
down_revision = '20251107_add_missing_indexes'
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.drop_column("product_slug")
def downgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.add_column(sa.Column(
"product_slug",
sa.String(length=255),
nullable=False,
))

24
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
<%text>
# Alembic migration script template
</%text>
"""empty message
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,20 @@
"""Initial database schema from schema.sql"""
from alembic import op
import sqlalchemy as sa
import pathlib
# revision identifiers, used by Alembic
revision = '0000_alembic'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
CREATE TABLE IF NOT EXISTS alembic_version (
version_num VARCHAR(32) NOT NULL,
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
);
""")

View File

@@ -0,0 +1,33 @@
"""Initial database schema from schema.sql"""
from alembic import op
import sqlalchemy as sa
import pathlib
# revision identifiers, used by Alembic
revision = '0001_initial_schema'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
return
schema_path = pathlib.Path(__file__).parent.parent.parent / "schema.sql"
with open(schema_path, encoding="utf-8") as f:
sql = f.read()
conn = op.get_bind()
conn.execute(sa.text(sql))
def downgrade():
return
# Drop all user-defined tables in the 'public' schema
conn = op.get_bind()
conn.execute(sa.text("""
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') LOOP
EXECUTE 'DROP TABLE IF EXISTS public.' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
"""))

View File

@@ -0,0 +1,78 @@
"""Add cart_items table for shopping cart"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0002_add_cart_items"
down_revision = "0001_initial_schema"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"cart_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
# Either a logged-in user *or* an anonymous session_id
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
),
sa.Column("session_id", sa.String(length=128), nullable=True),
# IMPORTANT: reference products.id (PK), not slug
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Indexes to speed up cart lookups
op.create_index(
"ix_cart_items_user_product",
"cart_items",
["user_id", "product_id"],
unique=False,
)
op.create_index(
"ix_cart_items_session_product",
"cart_items",
["session_id", "product_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_cart_items_session_product", table_name="cart_items")
op.drop_index("ix_cart_items_user_product", table_name="cart_items")
op.drop_table("cart_items")

View File

@@ -0,0 +1,118 @@
"""Add orders and order_items tables for checkout"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0003_add_orders"
down_revision = "0002_add_cart_items"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"orders",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True),
sa.Column("session_id", sa.String(length=64), nullable=True),
sa.Column(
"status",
sa.String(length=32),
nullable=False,
server_default="pending",
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"total_amount",
sa.Numeric(12, 2),
nullable=False,
),
# SumUp integration fields
sa.Column("sumup_checkout_id", sa.String(length=128), nullable=True),
sa.Column("sumup_status", sa.String(length=32), nullable=True),
sa.Column("sumup_hosted_url", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# Indexes to match model hints (session_id + sumup_checkout_id index=True)
op.create_index(
"ix_orders_session_id",
"orders",
["session_id"],
unique=False,
)
op.create_index(
"ix_orders_sumup_checkout_id",
"orders",
["sumup_checkout_id"],
unique=False,
)
op.create_table(
"order_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"order_id",
sa.Integer(),
sa.ForeignKey("orders.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id"),
nullable=False,
),
sa.Column("product_title", sa.String(length=512), nullable=True),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"unit_price",
sa.Numeric(12, 2),
nullable=False,
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
def downgrade() -> None:
op.drop_table("order_items")
op.drop_index("ix_orders_sumup_checkout_id", table_name="orders")
op.drop_index("ix_orders_session_id", table_name="orders")
op.drop_table("orders")

View File

@@ -0,0 +1,27 @@
"""Add sumup_reference to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0004_add_sumup_reference"
down_revision = "0003_add_orders"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("sumup_reference", sa.String(length=255), nullable=True),
)
op.create_index(
"ix_orders_sumup_reference",
"orders",
["sumup_reference"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_sumup_reference", table_name="orders")
op.drop_column("orders", "sumup_reference")

View File

@@ -0,0 +1,27 @@
"""Add description field to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0005_add_description"
down_revision = "0004_add_sumup_reference"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("description", sa.Text(), nullable=True),
)
op.create_index(
"ix_orders_description",
"orders",
["description"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_description", table_name="orders")
op.drop_column("orders", "description")

View File

@@ -0,0 +1,28 @@
from alembic import op
import sqlalchemy as sa
revision = '0006_update_calendar_entries'
down_revision = '0005_add_description' # use the appropriate previous revision ID
branch_labels = None
depends_on = None
def upgrade():
# Add user_id and session_id columns
op.add_column('calendar_entries', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_calendar_entries_user_id', 'calendar_entries', 'users', ['user_id'], ['id'])
op.add_column('calendar_entries', sa.Column('session_id', sa.String(length=128), nullable=True))
# Add state and cost columns
op.add_column('calendar_entries', sa.Column('state', sa.String(length=20), nullable=False, server_default='pending'))
op.add_column('calendar_entries', sa.Column('cost', sa.Numeric(10,2), nullable=False, server_default='10'))
# (Optional) Create indexes on the new columns
op.create_index('ix_calendar_entries_user_id', 'calendar_entries', ['user_id'])
op.create_index('ix_calendar_entries_session_id', 'calendar_entries', ['session_id'])
def downgrade():
op.drop_index('ix_calendar_entries_session_id', table_name='calendar_entries')
op.drop_index('ix_calendar_entries_user_id', table_name='calendar_entries')
op.drop_column('calendar_entries', 'cost')
op.drop_column('calendar_entries', 'state')
op.drop_column('calendar_entries', 'session_id')
op.drop_constraint('fk_calendar_entries_user_id', 'calendar_entries', type_='foreignkey')
op.drop_column('calendar_entries', 'user_id')

View File

@@ -0,0 +1,50 @@
from alembic import op
import sqlalchemy as sa
revision = "0007_add_oid_entries"
down_revision = "0006_update_calendar_entries"
branch_labels = None
depends_on = None
def upgrade():
# Add order_id column
op.add_column(
"calendar_entries",
sa.Column("order_id", sa.Integer(), nullable=True),
)
op.create_foreign_key(
"fk_calendar_entries_order_id",
"calendar_entries",
"orders",
["order_id"],
["id"],
ondelete="SET NULL",
)
op.create_index(
"ix_calendar_entries_order_id",
"calendar_entries",
["order_id"],
unique=False,
)
# Optional: add an index on state if you want faster queries by state
op.create_index(
"ix_calendar_entries_state",
"calendar_entries",
["state"],
unique=False,
)
def downgrade():
# Drop indexes and FK in reverse order
op.drop_index("ix_calendar_entries_state", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_order_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_order_id",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "order_id")

View File

@@ -0,0 +1,33 @@
"""add flexible flag to calendar_slots
Revision ID: 0008_add_flexible_to_calendar_slots
Revises: 0007_add_order_id_to_calendar_entries
Create Date: 2025-12-06 12:34:56.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0008_add_flexible_to_slots"
down_revision = "0007_add_oid_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"calendar_slots",
sa.Column(
"flexible",
sa.Boolean(),
nullable=False,
server_default=sa.false(), # set existing rows to False
),
)
# Optional: drop server_default so future inserts must supply a value
op.alter_column("calendar_slots", "flexible", server_default=None)
def downgrade() -> None:
op.drop_column("calendar_slots", "flexible")

View File

@@ -0,0 +1,54 @@
"""add slot_id to calendar_entries
Revision ID: 0009_add_slot_id_to_entries
Revises: 0008_add_flexible_to_slots
Create Date: 2025-12-06 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0009_add_slot_id_to_entries"
down_revision = "0008_add_flexible_to_slots"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add slot_id column as nullable initially
op.add_column(
"calendar_entries",
sa.Column(
"slot_id",
sa.Integer(),
nullable=True,
),
)
# Add foreign key constraint
op.create_foreign_key(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
"calendar_slots",
["slot_id"],
["id"],
ondelete="SET NULL",
)
# Add index for better query performance
op.create_index(
"ix_calendar_entries_slot_id",
"calendar_entries",
["slot_id"],
)
def downgrade() -> None:
op.drop_index("ix_calendar_entries_slot_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "slot_id")

View File

@@ -0,0 +1,64 @@
"""Add post_likes table for liking blog posts
Revision ID: 0010_add_post_likes
Revises: 0009_add_slot_id_to_entries
Create Date: 2025-12-07 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0010_add_post_likes"
down_revision = "0009_add_slot_id_to_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"post_likes",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"post_id",
sa.Integer(),
sa.ForeignKey("posts.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Index for fast user+post lookups
op.create_index(
"ix_post_likes_user_post",
"post_likes",
["user_id", "post_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_post_likes_user_post", table_name="post_likes")
op.drop_table("post_likes")

View File

@@ -0,0 +1,43 @@
"""Add ticket_price and ticket_count to calendar_entries
Revision ID: 0011_add_entry_tickets
Revises: 0010_add_post_likes
Create Date: 2025-12-07 14:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import NUMERIC
# revision identifiers, used by Alembic.
revision = "0011_add_entry_tickets"
down_revision = "0010_add_post_likes"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add ticket_price column (nullable - NULL means no tickets)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_price",
NUMERIC(10, 2),
nullable=True,
),
)
# Add ticket_count column (nullable - NULL means unlimited)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_count",
sa.Integer(),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("calendar_entries", "ticket_count")
op.drop_column("calendar_entries", "ticket_price")

View File

@@ -0,0 +1,41 @@
# Alembic migration script template
"""add ticket_types table
Revision ID: 47fc53fc0d2b
Revises: a9f54e4eaf02
Create Date: 2025-12-08 07:29:11.422435
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '47fc53fc0d2b'
down_revision = 'a9f54e4eaf02'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'ticket_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('entry_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('cost', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['entry_id'], ['calendar_entries.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_ticket_types_entry_id', 'ticket_types', ['entry_id'], unique=False)
op.create_index('ix_ticket_types_name', 'ticket_types', ['name'], unique=False)
def downgrade() -> None:
op.drop_index('ix_ticket_types_name', table_name='ticket_types')
op.drop_index('ix_ticket_types_entry_id', table_name='ticket_types')
op.drop_table('ticket_types')

View File

@@ -0,0 +1,36 @@
# Alembic migration script template
"""Add calendar_entry_posts association table
Revision ID: 6cb124491c9d
Revises: 0011_add_entry_tickets
Create Date: 2025-12-07 03:40:49.194068
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import TIMESTAMP
# revision identifiers, used by Alembic.
revision = '6cb124491c9d'
down_revision = '0011_add_entry_tickets'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'calendar_entry_posts',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('post_id', sa.Integer(), sa.ForeignKey('posts.id', ondelete='CASCADE'), nullable=False),
sa.Column('created_at', TIMESTAMP(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('deleted_at', TIMESTAMP(timezone=True), nullable=True),
)
op.create_index('ix_entry_posts_entry_id', 'calendar_entry_posts', ['entry_id'])
op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id'])
def downgrade() -> None:
op.drop_index('ix_entry_posts_post_id', 'calendar_entry_posts')
op.drop_index('ix_entry_posts_entry_id', 'calendar_entry_posts')
op.drop_table('calendar_entry_posts')

View File

@@ -0,0 +1,37 @@
# Alembic migration script template
"""add menu_items table
Revision ID: a9f54e4eaf02
Revises: 6cb124491c9d
Create Date: 2025-12-07 17:38:54.839296
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a9f54e4eaf02'
down_revision = '6cb124491c9d'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('menu_items',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('sort_order', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_menu_items_post_id'), 'menu_items', ['post_id'], unique=False)
op.create_index(op.f('ix_menu_items_sort_order'), 'menu_items', ['sort_order'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_menu_items_sort_order'), table_name='menu_items')
op.drop_index(op.f('ix_menu_items_post_id'), table_name='menu_items')
op.drop_table('menu_items')

View File

@@ -0,0 +1,35 @@
"""add snippets table
Revision ID: c3a1f7b9d4e5
Revises: 47fc53fc0d2b
Create Date: 2026-02-07
"""
from alembic import op
import sqlalchemy as sa
revision = 'c3a1f7b9d4e5'
down_revision = '47fc53fc0d2b'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'snippets',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.Column('visibility', sa.String(length=20), server_default='private', nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'name', name='uq_snippets_user_name'),
)
op.create_index('ix_snippets_visibility', 'snippets', ['visibility'])
def downgrade() -> None:
op.drop_index('ix_snippets_visibility', table_name='snippets')
op.drop_table('snippets')

View File

@@ -0,0 +1,45 @@
"""add post user_id, author email, publish_requested
Revision ID: d4b2e8f1a3c7
Revises: c3a1f7b9d4e5
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'd4b2e8f1a3c7'
down_revision = 'c3a1f7b9d4e5'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add author.email
op.add_column('authors', sa.Column('email', sa.String(255), nullable=True))
# Add post.user_id FK
op.add_column('posts', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_posts_user_id', 'posts', 'users', ['user_id'], ['id'], ondelete='SET NULL')
op.create_index('ix_posts_user_id', 'posts', ['user_id'])
# Add post.publish_requested
op.add_column('posts', sa.Column('publish_requested', sa.Boolean(), server_default='false', nullable=False))
# Backfill: match posts to users via primary_author email
op.execute("""
UPDATE posts
SET user_id = u.id
FROM authors a
JOIN users u ON lower(a.email) = lower(u.email)
WHERE posts.primary_author_id = a.id
AND posts.user_id IS NULL
AND a.email IS NOT NULL
""")
def downgrade() -> None:
op.drop_column('posts', 'publish_requested')
op.drop_index('ix_posts_user_id', table_name='posts')
op.drop_constraint('fk_posts_user_id', 'posts', type_='foreignkey')
op.drop_column('posts', 'user_id')
op.drop_column('authors', 'email')

View File

@@ -0,0 +1,45 @@
"""add tag_groups and tag_group_tags
Revision ID: e5c3f9a2b1d6
Revises: d4b2e8f1a3c7
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'e5c3f9a2b1d6'
down_revision = 'd4b2e8f1a3c7'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tag_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=191), nullable=False),
sa.Column('feature_image', sa.Text(), nullable=True),
sa.Column('colour', sa.String(length=32), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug'),
)
op.create_table(
'tag_group_tags',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('tag_group_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['tag_group_id'], ['tag_groups.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('tag_group_id', 'tag_id', name='uq_tag_group_tag'),
)
def downgrade() -> None:
op.drop_table('tag_group_tags')
op.drop_table('tag_groups')

View File

@@ -0,0 +1,47 @@
"""add tickets table
Revision ID: f6d4a1b2c3e7
Revises: e5c3f9a2b1d6
Create Date: 2026-02-09
"""
from alembic import op
import sqlalchemy as sa
revision = 'f6d4a1b2c3e7'
down_revision = 'e5c3f9a2b1d6'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tickets',
sa.Column('id', sa.Integer(), primary_key=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('ticket_type_id', sa.Integer(), sa.ForeignKey('ticket_types.id', ondelete='SET NULL'), nullable=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
sa.Column('session_id', sa.String(64), nullable=True),
sa.Column('order_id', sa.Integer(), sa.ForeignKey('orders.id', ondelete='SET NULL'), nullable=True),
sa.Column('code', sa.String(64), unique=True, nullable=False),
sa.Column('state', sa.String(20), nullable=False, server_default=sa.text("'reserved'")),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('checked_in_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_tickets_entry_id', 'tickets', ['entry_id'])
op.create_index('ix_tickets_ticket_type_id', 'tickets', ['ticket_type_id'])
op.create_index('ix_tickets_user_id', 'tickets', ['user_id'])
op.create_index('ix_tickets_session_id', 'tickets', ['session_id'])
op.create_index('ix_tickets_order_id', 'tickets', ['order_id'])
op.create_index('ix_tickets_code', 'tickets', ['code'], unique=True)
op.create_index('ix_tickets_state', 'tickets', ['state'])
def downgrade() -> None:
op.drop_index('ix_tickets_state', 'tickets')
op.drop_index('ix_tickets_code', 'tickets')
op.drop_index('ix_tickets_order_id', 'tickets')
op.drop_index('ix_tickets_session_id', 'tickets')
op.drop_index('ix_tickets_user_id', 'tickets')
op.drop_index('ix_tickets_ticket_type_id', 'tickets')
op.drop_index('ix_tickets_entry_id', 'tickets')
op.drop_table('tickets')