feat: initial shared library extraction

Contains shared infrastructure for all coop services:
- shared/ (factory, urls, user_loader, context, internal_api, jinja_setup)
- models/ (User, Order, Calendar, Ticket, Product, Ghost CMS)
- db/ (SQLAlchemy async session, base)
- suma_browser/app/ (csrf, middleware, errors, authz, redis_cacher, payments, filters, utils)
- suma_browser/templates/ (shared base layouts, macros, error pages)
- static/ (CSS, JS, fonts, images)
- alembic/ (database migrations)
- config/ (app-config.yaml)
- editor/ (Lexical editor Node.js build)
- requirements.txt

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
giles
2026-02-09 23:11:36 +00:00
commit 668d9c7df8
446 changed files with 22741 additions and 0 deletions

View File

@@ -0,0 +1,241 @@
"""snapshot writes to postgres (products/nav/listings/reports)
Revision ID: 20251107_090500_snapshot_to_db
Revises: 20251106_152905_calendar_config
Create Date: 2025-11-07T09:05:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_090500_snapshot_to_db"
down_revision = "20251106_152905_calendar_config"
branch_labels = None
depends_on = None
def upgrade() -> None:
# products (if not already present in your DB — keep idempotent with if_exists checks in env if needed)
if not op.get_bind().dialect.has_table(op.get_bind(), "products"):
op.create_table(
"products",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=False, unique=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("image", sa.Text(), nullable=True),
sa.Column("description_short", sa.Text(), nullable=True),
sa.Column("description_html", sa.Text(), nullable=True),
sa.Column("suma_href", sa.Text(), nullable=True),
sa.Column("brand", sa.String(length=255), nullable=True),
sa.Column("rrp", sa.Numeric(12, 2), nullable=True),
sa.Column("rrp_currency", sa.String(length=16), nullable=True),
sa.Column("rrp_raw", sa.String(length=128), nullable=True),
sa.Column("price_per_unit", sa.Numeric(12, 4), nullable=True),
sa.Column("price_per_unit_currency", sa.String(length=16), nullable=True),
sa.Column("price_per_unit_raw", sa.String(length=128), nullable=True),
sa.Column("special_price", sa.Numeric(12, 2), nullable=True),
sa.Column("special_price_currency", sa.String(length=16), nullable=True),
sa.Column("special_price_raw", sa.String(length=128), nullable=True),
sa.Column("case_size_count", sa.Integer(), nullable=True),
sa.Column("case_size_item_qty", sa.Numeric(12, 3), nullable=True),
sa.Column("case_size_item_unit", sa.String(length=32), nullable=True),
sa.Column("case_size_raw", sa.String(length=128), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_products_slug", "products", ["slug"], unique=False)
# product_sections
if not op.get_bind().dialect.has_table(op.get_bind(), "product_sections"):
op.create_table(
"product_sections",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("title", sa.String(length=255), nullable=False),
sa.Column("html", sa.Text(), nullable=False),
)
op.create_index("ix_product_sections_product_id", "product_sections", ["product_id"], unique=False)
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
# product_images (add kind + adjust unique)
if not op.get_bind().dialect.has_table(op.get_bind(), "product_images"):
op.create_table(
"product_images",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("url", sa.Text(), nullable=False),
sa.Column("position", sa.Integer(), nullable=False, server_default="0"),
sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"),
sa.CheckConstraint("position >= 0", name="ck_product_images_position_nonneg"),
)
op.create_index("ix_product_images_product_id", "product_images", ["product_id"], unique=False)
op.create_index("ix_product_images_position", "product_images", ["position"], unique=False)
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
else:
# alter existing table to add `kind` and update unique
with op.batch_alter_table("product_images") as batch_op:
if not op.get_bind().dialect.has_column(op.get_bind(), "product_images", "kind"):
batch_op.add_column(sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"))
try:
batch_op.drop_constraint("uq_product_images_product_url", type_="unique")
except Exception:
pass
batch_op.create_unique_constraint("uq_product_images_product_url_kind", ["product_id", "url", "kind"])
# nav_tops / nav_subs
op.create_table(
"nav_tops",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("label", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_nav_tops_slug", "nav_tops", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_tops_label_slug", "nav_tops", ["label", "slug"])
op.create_table(
"nav_subs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_id", sa.Integer(), sa.ForeignKey("nav_tops.id", ondelete="CASCADE"), nullable=False),
sa.Column("label", sa.String(length=255), nullable=True),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("href", sa.Text(), nullable=True),
)
op.create_index("ix_nav_subs_top_id", "nav_subs", ["top_id"], unique=False)
op.create_index("ix_nav_subs_slug", "nav_subs", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_subs_top_slug", "nav_subs", ["top_id", "slug"])
# listings & listing_items
op.create_table(
"listings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_slug", sa.String(length=255), nullable=False),
sa.Column("sub_slug", sa.String(length=255), nullable=True),
sa.Column("total_pages", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_listings_top_slug", "listings", ["top_slug"], unique=False)
op.create_index("ix_listings_sub_slug", "listings", ["sub_slug"], unique=False)
op.create_unique_constraint("uq_listings_top_sub", "listings", ["top_slug", "sub_slug"])
op.create_table(
"listing_items",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("listing_id", sa.Integer(), sa.ForeignKey("listings.id", ondelete="CASCADE"), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
)
op.create_index("ix_listing_items_listing_id", "listing_items", ["listing_id"], unique=False)
op.create_index("ix_listing_items_slug", "listing_items", ["slug"], unique=False)
op.create_unique_constraint("uq_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
# reports: link_errors, link_externals, subcategory_redirects, product_logs
op.create_table(
"link_errors",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("top", sa.String(length=255), nullable=True),
sa.Column("sub", sa.String(length=255), nullable=True),
sa.Column("target_slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_errors_product_slug", "link_errors", ["product_slug"], unique=False)
op.create_table(
"link_externals",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("host", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_externals_product_slug", "link_externals", ["product_slug"], unique=False)
op.create_table(
"subcategory_redirects",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("old_path", sa.String(length=512), nullable=False),
sa.Column("new_path", sa.String(length=512), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_subcategory_redirects_old_path", "subcategory_redirects", ["old_path"], unique=False)
op.create_unique_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", ["old_path", "new_path"])
op.create_table(
"product_logs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("href_tried", sa.Text(), nullable=True),
sa.Column("ok", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("error_type", sa.String(length=255), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("http_status", sa.Integer(), nullable=True),
sa.Column("final_url", sa.Text(), nullable=True),
sa.Column("transport_error", sa.Boolean(), nullable=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("has_description_html", sa.Boolean(), nullable=True),
sa.Column("has_description_short", sa.Boolean(), nullable=True),
sa.Column("sections_count", sa.Integer(), nullable=True),
sa.Column("images_count", sa.Integer(), nullable=True),
sa.Column("embedded_images_count", sa.Integer(), nullable=True),
sa.Column("all_images_count", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_product_logs_slug", "product_logs", ["slug"], unique=False)
def downgrade() -> None:
op.drop_index("ix_product_logs_slug", table_name="product_logs")
op.drop_table("product_logs")
op.drop_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", type_="unique")
op.drop_index("ix_subcategory_redirects_old_path", table_name="subcategory_redirects")
op.drop_table("subcategory_redirects")
op.drop_index("ix_link_externals_product_slug", table_name="link_externals")
op.drop_table("link_externals")
op.drop_index("ix_link_errors_product_slug", table_name="link_errors")
op.drop_table("link_errors")
op.drop_index("ix_listing_items_slug", table_name="listing_items")
op.drop_index("ix_listing_items_listing_id", table_name="listing_items")
op.drop_table("listing_items")
op.drop_constraint("uq_listings_top_sub", "listings", type_="unique")
op.drop_index("ix_listings_sub_slug", table_name="listings")
op.drop_index("ix_listings_top_slug", table_name="listings")
op.drop_table("listings")
op.drop_constraint("uq_nav_subs_top_slug", "nav_subs", type_="unique")
op.drop_index("ix_nav_subs_slug", table_name="nav_subs")
op.drop_index("ix_nav_subs_top_id", table_name="nav_subs")
op.drop_table("nav_subs")
op.drop_constraint("uq_nav_tops_label_slug", "nav_tops", type_="unique")
op.drop_index("ix_nav_tops_slug", table_name="nav_tops")
op.drop_table("nav_tops")
with op.batch_alter_table("product_images") as batch_op:
try:
batch_op.drop_constraint("uq_product_images_product_url_kind", type_="unique")
except Exception:
pass
# Do not drop 'kind' column automatically since existing code may rely on it.
# If needed, uncomment:
# batch_op.drop_column("kind")
op.drop_index("ix_product_images_position", table_name="product_images")
op.drop_index("ix_product_images_product_id", table_name="product_images")
op.drop_table("product_images")
op.drop_constraint("uq_product_sections_product_title", "product_sections", type_="unique")
op.drop_index("ix_product_sections_product_id", table_name="product_sections")
op.drop_table("product_sections")
op.drop_index("ix_products_slug", table_name="products")
op.drop_table("products")

View File

@@ -0,0 +1,67 @@
# Alembic migration script template
"""empty message
Revision ID: 0d767ad92dd7
Revises: 20251021_add_user_and_magic_link
Create Date: 2025-10-24 23:36:41.985357
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0d767ad92dd7'
down_revision = '20251021_add_user_and_magic_link'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_likes",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id",
ondelete="CASCADE",
),
primary_key=True,
nullable=False,
),
sa.Column(
"product_slug",
sa.String(length=255),
primary_key=True,
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# If you want an index to quickly fetch "all likes for this user":
op.create_index(
"ix_product_likes_user_id",
"product_likes",
["user_id"],
unique=False,
)
# If you want an index to quickly fetch "who liked this product":
op.create_index(
"ix_product_likes_product_slug",
"product_likes",
["product_slug"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_product_likes_product_slug", table_name="product_likes")
op.drop_index("ix_product_likes_user_id", table_name="product_likes")
op.drop_table("product_likes")

View File

@@ -0,0 +1,24 @@
# Alembic migration script template
"""empty message
Revision ID: 1a1f1f1fc71c
Revises: 20251107_180000_link_listings_to_nav_ids, 20251107_add_product_id_to_likes
Create Date: 2025-11-07 19:34:18.228002
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1a1f1f1fc71c'
down_revision = ('20251107_180000_link_listings_to_nav_ids', '20251107_add_product_id_to_likes')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -0,0 +1,20 @@
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251021211617"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'kv',
sa.Column('key', sa.String(length=120), nullable=False),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('key')
)
def downgrade() -> None:
op.drop_table('kv')

View File

@@ -0,0 +1,47 @@
"""add users and magic_links tables
Revision ID: 20251021_add_user_and_magic_link
Revises: a1b2c3d4e5f6 # <-- REPLACE with your actual head
Create Date: 2025-10-21
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '20251021_add_user_and_magic_link'
down_revision: Union[str, None] = '20251021211617' # <-- REPLACE THIS
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'users',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('email', sa.String(length=255), nullable=False, unique=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('last_login_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_table(
'magic_links',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('token', sa.String(length=128), nullable=False, unique=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('purpose', sa.String(length=32), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('ip', sa.String(length=64), nullable=True),
sa.Column('user_agent', sa.String(length=256), nullable=True),
)
op.create_index('ix_magic_links_token', 'magic_links', ['token'], unique=True)
op.create_index('ix_magic_links_user', 'magic_links', ['user_id'])
def downgrade() -> None:
op.drop_index('ix_magic_links_user', table_name='magic_links')
op.drop_index('ix_magic_links_token', table_name='magic_links')
op.drop_table('magic_links')
op.drop_index('ix_users_email', table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,135 @@
"""ghost content mirror (posts/pages/authors/tags)
Revision ID: 20251028_ghost_content
Revises: 0d767ad92dd7
Create Date: 2025-10-28
"""
from alembic import op
import sqlalchemy as sa
revision = "20251028_ghost_content"
down_revision = "0d767ad92dd7"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"authors",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("profile_image", sa.Text(), nullable=True),
sa.Column("cover_image", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.Column("website", sa.Text(), nullable=True),
sa.Column("location", sa.Text(), nullable=True),
sa.Column("facebook", sa.Text(), nullable=True),
sa.Column("twitter", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_authors_ghost_id"),
)
op.create_index("ix_authors_ghost_id", "authors", ["ghost_id"])
op.create_index("ix_authors_slug", "authors", ["slug"])
op.create_table(
"tags",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=300), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_tags_ghost_id"),
)
op.create_index("ix_tags_ghost_id", "tags", ["ghost_id"])
op.create_index("ix_tags_slug", "tags", ["slug"])
op.create_table(
"posts",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("uuid", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("title", sa.String(length=500), nullable=False),
sa.Column("html", sa.Text(), nullable=True),
sa.Column("plaintext", sa.Text(), nullable=True),
sa.Column("mobiledoc", sa.Text(), nullable=True),
sa.Column("lexical", sa.Text(), nullable=True),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("feature_image_alt", sa.Text(), nullable=True),
sa.Column("feature_image_caption", sa.Text(), nullable=True),
sa.Column("excerpt", sa.Text(), nullable=True),
sa.Column("custom_excerpt", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("status", sa.String(length=32), nullable=False, server_default="draft"),
sa.Column("featured", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("is_page", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("email_only", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("canonical_url", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=500), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("og_image", sa.Text(), nullable=True),
sa.Column("og_title", sa.String(length=500), nullable=True),
sa.Column("og_description", sa.Text(), nullable=True),
sa.Column("twitter_image", sa.Text(), nullable=True),
sa.Column("twitter_title", sa.String(length=500), nullable=True),
sa.Column("twitter_description", sa.Text(), nullable=True),
sa.Column("custom_template", sa.String(length=191), nullable=True),
sa.Column("reading_time", sa.Integer(), nullable=True),
sa.Column("comment_id", sa.String(length=191), nullable=True),
sa.Column("published_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("primary_author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="SET NULL"), nullable=True),
sa.Column("primary_tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="SET NULL"), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_posts_ghost_id"),
sa.UniqueConstraint("uuid", name="uq_posts_uuid"),
)
op.create_index("ix_posts_ghost_id", "posts", ["ghost_id"])
op.create_index("ix_posts_slug", "posts", ["slug"])
op.create_index("ix_posts_status", "posts", ["status"])
op.create_index("ix_posts_visibility", "posts", ["visibility"])
op.create_index("ix_posts_is_page", "posts", ["is_page"])
op.create_index("ix_posts_published_at", "posts", ["published_at"])
op.create_table(
"post_authors",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
op.create_table(
"post_tags",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
def downgrade():
op.drop_table("post_tags")
op.drop_table("post_authors")
op.drop_index("ix_posts_published_at", table_name="posts")
op.drop_index("ix_posts_is_page", table_name="posts")
op.drop_index("ix_posts_visibility", table_name="posts")
op.drop_index("ix_posts_status", table_name="posts")
op.drop_index("ix_posts_slug", table_name="posts")
op.drop_index("ix_posts_ghost_id", table_name="posts")
op.drop_table("posts")
op.drop_index("ix_tags_slug", table_name="tags")
op.drop_index("ix_tags_ghost_id", table_name="tags")
op.drop_table("tags")
op.drop_index("ix_authors_slug", table_name="authors")
op.drop_index("ix_authors_ghost_id", table_name="authors")
op.drop_table("authors")

View File

@@ -0,0 +1,128 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "20251102_223123"
down_revision = "20251028_ghost_content"
branch_labels = None
depends_on = None
def upgrade():
# Extend users
op.add_column("users", sa.Column("ghost_id", sa.String(length=64), nullable=True))
op.add_column("users", sa.Column("name", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_status", sa.String(length=50), nullable=True))
op.add_column("users", sa.Column("ghost_subscribed", sa.Boolean(), nullable=False, server_default=sa.true()))
op.add_column("users", sa.Column("ghost_note", sa.Text(), nullable=True))
op.add_column("users", sa.Column("avatar_image", sa.Text(), nullable=True))
op.add_column("users", sa.Column("stripe_customer_id", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.create_index("ix_users_ghost_id", "users", ["ghost_id"], unique=True)
op.create_index("ix_users_stripe_customer_id", "users", ["stripe_customer_id"])
# Labels
op.create_table(
"ghost_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_labels_ghost_id", "ghost_labels", ["ghost_id"], unique=True)
op.create_table(
"user_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("label_id", sa.Integer(), sa.ForeignKey("ghost_labels.id", ondelete="CASCADE"), nullable=False),
sa.UniqueConstraint("user_id", "label_id", name="uq_user_label"),
)
# Newsletters
op.create_table(
"ghost_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_newsletters_ghost_id", "ghost_newsletters", ["ghost_id"], unique=True)
op.create_table(
"user_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("newsletter_id", sa.Integer(), sa.ForeignKey("ghost_newsletters.id", ondelete="CASCADE"), nullable=False),
sa.Column("subscribed", sa.Boolean(), nullable=False, server_default=sa.true()),
sa.UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"),
)
# Tiers
op.create_table(
"ghost_tiers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=50), nullable=True),
sa.Column("visibility", sa.String(length=50), nullable=True),
)
op.create_index("ix_ghost_tiers_ghost_id", "ghost_tiers", ["ghost_id"], unique=True)
# Subscriptions
op.create_table(
"ghost_subscriptions",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("status", sa.String(length=50), nullable=True),
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("ghost_tiers.id", ondelete="SET NULL"), nullable=True),
sa.Column("cadence", sa.String(length=50), nullable=True),
sa.Column("price_amount", sa.Integer(), nullable=True),
sa.Column("price_currency", sa.String(length=10), nullable=True),
sa.Column("stripe_customer_id", sa.String(length=255), nullable=True),
sa.Column("stripe_subscription_id", sa.String(length=255), nullable=True),
sa.Column("raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
)
op.create_index("ix_ghost_subscriptions_ghost_id", "ghost_subscriptions", ["ghost_id"], unique=True)
op.create_index("ix_ghost_subscriptions_user_id", "ghost_subscriptions", ["user_id"])
op.create_index("ix_ghost_subscriptions_tier_id", "ghost_subscriptions", ["tier_id"])
op.create_index("ix_ghost_subscriptions_stripe_customer_id", "ghost_subscriptions", ["stripe_customer_id"])
op.create_index("ix_ghost_subscriptions_stripe_subscription_id", "ghost_subscriptions", ["stripe_subscription_id"])
def downgrade():
op.drop_index("ix_ghost_subscriptions_stripe_subscription_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_stripe_customer_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_tier_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_user_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_ghost_id", table_name="ghost_subscriptions")
op.drop_table("ghost_subscriptions")
op.drop_index("ix_ghost_tiers_ghost_id", table_name="ghost_tiers")
op.drop_table("ghost_tiers")
op.drop_table("user_newsletters")
op.drop_index("ix_ghost_newsletters_ghost_id", table_name="ghost_newsletters")
op.drop_table("ghost_newsletters")
op.drop_table("user_labels")
op.drop_index("ix_ghost_labels_ghost_id", table_name="ghost_labels")
op.drop_table("ghost_labels")
op.drop_index("ix_users_stripe_customer_id", table_name="users")
op.drop_index("ix_users_ghost_id", table_name="users")
op.drop_column("users", "ghost_raw")
op.drop_column("users", "stripe_customer_id")
op.drop_column("users", "avatar_image")
op.drop_column("users", "ghost_note")
op.drop_column("users", "ghost_subscribed")
op.drop_column("users", "ghost_status")
op.drop_column("users", "name")
op.drop_column("users", "ghost_id")

View File

@@ -0,0 +1,62 @@
"""add calendar description and slots
Revision ID: 20251106_152905_calendar_config
Revises: 215330c5ec15
Create Date: 2025-11-06T15:29:05.243479
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251106_152905_calendar_config"
down_revision = "215330c5ec15"
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("calendars") as batch_op:
batch_op.add_column(sa.Column("description", sa.Text(), nullable=True))
op.create_table(
"calendar_slots",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("mon", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("tue", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("wed", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("thu", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("fri", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sat", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sun", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("time_start", sa.Time(timezone=False), nullable=False),
sa.Column("time_end", sa.Time(timezone=False), nullable=False),
sa.Column("cost", sa.Numeric(10, 2), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(time_end > time_start)", name="ck_calendar_slots_time_end_after_start"),
)
op.create_index("ix_calendar_slots_calendar_id", "calendar_slots", ["calendar_id"], unique=False)
op.create_index("ix_calendar_slots_time_start", "calendar_slots", ["time_start"], unique=False)
op.create_unique_constraint(
"uq_calendar_slots_unique_band",
"calendar_slots",
["calendar_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_calendar_slots_unique_band", "calendar_slots", type_="unique")
op.drop_index("ix_calendar_slots_time_start", table_name="calendar_slots")
op.drop_index("ix_calendar_slots_calendar_id", table_name="calendar_slots")
op.drop_table("calendar_slots")
with op.batch_alter_table("calendars") as batch_op:
batch_op.drop_column("description")

View File

@@ -0,0 +1,52 @@
"""add product labels and stickers
Revision ID: 20251107_121500_add_labels_stickers
Revises: 20251107_090500_snapshot_to_db
Create Date: 2025-11-07T12:15:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_121500_labels_stickers"
down_revision = "20251107_090500_snapshot_to_db"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_labels_product_id", "product_labels", ["product_id"], unique=False)
op.create_index("ix_product_labels_name", "product_labels", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_labels_product_name", "product_labels", ["product_id", "name"]
)
op.create_table(
"product_stickers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_stickers_product_id", "product_stickers", ["product_id"], unique=False)
op.create_index("ix_product_stickers_name", "product_stickers", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_stickers_product_name", "product_stickers", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_stickers_product_name", "product_stickers", type_="unique")
op.drop_index("ix_product_stickers_name", table_name="product_stickers")
op.drop_index("ix_product_stickers_product_id", table_name="product_stickers")
op.drop_table("product_stickers")
op.drop_constraint("uq_product_labels_product_name", "product_labels", type_="unique")
op.drop_index("ix_product_labels_name", table_name="product_labels")
op.drop_index("ix_product_labels_product_id", table_name="product_labels")
op.drop_table("product_labels")

View File

@@ -0,0 +1,44 @@
"""widen alembic_version.version_num to 255
Revision ID: 20251107_123000_widen_alembic_version
Revises: 20251107_121500_labels_stickers
Create Date: 2025-11-07T12:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_123000_widen_alembic_version"
down_revision = "20251107_121500_labels_stickers"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Increase the size of alembic_version.version_num to 255."""
# Most projects use Postgres; this raw SQL is explicit and works reliably.
# Widening requires no USING clause on Postgres, but we'll be explicit for clarity.
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(255)"
)
# If you need cross-dialect support later, you could add dialect checks
# and use batch_alter_table for SQLite. For your Postgres setup, this is sufficient.
def downgrade() -> None:
"""Shrink alembic_version.version_num back to 32.
On Postgres, shrinking can fail if any row exceeds 32 chars.
We proactively truncate to 32 to guarantee a clean downgrade.
"""
# Truncate any too-long values to fit back into VARCHAR(32)
op.execute(
"UPDATE alembic_version SET version_num = LEFT(version_num, 32)"
)
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(32)"
)

View File

@@ -0,0 +1,93 @@
"""add product attributes, nutrition, allergens and extra product columns
Revision ID: 20251107_153000_product_attributes_nutrition
Revises: 20251107_123000_widen_alembic_version
Create Date: 2025-11-07T15:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_153000_product_attributes_nutrition"
down_revision = "20251107_123000_widen_alembic_version"
branch_labels = None
depends_on = None
def upgrade() -> None:
# --- products extra columns ---
with op.batch_alter_table("products") as batch_op:
batch_op.add_column(sa.Column("ean", sa.String(length=64), nullable=True))
batch_op.add_column(sa.Column("sku", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("unit_size", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("pack_size", sa.String(length=128), nullable=True))
batch_op.create_index("ix_products_ean", ["ean"], unique=False)
batch_op.create_index("ix_products_sku", ["sku"], unique=False)
# --- attributes: arbitrary key/value facts (e.g., Brand, Origin, etc.) ---
op.create_table(
"product_attributes",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.Text(), nullable=True),
)
op.create_index("ix_product_attributes_product_id", "product_attributes", ["product_id"], unique=False)
op.create_index("ix_product_attributes_key", "product_attributes", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_attributes_product_key", "product_attributes", ["product_id", "key"]
)
# --- nutrition: key/value[+unit] rows (e.g., Energy, Fat, Protein) ---
op.create_table(
"product_nutrition",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("unit", sa.String(length=64), nullable=True),
)
op.create_index("ix_product_nutrition_product_id", "product_nutrition", ["product_id"], unique=False)
op.create_index("ix_product_nutrition_key", "product_nutrition", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"]
)
# --- allergens: one row per allergen mention (name + contains boolean) ---
op.create_table(
"product_allergens",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("contains", sa.Boolean(), nullable=False, server_default=sa.false()),
)
op.create_index("ix_product_allergens_product_id", "product_allergens", ["product_id"], unique=False)
op.create_index("ix_product_allergens_name", "product_allergens", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_allergens_product_name", "product_allergens", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_allergens_product_name", "product_allergens", type_="unique")
op.drop_index("ix_product_allergens_name", table_name="product_allergens")
op.drop_index("ix_product_allergens_product_id", table_name="product_allergens")
op.drop_table("product_allergens")
op.drop_constraint("uq_product_nutrition_product_key", "product_nutrition", type_="unique")
op.drop_index("ix_product_nutrition_key", table_name="product_nutrition")
op.drop_index("ix_product_nutrition_product_id", table_name="product_nutrition")
op.drop_table("product_nutrition")
op.drop_constraint("uq_product_attributes_product_key", "product_attributes", type_="unique")
op.drop_index("ix_product_attributes_key", table_name="product_attributes")
op.drop_index("ix_product_attributes_product_id", table_name="product_attributes")
op.drop_table("product_attributes")
with op.batch_alter_table("products") as batch_op:
batch_op.drop_index("ix_products_sku")
batch_op.drop_index("ix_products_ean")
batch_op.drop_column("pack_size")
batch_op.drop_column("unit_size")
batch_op.drop_column("sku")
batch_op.drop_column("ean")

View File

@@ -0,0 +1,30 @@
"""Add regular_price and oe_list_price fields to Product
Revision ID: 20251107_163500_add_regular_price_and_oe_list_price
Revises: 20251107_153000_product_attributes_nutrition
Create Date: 2025-11-07 16:35:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_163500_add_regular_price_and_oe_list_price"
down_revision = "20251107_153000_product_attributes_nutrition"
branch_labels = None
depends_on = None
def upgrade():
op.add_column('products', sa.Column('regular_price', sa.Numeric(12, 2), nullable=True))
op.add_column('products', sa.Column('regular_price_currency', sa.String(length=16), nullable=True))
op.add_column('products', sa.Column('regular_price_raw', sa.String(length=128), nullable=True))
op.add_column('products', sa.Column('oe_list_price', sa.Numeric(12, 2), nullable=True))
def downgrade():
op.drop_column('products', 'oe_list_price')
op.drop_column('products', 'regular_price_raw')
op.drop_column('products', 'regular_price_currency')
op.drop_column('products', 'regular_price')

View File

@@ -0,0 +1,72 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select, update
from sqlalchemy.orm.session import Session
# revision identifiers, used by Alembic.
revision = '20251107_180000_link_listings_to_nav_ids'
down_revision = '20251107_163500_add_regular_price_and_oe_list_price'
branch_labels = None
depends_on = None
def upgrade():
# Add new nullable columns first
op.add_column('listings', sa.Column('top_id', sa.Integer(), nullable=True))
op.add_column('listings', sa.Column('sub_id', sa.Integer(), nullable=True))
bind = op.get_bind()
session = Session(bind=bind)
nav_tops = sa.table(
'nav_tops',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
)
nav_subs = sa.table(
'nav_subs',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
sa.column('top_id', sa.Integer),
)
listings = sa.table(
'listings',
sa.column('id', sa.Integer),
sa.column('top_slug', sa.String),
sa.column('sub_slug', sa.String),
sa.column('top_id', sa.Integer),
sa.column('sub_id', sa.Integer),
)
# Map top_slug -> top_id
top_slug_to_id = {
slug: id_ for id_, slug in session.execute(select(nav_tops.c.id, nav_tops.c.slug))
}
sub_slug_to_id = {
(top_id, slug): id_ for id_, slug, top_id in session.execute(
select(nav_subs.c.id, nav_subs.c.slug, nav_subs.c.top_id)
)
}
for row in session.execute(select(listings.c.id, listings.c.top_slug, listings.c.sub_slug)):
listing_id, top_slug, sub_slug = row
top_id = top_slug_to_id.get(top_slug)
sub_id = sub_slug_to_id.get((top_id, sub_slug)) if sub_slug else None
session.execute(
listings.update()
.where(listings.c.id == listing_id)
.values(top_id=top_id, sub_id=sub_id)
)
session.commit()
# Add foreign keys and constraints
op.create_foreign_key(None, 'listings', 'nav_tops', ['top_id'], ['id'])
op.create_foreign_key(None, 'listings', 'nav_subs', ['sub_id'], ['id'])
op.alter_column('listings', 'top_id', nullable=False)
# Optional: remove old slug fields
# op.drop_column('listings', 'top_slug')
# op.drop_column('listings', 'sub_slug')
def downgrade():
raise NotImplementedError("No downgrade")

View File

@@ -0,0 +1,26 @@
from alembic import op
import sqlalchemy as sa
revision = '20251107_add_missing_indexes'
down_revision = '1a1f1f1fc71c' # Adjust if needed to match your current head
depends_on = None
branch_labels = None
def upgrade() -> None:
# Index for sorting by price
op.create_index("ix_products_regular_price", "products", ["regular_price"])
# Index for filtering/aggregating by brand
op.create_index("ix_products_brand", "products", ["brand"])
# Index for product_likes.product_id (if not already covered by FK)
op.create_index("ix_product_likes_product_id", "product_likes", ["product_id"])
# Composite index on listing_items (may be partially redundant with existing constraints)
op.create_index("ix_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
def downgrade() -> None:
op.drop_index("ix_listing_items_listing_slug", table_name="listing_items")
op.drop_index("ix_product_likes_product_id", table_name="product_likes")
op.drop_index("ix_products_brand", table_name="products")
op.drop_index("ix_products_regular_price", table_name="products")

View File

@@ -0,0 +1,52 @@
"""Add surrogate key and product_id FK to product_likes"""
from alembic import op
import sqlalchemy as sa
# Revision identifiers
revision = '20251107_add_product_id_to_likes'
down_revision = '0d767ad92dd7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add surrogate primary key and product_id foreign key column
op.add_column("product_likes", sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True))
op.add_column("product_likes", sa.Column("product_id", sa.Integer(), nullable=True))
# Create temporary FK without constraint for backfill
op.execute("""
UPDATE product_likes
SET product_id = (
SELECT id FROM products WHERE products.slug = product_likes.product_slug
)
""")
# Add real FK constraint
op.create_foreign_key(
"fk_product_likes_product_id_products",
source_table="product_likes",
referent_table="products",
local_cols=["product_id"],
remote_cols=["id"],
ondelete="CASCADE"
)
# Make product_id non-nullable now that its backfilled
op.alter_column("product_likes", "product_id", nullable=False)
# Add index for efficient lookup
op.create_index(
"ix_product_likes_user_product",
"product_likes",
["user_id", "product_id"],
unique=True
)
def downgrade() -> None:
op.drop_index("ix_product_likes_user_product", table_name="product_likes")
op.drop_constraint("fk_product_likes_product_id_products", "product_likes", type_="foreignkey")
op.drop_column("product_likes", "product_id")
op.drop_column("product_likes", "id")

View File

@@ -0,0 +1,164 @@
"""Add soft delete and update unique constraints to include deleted_at
Revision ID: soft_delete_all
Revises:
Create Date: 2025-11-08 00:38:03
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_soft_delete_all'
down_revision = 'remove_product_slug_20251107'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column('product_likes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_likes_product_user', 'product_likes', ['product_id', 'user_id', 'deleted_at'])
# Drop the old unique index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Create a new unique index that includes deleted_at
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id', 'deleted_at'],
unique=True
)
op.add_column('product_allergens', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_allergens', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name', 'deleted_at'])
op.add_column('product_attributes', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_attributes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key', 'deleted_at'])
op.add_column('product_images', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_images', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_images', 'product_images', ['product_id', 'position', 'deleted_at'])
op.add_column('product_labels', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_labels', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_labels', 'product_labels', ['product_id', 'name', 'deleted_at'])
op.add_column('product_nutrition', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_nutrition', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_nutrition', 'product_nutrition', ['product_id', 'key', 'deleted_at'])
op.add_column('product_sections', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_sections', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_sections', 'product_sections', ['product_id', 'title', 'deleted_at'])
op.add_column('product_stickers', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_stickers', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_stickers', 'product_stickers', ['product_id', 'name', 'deleted_at'])
op.add_column('nav_tops', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_tops', 'nav_tops', ['slug', 'deleted_at'])
op.add_column('nav_subs', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('nav_subs', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_subs', 'nav_subs', ['top_id', 'slug', 'deleted_at'])
op.add_column('listings', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id', 'deleted_at'])
op.add_column('listing_items', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('listing_items', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_listing_items', 'listing_items', ['listing_id', 'slug', 'deleted_at'])
def downgrade() -> None:
# Drop the modified index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Recreate the old unique index
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id'],
unique=True
)
op.drop_constraint('uq_product_likes_product_user', 'product_likes', type_='unique')
op.drop_column('product_likes', 'deleted_at')
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.drop_column('product_allergens', 'deleted_at')
op.drop_column('product_allergens', 'created_at')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name'])
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.drop_column('product_attributes', 'deleted_at')
op.drop_column('product_attributes', 'created_at')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key'])
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_column('product_images', 'deleted_at')
op.drop_column('product_images', 'created_at')
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_column('product_labels', 'deleted_at')
op.drop_column('product_labels', 'created_at')
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_column('product_nutrition', 'deleted_at')
op.drop_column('product_nutrition', 'created_at')
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_column('product_sections', 'deleted_at')
op.drop_column('product_sections', 'created_at')
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_column('product_stickers', 'deleted_at')
op.drop_column('product_stickers', 'created_at')
op.drop_constraint('uq_nav_tops', 'nav_tops', type_='unique')
op.drop_column('nav_tops', 'deleted_at')
op.drop_constraint('uq_nav_subs', 'nav_subs', type_='unique')
op.drop_column('nav_subs', 'deleted_at')
op.drop_column('nav_subs', 'created_at')
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.drop_column('listings', 'deleted_at')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id'])
op.drop_constraint('uq_listing_items', 'listing_items', type_='unique')
op.drop_column('listing_items', 'deleted_at')
op.drop_column('listing_items', 'created_at')

View File

@@ -0,0 +1,60 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision='20251108_1_remove extra_uqs'
down_revision = '20251108_nav_tops_soft_delete'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind", "deleted_at"])
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", "deleted_at"])
def downgrade():
# Restore old constraint
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
op.create_unique_constraint("uq_product_images", "product_images", ["product_id", "position", "deleted_at"])
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name"])
op.create_unique_constraint("uq_product_labels", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"])
op.create_unique_constraint("uq_product_nutrition", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
op.create_unique_constraint("uq_product_sections", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", ])
op.create_unique_constraint("uq_product_stickers", "product_stickers", ["product_id", "name", "deleted_at" ])

View File

@@ -0,0 +1,36 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_nav_tops_soft_delete'
down_revision = '20251108_soft_delete_all'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Add new constraint including deleted_at
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug', 'deleted_at']
)
def downgrade():
# Drop new constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Restore old constraint
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug']
)

View File

@@ -0,0 +1,92 @@
"""add calendars & calendar_entries
Revision ID: 215330c5ec15
Revises: 20251102_223123
Create Date: 2025-11-03 13:07:10.387189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "215330c5ec15"
down_revision = "20251102_223123"
branch_labels = None
depends_on = None
def upgrade():
# calendars
op.create_table(
"calendars",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
# no hard UniqueConstraint; we enforce soft-delete-aware uniqueness with a partial index below
)
# helpful lookup indexes
op.create_index("ix_calendars_post_id", "calendars", ["post_id"], unique=False)
op.create_index("ix_calendars_name", "calendars", ["name"], unique=False)
op.create_index("ix_calendars_slug", "calendars", ["slug"], unique=False)
# calendar_entries
op.create_table(
"calendar_entries",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("start_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("end_at", sa.DateTime(timezone=True), nullable=True), # <- allow open-ended
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(end_at IS NULL) OR (end_at >= start_at)", name="ck_calendar_entries_end_after_start"),
)
op.create_index("ix_calendar_entries_calendar_id", "calendar_entries", ["calendar_id"], unique=False)
op.create_index("ix_calendar_entries_start_at", "calendar_entries", ["start_at"], unique=False)
op.create_index("ix_calendar_entries_name", "calendar_entries", ["name"], unique=False)
# ---- Soft-delete-aware uniqueness for calendars (Postgres) ----
# One active calendar per (post_id, lower(slug))
if op.get_bind().dialect.name == "postgresql":
# cleanup any active duplicates (defensive; table is new on fresh runs)
op.execute("""
WITH ranked AS (
SELECT
id,
ROW_NUMBER() OVER (
PARTITION BY post_id, lower(slug)
ORDER BY updated_at DESC, created_at DESC, id DESC
) AS rn
FROM calendars
WHERE deleted_at IS NULL
)
UPDATE calendars c
SET deleted_at = NOW()
FROM ranked r
WHERE c.id = r.id AND r.rn > 1;
""")
op.execute("""
CREATE UNIQUE INDEX IF NOT EXISTS ux_calendars_post_slug_active
ON calendars (post_id, lower(slug))
WHERE deleted_at IS NULL;
""")
def downgrade():
# drop in reverse dependency order
op.drop_index("ix_calendar_entries_name", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_start_at", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_calendar_id", table_name="calendar_entries")
op.drop_table("calendar_entries")
if op.get_bind().dialect.name == "postgresql":
op.execute("DROP INDEX IF EXISTS ux_calendars_post_slug_active;")
op.drop_index("ix_calendars_slug", table_name="calendars")
op.drop_index("ix_calendars_name", table_name="calendars")
op.drop_index("ix_calendars_post_id", table_name="calendars")
op.drop_table("calendars")

View File

@@ -0,0 +1,29 @@
"""Remove product_slug from product_likes
Revision ID: remove_product_slug_20251107
Revises: 0d767ad92dd7
Create Date: 2025-11-07
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'remove_product_slug_20251107'
down_revision = '20251107_add_missing_indexes'
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.drop_column("product_slug")
def downgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.add_column(sa.Column(
"product_slug",
sa.String(length=255),
nullable=False,
))