feat: initial shared library extraction

Contains shared infrastructure for all coop services:
- shared/ (factory, urls, user_loader, context, internal_api, jinja_setup)
- models/ (User, Order, Calendar, Ticket, Product, Ghost CMS)
- db/ (SQLAlchemy async session, base)
- suma_browser/app/ (csrf, middleware, errors, authz, redis_cacher, payments, filters, utils)
- suma_browser/templates/ (shared base layouts, macros, error pages)
- static/ (CSS, JS, fonts, images)
- alembic/ (database migrations)
- config/ (app-config.yaml)
- editor/ (Lexical editor Node.js build)
- requirements.txt

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
giles
2026-02-09 23:11:36 +00:00
commit 668d9c7df8
446 changed files with 22741 additions and 0 deletions

16
.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
__pycache__
venv
*.pyc
_config
_snapshot
project.zip
.env
_debug
.venv
.claude
editor/node_modules
static/scripts/editor.js
static/scripts/editor.css
suma_browser/static/scripts/editor.js
suma_browser/static/scripts/editor.css

5
README.md Normal file
View File

@@ -0,0 +1,5 @@
# coop/shared
Shared library for the Rose Ash Cooperative platform.
Used as a git submodule by each app repo (blog, market, cart, events).

35
alembic.ini Normal file
View File

@@ -0,0 +1,35 @@
[alembic]
script_location = alembic
sqlalchemy.url =
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s

61
alembic/env.py Normal file
View File

@@ -0,0 +1,61 @@
from __future__ import annotations
import os, sys
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
config = context.config
if config.config_file_name is not None:
try:
fileConfig(config.config_file_name)
except Exception:
pass
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from db.base import Base
import models # noqa: F401
target_metadata = Base.metadata
def _get_url() -> str:
url = os.getenv(
"ALEMBIC_DATABASE_URL",
os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url") or "")
)
print(url)
return url
def run_migrations_offline() -> None:
url = _get_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
url = _get_url()
if url:
config.set_main_option("sqlalchemy.url", url)
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,241 @@
"""snapshot writes to postgres (products/nav/listings/reports)
Revision ID: 20251107_090500_snapshot_to_db
Revises: 20251106_152905_calendar_config
Create Date: 2025-11-07T09:05:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_090500_snapshot_to_db"
down_revision = "20251106_152905_calendar_config"
branch_labels = None
depends_on = None
def upgrade() -> None:
# products (if not already present in your DB — keep idempotent with if_exists checks in env if needed)
if not op.get_bind().dialect.has_table(op.get_bind(), "products"):
op.create_table(
"products",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=False, unique=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("image", sa.Text(), nullable=True),
sa.Column("description_short", sa.Text(), nullable=True),
sa.Column("description_html", sa.Text(), nullable=True),
sa.Column("suma_href", sa.Text(), nullable=True),
sa.Column("brand", sa.String(length=255), nullable=True),
sa.Column("rrp", sa.Numeric(12, 2), nullable=True),
sa.Column("rrp_currency", sa.String(length=16), nullable=True),
sa.Column("rrp_raw", sa.String(length=128), nullable=True),
sa.Column("price_per_unit", sa.Numeric(12, 4), nullable=True),
sa.Column("price_per_unit_currency", sa.String(length=16), nullable=True),
sa.Column("price_per_unit_raw", sa.String(length=128), nullable=True),
sa.Column("special_price", sa.Numeric(12, 2), nullable=True),
sa.Column("special_price_currency", sa.String(length=16), nullable=True),
sa.Column("special_price_raw", sa.String(length=128), nullable=True),
sa.Column("case_size_count", sa.Integer(), nullable=True),
sa.Column("case_size_item_qty", sa.Numeric(12, 3), nullable=True),
sa.Column("case_size_item_unit", sa.String(length=32), nullable=True),
sa.Column("case_size_raw", sa.String(length=128), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
)
op.create_index("ix_products_slug", "products", ["slug"], unique=False)
# product_sections
if not op.get_bind().dialect.has_table(op.get_bind(), "product_sections"):
op.create_table(
"product_sections",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("title", sa.String(length=255), nullable=False),
sa.Column("html", sa.Text(), nullable=False),
)
op.create_index("ix_product_sections_product_id", "product_sections", ["product_id"], unique=False)
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
# product_images (add kind + adjust unique)
if not op.get_bind().dialect.has_table(op.get_bind(), "product_images"):
op.create_table(
"product_images",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("url", sa.Text(), nullable=False),
sa.Column("position", sa.Integer(), nullable=False, server_default="0"),
sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"),
sa.CheckConstraint("position >= 0", name="ck_product_images_position_nonneg"),
)
op.create_index("ix_product_images_product_id", "product_images", ["product_id"], unique=False)
op.create_index("ix_product_images_position", "product_images", ["position"], unique=False)
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
else:
# alter existing table to add `kind` and update unique
with op.batch_alter_table("product_images") as batch_op:
if not op.get_bind().dialect.has_column(op.get_bind(), "product_images", "kind"):
batch_op.add_column(sa.Column("kind", sa.String(length=16), nullable=False, server_default="gallery"))
try:
batch_op.drop_constraint("uq_product_images_product_url", type_="unique")
except Exception:
pass
batch_op.create_unique_constraint("uq_product_images_product_url_kind", ["product_id", "url", "kind"])
# nav_tops / nav_subs
op.create_table(
"nav_tops",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("label", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_nav_tops_slug", "nav_tops", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_tops_label_slug", "nav_tops", ["label", "slug"])
op.create_table(
"nav_subs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_id", sa.Integer(), sa.ForeignKey("nav_tops.id", ondelete="CASCADE"), nullable=False),
sa.Column("label", sa.String(length=255), nullable=True),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("href", sa.Text(), nullable=True),
)
op.create_index("ix_nav_subs_top_id", "nav_subs", ["top_id"], unique=False)
op.create_index("ix_nav_subs_slug", "nav_subs", ["slug"], unique=False)
op.create_unique_constraint("uq_nav_subs_top_slug", "nav_subs", ["top_id", "slug"])
# listings & listing_items
op.create_table(
"listings",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("top_slug", sa.String(length=255), nullable=False),
sa.Column("sub_slug", sa.String(length=255), nullable=True),
sa.Column("total_pages", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_listings_top_slug", "listings", ["top_slug"], unique=False)
op.create_index("ix_listings_sub_slug", "listings", ["sub_slug"], unique=False)
op.create_unique_constraint("uq_listings_top_sub", "listings", ["top_slug", "sub_slug"])
op.create_table(
"listing_items",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("listing_id", sa.Integer(), sa.ForeignKey("listings.id", ondelete="CASCADE"), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
)
op.create_index("ix_listing_items_listing_id", "listing_items", ["listing_id"], unique=False)
op.create_index("ix_listing_items_slug", "listing_items", ["slug"], unique=False)
op.create_unique_constraint("uq_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
# reports: link_errors, link_externals, subcategory_redirects, product_logs
op.create_table(
"link_errors",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("top", sa.String(length=255), nullable=True),
sa.Column("sub", sa.String(length=255), nullable=True),
sa.Column("target_slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_errors_product_slug", "link_errors", ["product_slug"], unique=False)
op.create_table(
"link_externals",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_slug", sa.String(length=255), nullable=True),
sa.Column("href", sa.Text(), nullable=True),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("host", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_link_externals_product_slug", "link_externals", ["product_slug"], unique=False)
op.create_table(
"subcategory_redirects",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("old_path", sa.String(length=512), nullable=False),
sa.Column("new_path", sa.String(length=512), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_subcategory_redirects_old_path", "subcategory_redirects", ["old_path"], unique=False)
op.create_unique_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", ["old_path", "new_path"])
op.create_table(
"product_logs",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("href_tried", sa.Text(), nullable=True),
sa.Column("ok", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("error_type", sa.String(length=255), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("http_status", sa.Integer(), nullable=True),
sa.Column("final_url", sa.Text(), nullable=True),
sa.Column("transport_error", sa.Boolean(), nullable=True),
sa.Column("title", sa.String(length=512), nullable=True),
sa.Column("has_description_html", sa.Boolean(), nullable=True),
sa.Column("has_description_short", sa.Boolean(), nullable=True),
sa.Column("sections_count", sa.Integer(), nullable=True),
sa.Column("images_count", sa.Integer(), nullable=True),
sa.Column("embedded_images_count", sa.Integer(), nullable=True),
sa.Column("all_images_count", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_product_logs_slug", "product_logs", ["slug"], unique=False)
def downgrade() -> None:
op.drop_index("ix_product_logs_slug", table_name="product_logs")
op.drop_table("product_logs")
op.drop_constraint("uq_subcategory_redirects_old_new", "subcategory_redirects", type_="unique")
op.drop_index("ix_subcategory_redirects_old_path", table_name="subcategory_redirects")
op.drop_table("subcategory_redirects")
op.drop_index("ix_link_externals_product_slug", table_name="link_externals")
op.drop_table("link_externals")
op.drop_index("ix_link_errors_product_slug", table_name="link_errors")
op.drop_table("link_errors")
op.drop_index("ix_listing_items_slug", table_name="listing_items")
op.drop_index("ix_listing_items_listing_id", table_name="listing_items")
op.drop_table("listing_items")
op.drop_constraint("uq_listings_top_sub", "listings", type_="unique")
op.drop_index("ix_listings_sub_slug", table_name="listings")
op.drop_index("ix_listings_top_slug", table_name="listings")
op.drop_table("listings")
op.drop_constraint("uq_nav_subs_top_slug", "nav_subs", type_="unique")
op.drop_index("ix_nav_subs_slug", table_name="nav_subs")
op.drop_index("ix_nav_subs_top_id", table_name="nav_subs")
op.drop_table("nav_subs")
op.drop_constraint("uq_nav_tops_label_slug", "nav_tops", type_="unique")
op.drop_index("ix_nav_tops_slug", table_name="nav_tops")
op.drop_table("nav_tops")
with op.batch_alter_table("product_images") as batch_op:
try:
batch_op.drop_constraint("uq_product_images_product_url_kind", type_="unique")
except Exception:
pass
# Do not drop 'kind' column automatically since existing code may rely on it.
# If needed, uncomment:
# batch_op.drop_column("kind")
op.drop_index("ix_product_images_position", table_name="product_images")
op.drop_index("ix_product_images_product_id", table_name="product_images")
op.drop_table("product_images")
op.drop_constraint("uq_product_sections_product_title", "product_sections", type_="unique")
op.drop_index("ix_product_sections_product_id", table_name="product_sections")
op.drop_table("product_sections")
op.drop_index("ix_products_slug", table_name="products")
op.drop_table("products")

View File

@@ -0,0 +1,67 @@
# Alembic migration script template
"""empty message
Revision ID: 0d767ad92dd7
Revises: 20251021_add_user_and_magic_link
Create Date: 2025-10-24 23:36:41.985357
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0d767ad92dd7'
down_revision = '20251021_add_user_and_magic_link'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_likes",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id",
ondelete="CASCADE",
),
primary_key=True,
nullable=False,
),
sa.Column(
"product_slug",
sa.String(length=255),
primary_key=True,
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# If you want an index to quickly fetch "all likes for this user":
op.create_index(
"ix_product_likes_user_id",
"product_likes",
["user_id"],
unique=False,
)
# If you want an index to quickly fetch "who liked this product":
op.create_index(
"ix_product_likes_product_slug",
"product_likes",
["product_slug"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_product_likes_product_slug", table_name="product_likes")
op.drop_index("ix_product_likes_user_id", table_name="product_likes")
op.drop_table("product_likes")

View File

@@ -0,0 +1,24 @@
# Alembic migration script template
"""empty message
Revision ID: 1a1f1f1fc71c
Revises: 20251107_180000_link_listings_to_nav_ids, 20251107_add_product_id_to_likes
Create Date: 2025-11-07 19:34:18.228002
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1a1f1f1fc71c'
down_revision = ('20251107_180000_link_listings_to_nav_ids', '20251107_add_product_id_to_likes')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -0,0 +1,20 @@
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251021211617"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'kv',
sa.Column('key', sa.String(length=120), nullable=False),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('key')
)
def downgrade() -> None:
op.drop_table('kv')

View File

@@ -0,0 +1,47 @@
"""add users and magic_links tables
Revision ID: 20251021_add_user_and_magic_link
Revises: a1b2c3d4e5f6 # <-- REPLACE with your actual head
Create Date: 2025-10-21
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '20251021_add_user_and_magic_link'
down_revision: Union[str, None] = '20251021211617' # <-- REPLACE THIS
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'users',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('email', sa.String(length=255), nullable=False, unique=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('last_login_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_table(
'magic_links',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('token', sa.String(length=128), nullable=False, unique=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('purpose', sa.String(length=32), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('ip', sa.String(length=64), nullable=True),
sa.Column('user_agent', sa.String(length=256), nullable=True),
)
op.create_index('ix_magic_links_token', 'magic_links', ['token'], unique=True)
op.create_index('ix_magic_links_user', 'magic_links', ['user_id'])
def downgrade() -> None:
op.drop_index('ix_magic_links_user', table_name='magic_links')
op.drop_index('ix_magic_links_token', table_name='magic_links')
op.drop_table('magic_links')
op.drop_index('ix_users_email', table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,135 @@
"""ghost content mirror (posts/pages/authors/tags)
Revision ID: 20251028_ghost_content
Revises: 0d767ad92dd7
Create Date: 2025-10-28
"""
from alembic import op
import sqlalchemy as sa
revision = "20251028_ghost_content"
down_revision = "0d767ad92dd7"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"authors",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("profile_image", sa.Text(), nullable=True),
sa.Column("cover_image", sa.Text(), nullable=True),
sa.Column("bio", sa.Text(), nullable=True),
sa.Column("website", sa.Text(), nullable=True),
sa.Column("location", sa.Text(), nullable=True),
sa.Column("facebook", sa.Text(), nullable=True),
sa.Column("twitter", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_authors_ghost_id"),
)
op.create_index("ix_authors_ghost_id", "authors", ["ghost_id"])
op.create_index("ix_authors_slug", "authors", ["slug"])
op.create_table(
"tags",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=300), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_tags_ghost_id"),
)
op.create_index("ix_tags_ghost_id", "tags", ["ghost_id"])
op.create_index("ix_tags_slug", "tags", ["slug"])
op.create_table(
"posts",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("uuid", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=191), nullable=False),
sa.Column("title", sa.String(length=500), nullable=False),
sa.Column("html", sa.Text(), nullable=True),
sa.Column("plaintext", sa.Text(), nullable=True),
sa.Column("mobiledoc", sa.Text(), nullable=True),
sa.Column("lexical", sa.Text(), nullable=True),
sa.Column("feature_image", sa.Text(), nullable=True),
sa.Column("feature_image_alt", sa.Text(), nullable=True),
sa.Column("feature_image_caption", sa.Text(), nullable=True),
sa.Column("excerpt", sa.Text(), nullable=True),
sa.Column("custom_excerpt", sa.Text(), nullable=True),
sa.Column("visibility", sa.String(length=32), nullable=False, server_default="public"),
sa.Column("status", sa.String(length=32), nullable=False, server_default="draft"),
sa.Column("featured", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("is_page", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("email_only", sa.Boolean(), nullable=False, server_default=sa.text("false")),
sa.Column("canonical_url", sa.Text(), nullable=True),
sa.Column("meta_title", sa.String(length=500), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("og_image", sa.Text(), nullable=True),
sa.Column("og_title", sa.String(length=500), nullable=True),
sa.Column("og_description", sa.Text(), nullable=True),
sa.Column("twitter_image", sa.Text(), nullable=True),
sa.Column("twitter_title", sa.String(length=500), nullable=True),
sa.Column("twitter_description", sa.Text(), nullable=True),
sa.Column("custom_template", sa.String(length=191), nullable=True),
sa.Column("reading_time", sa.Integer(), nullable=True),
sa.Column("comment_id", sa.String(length=191), nullable=True),
sa.Column("published_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("primary_author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="SET NULL"), nullable=True),
sa.Column("primary_tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="SET NULL"), nullable=True),
sa.UniqueConstraint("ghost_id", name="uq_posts_ghost_id"),
sa.UniqueConstraint("uuid", name="uq_posts_uuid"),
)
op.create_index("ix_posts_ghost_id", "posts", ["ghost_id"])
op.create_index("ix_posts_slug", "posts", ["slug"])
op.create_index("ix_posts_status", "posts", ["status"])
op.create_index("ix_posts_visibility", "posts", ["visibility"])
op.create_index("ix_posts_is_page", "posts", ["is_page"])
op.create_index("ix_posts_published_at", "posts", ["published_at"])
op.create_table(
"post_authors",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("author_id", sa.Integer(), sa.ForeignKey("authors.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
op.create_table(
"post_tags",
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), primary_key=True),
sa.Column("tag_id", sa.Integer(), sa.ForeignKey("tags.id", ondelete="CASCADE"), primary_key=True),
sa.Column("sort_order", sa.Integer(), nullable=False, server_default="0"),
)
def downgrade():
op.drop_table("post_tags")
op.drop_table("post_authors")
op.drop_index("ix_posts_published_at", table_name="posts")
op.drop_index("ix_posts_is_page", table_name="posts")
op.drop_index("ix_posts_visibility", table_name="posts")
op.drop_index("ix_posts_status", table_name="posts")
op.drop_index("ix_posts_slug", table_name="posts")
op.drop_index("ix_posts_ghost_id", table_name="posts")
op.drop_table("posts")
op.drop_index("ix_tags_slug", table_name="tags")
op.drop_index("ix_tags_ghost_id", table_name="tags")
op.drop_table("tags")
op.drop_index("ix_authors_slug", table_name="authors")
op.drop_index("ix_authors_ghost_id", table_name="authors")
op.drop_table("authors")

View File

@@ -0,0 +1,128 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "20251102_223123"
down_revision = "20251028_ghost_content"
branch_labels = None
depends_on = None
def upgrade():
# Extend users
op.add_column("users", sa.Column("ghost_id", sa.String(length=64), nullable=True))
op.add_column("users", sa.Column("name", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_status", sa.String(length=50), nullable=True))
op.add_column("users", sa.Column("ghost_subscribed", sa.Boolean(), nullable=False, server_default=sa.true()))
op.add_column("users", sa.Column("ghost_note", sa.Text(), nullable=True))
op.add_column("users", sa.Column("avatar_image", sa.Text(), nullable=True))
op.add_column("users", sa.Column("stripe_customer_id", sa.String(length=255), nullable=True))
op.add_column("users", sa.Column("ghost_raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.create_index("ix_users_ghost_id", "users", ["ghost_id"], unique=True)
op.create_index("ix_users_stripe_customer_id", "users", ["stripe_customer_id"])
# Labels
op.create_table(
"ghost_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_labels_ghost_id", "ghost_labels", ["ghost_id"], unique=True)
op.create_table(
"user_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("label_id", sa.Integer(), sa.ForeignKey("ghost_labels.id", ondelete="CASCADE"), nullable=False),
sa.UniqueConstraint("user_id", "label_id", name="uq_user_label"),
)
# Newsletters
op.create_table(
"ghost_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
)
op.create_index("ix_ghost_newsletters_ghost_id", "ghost_newsletters", ["ghost_id"], unique=True)
op.create_table(
"user_newsletters",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("newsletter_id", sa.Integer(), sa.ForeignKey("ghost_newsletters.id", ondelete="CASCADE"), nullable=False),
sa.Column("subscribed", sa.Boolean(), nullable=False, server_default=sa.true()),
sa.UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"),
)
# Tiers
op.create_table(
"ghost_tiers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=True),
sa.Column("type", sa.String(length=50), nullable=True),
sa.Column("visibility", sa.String(length=50), nullable=True),
)
op.create_index("ix_ghost_tiers_ghost_id", "ghost_tiers", ["ghost_id"], unique=True)
# Subscriptions
op.create_table(
"ghost_subscriptions",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("ghost_id", sa.String(length=64), nullable=False),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False),
sa.Column("status", sa.String(length=50), nullable=True),
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("ghost_tiers.id", ondelete="SET NULL"), nullable=True),
sa.Column("cadence", sa.String(length=50), nullable=True),
sa.Column("price_amount", sa.Integer(), nullable=True),
sa.Column("price_currency", sa.String(length=10), nullable=True),
sa.Column("stripe_customer_id", sa.String(length=255), nullable=True),
sa.Column("stripe_subscription_id", sa.String(length=255), nullable=True),
sa.Column("raw", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
)
op.create_index("ix_ghost_subscriptions_ghost_id", "ghost_subscriptions", ["ghost_id"], unique=True)
op.create_index("ix_ghost_subscriptions_user_id", "ghost_subscriptions", ["user_id"])
op.create_index("ix_ghost_subscriptions_tier_id", "ghost_subscriptions", ["tier_id"])
op.create_index("ix_ghost_subscriptions_stripe_customer_id", "ghost_subscriptions", ["stripe_customer_id"])
op.create_index("ix_ghost_subscriptions_stripe_subscription_id", "ghost_subscriptions", ["stripe_subscription_id"])
def downgrade():
op.drop_index("ix_ghost_subscriptions_stripe_subscription_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_stripe_customer_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_tier_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_user_id", table_name="ghost_subscriptions")
op.drop_index("ix_ghost_subscriptions_ghost_id", table_name="ghost_subscriptions")
op.drop_table("ghost_subscriptions")
op.drop_index("ix_ghost_tiers_ghost_id", table_name="ghost_tiers")
op.drop_table("ghost_tiers")
op.drop_table("user_newsletters")
op.drop_index("ix_ghost_newsletters_ghost_id", table_name="ghost_newsletters")
op.drop_table("ghost_newsletters")
op.drop_table("user_labels")
op.drop_index("ix_ghost_labels_ghost_id", table_name="ghost_labels")
op.drop_table("ghost_labels")
op.drop_index("ix_users_stripe_customer_id", table_name="users")
op.drop_index("ix_users_ghost_id", table_name="users")
op.drop_column("users", "ghost_raw")
op.drop_column("users", "stripe_customer_id")
op.drop_column("users", "avatar_image")
op.drop_column("users", "ghost_note")
op.drop_column("users", "ghost_subscribed")
op.drop_column("users", "ghost_status")
op.drop_column("users", "name")
op.drop_column("users", "ghost_id")

View File

@@ -0,0 +1,62 @@
"""add calendar description and slots
Revision ID: 20251106_152905_calendar_config
Revises: 215330c5ec15
Create Date: 2025-11-06T15:29:05.243479
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251106_152905_calendar_config"
down_revision = "215330c5ec15"
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("calendars") as batch_op:
batch_op.add_column(sa.Column("description", sa.Text(), nullable=True))
op.create_table(
"calendar_slots",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("mon", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("tue", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("wed", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("thu", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("fri", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sat", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("sun", sa.Boolean(), nullable=False, server_default=sa.false()),
sa.Column("time_start", sa.Time(timezone=False), nullable=False),
sa.Column("time_end", sa.Time(timezone=False), nullable=False),
sa.Column("cost", sa.Numeric(10, 2), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(time_end > time_start)", name="ck_calendar_slots_time_end_after_start"),
)
op.create_index("ix_calendar_slots_calendar_id", "calendar_slots", ["calendar_id"], unique=False)
op.create_index("ix_calendar_slots_time_start", "calendar_slots", ["time_start"], unique=False)
op.create_unique_constraint(
"uq_calendar_slots_unique_band",
"calendar_slots",
["calendar_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_calendar_slots_unique_band", "calendar_slots", type_="unique")
op.drop_index("ix_calendar_slots_time_start", table_name="calendar_slots")
op.drop_index("ix_calendar_slots_calendar_id", table_name="calendar_slots")
op.drop_table("calendar_slots")
with op.batch_alter_table("calendars") as batch_op:
batch_op.drop_column("description")

View File

@@ -0,0 +1,52 @@
"""add product labels and stickers
Revision ID: 20251107_121500_add_labels_stickers
Revises: 20251107_090500_snapshot_to_db
Create Date: 2025-11-07T12:15:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_121500_labels_stickers"
down_revision = "20251107_090500_snapshot_to_db"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"product_labels",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_labels_product_id", "product_labels", ["product_id"], unique=False)
op.create_index("ix_product_labels_name", "product_labels", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_labels_product_name", "product_labels", ["product_id", "name"]
)
op.create_table(
"product_stickers",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
)
op.create_index("ix_product_stickers_product_id", "product_stickers", ["product_id"], unique=False)
op.create_index("ix_product_stickers_name", "product_stickers", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_stickers_product_name", "product_stickers", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_stickers_product_name", "product_stickers", type_="unique")
op.drop_index("ix_product_stickers_name", table_name="product_stickers")
op.drop_index("ix_product_stickers_product_id", table_name="product_stickers")
op.drop_table("product_stickers")
op.drop_constraint("uq_product_labels_product_name", "product_labels", type_="unique")
op.drop_index("ix_product_labels_name", table_name="product_labels")
op.drop_index("ix_product_labels_product_id", table_name="product_labels")
op.drop_table("product_labels")

View File

@@ -0,0 +1,44 @@
"""widen alembic_version.version_num to 255
Revision ID: 20251107_123000_widen_alembic_version
Revises: 20251107_121500_labels_stickers
Create Date: 2025-11-07T12:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_123000_widen_alembic_version"
down_revision = "20251107_121500_labels_stickers"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Increase the size of alembic_version.version_num to 255."""
# Most projects use Postgres; this raw SQL is explicit and works reliably.
# Widening requires no USING clause on Postgres, but we'll be explicit for clarity.
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(255)"
)
# If you need cross-dialect support later, you could add dialect checks
# and use batch_alter_table for SQLite. For your Postgres setup, this is sufficient.
def downgrade() -> None:
"""Shrink alembic_version.version_num back to 32.
On Postgres, shrinking can fail if any row exceeds 32 chars.
We proactively truncate to 32 to guarantee a clean downgrade.
"""
# Truncate any too-long values to fit back into VARCHAR(32)
op.execute(
"UPDATE alembic_version SET version_num = LEFT(version_num, 32)"
)
op.execute(
"ALTER TABLE alembic_version "
"ALTER COLUMN version_num TYPE VARCHAR(32)"
)

View File

@@ -0,0 +1,93 @@
"""add product attributes, nutrition, allergens and extra product columns
Revision ID: 20251107_153000_product_attributes_nutrition
Revises: 20251107_123000_widen_alembic_version
Create Date: 2025-11-07T15:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_153000_product_attributes_nutrition"
down_revision = "20251107_123000_widen_alembic_version"
branch_labels = None
depends_on = None
def upgrade() -> None:
# --- products extra columns ---
with op.batch_alter_table("products") as batch_op:
batch_op.add_column(sa.Column("ean", sa.String(length=64), nullable=True))
batch_op.add_column(sa.Column("sku", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("unit_size", sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column("pack_size", sa.String(length=128), nullable=True))
batch_op.create_index("ix_products_ean", ["ean"], unique=False)
batch_op.create_index("ix_products_sku", ["sku"], unique=False)
# --- attributes: arbitrary key/value facts (e.g., Brand, Origin, etc.) ---
op.create_table(
"product_attributes",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.Text(), nullable=True),
)
op.create_index("ix_product_attributes_product_id", "product_attributes", ["product_id"], unique=False)
op.create_index("ix_product_attributes_key", "product_attributes", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_attributes_product_key", "product_attributes", ["product_id", "key"]
)
# --- nutrition: key/value[+unit] rows (e.g., Energy, Fat, Protein) ---
op.create_table(
"product_nutrition",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("unit", sa.String(length=64), nullable=True),
)
op.create_index("ix_product_nutrition_product_id", "product_nutrition", ["product_id"], unique=False)
op.create_index("ix_product_nutrition_key", "product_nutrition", ["key"], unique=False)
op.create_unique_constraint(
"uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"]
)
# --- allergens: one row per allergen mention (name + contains boolean) ---
op.create_table(
"product_allergens",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("product_id", sa.Integer(), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("contains", sa.Boolean(), nullable=False, server_default=sa.false()),
)
op.create_index("ix_product_allergens_product_id", "product_allergens", ["product_id"], unique=False)
op.create_index("ix_product_allergens_name", "product_allergens", ["name"], unique=False)
op.create_unique_constraint(
"uq_product_allergens_product_name", "product_allergens", ["product_id", "name"]
)
def downgrade() -> None:
op.drop_constraint("uq_product_allergens_product_name", "product_allergens", type_="unique")
op.drop_index("ix_product_allergens_name", table_name="product_allergens")
op.drop_index("ix_product_allergens_product_id", table_name="product_allergens")
op.drop_table("product_allergens")
op.drop_constraint("uq_product_nutrition_product_key", "product_nutrition", type_="unique")
op.drop_index("ix_product_nutrition_key", table_name="product_nutrition")
op.drop_index("ix_product_nutrition_product_id", table_name="product_nutrition")
op.drop_table("product_nutrition")
op.drop_constraint("uq_product_attributes_product_key", "product_attributes", type_="unique")
op.drop_index("ix_product_attributes_key", table_name="product_attributes")
op.drop_index("ix_product_attributes_product_id", table_name="product_attributes")
op.drop_table("product_attributes")
with op.batch_alter_table("products") as batch_op:
batch_op.drop_index("ix_products_sku")
batch_op.drop_index("ix_products_ean")
batch_op.drop_column("pack_size")
batch_op.drop_column("unit_size")
batch_op.drop_column("sku")
batch_op.drop_column("ean")

View File

@@ -0,0 +1,30 @@
"""Add regular_price and oe_list_price fields to Product
Revision ID: 20251107_163500_add_regular_price_and_oe_list_price
Revises: 20251107_153000_product_attributes_nutrition
Create Date: 2025-11-07 16:35:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20251107_163500_add_regular_price_and_oe_list_price"
down_revision = "20251107_153000_product_attributes_nutrition"
branch_labels = None
depends_on = None
def upgrade():
op.add_column('products', sa.Column('regular_price', sa.Numeric(12, 2), nullable=True))
op.add_column('products', sa.Column('regular_price_currency', sa.String(length=16), nullable=True))
op.add_column('products', sa.Column('regular_price_raw', sa.String(length=128), nullable=True))
op.add_column('products', sa.Column('oe_list_price', sa.Numeric(12, 2), nullable=True))
def downgrade():
op.drop_column('products', 'oe_list_price')
op.drop_column('products', 'regular_price_raw')
op.drop_column('products', 'regular_price_currency')
op.drop_column('products', 'regular_price')

View File

@@ -0,0 +1,72 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select, update
from sqlalchemy.orm.session import Session
# revision identifiers, used by Alembic.
revision = '20251107_180000_link_listings_to_nav_ids'
down_revision = '20251107_163500_add_regular_price_and_oe_list_price'
branch_labels = None
depends_on = None
def upgrade():
# Add new nullable columns first
op.add_column('listings', sa.Column('top_id', sa.Integer(), nullable=True))
op.add_column('listings', sa.Column('sub_id', sa.Integer(), nullable=True))
bind = op.get_bind()
session = Session(bind=bind)
nav_tops = sa.table(
'nav_tops',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
)
nav_subs = sa.table(
'nav_subs',
sa.column('id', sa.Integer),
sa.column('slug', sa.String),
sa.column('top_id', sa.Integer),
)
listings = sa.table(
'listings',
sa.column('id', sa.Integer),
sa.column('top_slug', sa.String),
sa.column('sub_slug', sa.String),
sa.column('top_id', sa.Integer),
sa.column('sub_id', sa.Integer),
)
# Map top_slug -> top_id
top_slug_to_id = {
slug: id_ for id_, slug in session.execute(select(nav_tops.c.id, nav_tops.c.slug))
}
sub_slug_to_id = {
(top_id, slug): id_ for id_, slug, top_id in session.execute(
select(nav_subs.c.id, nav_subs.c.slug, nav_subs.c.top_id)
)
}
for row in session.execute(select(listings.c.id, listings.c.top_slug, listings.c.sub_slug)):
listing_id, top_slug, sub_slug = row
top_id = top_slug_to_id.get(top_slug)
sub_id = sub_slug_to_id.get((top_id, sub_slug)) if sub_slug else None
session.execute(
listings.update()
.where(listings.c.id == listing_id)
.values(top_id=top_id, sub_id=sub_id)
)
session.commit()
# Add foreign keys and constraints
op.create_foreign_key(None, 'listings', 'nav_tops', ['top_id'], ['id'])
op.create_foreign_key(None, 'listings', 'nav_subs', ['sub_id'], ['id'])
op.alter_column('listings', 'top_id', nullable=False)
# Optional: remove old slug fields
# op.drop_column('listings', 'top_slug')
# op.drop_column('listings', 'sub_slug')
def downgrade():
raise NotImplementedError("No downgrade")

View File

@@ -0,0 +1,26 @@
from alembic import op
import sqlalchemy as sa
revision = '20251107_add_missing_indexes'
down_revision = '1a1f1f1fc71c' # Adjust if needed to match your current head
depends_on = None
branch_labels = None
def upgrade() -> None:
# Index for sorting by price
op.create_index("ix_products_regular_price", "products", ["regular_price"])
# Index for filtering/aggregating by brand
op.create_index("ix_products_brand", "products", ["brand"])
# Index for product_likes.product_id (if not already covered by FK)
op.create_index("ix_product_likes_product_id", "product_likes", ["product_id"])
# Composite index on listing_items (may be partially redundant with existing constraints)
op.create_index("ix_listing_items_listing_slug", "listing_items", ["listing_id", "slug"])
def downgrade() -> None:
op.drop_index("ix_listing_items_listing_slug", table_name="listing_items")
op.drop_index("ix_product_likes_product_id", table_name="product_likes")
op.drop_index("ix_products_brand", table_name="products")
op.drop_index("ix_products_regular_price", table_name="products")

View File

@@ -0,0 +1,52 @@
"""Add surrogate key and product_id FK to product_likes"""
from alembic import op
import sqlalchemy as sa
# Revision identifiers
revision = '20251107_add_product_id_to_likes'
down_revision = '0d767ad92dd7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add surrogate primary key and product_id foreign key column
op.add_column("product_likes", sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True))
op.add_column("product_likes", sa.Column("product_id", sa.Integer(), nullable=True))
# Create temporary FK without constraint for backfill
op.execute("""
UPDATE product_likes
SET product_id = (
SELECT id FROM products WHERE products.slug = product_likes.product_slug
)
""")
# Add real FK constraint
op.create_foreign_key(
"fk_product_likes_product_id_products",
source_table="product_likes",
referent_table="products",
local_cols=["product_id"],
remote_cols=["id"],
ondelete="CASCADE"
)
# Make product_id non-nullable now that its backfilled
op.alter_column("product_likes", "product_id", nullable=False)
# Add index for efficient lookup
op.create_index(
"ix_product_likes_user_product",
"product_likes",
["user_id", "product_id"],
unique=True
)
def downgrade() -> None:
op.drop_index("ix_product_likes_user_product", table_name="product_likes")
op.drop_constraint("fk_product_likes_product_id_products", "product_likes", type_="foreignkey")
op.drop_column("product_likes", "product_id")
op.drop_column("product_likes", "id")

View File

@@ -0,0 +1,164 @@
"""Add soft delete and update unique constraints to include deleted_at
Revision ID: soft_delete_all
Revises:
Create Date: 2025-11-08 00:38:03
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_soft_delete_all'
down_revision = 'remove_product_slug_20251107'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column('product_likes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_likes_product_user', 'product_likes', ['product_id', 'user_id', 'deleted_at'])
# Drop the old unique index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Create a new unique index that includes deleted_at
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id', 'deleted_at'],
unique=True
)
op.add_column('product_allergens', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_allergens', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name', 'deleted_at'])
op.add_column('product_attributes', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_attributes', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key', 'deleted_at'])
op.add_column('product_images', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_images', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_images', 'product_images', ['product_id', 'position', 'deleted_at'])
op.add_column('product_labels', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_labels', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_labels', 'product_labels', ['product_id', 'name', 'deleted_at'])
op.add_column('product_nutrition', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_nutrition', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_nutrition', 'product_nutrition', ['product_id', 'key', 'deleted_at'])
op.add_column('product_sections', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_sections', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_sections', 'product_sections', ['product_id', 'title', 'deleted_at'])
op.add_column('product_stickers', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('product_stickers', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_product_stickers', 'product_stickers', ['product_id', 'name', 'deleted_at'])
op.add_column('nav_tops', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_tops', 'nav_tops', ['slug', 'deleted_at'])
op.add_column('nav_subs', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('nav_subs', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_nav_subs', 'nav_subs', ['top_id', 'slug', 'deleted_at'])
op.add_column('listings', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id', 'deleted_at'])
op.add_column('listing_items', sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False))
op.add_column('listing_items', sa.Column('deleted_at', sa.DateTime(), nullable=True))
op.create_unique_constraint('uq_listing_items', 'listing_items', ['listing_id', 'slug', 'deleted_at'])
def downgrade() -> None:
# Drop the modified index
op.drop_index('ix_product_likes_user_product', table_name='product_likes')
# Recreate the old unique index
op.create_index(
'ix_product_likes_user_product',
'product_likes',
['user_id', 'product_id'],
unique=True
)
op.drop_constraint('uq_product_likes_product_user', 'product_likes', type_='unique')
op.drop_column('product_likes', 'deleted_at')
op.drop_constraint('uq_product_allergens_product_name', 'product_allergens', type_='unique')
op.drop_column('product_allergens', 'deleted_at')
op.drop_column('product_allergens', 'created_at')
op.create_unique_constraint('uq_product_allergens_product_name', 'product_allergens', ['product_id', 'name'])
op.drop_constraint('uq_product_attributes_product_key', 'product_attributes', type_='unique')
op.drop_column('product_attributes', 'deleted_at')
op.drop_column('product_attributes', 'created_at')
op.create_unique_constraint('uq_product_attributes_product_key', 'product_attributes', ['product_id', 'key'])
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_column('product_images', 'deleted_at')
op.drop_column('product_images', 'created_at')
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_column('product_labels', 'deleted_at')
op.drop_column('product_labels', 'created_at')
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_column('product_nutrition', 'deleted_at')
op.drop_column('product_nutrition', 'created_at')
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_column('product_sections', 'deleted_at')
op.drop_column('product_sections', 'created_at')
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_column('product_stickers', 'deleted_at')
op.drop_column('product_stickers', 'created_at')
op.drop_constraint('uq_nav_tops', 'nav_tops', type_='unique')
op.drop_column('nav_tops', 'deleted_at')
op.drop_constraint('uq_nav_subs', 'nav_subs', type_='unique')
op.drop_column('nav_subs', 'deleted_at')
op.drop_column('nav_subs', 'created_at')
op.drop_constraint('uq_listings_top_sub', 'listings', type_='unique')
op.drop_column('listings', 'deleted_at')
op.create_unique_constraint('uq_listings_top_sub', 'listings', ['top_id', 'sub_id'])
op.drop_constraint('uq_listing_items', 'listing_items', type_='unique')
op.drop_column('listing_items', 'deleted_at')
op.drop_column('listing_items', 'created_at')

View File

@@ -0,0 +1,60 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision='20251108_1_remove extra_uqs'
down_revision = '20251108_nav_tops_soft_delete'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_product_images', 'product_images', type_='unique')
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind", "deleted_at"])
op.drop_constraint('uq_product_labels', 'product_labels', type_='unique')
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition', 'product_nutrition', type_='unique')
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections', 'product_sections', type_='unique')
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers', 'product_stickers', type_='unique')
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", "deleted_at"])
def downgrade():
# Restore old constraint
op.drop_constraint('uq_product_images_product_url_kind', 'product_images', type_='unique')
op.create_unique_constraint("uq_product_images_product_url_kind", "product_images", ["product_id", "url", "kind"])
op.create_unique_constraint("uq_product_images", "product_images", ["product_id", "position", "deleted_at"])
op.drop_constraint('uq_product_labels_product_name', 'product_labels', type_='unique')
op.create_unique_constraint("uq_product_labels_product_name", "product_labels", ["product_id", "name"])
op.create_unique_constraint("uq_product_labels", "product_labels", ["product_id", "name", "deleted_at"])
op.drop_constraint('uq_product_nutrition_product_key', 'product_nutrition', type_='unique')
op.create_unique_constraint("uq_product_nutrition_product_key", "product_nutrition", ["product_id", "key"])
op.create_unique_constraint("uq_product_nutrition", "product_nutrition", ["product_id", "key", "deleted_at"])
op.drop_constraint('uq_product_sections_product_title', 'product_sections', type_='unique')
op.create_unique_constraint("uq_product_sections_product_title", "product_sections", ["product_id", "title"])
op.create_unique_constraint("uq_product_sections", "product_sections", ["product_id", "title", "deleted_at"])
op.drop_constraint('uq_product_stickers_product_name', 'product_stickers', type_='unique')
op.create_unique_constraint("uq_product_stickers_product_name", "product_stickers", ["product_id", "name", ])
op.create_unique_constraint("uq_product_stickers", "product_stickers", ["product_id", "name", "deleted_at" ])

View File

@@ -0,0 +1,36 @@
"""Update nav_tops unique constraint to include deleted_at"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251108_nav_tops_soft_delete'
down_revision = '20251108_soft_delete_all'
branch_labels = None
depends_on = None
def upgrade():
# Drop existing constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Add new constraint including deleted_at
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug', 'deleted_at']
)
def downgrade():
# Drop new constraint
op.drop_constraint('uq_nav_tops_label_slug', 'nav_tops', type_='unique')
# Restore old constraint
op.create_unique_constraint(
'uq_nav_tops_label_slug',
'nav_tops',
['label', 'slug']
)

View File

@@ -0,0 +1,92 @@
"""add calendars & calendar_entries
Revision ID: 215330c5ec15
Revises: 20251102_223123
Create Date: 2025-11-03 13:07:10.387189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "215330c5ec15"
down_revision = "20251102_223123"
branch_labels = None
depends_on = None
def upgrade():
# calendars
op.create_table(
"calendars",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("post_id", sa.Integer(), sa.ForeignKey("posts.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
# no hard UniqueConstraint; we enforce soft-delete-aware uniqueness with a partial index below
)
# helpful lookup indexes
op.create_index("ix_calendars_post_id", "calendars", ["post_id"], unique=False)
op.create_index("ix_calendars_name", "calendars", ["name"], unique=False)
op.create_index("ix_calendars_slug", "calendars", ["slug"], unique=False)
# calendar_entries
op.create_table(
"calendar_entries",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("calendar_id", sa.Integer(), sa.ForeignKey("calendars.id", ondelete="CASCADE"), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("start_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("end_at", sa.DateTime(timezone=True), nullable=True), # <- allow open-ended
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
sa.CheckConstraint("(end_at IS NULL) OR (end_at >= start_at)", name="ck_calendar_entries_end_after_start"),
)
op.create_index("ix_calendar_entries_calendar_id", "calendar_entries", ["calendar_id"], unique=False)
op.create_index("ix_calendar_entries_start_at", "calendar_entries", ["start_at"], unique=False)
op.create_index("ix_calendar_entries_name", "calendar_entries", ["name"], unique=False)
# ---- Soft-delete-aware uniqueness for calendars (Postgres) ----
# One active calendar per (post_id, lower(slug))
if op.get_bind().dialect.name == "postgresql":
# cleanup any active duplicates (defensive; table is new on fresh runs)
op.execute("""
WITH ranked AS (
SELECT
id,
ROW_NUMBER() OVER (
PARTITION BY post_id, lower(slug)
ORDER BY updated_at DESC, created_at DESC, id DESC
) AS rn
FROM calendars
WHERE deleted_at IS NULL
)
UPDATE calendars c
SET deleted_at = NOW()
FROM ranked r
WHERE c.id = r.id AND r.rn > 1;
""")
op.execute("""
CREATE UNIQUE INDEX IF NOT EXISTS ux_calendars_post_slug_active
ON calendars (post_id, lower(slug))
WHERE deleted_at IS NULL;
""")
def downgrade():
# drop in reverse dependency order
op.drop_index("ix_calendar_entries_name", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_start_at", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_calendar_id", table_name="calendar_entries")
op.drop_table("calendar_entries")
if op.get_bind().dialect.name == "postgresql":
op.execute("DROP INDEX IF EXISTS ux_calendars_post_slug_active;")
op.drop_index("ix_calendars_slug", table_name="calendars")
op.drop_index("ix_calendars_name", table_name="calendars")
op.drop_index("ix_calendars_post_id", table_name="calendars")
op.drop_table("calendars")

View File

@@ -0,0 +1,29 @@
"""Remove product_slug from product_likes
Revision ID: remove_product_slug_20251107
Revises: 0d767ad92dd7
Create Date: 2025-11-07
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'remove_product_slug_20251107'
down_revision = '20251107_add_missing_indexes'
branch_labels = None
depends_on = None
def upgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.drop_column("product_slug")
def downgrade() -> None:
with op.batch_alter_table("product_likes") as batch_op:
batch_op.add_column(sa.Column(
"product_slug",
sa.String(length=255),
nullable=False,
))

24
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
<%text>
# Alembic migration script template
</%text>
"""empty message
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,20 @@
"""Initial database schema from schema.sql"""
from alembic import op
import sqlalchemy as sa
import pathlib
# revision identifiers, used by Alembic
revision = '0000_alembic'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
CREATE TABLE IF NOT EXISTS alembic_version (
version_num VARCHAR(32) NOT NULL,
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
);
""")

View File

@@ -0,0 +1,33 @@
"""Initial database schema from schema.sql"""
from alembic import op
import sqlalchemy as sa
import pathlib
# revision identifiers, used by Alembic
revision = '0001_initial_schema'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
return
schema_path = pathlib.Path(__file__).parent.parent.parent / "schema.sql"
with open(schema_path, encoding="utf-8") as f:
sql = f.read()
conn = op.get_bind()
conn.execute(sa.text(sql))
def downgrade():
return
# Drop all user-defined tables in the 'public' schema
conn = op.get_bind()
conn.execute(sa.text("""
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') LOOP
EXECUTE 'DROP TABLE IF EXISTS public.' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
"""))

View File

@@ -0,0 +1,78 @@
"""Add cart_items table for shopping cart"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0002_add_cart_items"
down_revision = "0001_initial_schema"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"cart_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
# Either a logged-in user *or* an anonymous session_id
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
),
sa.Column("session_id", sa.String(length=128), nullable=True),
# IMPORTANT: reference products.id (PK), not slug
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Indexes to speed up cart lookups
op.create_index(
"ix_cart_items_user_product",
"cart_items",
["user_id", "product_id"],
unique=False,
)
op.create_index(
"ix_cart_items_session_product",
"cart_items",
["session_id", "product_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_cart_items_session_product", table_name="cart_items")
op.drop_index("ix_cart_items_user_product", table_name="cart_items")
op.drop_table("cart_items")

View File

@@ -0,0 +1,118 @@
"""Add orders and order_items tables for checkout"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0003_add_orders"
down_revision = "0002_add_cart_items"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"orders",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True),
sa.Column("session_id", sa.String(length=64), nullable=True),
sa.Column(
"status",
sa.String(length=32),
nullable=False,
server_default="pending",
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"total_amount",
sa.Numeric(12, 2),
nullable=False,
),
# SumUp integration fields
sa.Column("sumup_checkout_id", sa.String(length=128), nullable=True),
sa.Column("sumup_status", sa.String(length=32), nullable=True),
sa.Column("sumup_hosted_url", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
# Indexes to match model hints (session_id + sumup_checkout_id index=True)
op.create_index(
"ix_orders_session_id",
"orders",
["session_id"],
unique=False,
)
op.create_index(
"ix_orders_sumup_checkout_id",
"orders",
["sumup_checkout_id"],
unique=False,
)
op.create_table(
"order_items",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"order_id",
sa.Integer(),
sa.ForeignKey("orders.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id"),
nullable=False,
),
sa.Column("product_title", sa.String(length=512), nullable=True),
sa.Column(
"quantity",
sa.Integer(),
nullable=False,
server_default="1",
),
sa.Column(
"unit_price",
sa.Numeric(12, 2),
nullable=False,
),
sa.Column(
"currency",
sa.String(length=16),
nullable=False,
server_default="GBP",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
)
def downgrade() -> None:
op.drop_table("order_items")
op.drop_index("ix_orders_sumup_checkout_id", table_name="orders")
op.drop_index("ix_orders_session_id", table_name="orders")
op.drop_table("orders")

View File

@@ -0,0 +1,27 @@
"""Add sumup_reference to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0004_add_sumup_reference"
down_revision = "0003_add_orders"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("sumup_reference", sa.String(length=255), nullable=True),
)
op.create_index(
"ix_orders_sumup_reference",
"orders",
["sumup_reference"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_sumup_reference", table_name="orders")
op.drop_column("orders", "sumup_reference")

View File

@@ -0,0 +1,27 @@
"""Add description field to orders"""
from alembic import op
import sqlalchemy as sa
revision = "0005_add_description"
down_revision = "0004_add_sumup_reference"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"orders",
sa.Column("description", sa.Text(), nullable=True),
)
op.create_index(
"ix_orders_description",
"orders",
["description"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_orders_description", table_name="orders")
op.drop_column("orders", "description")

View File

@@ -0,0 +1,28 @@
from alembic import op
import sqlalchemy as sa
revision = '0006_update_calendar_entries'
down_revision = '0005_add_description' # use the appropriate previous revision ID
branch_labels = None
depends_on = None
def upgrade():
# Add user_id and session_id columns
op.add_column('calendar_entries', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_calendar_entries_user_id', 'calendar_entries', 'users', ['user_id'], ['id'])
op.add_column('calendar_entries', sa.Column('session_id', sa.String(length=128), nullable=True))
# Add state and cost columns
op.add_column('calendar_entries', sa.Column('state', sa.String(length=20), nullable=False, server_default='pending'))
op.add_column('calendar_entries', sa.Column('cost', sa.Numeric(10,2), nullable=False, server_default='10'))
# (Optional) Create indexes on the new columns
op.create_index('ix_calendar_entries_user_id', 'calendar_entries', ['user_id'])
op.create_index('ix_calendar_entries_session_id', 'calendar_entries', ['session_id'])
def downgrade():
op.drop_index('ix_calendar_entries_session_id', table_name='calendar_entries')
op.drop_index('ix_calendar_entries_user_id', table_name='calendar_entries')
op.drop_column('calendar_entries', 'cost')
op.drop_column('calendar_entries', 'state')
op.drop_column('calendar_entries', 'session_id')
op.drop_constraint('fk_calendar_entries_user_id', 'calendar_entries', type_='foreignkey')
op.drop_column('calendar_entries', 'user_id')

View File

@@ -0,0 +1,50 @@
from alembic import op
import sqlalchemy as sa
revision = "0007_add_oid_entries"
down_revision = "0006_update_calendar_entries"
branch_labels = None
depends_on = None
def upgrade():
# Add order_id column
op.add_column(
"calendar_entries",
sa.Column("order_id", sa.Integer(), nullable=True),
)
op.create_foreign_key(
"fk_calendar_entries_order_id",
"calendar_entries",
"orders",
["order_id"],
["id"],
ondelete="SET NULL",
)
op.create_index(
"ix_calendar_entries_order_id",
"calendar_entries",
["order_id"],
unique=False,
)
# Optional: add an index on state if you want faster queries by state
op.create_index(
"ix_calendar_entries_state",
"calendar_entries",
["state"],
unique=False,
)
def downgrade():
# Drop indexes and FK in reverse order
op.drop_index("ix_calendar_entries_state", table_name="calendar_entries")
op.drop_index("ix_calendar_entries_order_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_order_id",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "order_id")

View File

@@ -0,0 +1,33 @@
"""add flexible flag to calendar_slots
Revision ID: 0008_add_flexible_to_calendar_slots
Revises: 0007_add_order_id_to_calendar_entries
Create Date: 2025-12-06 12:34:56.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0008_add_flexible_to_slots"
down_revision = "0007_add_oid_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"calendar_slots",
sa.Column(
"flexible",
sa.Boolean(),
nullable=False,
server_default=sa.false(), # set existing rows to False
),
)
# Optional: drop server_default so future inserts must supply a value
op.alter_column("calendar_slots", "flexible", server_default=None)
def downgrade() -> None:
op.drop_column("calendar_slots", "flexible")

View File

@@ -0,0 +1,54 @@
"""add slot_id to calendar_entries
Revision ID: 0009_add_slot_id_to_entries
Revises: 0008_add_flexible_to_slots
Create Date: 2025-12-06 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0009_add_slot_id_to_entries"
down_revision = "0008_add_flexible_to_slots"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add slot_id column as nullable initially
op.add_column(
"calendar_entries",
sa.Column(
"slot_id",
sa.Integer(),
nullable=True,
),
)
# Add foreign key constraint
op.create_foreign_key(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
"calendar_slots",
["slot_id"],
["id"],
ondelete="SET NULL",
)
# Add index for better query performance
op.create_index(
"ix_calendar_entries_slot_id",
"calendar_entries",
["slot_id"],
)
def downgrade() -> None:
op.drop_index("ix_calendar_entries_slot_id", table_name="calendar_entries")
op.drop_constraint(
"fk_calendar_entries_slot_id_calendar_slots",
"calendar_entries",
type_="foreignkey",
)
op.drop_column("calendar_entries", "slot_id")

View File

@@ -0,0 +1,64 @@
"""Add post_likes table for liking blog posts
Revision ID: 0010_add_post_likes
Revises: 0009_add_slot_id_to_entries
Create Date: 2025-12-07 13:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0010_add_post_likes"
down_revision = "0009_add_slot_id_to_entries"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"post_likes",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"post_id",
sa.Integer(),
sa.ForeignKey("posts.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"deleted_at",
sa.DateTime(timezone=True),
nullable=True,
),
)
# Index for fast user+post lookups
op.create_index(
"ix_post_likes_user_post",
"post_likes",
["user_id", "post_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index("ix_post_likes_user_post", table_name="post_likes")
op.drop_table("post_likes")

View File

@@ -0,0 +1,43 @@
"""Add ticket_price and ticket_count to calendar_entries
Revision ID: 0011_add_entry_tickets
Revises: 0010_add_post_likes
Create Date: 2025-12-07 14:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import NUMERIC
# revision identifiers, used by Alembic.
revision = "0011_add_entry_tickets"
down_revision = "0010_add_post_likes"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add ticket_price column (nullable - NULL means no tickets)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_price",
NUMERIC(10, 2),
nullable=True,
),
)
# Add ticket_count column (nullable - NULL means unlimited)
op.add_column(
"calendar_entries",
sa.Column(
"ticket_count",
sa.Integer(),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("calendar_entries", "ticket_count")
op.drop_column("calendar_entries", "ticket_price")

View File

@@ -0,0 +1,41 @@
# Alembic migration script template
"""add ticket_types table
Revision ID: 47fc53fc0d2b
Revises: a9f54e4eaf02
Create Date: 2025-12-08 07:29:11.422435
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '47fc53fc0d2b'
down_revision = 'a9f54e4eaf02'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'ticket_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('entry_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('cost', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['entry_id'], ['calendar_entries.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_ticket_types_entry_id', 'ticket_types', ['entry_id'], unique=False)
op.create_index('ix_ticket_types_name', 'ticket_types', ['name'], unique=False)
def downgrade() -> None:
op.drop_index('ix_ticket_types_name', table_name='ticket_types')
op.drop_index('ix_ticket_types_entry_id', table_name='ticket_types')
op.drop_table('ticket_types')

View File

@@ -0,0 +1,36 @@
# Alembic migration script template
"""Add calendar_entry_posts association table
Revision ID: 6cb124491c9d
Revises: 0011_add_entry_tickets
Create Date: 2025-12-07 03:40:49.194068
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import TIMESTAMP
# revision identifiers, used by Alembic.
revision = '6cb124491c9d'
down_revision = '0011_add_entry_tickets'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'calendar_entry_posts',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('post_id', sa.Integer(), sa.ForeignKey('posts.id', ondelete='CASCADE'), nullable=False),
sa.Column('created_at', TIMESTAMP(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('deleted_at', TIMESTAMP(timezone=True), nullable=True),
)
op.create_index('ix_entry_posts_entry_id', 'calendar_entry_posts', ['entry_id'])
op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id'])
def downgrade() -> None:
op.drop_index('ix_entry_posts_post_id', 'calendar_entry_posts')
op.drop_index('ix_entry_posts_entry_id', 'calendar_entry_posts')
op.drop_table('calendar_entry_posts')

View File

@@ -0,0 +1,37 @@
# Alembic migration script template
"""add menu_items table
Revision ID: a9f54e4eaf02
Revises: 6cb124491c9d
Create Date: 2025-12-07 17:38:54.839296
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a9f54e4eaf02'
down_revision = '6cb124491c9d'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('menu_items',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('sort_order', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_menu_items_post_id'), 'menu_items', ['post_id'], unique=False)
op.create_index(op.f('ix_menu_items_sort_order'), 'menu_items', ['sort_order'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_menu_items_sort_order'), table_name='menu_items')
op.drop_index(op.f('ix_menu_items_post_id'), table_name='menu_items')
op.drop_table('menu_items')

View File

@@ -0,0 +1,35 @@
"""add snippets table
Revision ID: c3a1f7b9d4e5
Revises: 47fc53fc0d2b
Create Date: 2026-02-07
"""
from alembic import op
import sqlalchemy as sa
revision = 'c3a1f7b9d4e5'
down_revision = '47fc53fc0d2b'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'snippets',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.Column('visibility', sa.String(length=20), server_default='private', nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'name', name='uq_snippets_user_name'),
)
op.create_index('ix_snippets_visibility', 'snippets', ['visibility'])
def downgrade() -> None:
op.drop_index('ix_snippets_visibility', table_name='snippets')
op.drop_table('snippets')

View File

@@ -0,0 +1,45 @@
"""add post user_id, author email, publish_requested
Revision ID: d4b2e8f1a3c7
Revises: c3a1f7b9d4e5
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'd4b2e8f1a3c7'
down_revision = 'c3a1f7b9d4e5'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add author.email
op.add_column('authors', sa.Column('email', sa.String(255), nullable=True))
# Add post.user_id FK
op.add_column('posts', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_posts_user_id', 'posts', 'users', ['user_id'], ['id'], ondelete='SET NULL')
op.create_index('ix_posts_user_id', 'posts', ['user_id'])
# Add post.publish_requested
op.add_column('posts', sa.Column('publish_requested', sa.Boolean(), server_default='false', nullable=False))
# Backfill: match posts to users via primary_author email
op.execute("""
UPDATE posts
SET user_id = u.id
FROM authors a
JOIN users u ON lower(a.email) = lower(u.email)
WHERE posts.primary_author_id = a.id
AND posts.user_id IS NULL
AND a.email IS NOT NULL
""")
def downgrade() -> None:
op.drop_column('posts', 'publish_requested')
op.drop_index('ix_posts_user_id', table_name='posts')
op.drop_constraint('fk_posts_user_id', 'posts', type_='foreignkey')
op.drop_column('posts', 'user_id')
op.drop_column('authors', 'email')

View File

@@ -0,0 +1,45 @@
"""add tag_groups and tag_group_tags
Revision ID: e5c3f9a2b1d6
Revises: d4b2e8f1a3c7
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = 'e5c3f9a2b1d6'
down_revision = 'd4b2e8f1a3c7'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tag_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=191), nullable=False),
sa.Column('feature_image', sa.Text(), nullable=True),
sa.Column('colour', sa.String(length=32), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug'),
)
op.create_table(
'tag_group_tags',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('tag_group_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['tag_group_id'], ['tag_groups.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('tag_group_id', 'tag_id', name='uq_tag_group_tag'),
)
def downgrade() -> None:
op.drop_table('tag_group_tags')
op.drop_table('tag_groups')

View File

@@ -0,0 +1,47 @@
"""add tickets table
Revision ID: f6d4a1b2c3e7
Revises: e5c3f9a2b1d6
Create Date: 2026-02-09
"""
from alembic import op
import sqlalchemy as sa
revision = 'f6d4a1b2c3e7'
down_revision = 'e5c3f9a2b1d6'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'tickets',
sa.Column('id', sa.Integer(), primary_key=True),
sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False),
sa.Column('ticket_type_id', sa.Integer(), sa.ForeignKey('ticket_types.id', ondelete='SET NULL'), nullable=True),
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
sa.Column('session_id', sa.String(64), nullable=True),
sa.Column('order_id', sa.Integer(), sa.ForeignKey('orders.id', ondelete='SET NULL'), nullable=True),
sa.Column('code', sa.String(64), unique=True, nullable=False),
sa.Column('state', sa.String(20), nullable=False, server_default=sa.text("'reserved'")),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('checked_in_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('ix_tickets_entry_id', 'tickets', ['entry_id'])
op.create_index('ix_tickets_ticket_type_id', 'tickets', ['ticket_type_id'])
op.create_index('ix_tickets_user_id', 'tickets', ['user_id'])
op.create_index('ix_tickets_session_id', 'tickets', ['session_id'])
op.create_index('ix_tickets_order_id', 'tickets', ['order_id'])
op.create_index('ix_tickets_code', 'tickets', ['code'], unique=True)
op.create_index('ix_tickets_state', 'tickets', ['state'])
def downgrade() -> None:
op.drop_index('ix_tickets_state', 'tickets')
op.drop_index('ix_tickets_code', 'tickets')
op.drop_index('ix_tickets_order_id', 'tickets')
op.drop_index('ix_tickets_session_id', 'tickets')
op.drop_index('ix_tickets_user_id', 'tickets')
op.drop_index('ix_tickets_ticket_type_id', 'tickets')
op.drop_index('ix_tickets_entry_id', 'tickets')
op.drop_table('tickets')

84
config.py Normal file
View File

@@ -0,0 +1,84 @@
# suma_browser/config.py
from __future__ import annotations
import asyncio
import os
from types import MappingProxyType
from typing import Any, Optional
import copy
import yaml
# Default config path (override with APP_CONFIG_FILE)
_DEFAULT_CONFIG_PATH = os.environ.get(
"APP_CONFIG_FILE",
os.path.join(os.getcwd(), "config/app-config.yaml"),
)
# Module state
_init_lock = asyncio.Lock()
_data_frozen: Any = None # read-only view (mappingproxy / tuples / frozensets)
_data_plain: Any = None # plain builtins for pretty-print / logging
# ---------------- utils ----------------
def _freeze(obj: Any) -> Any:
"""Deep-freeze containers to read-only equivalents."""
if isinstance(obj, dict):
# freeze children first, then wrap dict in mappingproxy
return MappingProxyType({k: _freeze(v) for k, v in obj.items()})
if isinstance(obj, list):
return tuple(_freeze(v) for v in obj)
if isinstance(obj, set):
return frozenset(_freeze(v) for v in obj)
if isinstance(obj, tuple):
return tuple(_freeze(v) for v in obj)
return obj
# ---------------- API ----------------
async def init_config(path: Optional[str] = None, *, force: bool = False) -> None:
"""
Load YAML exactly as-is and cache both a frozen (read-only) and a plain copy.
Idempotent; pass force=True to reload.
"""
global _data_frozen, _data_plain
if _data_frozen is not None and not force:
return
async with _init_lock:
if _data_frozen is not None and not force:
return
cfg_path = path or _DEFAULT_CONFIG_PATH
if not os.path.exists(cfg_path):
raise FileNotFoundError(f"Config file not found: {cfg_path}")
with open(cfg_path, "r", encoding="utf-8") as f:
raw = yaml.safe_load(f) # whatever the YAML root is
# store plain as loaded; store frozen for normal use
_data_plain = raw
_data_frozen = _freeze(raw)
def config() -> Any:
"""
Return the read-only (frozen) config. Call init_config() first.
"""
if _data_frozen is None:
raise RuntimeError("init_config() has not been awaited yet.")
return _data_frozen
def as_plain() -> Any:
"""
Return a deep copy of the plain config for safe external use/pretty printing.
"""
if _data_plain is None:
raise RuntimeError("init_config() has not been awaited yet.")
return copy.deepcopy(_data_plain)
def pretty() -> str:
"""
YAML pretty string without mappingproxy noise.
"""
if _data_plain is None:
raise RuntimeError("init_config() has not been awaited yet.")
return yaml.safe_dump(_data_plain, sort_keys=False, allow_unicode=True)

83
config/app-config.yaml Normal file
View File

@@ -0,0 +1,83 @@
# App-wide settings
base_host: "wholesale.suma.coop"
base_login: https://wholesale.suma.coop/customer/account/login/
base_url: https://wholesale.suma.coop/
title: Rose Ash
coop_root: /market
coop_title: Market
blog_root: /
blog_title: all the news
cart_root: /cart
app_urls:
coop: "http://localhost:8000"
market: "http://localhost:8001"
cart: "http://localhost:8002"
events: "http://localhost:8003"
cache:
fs_root: _snapshot # <- absolute path to your snapshot dir
categories:
allow:
Basics: basics
Branded Goods: branded-goods
Chilled: chilled
Frozen: frozen
Non-foods: non-foods
Supplements: supplements
Christmas: christmas
slugs:
skip:
- ""
- customer
- account
- checkout
- wishlist
- sales
- contact
- privacy-policy
- terms-and-conditions
- delivery
- catalogsearch
- quickorder
- apply
- search
- static
- media
section-titles:
- ingredients
- allergy information
- allergens
- nutritional information
- nutrition
- storage
- directions
- preparation
- serving suggestions
- origin
- country of origin
- recycling
- general information
- additional information
- a note about prices
blacklist:
category:
- branded-goods/alcoholic-drinks
- branded-goods/beers
- branded-goods/wines
- branded-goods/ciders
product:
- list-price-suma-current-suma-price-list-each-bk012-2-html
- ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html
product-details:
- General Information
- A Note About Prices
# SumUp payment settings (fill these in for live usage)
sumup:
merchant_code: "ME4J6100"
currency: "GBP"
# Name of the environment variable that holds your SumUp API key
api_key_env: "SUMUP_API_KEY"
webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING"
checkout_reference_prefix: 'dev-'

0
db/__init__.py Normal file
View File

4
db/base.py Normal file
View File

@@ -0,0 +1,4 @@
from __future__ import annotations
from sqlalchemy.orm import declarative_base
Base = declarative_base()

99
db/session.py Normal file
View File

@@ -0,0 +1,99 @@
from __future__ import annotations
import os
from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from quart import Quart, g
DATABASE_URL = (
os.getenv("DATABASE_URL_ASYNC")
or os.getenv("DATABASE_URL")
or "postgresql+asyncpg://localhost/coop"
)
_engine = create_async_engine(
DATABASE_URL,
future=True,
echo=False,
pool_pre_ping=True,
pool_size=-1 # ned to look at this!!!
)
_Session = async_sessionmaker(
bind=_engine,
class_=AsyncSession,
expire_on_commit=False,
)
@asynccontextmanager
async def get_session():
"""Always create a fresh AsyncSession for this block."""
sess = _Session()
try:
yield sess
finally:
await sess.close()
def register_db(app: Quart):
#@app.before_request
#async def _open_session():
# g.s = _Session()
# g.tx = await g.s.begin() # begin txn now (or begin_nested if you like)
#@app.after_request
#async def _commit_session(response):
# print('after request')
# return response
#@app.teardown_request
#async def _rollback_on_error(exc):
# print('teardown')
# # Quart calls this when an exception happened
# if exc is not None and hasattr(g, "tx"):
# await g.tx.rollback()
# if exc and hasattr(g, 'tx'):
# await g.tx.commit()
# if hasattr(g, "sess"):
# await g.s.close()
@app.before_request
async def open_session():
g.s = _Session()
g.tx = await g.s.begin()
g.had_error = False
@app.after_request
async def maybe_commit(response):
# Runs BEFORE bytes are sent.
if not g.had_error and 200 <= response.status_code < 400:
try:
if hasattr(g, "tx"):
await g.tx.commit()
except Exception as e:
print(f'commit failed {e}')
if hasattr(g, "tx"):
await g.tx.rollback()
from quart import make_response
return await make_response("Commit failed", 500)
return response
@app.teardown_request
async def finish(exc):
try:
# If an exception occurred OR we didnt commit (still in txn), roll back.
if hasattr(g, "s"):
if exc is not None or g.s.in_transaction():
if hasattr(g, "tx"):
await g.tx.rollback()
finally:
if hasattr(g, "s"):
await g.s.close()
@app.errorhandler(Exception)
async def mark_error(e):
g.had_error = True
raise

45
editor/build.mjs Normal file
View File

@@ -0,0 +1,45 @@
import * as esbuild from "esbuild";
import path from "path";
import { fileURLToPath } from "url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const isProduction = process.env.NODE_ENV === "production";
const isWatch = process.argv.includes("--watch");
/** @type {import('esbuild').BuildOptions} */
const opts = {
alias: {
"koenig-styles": path.resolve(
__dirname,
"node_modules/@tryghost/koenig-lexical/dist/index.css"
),
},
entryPoints: ["src/index.jsx"],
bundle: true,
outdir: "../static/scripts",
entryNames: "editor",
format: "iife",
target: "es2020",
jsx: "automatic",
minify: isProduction,
define: {
"process.env.NODE_ENV": JSON.stringify(
isProduction ? "production" : "development"
),
},
loader: {
".svg": "dataurl",
".woff": "file",
".woff2": "file",
".ttf": "file",
},
logLevel: "info",
};
if (isWatch) {
const ctx = await esbuild.context(opts);
await ctx.watch();
console.log("Watching for changes...");
} else {
await esbuild.build(opts);
}

512
editor/package-lock.json generated Normal file
View File

@@ -0,0 +1,512 @@
{
"name": "coop-lexical-editor",
"version": "2.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "coop-lexical-editor",
"version": "2.0.0",
"dependencies": {
"@tryghost/koenig-lexical": "^1.7.10",
"react": "^18.3.1",
"react-dom": "^18.3.1"
},
"devDependencies": {
"esbuild": "^0.24.0"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz",
"integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==",
"cpu": [
"ppc64"
],
"dev": true,
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz",
"integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz",
"integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz",
"integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz",
"integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz",
"integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz",
"integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz",
"integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz",
"integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz",
"integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz",
"integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz",
"integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==",
"cpu": [
"loong64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz",
"integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==",
"cpu": [
"mips64el"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz",
"integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==",
"cpu": [
"ppc64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz",
"integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==",
"cpu": [
"riscv64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz",
"integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==",
"cpu": [
"s390x"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz",
"integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz",
"integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz",
"integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz",
"integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz",
"integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz",
"integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz",
"integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz",
"integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz",
"integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@tryghost/koenig-lexical": {
"version": "1.7.10",
"resolved": "https://registry.npmjs.org/@tryghost/koenig-lexical/-/koenig-lexical-1.7.10.tgz",
"integrity": "sha512-6tI2kbSzZ669hQ5GxpENB8n2aDLugZDmpR/nO0GriduOZJLLN8AdDDa/S3Y8dpF5/cOGKsOxFRj3oLGRDOi6tw=="
},
"node_modules/esbuild": {
"version": "0.24.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz",
"integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==",
"dev": true,
"hasInstallScript": true,
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.24.2",
"@esbuild/android-arm": "0.24.2",
"@esbuild/android-arm64": "0.24.2",
"@esbuild/android-x64": "0.24.2",
"@esbuild/darwin-arm64": "0.24.2",
"@esbuild/darwin-x64": "0.24.2",
"@esbuild/freebsd-arm64": "0.24.2",
"@esbuild/freebsd-x64": "0.24.2",
"@esbuild/linux-arm": "0.24.2",
"@esbuild/linux-arm64": "0.24.2",
"@esbuild/linux-ia32": "0.24.2",
"@esbuild/linux-loong64": "0.24.2",
"@esbuild/linux-mips64el": "0.24.2",
"@esbuild/linux-ppc64": "0.24.2",
"@esbuild/linux-riscv64": "0.24.2",
"@esbuild/linux-s390x": "0.24.2",
"@esbuild/linux-x64": "0.24.2",
"@esbuild/netbsd-arm64": "0.24.2",
"@esbuild/netbsd-x64": "0.24.2",
"@esbuild/openbsd-arm64": "0.24.2",
"@esbuild/openbsd-x64": "0.24.2",
"@esbuild/sunos-x64": "0.24.2",
"@esbuild/win32-arm64": "0.24.2",
"@esbuild/win32-ia32": "0.24.2",
"@esbuild/win32-x64": "0.24.2"
}
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"dependencies": {
"js-tokens": "^3.0.0 || ^4.0.0"
},
"bin": {
"loose-envify": "cli.js"
}
},
"node_modules/react": {
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
"integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
"dependencies": {
"loose-envify": "^1.1.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/react-dom": {
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
"dependencies": {
"loose-envify": "^1.1.0",
"scheduler": "^0.23.2"
},
"peerDependencies": {
"react": "^18.3.1"
}
},
"node_modules/scheduler": {
"version": "0.23.2",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
"integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
"dependencies": {
"loose-envify": "^1.1.0"
}
}
}
}

18
editor/package.json Normal file
View File

@@ -0,0 +1,18 @@
{
"name": "coop-lexical-editor",
"version": "2.0.0",
"private": true,
"scripts": {
"build": "node build.mjs",
"build:prod": "NODE_ENV=production node build.mjs",
"dev": "node build.mjs --watch"
},
"dependencies": {
"@tryghost/koenig-lexical": "^1.7.10",
"react": "^18.3.1",
"react-dom": "^18.3.1"
},
"devDependencies": {
"esbuild": "^0.24.0"
}
}

81
editor/src/Editor.jsx Normal file
View File

@@ -0,0 +1,81 @@
import { useMemo, useState, useEffect, useCallback } from "react";
import { KoenigComposer, KoenigEditor, CardMenuPlugin } from "@tryghost/koenig-lexical";
import "koenig-styles";
import makeFileUploader from "./useFileUpload";
export default function Editor({ initialState, onChange, csrfToken, uploadUrls, oembedUrl, unsplashApiKey, snippetsUrl }) {
const fileUploader = useMemo(() => makeFileUploader(csrfToken, uploadUrls), [csrfToken, uploadUrls]);
const [snippets, setSnippets] = useState([]);
useEffect(() => {
if (!snippetsUrl) return;
fetch(snippetsUrl, { headers: { "X-CSRFToken": csrfToken || "" } })
.then((r) => r.ok ? r.json() : [])
.then(setSnippets)
.catch(() => {});
}, [snippetsUrl, csrfToken]);
const createSnippet = useCallback(async ({ name, value }) => {
if (!snippetsUrl) return;
const resp = await fetch(snippetsUrl, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-CSRFToken": csrfToken || "",
},
body: JSON.stringify({ name, value: JSON.stringify(value) }),
});
if (!resp.ok) return;
const created = await resp.json();
setSnippets((prev) => {
const idx = prev.findIndex((s) => s.name === created.name);
if (idx >= 0) {
const next = [...prev];
next[idx] = created;
return next;
}
return [...prev, created].sort((a, b) => a.name.localeCompare(b.name));
});
}, [snippetsUrl, csrfToken]);
const cardConfig = useMemo(() => ({
fetchEmbed: async (url, { type } = {}) => {
const params = new URLSearchParams({ url });
if (type) params.set("type", type);
const resp = await fetch(`${oembedUrl}?${params}`, {
headers: { "X-CSRFToken": csrfToken || "" },
});
if (!resp.ok) return {};
return resp.json();
},
unsplash: unsplashApiKey
? { defaultHeaders: { Authorization: `Client-ID ${unsplashApiKey}` } }
: false,
membersEnabled: true,
snippets: snippets.map((s) => ({
id: s.id,
name: s.name,
value: typeof s.value === "string" ? JSON.parse(s.value) : s.value,
})),
createSnippet,
}), [oembedUrl, csrfToken, unsplashApiKey, snippets, createSnippet]);
return (
<KoenigComposer
initialEditorState={initialState || undefined}
fileUploader={fileUploader}
cardConfig={cardConfig}
>
<KoenigEditor
onChange={(serializedState) => {
if (onChange) {
onChange(JSON.stringify(serializedState));
}
}}
>
<CardMenuPlugin />
</KoenigEditor>
</KoenigComposer>
);
}

49
editor/src/index.jsx Normal file
View File

@@ -0,0 +1,49 @@
import React from "react";
import { createRoot } from "react-dom/client";
import Editor from "./Editor";
/**
* Mount the Koenig editor into the given DOM element.
*
* @param {string} elementId - ID of the container element
* @param {object} opts
* @param {string} [opts.initialJson] - Serialised Lexical JSON (from Ghost)
* @param {string} [opts.csrfToken] - CSRF token for API calls
* @param {object} [opts.uploadUrls] - { image, media, file } upload endpoint URLs
* @param {string} [opts.oembedUrl] - oEmbed proxy endpoint URL
* @param {string} [opts.unsplashApiKey] - Unsplash API key for image search
*/
window.mountEditor = function mountEditor(elementId, opts = {}) {
const container = document.getElementById(elementId);
if (!container) {
console.error(`[editor] Element #${elementId} not found`);
return;
}
let currentJson = opts.initialJson || null;
function handleChange(json) {
currentJson = json;
// Stash the latest JSON in a hidden input for form submission
const hidden = document.getElementById("lexical-json-input");
if (hidden) hidden.value = json;
}
const root = createRoot(container);
root.render(
<Editor
initialState={opts.initialJson || null}
onChange={handleChange}
csrfToken={opts.csrfToken || ""}
uploadUrls={opts.uploadUrls || ""}
oembedUrl={opts.oembedUrl || "/editor-api/oembed/"}
unsplashApiKey={opts.unsplashApiKey || ""}
snippetsUrl={opts.snippetsUrl || ""}
/>
);
// Return handle for programmatic access
return {
getJson: () => currentJson,
};
};

View File

@@ -0,0 +1,99 @@
import { useState, useCallback, useRef } from "react";
/**
* Koenig expects `fileUploader.useFileUpload(type)` — a React hook it
* calls internally for each card type ("image", "audio", "file", etc.).
*
* `makeFileUploader(csrfToken, uploadUrls)` returns the object Koenig wants:
* { useFileUpload: (type) => { upload, progress, isLoading, errors, filesNumber } }
*
* `uploadUrls` is an object: { image, media, file }
* For backwards compat, a plain string is treated as the image URL.
*/
const URL_KEY_MAP = {
image: { urlKey: "image", responseKey: "images" },
audio: { urlKey: "media", responseKey: "media" },
video: { urlKey: "media", responseKey: "media" },
mediaThumbnail: { urlKey: "image", responseKey: "images" },
file: { urlKey: "file", responseKey: "files" },
};
export default function makeFileUploader(csrfToken, uploadUrls) {
// Normalise: string → object with all keys pointing to same URL
const urls =
typeof uploadUrls === "string"
? { image: uploadUrls, media: uploadUrls, file: uploadUrls }
: uploadUrls || {};
return {
fileTypes: {
image: { mimeTypes: ['image/jpeg', 'image/png', 'image/gif', 'image/webp', 'image/svg+xml'] },
audio: { mimeTypes: ['audio/mpeg', 'audio/ogg', 'audio/wav', 'audio/mp4', 'audio/aac'] },
video: { mimeTypes: ['video/mp4', 'video/webm', 'video/ogg'] },
mediaThumbnail: { mimeTypes: ['image/jpeg', 'image/png', 'image/gif', 'image/webp'] },
file: { mimeTypes: [] },
},
useFileUpload(type) {
const mapping = URL_KEY_MAP[type] || URL_KEY_MAP.image;
const [progress, setProgress] = useState(0);
const [isLoading, setIsLoading] = useState(false);
const [errors, setErrors] = useState([]);
const [filesNumber, setFilesNumber] = useState(0);
const csrfRef = useRef(csrfToken);
const urlRef = useRef(urls[mapping.urlKey] || urls.image || "/editor-api/images/upload/");
const responseKeyRef = useRef(mapping.responseKey);
const upload = useCallback(async (files) => {
const fileList = Array.from(files);
setFilesNumber(fileList.length);
setIsLoading(true);
setErrors([]);
setProgress(0);
const results = [];
for (let i = 0; i < fileList.length; i++) {
const file = fileList[i];
const formData = new FormData();
formData.append("file", file);
try {
const resp = await fetch(urlRef.current, {
method: "POST",
body: formData,
headers: {
"X-CSRFToken": csrfRef.current || "",
},
});
if (!resp.ok) {
const err = await resp.json().catch(() => ({}));
const msg =
err.errors?.[0]?.message || `Upload failed (${resp.status})`;
setErrors((prev) => [
...prev,
{ message: msg, fileName: file.name },
]);
continue;
}
const data = await resp.json();
const fileUrl = data[responseKeyRef.current]?.[0]?.url;
if (fileUrl) {
results.push({ url: fileUrl, fileName: file.name });
}
} catch (e) {
setErrors((prev) => [
...prev,
{ message: e.message, fileName: file.name },
]);
}
setProgress(Math.round(((i + 1) / fileList.length) * 100));
}
setIsLoading(false);
return results;
}, []);
return { upload, progress, isLoading, errors, filesNumber };
},
};
}

21
models/__init__.py Normal file
View File

@@ -0,0 +1,21 @@
from .kv import KV
from .user import User
from .magic_link import MagicLink
from .market import ProductLike
from .ghost_content import Author, Tag, Post, PostAuthor, PostTag, PostLike
from .menu_item import MenuItem
from .ghost_membership_entities import (
GhostLabel, UserLabel,
GhostNewsletter, UserNewsletter,
GhostTier, GhostSubscription,
)
from .calendars import Calendar, CalendarEntry, Ticket
from .order import Order, OrderItem
from .snippet import Snippet
from .tag_group import TagGroup, TagGroupTag

304
models/calendars.py Normal file
View File

@@ -0,0 +1,304 @@
from __future__ import annotations
from sqlalchemy import (
Column, Integer, String, DateTime, ForeignKey, CheckConstraint,
Index, text, Text, Boolean, Time, Numeric
)
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
# Adjust this import to match where your Base lives
from db.base import Base
from datetime import datetime, timezone
def utcnow() -> datetime:
return datetime.now(timezone.utc)
class Calendar(Base):
__tablename__ = "calendars"
id = Column(Integer, primary_key=True)
post_id = Column(Integer, ForeignKey("posts.id", ondelete="CASCADE"), nullable=False)
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
slug = Column(String(255), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
deleted_at = Column(DateTime(timezone=True), nullable=True)
# relationships
post = relationship("Post", back_populates="calendars")
entries = relationship(
"CalendarEntry",
back_populates="calendar",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="CalendarEntry.start_at",
)
slots = relationship(
"CalendarSlot",
back_populates="calendar",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="CalendarSlot.time_start",
)
# Indexes / constraints (match Alembic migration)
__table_args__ = (
# Helpful lookups
Index("ix_calendars_post_id", "post_id"),
Index("ix_calendars_name", "name"),
Index("ix_calendars_slug", "slug"),
# Soft-delete-aware uniqueness (PostgreSQL):
# one active calendar per post/slug (case-insensitive)
Index(
"ux_calendars_post_slug_active",
"post_id",
func.lower(slug),
unique=True,
postgresql_where=text("deleted_at IS NULL"),
),
)
class CalendarEntry(Base):
__tablename__ = "calendar_entries"
id = Column(Integer, primary_key=True)
calendar_id = Column(
Integer,
ForeignKey("calendars.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
# NEW: ownership + order link
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
session_id = Column(String(64), nullable=True, index=True)
order_id = Column(Integer, ForeignKey("orders.id", ondelete="SET NULL"), nullable=True, index=True)
# NEW: slot link
slot_id = Column(Integer, ForeignKey("calendar_slots.id", ondelete="SET NULL"), nullable=True, index=True)
# details
name = Column(String(255), nullable=False)
start_at = Column(DateTime(timezone=True), nullable=False, index=True)
end_at = Column(DateTime(timezone=True), nullable=True)
# NEW: booking state + cost
state = Column(
String(20),
nullable=False,
server_default=text("'pending'"),
)
cost = Column(Numeric(10, 2), nullable=False, server_default=text("10"))
# Ticket configuration
ticket_price = Column(Numeric(10, 2), nullable=True) # Price per ticket (NULL = no tickets)
ticket_count = Column(Integer, nullable=True) # Total available tickets (NULL = unlimited)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
deleted_at = Column(DateTime(timezone=True), nullable=True)
__table_args__ = (
CheckConstraint(
"(end_at IS NULL) OR (end_at >= start_at)",
name="ck_calendar_entries_end_after_start",
),
Index("ix_calendar_entries_name", "name"),
Index("ix_calendar_entries_start_at", "start_at"),
Index("ix_calendar_entries_user_id", "user_id"),
Index("ix_calendar_entries_session_id", "session_id"),
Index("ix_calendar_entries_state", "state"),
Index("ix_calendar_entries_order_id", "order_id"),
Index("ix_calendar_entries_slot_id", "slot_id"),
)
calendar = relationship("Calendar", back_populates="entries")
slot = relationship("CalendarSlot", back_populates="entries", lazy="selectin")
# Optional, but handy:
order = relationship("Order", back_populates="calendar_entries", lazy="selectin")
posts = relationship("CalendarEntryPost", back_populates="entry", cascade="all, delete-orphan")
ticket_types = relationship(
"TicketType",
back_populates="entry",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="TicketType.name",
)
DAY_LABELS = [
("mon", "Mon"),
("tue", "Tue"),
("wed", "Wed"),
("thu", "Thu"),
("fri", "Fri"),
("sat", "Sat"),
("sun", "Sun"),
]
class CalendarSlot(Base):
__tablename__ = "calendar_slots"
id = Column(Integer, primary_key=True)
calendar_id = Column(
Integer,
ForeignKey("calendars.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
mon = Column(Boolean, nullable=False, default=False)
tue = Column(Boolean, nullable=False, default=False)
wed = Column(Boolean, nullable=False, default=False)
thu = Column(Boolean, nullable=False, default=False)
fri = Column(Boolean, nullable=False, default=False)
sat = Column(Boolean, nullable=False, default=False)
sun = Column(Boolean, nullable=False, default=False)
# NEW: whether bookings can be made at flexible times within this band
flexible = Column(
Boolean,
nullable=False,
server_default=text("false"),
default=False,
)
@property
def days_display(self) -> str:
days = [label for attr, label in DAY_LABELS if getattr(self, attr)]
if len(days) == len(DAY_LABELS):
# all days selected
return "All" # or "All days" if you prefer
return ", ".join(days) if days else ""
time_start = Column(Time(timezone=False), nullable=False)
time_end = Column(Time(timezone=False), nullable=False)
cost = Column(Numeric(10, 2), nullable=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
deleted_at = Column(DateTime(timezone=True), nullable=True)
__table_args__ = (
CheckConstraint(
"(time_end > time_start)",
name="ck_calendar_slots_time_end_after_start",
),
Index("ix_calendar_slots_calendar_id", "calendar_id"),
Index("ix_calendar_slots_time_start", "time_start"),
)
calendar = relationship("Calendar", back_populates="slots")
entries = relationship("CalendarEntry", back_populates="slot")
class TicketType(Base):
__tablename__ = "ticket_types"
id = Column(Integer, primary_key=True)
entry_id = Column(
Integer,
ForeignKey("calendar_entries.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
name = Column(String(255), nullable=False)
cost = Column(Numeric(10, 2), nullable=False)
count = Column(Integer, nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
deleted_at = Column(DateTime(timezone=True), nullable=True)
__table_args__ = (
Index("ix_ticket_types_entry_id", "entry_id"),
Index("ix_ticket_types_name", "name"),
)
entry = relationship("CalendarEntry", back_populates="ticket_types")
class Ticket(Base):
__tablename__ = "tickets"
id = Column(Integer, primary_key=True)
entry_id = Column(
Integer,
ForeignKey("calendar_entries.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
ticket_type_id = Column(
Integer,
ForeignKey("ticket_types.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
session_id = Column(String(64), nullable=True, index=True)
order_id = Column(
Integer,
ForeignKey("orders.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
code = Column(String(64), unique=True, nullable=False) # QR/barcode value
state = Column(
String(20),
nullable=False,
server_default=text("'reserved'"),
) # reserved, confirmed, checked_in, cancelled
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
checked_in_at = Column(DateTime(timezone=True), nullable=True)
__table_args__ = (
Index("ix_tickets_entry_id", "entry_id"),
Index("ix_tickets_ticket_type_id", "ticket_type_id"),
Index("ix_tickets_user_id", "user_id"),
Index("ix_tickets_session_id", "session_id"),
Index("ix_tickets_order_id", "order_id"),
Index("ix_tickets_code", "code", unique=True),
Index("ix_tickets_state", "state"),
)
entry = relationship("CalendarEntry", backref="tickets")
ticket_type = relationship("TicketType", backref="tickets")
order = relationship("Order", backref="tickets")
class CalendarEntryPost(Base):
__tablename__ = "calendar_entry_posts"
id = Column(Integer, primary_key=True, autoincrement=True)
entry_id = Column(Integer, ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False)
post_id = Column(Integer, ForeignKey("posts.id", ondelete="CASCADE"), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
deleted_at = Column(DateTime(timezone=True), nullable=True)
__table_args__ = (
Index("ix_entry_posts_entry_id", "entry_id"),
Index("ix_entry_posts_post_id", "post_id"),
)
entry = relationship("CalendarEntry", back_populates="posts")
post = relationship("Post", back_populates="calendar_entries")
__all__ = ["Calendar", "CalendarEntry", "CalendarSlot", "TicketType", "Ticket", "CalendarEntryPost"]

70
models/cart_item.py Normal file
View File

@@ -0,0 +1,70 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import Integer, String, DateTime, ForeignKey, func, Index
from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import Base # you already import Base in app.py
# from .user import User # only if you normally import it here
# from .coop import Product # if not already in this module
from .market import Product
from .user import User
class CartItem(Base):
__tablename__ = "cart_items"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
# Either a logged-in user OR an anonymous session
user_id: Mapped[int | None] = mapped_column(
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
)
session_id: Mapped[str | None] = mapped_column(
String(128),
nullable=True,
)
# IMPORTANT: link to product *id*, not slug
product_id: Mapped[int] = mapped_column(
ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
)
quantity: Mapped[int] = mapped_column(
Integer,
nullable=False,
default=1,
server_default="1",
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
deleted_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
# Relationships
product: Mapped["Product"] = relationship(
"Product",
back_populates="cart_items",
)
user: Mapped["User | None"] = relationship("User", back_populates="cart_items")
__table_args__ = (
Index("ix_cart_items_user_product", "user_id", "product_id"),
Index("ix_cart_items_session_product", "session_id", "product_id"),
)

239
models/ghost_content.py Normal file
View File

@@ -0,0 +1,239 @@
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy import (
Integer,
String,
Text,
Boolean,
DateTime,
ForeignKey,
Column,
func,
)
from db.base import Base # whatever your Base is
# from .author import Author # make sure imports resolve
# from ..app.blog.calendars.model import Calendar
class Tag(Base):
__tablename__ = "tags"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
description: Mapped[Optional[str]] = mapped_column(Text())
visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False)
feature_image: Mapped[Optional[str]] = mapped_column(Text())
meta_title: Mapped[Optional[str]] = mapped_column(String(300))
meta_description: Mapped[Optional[str]] = mapped_column(Text())
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
# NEW: posts relationship is now direct Post objects via PostTag
posts: Mapped[List["Post"]] = relationship(
"Post",
secondary="post_tags",
primaryjoin="Tag.id==post_tags.c.tag_id",
secondaryjoin="Post.id==post_tags.c.post_id",
back_populates="tags",
order_by="PostTag.sort_order",
)
class Post(Base):
__tablename__ = "posts"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
uuid: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
title: Mapped[str] = mapped_column(String(500), nullable=False)
html: Mapped[Optional[str]] = mapped_column(Text())
plaintext: Mapped[Optional[str]] = mapped_column(Text())
mobiledoc: Mapped[Optional[str]] = mapped_column(Text())
lexical: Mapped[Optional[str]] = mapped_column(Text())
feature_image: Mapped[Optional[str]] = mapped_column(Text())
feature_image_alt: Mapped[Optional[str]] = mapped_column(Text())
feature_image_caption: Mapped[Optional[str]] = mapped_column(Text())
excerpt: Mapped[Optional[str]] = mapped_column(Text())
custom_excerpt: Mapped[Optional[str]] = mapped_column(Text())
visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False)
status: Mapped[str] = mapped_column(String(32), default="draft", nullable=False)
featured: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
is_page: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
email_only: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
canonical_url: Mapped[Optional[str]] = mapped_column(Text())
meta_title: Mapped[Optional[str]] = mapped_column(String(500))
meta_description: Mapped[Optional[str]] = mapped_column(Text())
og_image: Mapped[Optional[str]] = mapped_column(Text())
og_title: Mapped[Optional[str]] = mapped_column(String(500))
og_description: Mapped[Optional[str]] = mapped_column(Text())
twitter_image: Mapped[Optional[str]] = mapped_column(Text())
twitter_title: Mapped[Optional[str]] = mapped_column(String(500))
twitter_description: Mapped[Optional[str]] = mapped_column(Text())
custom_template: Mapped[Optional[str]] = mapped_column(String(191))
reading_time: Mapped[Optional[int]] = mapped_column(Integer())
comment_id: Mapped[Optional[str]] = mapped_column(String(191))
published_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
user_id: Mapped[Optional[int]] = mapped_column(
Integer, ForeignKey("users.id", ondelete="SET NULL"), index=True
)
publish_requested: Mapped[bool] = mapped_column(Boolean(), default=False, server_default="false", nullable=False)
primary_author_id: Mapped[Optional[int]] = mapped_column(
Integer, ForeignKey("authors.id", ondelete="SET NULL")
)
primary_tag_id: Mapped[Optional[int]] = mapped_column(
Integer, ForeignKey("tags.id", ondelete="SET NULL")
)
primary_author: Mapped[Optional["Author"]] = relationship(
"Author", foreign_keys=[primary_author_id]
)
primary_tag: Mapped[Optional[Tag]] = relationship(
"Tag", foreign_keys=[primary_tag_id]
)
user: Mapped[Optional["User"]] = relationship(
"User", foreign_keys=[user_id]
)
# AUTHORS RELATIONSHIP (many-to-many via post_authors)
authors: Mapped[List["Author"]] = relationship(
"Author",
secondary="post_authors",
primaryjoin="Post.id==post_authors.c.post_id",
secondaryjoin="Author.id==post_authors.c.author_id",
back_populates="posts",
order_by="PostAuthor.sort_order",
)
# TAGS RELATIONSHIP (many-to-many via post_tags)
tags: Mapped[List[Tag]] = relationship(
"Tag",
secondary="post_tags",
primaryjoin="Post.id==post_tags.c.post_id",
secondaryjoin="Tag.id==post_tags.c.tag_id",
back_populates="posts",
order_by="PostTag.sort_order",
)
calendars:Mapped[List["Calendar"]] = relationship(
"Calendar",
back_populates="post",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="Calendar.name",
)
likes: Mapped[List["PostLike"]] = relationship(
"PostLike",
back_populates="post",
cascade="all, delete-orphan",
passive_deletes=True,
)
calendar_entries: Mapped[List["CalendarEntryPost"]] = relationship(
"CalendarEntryPost",
back_populates="post",
cascade="all, delete-orphan",
passive_deletes=True,
)
menu_items: Mapped[List["MenuItem"]] = relationship(
"MenuItem",
back_populates="post",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="MenuItem.sort_order",
)
class Author(Base):
__tablename__ = "authors"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
email: Mapped[Optional[str]] = mapped_column(String(255))
profile_image: Mapped[Optional[str]] = mapped_column(Text())
cover_image: Mapped[Optional[str]] = mapped_column(Text())
bio: Mapped[Optional[str]] = mapped_column(Text())
website: Mapped[Optional[str]] = mapped_column(Text())
location: Mapped[Optional[str]] = mapped_column(Text())
facebook: Mapped[Optional[str]] = mapped_column(Text())
twitter: Mapped[Optional[str]] = mapped_column(Text())
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
# backref to posts via post_authors
posts: Mapped[List[Post]] = relationship(
"Post",
secondary="post_authors",
primaryjoin="Author.id==post_authors.c.author_id",
secondaryjoin="Post.id==post_authors.c.post_id",
back_populates="authors",
order_by="PostAuthor.sort_order",
)
class PostAuthor(Base):
__tablename__ = "post_authors"
post_id: Mapped[int] = mapped_column(
ForeignKey("posts.id", ondelete="CASCADE"),
primary_key=True,
)
author_id: Mapped[int] = mapped_column(
ForeignKey("authors.id", ondelete="CASCADE"),
primary_key=True,
)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
class PostTag(Base):
__tablename__ = "post_tags"
post_id: Mapped[int] = mapped_column(
ForeignKey("posts.id", ondelete="CASCADE"),
primary_key=True,
)
tag_id: Mapped[int] = mapped_column(
ForeignKey("tags.id", ondelete="CASCADE"),
primary_key=True,
)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
class PostLike(Base):
__tablename__ = "post_likes"
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
post_id: Mapped[int] = mapped_column(ForeignKey("posts.id", ondelete="CASCADE"), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
post: Mapped["Post"] = relationship("Post", back_populates="likes", foreign_keys=[post_id])
user = relationship("User", back_populates="liked_posts")

View File

@@ -0,0 +1,122 @@
# suma_browser/models/ghost_membership_entities.py
from datetime import datetime
from typing import Optional
from sqlalchemy import (
Integer, String, Text, Boolean, DateTime, ForeignKey, UniqueConstraint
)
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.ext.associationproxy import association_proxy
from db.base import Base
# -----------------------
# Labels (simple M2M)
# -----------------------
class GhostLabel(Base):
__tablename__ = "ghost_labels"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[Optional[str]] = mapped_column(String(255))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow)
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow)
# Back-populated by User.labels
users = relationship("User", secondary="user_labels", back_populates="labels", lazy="selectin")
class UserLabel(Base):
__tablename__ = "user_labels"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True)
label_id: Mapped[int] = mapped_column(ForeignKey("ghost_labels.id", ondelete="CASCADE"), index=True)
__table_args__ = (
UniqueConstraint("user_id", "label_id", name="uq_user_label"),
)
# -----------------------
# Newsletters (association object + proxy)
# -----------------------
class GhostNewsletter(Base):
__tablename__ = "ghost_newsletters"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[Optional[str]] = mapped_column(String(255))
description: Mapped[Optional[str]] = mapped_column(Text)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow)
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow)
# Association-object side (one-to-many)
user_newsletters = relationship(
"UserNewsletter",
back_populates="newsletter",
cascade="all, delete-orphan",
lazy="selectin",
)
# Convenience: list-like proxy of Users via association rows (read-only container)
users = association_proxy("user_newsletters", "user")
class UserNewsletter(Base):
__tablename__ = "user_newsletters"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True)
newsletter_id: Mapped[int] = mapped_column(ForeignKey("ghost_newsletters.id", ondelete="CASCADE"), index=True)
subscribed: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
__table_args__ = (
UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"),
)
# Bidirectional links for the association object
user = relationship("User", back_populates="user_newsletters", lazy="selectin")
newsletter = relationship("GhostNewsletter", back_populates="user_newsletters", lazy="selectin")
# -----------------------
# Tiers & Subscriptions
# -----------------------
class GhostTier(Base):
__tablename__ = "ghost_tiers"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[Optional[str]] = mapped_column(String(255))
type: Mapped[Optional[str]] = mapped_column(String(50)) # e.g. free, paid
visibility: Mapped[Optional[str]] = mapped_column(String(50))
class GhostSubscription(Base):
__tablename__ = "ghost_subscriptions"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True)
status: Mapped[Optional[str]] = mapped_column(String(50))
tier_id: Mapped[Optional[int]] = mapped_column(ForeignKey("ghost_tiers.id", ondelete="SET NULL"), index=True)
cadence: Mapped[Optional[str]] = mapped_column(String(50)) # month, year
price_amount: Mapped[Optional[int]] = mapped_column(Integer)
price_currency: Mapped[Optional[str]] = mapped_column(String(10))
stripe_customer_id: Mapped[Optional[str]] = mapped_column(String(255), index=True)
stripe_subscription_id: Mapped[Optional[str]] = mapped_column(String(255), index=True)
raw: Mapped[Optional[dict]] = mapped_column(JSONB, nullable=True)
# Relationships
user = relationship("User", back_populates="subscriptions", lazy="selectin")
tier = relationship("GhostTier", lazy="selectin")

13
models/kv.py Normal file
View File

@@ -0,0 +1,13 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import String, Text, DateTime
from sqlalchemy.orm import Mapped, mapped_column
from db.base import Base
class KV(Base):
__tablename__ = "kv"
"""Simple key-value table for settings/cache/demo."""
key: Mapped[str] = mapped_column(String(120), primary_key=True)
value: Mapped[str | None] = mapped_column(Text(), nullable=True)
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow)

25
models/magic_link.py Normal file
View File

@@ -0,0 +1,25 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index
from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import Base
class MagicLink(Base):
__tablename__ = "magic_links"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
token: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
purpose: Mapped[str] = mapped_column(String(32), nullable=False, default="signin")
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
ip: Mapped[str | None] = mapped_column(String(64), nullable=True)
user_agent: Mapped[str | None] = mapped_column(String(256), nullable=True)
user = relationship("User", backref="magic_links")
__table_args__ = (
Index("ix_magic_link_token", "token", unique=True),
Index("ix_magic_link_user", "user_id"),
)

425
models/market.py Normal file
View File

@@ -0,0 +1,425 @@
# at top of persist_snapshot.py:
from datetime import datetime
from typing import Optional, List
from sqlalchemy.orm import Mapped, mapped_column, relationship
from typing import List, Optional
from sqlalchemy import (
String, Text, Integer, ForeignKey, DateTime, Boolean, Numeric,
UniqueConstraint, Index, func
)
from db.base import Base # you already import Base in app.py
class Product(Base):
__tablename__ = "products"
id: Mapped[int] = mapped_column(primary_key=True)
slug: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
title: Mapped[Optional[str]] = mapped_column(String(512))
image: Mapped[Optional[str]] = mapped_column(Text)
description_short: Mapped[Optional[str]] = mapped_column(Text)
description_html: Mapped[Optional[str]] = mapped_column(Text)
suma_href: Mapped[Optional[str]] = mapped_column(Text)
brand: Mapped[Optional[str]] = mapped_column(String(255))
rrp: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
rrp_currency: Mapped[Optional[str]] = mapped_column(String(16))
rrp_raw: Mapped[Optional[str]] = mapped_column(String(128))
price_per_unit: Mapped[Optional[float]] = mapped_column(Numeric(12, 4))
price_per_unit_currency: Mapped[Optional[str]] = mapped_column(String(16))
price_per_unit_raw: Mapped[Optional[str]] = mapped_column(String(128))
special_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
special_price_currency: Mapped[Optional[str]] = mapped_column(String(16))
special_price_raw: Mapped[Optional[str]] = mapped_column(String(128))
regular_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
regular_price_currency: Mapped[Optional[str]] = mapped_column(String(16))
regular_price_raw: Mapped[Optional[str]] = mapped_column(String(128))
oe_list_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
case_size_count: Mapped[Optional[int]] = mapped_column(Integer)
case_size_item_qty: Mapped[Optional[float]] = mapped_column(Numeric(12, 3))
case_size_item_unit: Mapped[Optional[str]] = mapped_column(String(32))
case_size_raw: Mapped[Optional[str]] = mapped_column(String(128))
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
images: Mapped[List["ProductImage"]] = relationship(
back_populates="product",
cascade="all, delete-orphan",
passive_deletes=True,
)
sections: Mapped[List["ProductSection"]] = relationship(
back_populates="product",
cascade="all, delete-orphan",
passive_deletes=True,
)
labels: Mapped[List["ProductLabel"]] = relationship(
cascade="all, delete-orphan",
passive_deletes=True,
)
stickers: Mapped[List["ProductSticker"]] = relationship(
cascade="all, delete-orphan",
passive_deletes=True,
)
ean: Mapped[Optional[str]] = mapped_column(String(64))
sku: Mapped[Optional[str]] = mapped_column(String(128))
unit_size: Mapped[Optional[str]] = mapped_column(String(128))
pack_size: Mapped[Optional[str]] = mapped_column(String(128))
attributes = relationship(
"ProductAttribute",
back_populates="product",
lazy="selectin",
cascade="all, delete-orphan",
)
nutrition = relationship(
"ProductNutrition",
back_populates="product",
lazy="selectin",
cascade="all, delete-orphan",
)
allergens = relationship(
"ProductAllergen",
back_populates="product",
lazy="selectin",
cascade="all, delete-orphan",
)
likes = relationship(
"ProductLike",
back_populates="product",
cascade="all, delete-orphan",
)
cart_items: Mapped[List["CartItem"]] = relationship(
"CartItem",
back_populates="product",
cascade="all, delete-orphan",
)
# NEW: all order items that reference this product
order_items: Mapped[List["OrderItem"]] = relationship(
"OrderItem",
back_populates="product",
cascade="all, delete-orphan",
)
from sqlalchemy import Column
class ProductLike(Base):
__tablename__ = "product_likes"
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
product_slug: Mapped[str] = mapped_column(ForeignKey("products.slug", ondelete="CASCADE"))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
product: Mapped["Product"] = relationship("Product", back_populates="likes", foreign_keys=[product_slug])
user = relationship("User", back_populates="liked_products") # optional, if you want reverse access
class ProductImage(Base):
__tablename__ = "product_images"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
url: Mapped[str] = mapped_column(Text, nullable=False)
position: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
kind: Mapped[str] = mapped_column(String(16), nullable=False, default="gallery")
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
product: Mapped["Product"] = relationship(back_populates="images")
__table_args__ = (
UniqueConstraint("product_id", "url", "kind", name="uq_product_images_product_url_kind"),
Index("ix_product_images_position", "position"),
)
class ProductSection(Base):
__tablename__ = "product_sections"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(
ForeignKey("products.id", ondelete="CASCADE"),
index=True,
nullable=False,
)
title: Mapped[str] = mapped_column(String(255), nullable=False)
html: Mapped[str] = mapped_column(Text, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
# ⬇️ ADD THIS LINE:
product: Mapped["Product"] = relationship(back_populates="sections")
__table_args__ = (
UniqueConstraint("product_id", "title", name="uq_product_sections_product_title"),
)
# --- Nav & listings ---
class NavTop(Base):
__tablename__ = "nav_tops"
id: Mapped[int] = mapped_column(primary_key=True)
label: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
listings: Mapped[List["Listing"]] = relationship(back_populates="top", cascade="all, delete-orphan")
__table_args__ = (UniqueConstraint("label", "slug", name="uq_nav_tops_label_slug"),)
class NavSub(Base):
__tablename__ = "nav_subs"
id: Mapped[int] = mapped_column(primary_key=True)
top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False)
label: Mapped[Optional[str]] = mapped_column(String(255))
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
href: Mapped[Optional[str]] = mapped_column(Text)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
listings: Mapped[List["Listing"]] = relationship(back_populates="sub", cascade="all, delete-orphan")
__table_args__ = (UniqueConstraint("top_id", "slug", name="uq_nav_subs_top_slug"),)
class Listing(Base):
__tablename__ = "listings"
id: Mapped[int] = mapped_column(primary_key=True)
# Old slug-based fields (optional: remove)
# top_slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
# sub_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False)
sub_id: Mapped[Optional[int]] = mapped_column(ForeignKey("nav_subs.id", ondelete="CASCADE"), index=True)
total_pages: Mapped[Optional[int]] = mapped_column(Integer)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
top: Mapped["NavTop"] = relationship(back_populates="listings")
sub: Mapped[Optional["NavSub"]] = relationship(back_populates="listings")
__table_args__ = (
UniqueConstraint("top_id", "sub_id", name="uq_listings_top_sub"),
)
class ListingItem(Base):
__tablename__ = "listing_items"
id: Mapped[int] = mapped_column(primary_key=True)
listing_id: Mapped[int] = mapped_column(ForeignKey("listings.id", ondelete="CASCADE"), index=True, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
__table_args__ = (UniqueConstraint("listing_id", "slug", name="uq_listing_items_listing_slug"),)
# --- Reports / redirects / logs ---
class LinkError(Base):
__tablename__ = "link_errors"
id: Mapped[int] = mapped_column(primary_key=True)
product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
href: Mapped[Optional[str]] = mapped_column(Text)
text: Mapped[Optional[str]] = mapped_column(Text)
top: Mapped[Optional[str]] = mapped_column(String(255))
sub: Mapped[Optional[str]] = mapped_column(String(255))
target_slug: Mapped[Optional[str]] = mapped_column(String(255))
type: Mapped[Optional[str]] = mapped_column(String(255))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
class LinkExternal(Base):
__tablename__ = "link_externals"
id: Mapped[int] = mapped_column(primary_key=True)
product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
href: Mapped[Optional[str]] = mapped_column(Text)
text: Mapped[Optional[str]] = mapped_column(Text)
host: Mapped[Optional[str]] = mapped_column(String(255))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
class SubcategoryRedirect(Base):
__tablename__ = "subcategory_redirects"
id: Mapped[int] = mapped_column(primary_key=True)
old_path: Mapped[str] = mapped_column(String(512), nullable=False, index=True)
new_path: Mapped[str] = mapped_column(String(512), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
class ProductLog(Base):
__tablename__ = "product_logs"
id: Mapped[int] = mapped_column(primary_key=True)
slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
href_tried: Mapped[Optional[str]] = mapped_column(Text)
ok: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false")
error_type: Mapped[Optional[str]] = mapped_column(String(255))
error_message: Mapped[Optional[str]] = mapped_column(Text)
http_status: Mapped[Optional[int]] = mapped_column(Integer)
final_url: Mapped[Optional[str]] = mapped_column(Text)
transport_error: Mapped[Optional[bool]] = mapped_column(Boolean)
title: Mapped[Optional[str]] = mapped_column(String(512))
has_description_html: Mapped[Optional[bool]] = mapped_column(Boolean)
has_description_short: Mapped[Optional[bool]] = mapped_column(Boolean)
sections_count: Mapped[Optional[int]] = mapped_column(Integer)
images_count: Mapped[Optional[int]] = mapped_column(Integer)
embedded_images_count: Mapped[Optional[int]] = mapped_column(Integer)
all_images_count: Mapped[Optional[int]] = mapped_column(Integer)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
# ...existing models...
class ProductLabel(Base):
__tablename__ = "product_labels"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
product: Mapped["Product"] = relationship(back_populates="labels")
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_labels_product_name"),)
class ProductSticker(Base):
__tablename__ = "product_stickers"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
product: Mapped["Product"] = relationship(back_populates="stickers")
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_stickers_product_name"),)
class ProductAttribute(Base):
__tablename__ = "product_attributes"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
key: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
value: Mapped[Optional[str]] = mapped_column(Text)
product = relationship("Product", back_populates="attributes")
__table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_attributes_product_key"),)
class ProductNutrition(Base):
__tablename__ = "product_nutrition"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
key: Mapped[str] = mapped_column(String(255), nullable=False)
value: Mapped[Optional[str]] = mapped_column(String(255))
unit: Mapped[Optional[str]] = mapped_column(String(64))
product = relationship("Product", back_populates="nutrition")
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
__table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_nutrition_product_key"),)
class ProductAllergen(Base):
__tablename__ = "product_allergens"
id: Mapped[int] = mapped_column(primary_key=True)
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
contains: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false")
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
product: Mapped["Product"] = relationship(back_populates="allergens")
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_allergens_product_name"),)
class CartItem(Base):
__tablename__ = "cart_items"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
# Either a logged-in user OR an anonymous session
user_id: Mapped[int | None] = mapped_column(
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
)
session_id: Mapped[str | None] = mapped_column(
String(128),
nullable=True,
)
# IMPORTANT: link to product *id*, not slug
product_id: Mapped[int] = mapped_column(
ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
)
quantity: Mapped[int] = mapped_column(
Integer,
nullable=False,
default=1,
server_default="1",
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
deleted_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
# Relationships
product: Mapped["Product"] = relationship(
"Product",
back_populates="cart_items",
)
user: Mapped["User | None"] = relationship("User", back_populates="cart_items")
__table_args__ = (
Index("ix_cart_items_user_product", "user_id", "product_id"),
Index("ix_cart_items_session_product", "session_id", "product_id"),
)

42
models/menu_item.py Normal file
View File

@@ -0,0 +1,42 @@
from datetime import datetime
from typing import Optional
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy import Integer, String, DateTime, ForeignKey, func
from db.base import Base
class MenuItem(Base):
__tablename__ = "menu_items"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
# Foreign key to posts table
post_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("posts.id", ondelete="CASCADE"),
nullable=False,
index=True
)
# Order for sorting menu items
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0, index=True)
# Timestamps
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False
)
deleted_at: Mapped[Optional[datetime]] = mapped_column(
DateTime(timezone=True),
nullable=True
)
# Relationship to Post
post: Mapped["Post"] = relationship("Post", back_populates="menu_items")

108
models/order.py Normal file
View File

@@ -0,0 +1,108 @@
from __future__ import annotations
from datetime import datetime
from typing import Optional, List
from sqlalchemy import Integer, String, DateTime, ForeignKey, Numeric, func, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import Base
class Order(Base):
__tablename__ = "orders"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True)
session_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
status: Mapped[str] = mapped_column(
String(32),
nullable=False,
default="pending",
server_default="pending",
)
currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP")
total_amount: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False)
# free-form description for the order
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True, index=True)
# SumUp reference string (what we send as checkout_reference)
sumup_reference: Mapped[Optional[str]] = mapped_column(
String(255),
nullable=True,
index=True,
)
# SumUp integration fields
sumup_checkout_id: Mapped[Optional[str]] = mapped_column(
String(128),
nullable=True,
index=True,
)
sumup_status: Mapped[Optional[str]] = mapped_column(String(32), nullable=True)
sumup_hosted_url: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
onupdate=func.now(),
)
items: Mapped[List["OrderItem"]] = relationship(
"OrderItem",
back_populates="order",
cascade="all, delete-orphan",
lazy="selectin",
)
calendar_entries: Mapped[List["CalendarEntry"]] = relationship(
"CalendarEntry",
back_populates="order",
lazy="selectin",
)
class OrderItem(Base):
__tablename__ = "order_items"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
order_id: Mapped[int] = mapped_column(
ForeignKey("orders.id", ondelete="CASCADE"),
nullable=False,
)
product_id: Mapped[int] = mapped_column(
ForeignKey("products.id"),
nullable=False,
)
product_title: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
quantity: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
unit_price: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False)
currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP")
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
server_default=func.now(),
)
order: Mapped["Order"] = relationship(
"Order",
back_populates="items",
)
# NEW: link each order item to its product
product: Mapped["Product"] = relationship(
"Product",
back_populates="order_items",
lazy="selectin",
)

32
models/snippet.py Normal file
View File

@@ -0,0 +1,32 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import Integer, String, Text, DateTime, ForeignKey, UniqueConstraint, Index, func
from sqlalchemy.orm import Mapped, mapped_column
from db.base import Base
class Snippet(Base):
__tablename__ = "snippets"
__table_args__ = (
UniqueConstraint("user_id", "name", name="uq_snippets_user_name"),
Index("ix_snippets_visibility", "visibility"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[int] = mapped_column(
ForeignKey("users.id", ondelete="CASCADE"), nullable=False,
)
name: Mapped[str] = mapped_column(String(255), nullable=False)
value: Mapped[str] = mapped_column(Text, nullable=False)
visibility: Mapped[str] = mapped_column(
String(20), nullable=False, default="private", server_default="private",
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False, server_default=func.now(),
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now(),
)

52
models/tag_group.py Normal file
View File

@@ -0,0 +1,52 @@
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy import (
Integer,
String,
Text,
DateTime,
ForeignKey,
UniqueConstraint,
func,
)
from db.base import Base
class TagGroup(Base):
__tablename__ = "tag_groups"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
name: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[str] = mapped_column(String(191), unique=True, nullable=False)
feature_image: Mapped[Optional[str]] = mapped_column(Text())
colour: Mapped[Optional[str]] = mapped_column(String(32))
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False, server_default=func.now()
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now()
)
tag_links: Mapped[List["TagGroupTag"]] = relationship(
"TagGroupTag", back_populates="group", cascade="all, delete-orphan", passive_deletes=True
)
class TagGroupTag(Base):
__tablename__ = "tag_group_tags"
__table_args__ = (
UniqueConstraint("tag_group_id", "tag_id", name="uq_tag_group_tag"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
tag_group_id: Mapped[int] = mapped_column(
ForeignKey("tag_groups.id", ondelete="CASCADE"), nullable=False
)
tag_id: Mapped[int] = mapped_column(
ForeignKey("tags.id", ondelete="CASCADE"), nullable=False
)
group: Mapped["TagGroup"] = relationship("TagGroup", back_populates="tag_links")

46
models/user.py Normal file
View File

@@ -0,0 +1,46 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import String, Integer, DateTime, func, Index, Text, Boolean
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.ext.associationproxy import association_proxy
from db.base import Base
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
email: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
last_login_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
# Ghost membership linkage
ghost_id: Mapped[str | None] = mapped_column(String(64), unique=True, index=True, nullable=True)
name: Mapped[str | None] = mapped_column(String(255), nullable=True)
ghost_status: Mapped[str | None] = mapped_column(String(50), nullable=True) # free, paid, comped
ghost_subscribed: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default=func.true())
ghost_note: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar_image: Mapped[str | None] = mapped_column(Text, nullable=True)
stripe_customer_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
ghost_raw: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
# Relationships to Ghost-related entities
user_newsletters = relationship("UserNewsletter", back_populates="user", cascade="all, delete-orphan", lazy="selectin")
newsletters = association_proxy("user_newsletters", "newsletter")
labels = relationship("GhostLabel", secondary="user_labels", back_populates="users", lazy="selectin")
subscriptions = relationship("GhostSubscription", back_populates="user", cascade="all, delete-orphan", lazy="selectin")
liked_products = relationship("ProductLike", back_populates="user", cascade="all, delete-orphan")
liked_posts = relationship("PostLike", back_populates="user", cascade="all, delete-orphan")
cart_items = relationship(
"CartItem",
back_populates="user",
cascade="all, delete-orphan",
)
__table_args__ = (
Index("ix_user_email", "email", unique=True),
)
def __repr__(self) -> str:
return f"<User {self.id} {self.email}>"

47
requirements.txt Normal file
View File

@@ -0,0 +1,47 @@
starlette>=0.37,<0.39
aiofiles==25.1.0
aiosmtplib==5.0.0
alembic==1.17.0
anyio==4.11.0
async-timeout==5.0.1
asyncpg==0.30.0
beautifulsoup4==4.14.2
blinker==1.9.0
Brotli==1.1.0
certifi==2025.10.5
click==8.3.0
exceptiongroup==1.3.0
Flask==3.1.2
greenlet==3.2.4
h11==0.16.0
h2==4.3.0
hpack==4.1.0
httpcore==1.0.9
httpx==0.28.1
Hypercorn==0.17.3
hyperframe==6.1.0
idna==3.10
itsdangerous==2.2.0
Jinja2==3.1.6
lxml==6.0.2
Mako==1.3.10
MarkupSafe==3.0.3
priority==2.0.0
psycopg==3.2.11
psycopg-binary==3.2.11
PyJWT==2.10.1
PyYAML==6.0.3
Quart==0.20.0
sniffio==1.3.1
soupsieve==2.8
SQLAlchemy==2.0.44
taskgroup==0.2.2
tomli==2.3.0
typing_extensions==4.15.0
Werkzeug==3.1.3
wsproto==1.2.0
zstandard==0.25.0
redis>=5.0
mistune>=3.0
pytest>=8.0
pytest-asyncio>=0.23

0
shared/__init__.py Normal file
View File

34
shared/cart_identity.py Normal file
View File

@@ -0,0 +1,34 @@
"""
Cart identity resolution — shared across all apps that need to know
who the current cart owner is (user_id or anonymous session_id).
"""
from __future__ import annotations
import secrets
from typing import TypedDict, Optional
from quart import g, session as qsession
class CartIdentity(TypedDict):
user_id: Optional[int]
session_id: Optional[str]
def current_cart_identity() -> CartIdentity:
"""
Decide how to identify the cart:
- If user is logged in -> use user_id (and ignore session_id)
- Else -> generate / reuse an anonymous session_id stored in Quart's session
"""
user = getattr(g, "user", None)
if user is not None and getattr(user, "id", None) is not None:
return {"user_id": user.id, "session_id": None}
sid = qsession.get("cart_sid")
if not sid:
sid = secrets.token_hex(16)
qsession["cart_sid"] = sid
return {"user_id": None, "session_id": sid}

9
shared/cart_loader.py Normal file
View File

@@ -0,0 +1,9 @@
from __future__ import annotations
from quart import g
from suma_browser.app.bp.cart.services import get_cart
async def load_cart():
g.cart = await get_cart(g.s)

58
shared/context.py Normal file
View File

@@ -0,0 +1,58 @@
"""
Base template context shared by all apps.
This module no longer imports cart or menu_items services directly.
Each app provides its own context_fn that calls this base and adds
app-specific variables (cart data, menu_items, etc.).
"""
from __future__ import annotations
from datetime import datetime
from quart import request, g, current_app
from config import config
from utils import host_url
from suma_browser.app.utils import current_route_relative_path
async def base_context() -> dict:
"""
Common template variables available in every app.
Does NOT include cart, calendar_cart_entries, total, calendar_total,
or menu_items — those are added by each app's context_fn.
"""
is_htmx = request.headers.get("HX-Request") == "true"
search = request.headers.get("X-Search", "")
zap_filter = is_htmx and search == ""
def base_url():
return host_url()
hx_select = "#main-panel"
hx_select_search = (
hx_select
+ ", #search-mobile, #search-count-mobile, #search-desktop, #search-count-desktop, #menu-items-nav-wrapper"
)
return {
"is_htmx": is_htmx,
"request": request,
"now": datetime.now(),
"current_local_href": current_route_relative_path(),
"config": config(),
"asset_url": current_app.jinja_env.globals.get("asset_url", lambda p: ""),
"sort_options": [
("az", "A\u2013Z", "order/a-z.svg"),
("za", "Z\u2013A", "order/z-a.svg"),
("price-asc", "\u00a3 low\u2192high", "order/l-h.svg"),
("price-desc", "\u00a3 high\u2192low", "order/h-l.svg"),
],
"zap_filter": zap_filter,
"print": print,
"base_url": base_url,
"base_title": config()["title"],
"hx_select": hx_select,
"hx_select_search": hx_select_search,
}

141
shared/factory.py Normal file
View File

@@ -0,0 +1,141 @@
from __future__ import annotations
import asyncio
import os
from pathlib import Path
from typing import Callable, Awaitable, Sequence
from quart import Quart, request, g, send_from_directory
from config import init_config, config, pretty
from models import KV # ensure models imported
from db.session import register_db
from suma_browser.app.middleware import register as register_middleware
from suma_browser.app.redis_cacher import register as register_redis
from suma_browser.app.csrf import protect
from suma_browser.app.errors import errors
from .jinja_setup import setup_jinja
from .user_loader import load_current_user
# Async init of config (runs once at import)
asyncio.run(init_config())
BASE_DIR = Path(__file__).resolve().parent.parent
STATIC_DIR = str(BASE_DIR / "static")
TEMPLATE_DIR = str(BASE_DIR / "suma_browser" / "templates")
def create_base_app(
name: str,
*,
context_fn: Callable[[], Awaitable[dict]] | None = None,
before_request_fns: Sequence[Callable[[], Awaitable[None]]] | None = None,
) -> Quart:
"""
Create a Quart app with shared infrastructure.
Parameters
----------
name:
Application name (also used as CACHE_APP_PREFIX).
context_fn:
Async function returning a dict for template context.
Each app provides its own — the cart app queries locally,
while coop/market apps fetch via internal API.
If not provided, a minimal default context is used.
before_request_fns:
Extra before-request hooks (e.g. cart_loader for the cart app).
"""
app = Quart(
name,
static_folder=STATIC_DIR,
static_url_path="/static",
template_folder=TEMPLATE_DIR,
)
app.secret_key = os.getenv("SECRET_KEY", "dev-secret-key-change-me-777")
# Session cookie shared across subdomains
cookie_domain = os.getenv("SESSION_COOKIE_DOMAIN") # e.g. ".rose-ash.com"
if cookie_domain:
app.config["SESSION_COOKIE_DOMAIN"] = cookie_domain
app.config["SESSION_COOKIE_NAME"] = "coop_session"
# Ghost / Redis config
app.config["GHOST_API_URL"] = os.getenv("GHOST_API_URL")
app.config["GHOST_PUBLIC_URL"] = os.getenv("GHOST_PUBLIC_URL")
app.config["GHOST_CONTENT_KEY"] = os.getenv("GHOST_CONTENT_API_KEY")
app.config["REDIS_URL"] = os.getenv("REDIS_URL")
# Cache app prefix for key namespacing
app.config["CACHE_APP_PREFIX"] = name
# --- infrastructure ---
register_middleware(app)
register_db(app)
register_redis(app)
setup_jinja(app)
errors(app)
# --- before-request hooks ---
@app.before_request
async def _route_log():
g.root = request.headers.get("x-forwarded-prefix", "/")
g.scheme = request.scheme
g.host = request.host
@app.before_request
async def _load_user():
await load_current_user()
# Register any app-specific before-request hooks (e.g. cart loader)
if before_request_fns:
for fn in before_request_fns:
app.before_request(fn)
@app.before_request
async def _csrf_protect():
await protect()
# --- after-request hooks ---
@app.after_request
async def _add_hx_preserve_search_header(response):
value = request.headers.get("X-Search")
if value is not None:
response.headers["HX-Preserve-Search"] = value
return response
# --- context processor ---
if context_fn is not None:
@app.context_processor
async def _inject_base():
return await context_fn()
else:
# Minimal fallback (no cart, no menu_items)
from .context import base_context
@app.context_processor
async def _inject_base():
return await base_context()
# --- cleanup internal API client on shutdown ---
@app.after_serving
async def _close_internal_client():
from .internal_api import close_client
await close_client()
# --- startup ---
@app.before_serving
async def _startup():
await init_config()
print(pretty())
# --- favicon ---
@app.get("/favicon.ico")
async def favicon():
return await send_from_directory("static", "favicon.ico")
return app

49
shared/http_utils.py Normal file
View File

@@ -0,0 +1,49 @@
"""
HTTP utility helpers shared across apps.
Extracted from browse/services/services.py so order/orders blueprints
(which live in the cart app) don't need to import from the browse blueprint.
"""
from __future__ import annotations
from urllib.parse import urlencode
from quart import g, request
from utils import host_url
def vary(resp):
"""
Ensure HX-Request and X-Origin are part of the Vary header
so caches distinguish HTMX from full-page requests.
"""
v = resp.headers.get("Vary", "")
parts = [p.strip() for p in v.split(",") if p.strip()]
for h in ("HX-Request", "X-Origin"):
if h not in parts:
parts.append(h)
if parts:
resp.headers["Vary"] = ", ".join(parts)
return resp
def current_url_without_page():
"""
Return the current URL with the ``page`` query-string parameter removed.
Used for Hx-Push-Url headers on paginated routes.
"""
(request.script_root or "").rstrip("/")
root2 = "/" + g.root
path_only = request.path
if root2 and path_only.startswith(root2):
rel = path_only[len(root2):]
rel = rel if rel.startswith("/") else "/" + rel
else:
rel = path_only
base = host_url(rel)
params = request.args.to_dict(flat=False)
params.pop("page", None)
qs = urlencode(params, doseq=True)
return f"{base}?{qs}" if qs else base

152
shared/internal_api.py Normal file
View File

@@ -0,0 +1,152 @@
"""
Async HTTP client for inter-app communication.
Each app exposes internal JSON API endpoints. Other apps call them
via httpx over the Docker overlay network (or localhost in dev).
URLs resolved from env vars:
INTERNAL_URL_COOP (default http://localhost:8000)
INTERNAL_URL_MARKET (default http://localhost:8001)
INTERNAL_URL_CART (default http://localhost:8002)
Session cookie forwarding: when ``forward_session=True`` the current
request's ``coop_session`` cookie is sent along so the target app can
resolve ``g.user`` / cart identity.
"""
from __future__ import annotations
import logging
import os
from typing import Any
import httpx
from quart import request as quart_request
log = logging.getLogger("internal_api")
class DictObj:
"""Thin wrapper so ``d.key`` works on dicts returned by JSON APIs.
Jinja templates use attribute access (``item.post.slug``) which
doesn't work on plain dicts. Wrapping the API response with
``dictobj()`` makes both ``item.post.slug`` and ``item["post"]["slug"]``
work identically.
"""
__slots__ = ("_data",)
def __init__(self, data: dict):
self._data = data
def __getattr__(self, name: str):
try:
v = self._data[name]
except KeyError:
raise AttributeError(name)
if isinstance(v, dict):
return DictObj(v)
return v
def get(self, key, default=None):
v = self._data.get(key, default)
if isinstance(v, dict):
return DictObj(v)
return v
def __repr__(self):
return f"DictObj({self._data!r})"
def __bool__(self):
return bool(self._data)
def dictobj(data):
"""Recursively wrap dicts (or lists of dicts) for attribute access."""
if isinstance(data, list):
return [DictObj(d) if isinstance(d, dict) else d for d in data]
if isinstance(data, dict):
return DictObj(data)
return data
_DEFAULTS = {
"coop": "http://localhost:8000",
"market": "http://localhost:8001",
"cart": "http://localhost:8002",
"events": "http://localhost:8003",
}
_client: httpx.AsyncClient | None = None
TIMEOUT = 3.0 # seconds
def _base_url(app_name: str) -> str:
env_key = f"INTERNAL_URL_{app_name.upper()}"
return os.getenv(env_key, _DEFAULTS.get(app_name, ""))
def _get_client() -> httpx.AsyncClient:
global _client
if _client is None or _client.is_closed:
_client = httpx.AsyncClient(timeout=TIMEOUT)
return _client
async def close_client() -> None:
"""Call from ``@app.after_serving`` to cleanly close the pool."""
global _client
if _client is not None and not _client.is_closed:
await _client.aclose()
_client = None
def _session_cookies() -> dict[str, str]:
"""Extract the shared session cookie from the incoming request."""
cookie_name = "coop_session"
try:
val = quart_request.cookies.get(cookie_name)
except RuntimeError:
# No active request context
val = None
if val:
return {cookie_name: val}
return {}
async def get(
app_name: str,
path: str,
*,
forward_session: bool = False,
params: dict | None = None,
) -> dict | list | None:
"""GET ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
url = _base_url(app_name).rstrip("/") + path
cookies = _session_cookies() if forward_session else {}
try:
resp = await _get_client().get(url, params=params, cookies=cookies)
resp.raise_for_status()
return resp.json()
except Exception as exc:
log.warning("internal_api GET %s failed: %r", url, exc)
return None
async def post(
app_name: str,
path: str,
*,
json: Any = None,
forward_session: bool = False,
) -> dict | list | None:
"""POST ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
url = _base_url(app_name).rstrip("/") + path
cookies = _session_cookies() if forward_session else {}
try:
resp = await _get_client().post(url, json=json, cookies=cookies)
resp.raise_for_status()
return resp.json()
except Exception as exc:
log.warning("internal_api POST %s failed: %r", url, exc)
return None

98
shared/jinja_setup.py Normal file
View File

@@ -0,0 +1,98 @@
from __future__ import annotations
import hashlib
import re
from pathlib import Path
from quart import Quart, g, url_for
from config import config
from utils import host_url
from suma_browser.app.csrf import generate_csrf_token
from suma_browser.app.authz import has_access
from suma_browser.app.filters import register as register_filters
from .urls import coop_url, market_url, cart_url, events_url, login_url
def setup_jinja(app: Quart) -> None:
app.jinja_env.add_extension("jinja2.ext.do")
# --- template globals ---
app.add_template_global(generate_csrf_token, "csrf_token")
app.add_template_global(has_access, "has_access")
def level():
if not hasattr(g, "_level_counter"):
g._level_counter = 0
return g._level_counter
def level_up():
if not hasattr(g, "_level_counter"):
g._level_counter = 0
g._level_counter += 1
return ""
app.jinja_env.globals["level"] = level
app.jinja_env.globals["level_up"] = level_up
app.jinja_env.globals["menu_colour"] = "sky"
nav_button = """justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black
[.hover-capable_&]:hover:bg-yellow-300
aria-selected:bg-stone-500 aria-selected:text-white
[.hover-capable_&[aria-selected=true]:hover]:bg-orange-500"""
styles = {
"pill": """
inline-flex items-center px-3 py-1 rounded-full bg-stone-200 text-stone-700 text-sm
hover:bg-stone-300 hover:text-stone-900
focus:outline-none focus-visible:ring-2 focus-visible:ring-stone-400
""",
"tr": "odd:bg-slate-50 even:bg-white hover:bg-slate-100",
"action_button": "px-2 py-1 border rounded text-sm bg-sky-300 hover:bg-sky-400 flex gap-1 items-center",
"pre_action_button": "px-2 py-1 border rounded text-sm bg-green-200 hover:bg-green-300",
"cancel_button": "px-3 py-1.5 rounded-full text-sm border border-stone-300 text-stone-700 hover:bg-stone-100",
"list_container": "border border-stone-200 rounded-lg p-3 mb-3 bg-white space-y-3 bg-yellow-200",
"nav_button": f"{nav_button} p-3",
"nav_button_less_pad": f"{nav_button} p-2",
}
app.jinja_env.globals["styles"] = styles
def _asset_url(path: str) -> str:
def squash_double_slashes(url: str) -> str:
m = re.match(r"(?:[A-Za-z][\w+.-]*:)?//", url)
prefix = m.group(0) if m else ""
rest = re.sub(r"/+", "/", url[len(prefix):])
return prefix + rest
file_path = Path("static") / path
try:
digest = hashlib.md5(file_path.read_bytes()).hexdigest()[:8]
except Exception:
digest = "dev"
return squash_double_slashes(
f"{g.scheme}://{g.host}{g.root}/{url_for('static', filename=path, v=digest)}"
)
app.jinja_env.globals["asset_url"] = _asset_url
def site():
return {
"url": host_url(),
"logo": _asset_url("img/logo.jpg"),
"default_image": _asset_url("img/logo.jpg"),
"title": config()["title"],
}
app.jinja_env.globals["site"] = site
# cross-app URL helpers available in all templates
app.jinja_env.globals["coop_url"] = coop_url
app.jinja_env.globals["market_url"] = market_url
app.jinja_env.globals["cart_url"] = cart_url
app.jinja_env.globals["events_url"] = events_url
app.jinja_env.globals["login_url"] = login_url
# register jinja filters
register_filters(app)

43
shared/urls.py Normal file
View File

@@ -0,0 +1,43 @@
from __future__ import annotations
import os
from urllib.parse import quote
from config import config
def _get_app_url(app_name: str) -> str:
env_key = f"APP_URL_{app_name.upper()}"
env_val = os.getenv(env_key)
if env_val:
return env_val.rstrip("/")
return config()["app_urls"][app_name].rstrip("/")
def app_url(app_name: str, path: str = "/") -> str:
base = _get_app_url(app_name)
if not path.startswith("/"):
path = "/" + path
return base + path
def coop_url(path: str = "/") -> str:
return app_url("coop", path)
def market_url(path: str = "/") -> str:
return app_url("market", path)
def cart_url(path: str = "/") -> str:
return app_url("cart", path)
def events_url(path: str = "/") -> str:
return app_url("events", path)
def login_url(next_url: str = "") -> str:
if next_url:
return coop_url(f"/auth/login/?next={quote(next_url, safe='')}")
return coop_url("/auth/login/")

35
shared/user_loader.py Normal file
View File

@@ -0,0 +1,35 @@
from __future__ import annotations
from quart import session as qsession, g
from sqlalchemy import select
from sqlalchemy.orm import selectinload
from models.user import User
from models.ghost_membership_entities import UserNewsletter
async def load_user_by_id(session, user_id: int):
"""Load a user by ID with labels and newsletters eagerly loaded."""
stmt = (
select(User)
.options(
selectinload(User.labels),
selectinload(User.user_newsletters).selectinload(
UserNewsletter.newsletter
),
)
.where(User.id == user_id)
)
result = await session.execute(stmt)
return result.scalar_one_or_none()
async def load_current_user():
uid = qsession.get("uid")
if not uid:
g.user = None
g.rights = {"admin": False}
return
g.user = await load_user_by_id(g.s, uid)
g.rights = {l.name: True for l in g.user.labels} if g.user else {}

BIN
static/errors/403.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 101 KiB

BIN
static/errors/404.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

BIN
static/errors/error.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 646 KiB

BIN
static/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

9
static/fontawesome/css/all.min.css vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

2
static/img/filter.svg Normal file
View File

@@ -0,0 +1,2 @@
<?xml version="1.0" ?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M18 7H17M17 7H16M17 7V6M17 7V8M12.5 5H6C5.5286 5 5.29289 5 5.14645 5.14645C5 5.29289 5 5.5286 5 6V7.96482C5 8.2268 5 8.35779 5.05916 8.46834C5.11833 8.57888 5.22732 8.65154 5.4453 8.79687L8.4688 10.8125C9.34073 11.3938 9.7767 11.6845 10.0133 12.1267C10.25 12.5688 10.25 13.0928 10.25 14.1407V19L13.75 17.25V14.1407C13.75 13.0928 13.75 12.5688 13.9867 12.1267C14.1205 11.8765 14.3182 11.6748 14.6226 11.4415M20 7C20 8.65685 18.6569 10 17 10C15.3431 10 14 8.65685 14 7C14 5.34315 15.3431 4 17 4C18.6569 4 20 5.34315 20 7Z" stroke="#464455" stroke-linecap="round" stroke-linejoin="round"/></svg>

After

Width:  |  Height:  |  Size: 806 B

BIN
static/img/logo.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 358 KiB

4
static/img/search.svg Normal file
View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M15.7955 15.8111L21 21M18 10.5C18 14.6421 14.6421 18 10.5 18C6.35786 18 3 14.6421 3 10.5C3 6.35786 6.35786 3 10.5 3C14.6421 3 18 6.35786 18 10.5Z" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 469 B

17
static/labels/_blank.svg Normal file
View File

@@ -0,0 +1,17 @@
<svg xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 200 200" width="200" height="200" role="img" aria-labelledby="title">
<title id="title">Offer ribbon (top-right)</title>
<!-- Ribbon group: move origin to top-right, then rotate 45° -->
<g transform="translate(200 0) rotate(45)">
<!-- The stripe -->
<rect x="-160" y="50" width="320" height="25" rx="4" fill="#22c55e"/>
<!-- Label on the stripe (centered) -->
<text x="0" y="65"
font-family="system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif"
font-size="18" font-weight="800" fill="#ffffff"
text-anchor="middle" dominant-baseline="middle" letter-spacing=".15em">
NEW
</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 734 B

17
static/labels/new.svg Normal file
View File

@@ -0,0 +1,17 @@
<svg xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 200 200" width="200" height="200" role="img" aria-labelledby="title">
<title id="title">Offer ribbon (top-right)</title>
<!-- Ribbon group: move origin to top-right, then rotate 45° -->
<g transform=" rotate(-45 0 0)">
<!-- The stripe -->
<rect x="-160" y="25" width="320" height="25" rx="4" fill="#22c55e"/>
<!-- Label on the stripe (centered) -->
<text x="0" y="40"
font-family="system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif"
font-size="18" font-weight="800" fill="#ffffff"
text-anchor="middle" dominant-baseline="middle" letter-spacing=".15em">
NEW
</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 723 B

19
static/labels/offer.svg Normal file
View File

@@ -0,0 +1,19 @@
<svg xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 200 200" width="200" height="200" role="img" aria-labelledby="title">
<title id="title">Offer ribbon</title>
<!-- Transparent background (nothing drawn) -->
<!-- Ribbon group rotated -45° around the top-left corner (0,0) -->
<g transform="rotate(-45 0 0)">
<!-- The stripe itself -->
<rect x="-80" y="50" width="260" height="25" rx="4" fill="#ef4444"/>
<!-- Centered label on the stripe -->
<text x="0" y="65"
font-family="system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif"
font-size="18" font-weight="800" fill="#ffffff"
text-anchor="middle" dominant-baseline="middle" letter-spacing=".15em">
OFFER
</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 770 B

14
static/nav-labels/new.svg Normal file
View File

@@ -0,0 +1,14 @@
<svg xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 100 100" width="100" height="100" role="img" aria-labelledby="title">
<title id="title">New</title>
<!-- The stripe -->
<rect x="0" y="30" width="100" height="40" rx="4" fill="#22c55e"/>
<!-- Label on the stripe (centered) -->
<text x="52" y="52"
font-family="system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif"
font-size="25" font-weight="800" fill="#ffffff"
text-anchor="middle" dominant-baseline="middle" letter-spacing=".15em">
NEW
</text>
</svg>

After

Width:  |  Height:  |  Size: 590 B

Some files were not shown because too many files have changed in this diff Show More