commit f42042ccb7953637a3fab218ffcc4ede786edfa4 Author: giles Date: Tue Feb 24 19:44:17 2026 +0000 Monorepo: consolidate 7 repos into one Combines shared, blog, market, cart, events, federation, and account into a single repository. Eliminates submodule sync, sibling model copying at build time, and per-app CI orchestration. Changes: - Remove per-app .git, .gitmodules, .gitea, submodule shared/ dirs - Remove stale sibling model copies from each app - Update all 6 Dockerfiles for monorepo build context (root = .) - Add build directives to docker-compose.yml - Add single .gitea/workflows/ci.yml with change detection - Add .dockerignore for monorepo build context - Create __init__.py for federation and account (cross-app imports) diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..397600b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,13 @@ +.git +.gitea +.env +_snapshot +docs +schema.sql +**/.gitmodules +**/.gitignore +**/README.md +**/__pycache__ +**/.pytest_cache +**/node_modules +**/*.pyc diff --git a/.gitea/workflows/ci.yml b/.gitea/workflows/ci.yml new file mode 100644 index 0000000..852020c --- /dev/null +++ b/.gitea/workflows/ci.yml @@ -0,0 +1,72 @@ +name: Build and Deploy + +on: + push: + branches: [main, decoupling] + +env: + REGISTRY: registry.rose-ash.com:5000 + COOP_DIR: /root/rose-ash + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install tools + run: | + apt-get update && apt-get install -y --no-install-recommends openssh-client + + - name: Set up SSH + env: + SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }} + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + mkdir -p ~/.ssh + echo "$SSH_KEY" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true + + - name: Build and deploy changed apps + env: + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + ssh "root@$DEPLOY_HOST" " + cd ${{ env.COOP_DIR }} + git fetch origin ${{ github.ref_name }} + + # Detect what changed since current HEAD + CHANGED=\$(git diff --name-only HEAD origin/${{ github.ref_name }}) + git reset --hard origin/${{ github.ref_name }} + + REBUILD_ALL=false + if echo \"\$CHANGED\" | grep -q '^shared/'; then + REBUILD_ALL=true + fi + if echo \"\$CHANGED\" | grep -q '^docker-compose.yml'; then + REBUILD_ALL=true + fi + + for app in blog market cart events federation account; do + if [ \"\$REBUILD_ALL\" = true ] || echo \"\$CHANGED\" | grep -q \"^\$app/\"; then + echo \"Building \$app...\" + docker build \ + --build-arg CACHEBUST=\$(date +%s) \ + -f \$app/Dockerfile \ + -t ${{ env.REGISTRY }}/\$app:latest \ + -t ${{ env.REGISTRY }}/\$app:${{ github.sha }} \ + . + docker push ${{ env.REGISTRY }}/\$app:latest + docker push ${{ env.REGISTRY }}/\$app:${{ github.sha }} + else + echo \"Skipping \$app (no changes)\" + fi + done + + source .env + docker stack deploy -c docker-compose.yml coop + echo 'Waiting for services to update...' + sleep 10 + docker stack services coop + " diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1e2e7ed --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +__pycache__/ +*.pyc +*.pyo +.env +node_modules/ +*.egg-info/ +dist/ +build/ +.venv/ +venv/ +_snapshot/ +_debug/ diff --git a/_config/app-config.yaml b/_config/app-config.yaml new file mode 100644 index 0000000..dabb4c3 --- /dev/null +++ b/_config/app-config.yaml @@ -0,0 +1,83 @@ +root: "/rose-ash-wholefood-coop" # no trailing slash needed (we normalize it) +host: "https://rose-ash.com" +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: ROSE-ASH 2.0 +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "https://blog.rose-ash.com" + market: "https://market.rose-ash.com" + cart: "https://cart.rose-ash.com" + events: "https://events.rose-ash.com" + federation: "https://federation.rose-ash.com" + account: "https://account.rose-ash.com" +cache: + fs_root: /app/_snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/ciders + - branded-goods/wines + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + product-details: + - General Information + - A Note About Prices +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "jfwlekjfwef798ewf769ew8f679ew8f7weflwef" + + diff --git a/account/Dockerfile b/account/Dockerfile new file mode 100644 index 0000000..6131e2d --- /dev/null +++ b/account/Dockerfile @@ -0,0 +1,50 @@ +# syntax=docker/dockerfile:1 + +# ---------- Python application ---------- +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY account/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY blog/__init__.py ./blog/__init__.py +COPY blog/models/ ./blog/models/ +COPY market/__init__.py ./market/__init__.py +COPY market/models/ ./market/models/ +COPY cart/__init__.py ./cart/__init__.py +COPY cart/models/ ./cart/models/ +COPY events/__init__.py ./events/__init__.py +COPY events/models/ ./events/models/ +COPY federation/__init__.py ./federation/__init__.py +COPY federation/models/ ./federation/models/ + +# ---------- Runtime setup ---------- +COPY account/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/account/__init__.py b/account/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/account/app.py b/account/app.py new file mode 100644 index 0000000..c23ff3d --- /dev/null +++ b/account/app.py @@ -0,0 +1,65 @@ +from __future__ import annotations +import path_setup # noqa: F401 # adds shared/ to sys.path +from pathlib import Path + +from quart import g, request +from jinja2 import FileSystemLoader, ChoiceLoader + +from shared.infrastructure.factory import create_base_app +from shared.services.registry import services + +from bp import register_account_bp, register_auth_bp, register_fragments + + +async def account_context() -> dict: + """Account app context processor.""" + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.infrastructure.cart_identity import current_cart_identity + from shared.infrastructure.fragments import fetch_fragment + + ctx = await base_context() + + ctx["nav_tree_html"] = await fetch_fragment( + "blog", "nav-tree", + params={"app_name": "account", "path": request.path}, + ) + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + # Cart data (consistent with all other apps) + ident = current_cart_identity() + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + ctx["cart_count"] = summary.count + summary.calendar_count + summary.ticket_count + ctx["cart_total"] = float(summary.total + summary.calendar_total + summary.ticket_total) + + return ctx + + +def create_app() -> "Quart": + from services import register_domain_services + + app = create_base_app( + "account", + context_fn=account_context, + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + # --- blueprints --- + app.register_blueprint(register_auth_bp()) + app.register_blueprint(register_account_bp()) + app.register_blueprint(register_fragments()) + + return app + + +app = create_app() diff --git a/account/bp/__init__.py b/account/bp/__init__.py new file mode 100644 index 0000000..fe22f4e --- /dev/null +++ b/account/bp/__init__.py @@ -0,0 +1,3 @@ +from .account.routes import register as register_account_bp +from .auth.routes import register as register_auth_bp +from .fragments import register_fragments diff --git a/account/bp/account/__init__.py b/account/bp/account/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/account/bp/account/routes.py b/account/bp/account/routes.py new file mode 100644 index 0000000..23b3cae --- /dev/null +++ b/account/bp/account/routes.py @@ -0,0 +1,168 @@ +"""Account pages blueprint. + +Moved from federation/bp/auth — newsletters, fragment pages (tickets, bookings). +Mounted at root /. +""" +from __future__ import annotations + +from quart import ( + Blueprint, + request, + render_template, + make_response, + redirect, + g, +) +from sqlalchemy import select + +from shared.models import UserNewsletter +from shared.models.ghost_membership_entities import GhostNewsletter +from shared.infrastructure.urls import login_url +from shared.infrastructure.fragments import fetch_fragment, fetch_fragments + +oob = { + "oob_extends": "oob_elements.html", + "extends": "_types/root/_index.html", + "parent_id": "root-header-child", + "child_id": "auth-header-child", + "header": "_types/auth/header/_header.html", + "parent_header": "_types/root/header/_header.html", + "nav": "_types/auth/_nav.html", + "main": "_types/auth/_main_panel.html", +} + + +def register(url_prefix="/"): + account_bp = Blueprint("account", __name__, url_prefix=url_prefix) + + @account_bp.context_processor + async def context(): + events_nav, cart_nav = await fetch_fragments([ + ("events", "account-nav-item", {}), + ("cart", "account-nav-item", {}), + ]) + return {"oob": oob, "account_nav_html": events_nav + cart_nav} + + @account_bp.get("/") + async def account(): + from shared.browser.app.utils.htmx import is_htmx_request + + if not g.get("user"): + return redirect(login_url("/")) + + if not is_htmx_request(): + html = await render_template("_types/auth/index.html") + else: + html = await render_template("_types/auth/_oob_elements.html") + + return await make_response(html) + + @account_bp.get("/newsletters/") + async def newsletters(): + from shared.browser.app.utils.htmx import is_htmx_request + + if not g.get("user"): + return redirect(login_url("/newsletters/")) + + result = await g.s.execute( + select(GhostNewsletter).order_by(GhostNewsletter.name) + ) + all_newsletters = result.scalars().all() + + sub_result = await g.s.execute( + select(UserNewsletter).where( + UserNewsletter.user_id == g.user.id, + ) + ) + user_subs = {un.newsletter_id: un for un in sub_result.scalars().all()} + + newsletter_list = [] + for nl in all_newsletters: + un = user_subs.get(nl.id) + newsletter_list.append({ + "newsletter": nl, + "un": un, + "subscribed": un.subscribed if un else False, + }) + + nl_oob = {**oob, "main": "_types/auth/_newsletters_panel.html"} + + if not is_htmx_request(): + html = await render_template( + "_types/auth/index.html", + oob=nl_oob, + newsletter_list=newsletter_list, + ) + else: + html = await render_template( + "_types/auth/_oob_elements.html", + oob=nl_oob, + newsletter_list=newsletter_list, + ) + + return await make_response(html) + + @account_bp.post("/newsletter//toggle/") + async def toggle_newsletter(newsletter_id: int): + if not g.get("user"): + return "", 401 + + result = await g.s.execute( + select(UserNewsletter).where( + UserNewsletter.user_id == g.user.id, + UserNewsletter.newsletter_id == newsletter_id, + ) + ) + un = result.scalar_one_or_none() + + if un: + un.subscribed = not un.subscribed + else: + un = UserNewsletter( + user_id=g.user.id, + newsletter_id=newsletter_id, + subscribed=True, + ) + g.s.add(un) + + await g.s.flush() + + return await render_template( + "_types/auth/_newsletter_toggle.html", + un=un, + ) + + # Catch-all for fragment-provided pages — must be last + @account_bp.get("//") + async def fragment_page(slug): + from shared.browser.app.utils.htmx import is_htmx_request + from quart import abort + + if not g.get("user"): + return redirect(login_url(f"/{slug}/")) + + fragment_html = await fetch_fragment( + "events", "account-page", + params={"slug": slug, "user_id": str(g.user.id)}, + ) + if not fragment_html: + abort(404) + + w_oob = {**oob, "main": "_types/auth/_fragment_panel.html"} + + if not is_htmx_request(): + html = await render_template( + "_types/auth/index.html", + oob=w_oob, + page_fragment_html=fragment_html, + ) + else: + html = await render_template( + "_types/auth/_oob_elements.html", + oob=w_oob, + page_fragment_html=fragment_html, + ) + + return await make_response(html) + + return account_bp diff --git a/account/bp/auth/__init__.py b/account/bp/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/account/bp/auth/routes.py b/account/bp/auth/routes.py new file mode 100644 index 0000000..5d1f334 --- /dev/null +++ b/account/bp/auth/routes.py @@ -0,0 +1,486 @@ +"""Authentication routes for the account app. + +Account is the OAuth authorization server. Owns magic link login/logout, +OAuth2 authorize endpoint, grant verification, and SSO logout. +""" +from __future__ import annotations + +import secrets +from datetime import datetime, timezone, timedelta + +from quart import ( + Blueprint, + request, + render_template, + redirect, + url_for, + session as qsession, + g, + current_app, + jsonify, +) +from sqlalchemy import select, update +from sqlalchemy.exc import SQLAlchemyError + +from shared.db.session import get_session +from shared.models import User +from shared.models.oauth_code import OAuthCode +from shared.models.oauth_grant import OAuthGrant +from shared.infrastructure.urls import account_url, app_url +from shared.infrastructure.cart_identity import current_cart_identity +from shared.events import emit_activity + +from .services import ( + pop_login_redirect_target, + store_login_redirect_target, + send_magic_email, + find_or_create_user, + create_magic_link, + validate_magic_link, + validate_email, +) + +SESSION_USER_KEY = "uid" +ACCOUNT_SESSION_KEY = "account_sid" + +ALLOWED_CLIENTS = {"blog", "market", "cart", "events", "federation", "artdag"} + + +def register(url_prefix="/auth"): + auth_bp = Blueprint("auth", __name__, url_prefix=url_prefix) + + # --- OAuth2 authorize endpoint ------------------------------------------- + + @auth_bp.get("/oauth/authorize") + @auth_bp.get("/oauth/authorize/") + async def oauth_authorize(): + client_id = request.args.get("client_id", "") + redirect_uri = request.args.get("redirect_uri", "") + state = request.args.get("state", "") + device_id = request.args.get("device_id", "") + prompt = request.args.get("prompt", "") + + if client_id not in ALLOWED_CLIENTS: + return "Invalid client_id", 400 + + expected_redirect = app_url(client_id, "/auth/callback") + if redirect_uri != expected_redirect: + return "Invalid redirect_uri", 400 + + # Account's own device id — always available via factory hook + account_did = g.device_id + + # Not logged in + if not g.get("user"): + if prompt == "none": + # Silent check — pass account_did so client can watch for future logins + sep = "&" if "?" in redirect_uri else "?" + return redirect( + f"{redirect_uri}{sep}error=login_required" + f"&state={state}&account_did={account_did}" + ) + authorize_path = request.full_path + store_login_redirect_target() + return redirect(url_for("auth.login_form", next=authorize_path)) + + # Logged in — create grant + authorization code + account_sid = qsession.get(ACCOUNT_SESSION_KEY) + if not account_sid: + account_sid = secrets.token_urlsafe(32) + qsession[ACCOUNT_SESSION_KEY] = account_sid + + grant_token = secrets.token_urlsafe(48) + code = secrets.token_urlsafe(48) + now = datetime.now(timezone.utc) + expires = now + timedelta(minutes=5) + + async with get_session() as s: + async with s.begin(): + grant = OAuthGrant( + token=grant_token, + user_id=g.user.id, + client_id=client_id, + issuer_session=account_sid, + device_id=device_id or None, + ) + s.add(grant) + + oauth_code = OAuthCode( + code=code, + user_id=g.user.id, + client_id=client_id, + redirect_uri=redirect_uri, + expires_at=expires, + grant_token=grant_token, + ) + s.add(oauth_code) + + sep = "&" if "?" in redirect_uri else "?" + return redirect( + f"{redirect_uri}{sep}code={code}&state={state}" + f"&account_did={account_did}" + ) + + # --- OAuth2 token exchange (for external clients like artdag) ------------- + + from shared.browser.app.csrf import csrf_exempt + + @csrf_exempt + @auth_bp.post("/oauth/token") + @auth_bp.post("/oauth/token/") + async def oauth_token(): + """Exchange an authorization code for user info + grant token. + + Used by clients that don't share the coop database (e.g. artdag). + Accepts JSON: {code, client_id, redirect_uri} + Returns JSON: {user_id, username, display_name, grant_token} + """ + data = await request.get_json() + if not data: + return jsonify({"error": "invalid_request"}), 400 + + code = data.get("code", "") + client_id = data.get("client_id", "") + redirect_uri = data.get("redirect_uri", "") + + if client_id not in ALLOWED_CLIENTS: + return jsonify({"error": "invalid_client"}), 400 + + now = datetime.now(timezone.utc) + + async with get_session() as s: + async with s.begin(): + result = await s.execute( + select(OAuthCode) + .where(OAuthCode.code == code) + .with_for_update() + ) + oauth_code = result.scalar_one_or_none() + + if not oauth_code: + return jsonify({"error": "invalid_grant"}), 400 + + if oauth_code.used_at is not None: + return jsonify({"error": "invalid_grant"}), 400 + + if oauth_code.expires_at < now: + return jsonify({"error": "invalid_grant"}), 400 + + if oauth_code.client_id != client_id: + return jsonify({"error": "invalid_grant"}), 400 + + if oauth_code.redirect_uri != redirect_uri: + return jsonify({"error": "invalid_grant"}), 400 + + oauth_code.used_at = now + user_id = oauth_code.user_id + grant_token = oauth_code.grant_token + + user = await s.get(User, user_id) + if not user: + return jsonify({"error": "invalid_grant"}), 400 + + return jsonify({ + "user_id": user_id, + "username": user.email or "", + "display_name": user.name or "", + "grant_token": grant_token, + }) + + # --- Grant verification (internal endpoint) ------------------------------ + + @auth_bp.get("/internal/verify-grant") + async def verify_grant(): + """Called by client apps to check if a grant is still valid.""" + token = request.args.get("token", "") + if not token: + return jsonify({"valid": False}), 200 + + async with get_session() as s: + grant = await s.scalar( + select(OAuthGrant).where(OAuthGrant.token == token) + ) + if not grant or grant.revoked_at is not None: + return jsonify({"valid": False}), 200 + return jsonify({"valid": True}), 200 + + @auth_bp.get("/internal/check-device") + async def check_device(): + """Called by client apps to check if a device has an active auth. + + Looks up the most recent grant for (device_id, client_id). + If the grant is active → {active: true}. + If revoked but user has logged in since → {active: true} (re-auth needed). + Otherwise → {active: false}. + """ + device_id = request.args.get("device_id", "") + app_name = request.args.get("app", "") + if not device_id or not app_name: + return jsonify({"active": False}), 200 + + async with get_session() as s: + # Find the most recent grant for this device + app + result = await s.execute( + select(OAuthGrant) + .where(OAuthGrant.device_id == device_id) + .where(OAuthGrant.client_id == app_name) + .order_by(OAuthGrant.created_at.desc()) + .limit(1) + ) + grant = result.scalar_one_or_none() + + if not grant: + return jsonify({"active": False}), 200 + + # Grant still active + if grant.revoked_at is None: + return jsonify({"active": True}), 200 + + # Grant revoked — check if user logged in since + user = await s.get(User, grant.user_id) + if user and user.last_login_at and user.last_login_at > grant.revoked_at: + return jsonify({"active": True}), 200 + + return jsonify({"active": False}), 200 + + # --- Magic link login flow ----------------------------------------------- + + @auth_bp.get("/login/") + async def login_form(): + store_login_redirect_target() + cross_cart_sid = request.args.get("cart_sid") + if cross_cart_sid: + qsession["cart_sid"] = cross_cart_sid + if g.get("user"): + redirect_url = pop_login_redirect_target() + return redirect(redirect_url) + return await render_template("auth/login.html") + + @auth_bp.post("/start/") + async def start_login(): + form = await request.form + email_input = form.get("email") or "" + + is_valid, email = validate_email(email_input) + if not is_valid: + return ( + await render_template( + "auth/login.html", + error="Please enter a valid email address.", + email=email_input, + ), + 400, + ) + + user = await find_or_create_user(g.s, email) + token, expires = await create_magic_link(g.s, user.id) + + from shared.utils import host_url + magic_url = host_url(url_for("auth.magic", token=token)) + + email_error = None + try: + await send_magic_email(email, magic_url) + except Exception as e: + current_app.logger.error("EMAIL SEND FAILED: %r", e) + email_error = ( + "We couldn't send the email automatically. " + "Please try again in a moment." + ) + + return await render_template( + "auth/check_email.html", + email=email, + email_error=email_error, + ) + + @auth_bp.get("/magic//") + async def magic(token: str): + now = datetime.now(timezone.utc) + user_id: int | None = None + + try: + async with get_session() as s: + async with s.begin(): + user, error = await validate_magic_link(s, token) + + if error: + return ( + await render_template("auth/login.html", error=error), + 400, + ) + user_id = user.id + + except Exception: + return ( + await render_template( + "auth/login.html", + error="Could not sign you in right now. Please try again.", + ), + 502, + ) + + assert user_id is not None + + ident = current_cart_identity() + anon_session_id = ident.get("session_id") + + try: + async with get_session() as s: + async with s.begin(): + u2 = await s.get(User, user_id) + if u2: + u2.last_login_at = now + if anon_session_id: + await emit_activity( + s, + activity_type="rose:Login", + actor_uri="internal:system", + object_type="Person", + object_data={ + "user_id": user_id, + "session_id": anon_session_id, + }, + ) + # Notify external services of device login + await emit_activity( + s, + activity_type="rose:DeviceAuth", + actor_uri="internal:system", + object_type="Device", + object_data={ + "device_id": g.device_id, + "action": "login", + }, + ) + except SQLAlchemyError: + current_app.logger.exception( + "[auth] non-fatal DB update for user_id=%s", user_id + ) + + qsession[SESSION_USER_KEY] = user_id + # Fresh account session ID for grant tracking + qsession[ACCOUNT_SESSION_KEY] = secrets.token_urlsafe(32) + + # Signal login for this device so client apps can detect it + try: + from shared.browser.app.redis_cacher import get_redis + import time as _time + _redis = get_redis() + if _redis: + await _redis.set( + f"did_auth:{g.device_id}", + str(_time.time()).encode(), + ex=30 * 24 * 3600, + ) + except Exception: + current_app.logger.exception("[auth] failed to set did_auth in Redis") + + redirect_url = pop_login_redirect_target() + return redirect(redirect_url, 303) + + @auth_bp.post("/logout/") + async def logout(): + # Revoke all grants issued by this account session + account_sid = qsession.get(ACCOUNT_SESSION_KEY) + if account_sid: + try: + async with get_session() as s: + async with s.begin(): + await s.execute( + update(OAuthGrant) + .where(OAuthGrant.issuer_session == account_sid) + .where(OAuthGrant.revoked_at.is_(None)) + .values(revoked_at=datetime.now(timezone.utc)) + ) + except SQLAlchemyError: + current_app.logger.exception("[auth] failed to revoke grants") + + # Clear login signal for this device + try: + from shared.browser.app.redis_cacher import get_redis + _redis = get_redis() + if _redis: + await _redis.delete(f"did_auth:{g.device_id}") + except Exception: + pass + + # Notify external services of device logout + try: + async with get_session() as s: + async with s.begin(): + await emit_activity( + s, + activity_type="rose:DeviceAuth", + actor_uri="internal:system", + object_type="Device", + object_data={ + "device_id": g.device_id, + "action": "logout", + }, + ) + except Exception: + current_app.logger.exception("[auth] failed to emit DeviceAuth logout") + + qsession.pop(SESSION_USER_KEY, None) + qsession.pop(ACCOUNT_SESSION_KEY, None) + from shared.infrastructure.urls import blog_url + return redirect(blog_url("/")) + + @auth_bp.get("/sso-logout/") + async def sso_logout(): + """SSO logout called by client apps: revoke grants, clear session.""" + account_sid = qsession.get(ACCOUNT_SESSION_KEY) + if account_sid: + try: + async with get_session() as s: + async with s.begin(): + await s.execute( + update(OAuthGrant) + .where(OAuthGrant.issuer_session == account_sid) + .where(OAuthGrant.revoked_at.is_(None)) + .values(revoked_at=datetime.now(timezone.utc)) + ) + except SQLAlchemyError: + current_app.logger.exception("[auth] failed to revoke grants") + + # Clear login signal for this device + try: + from shared.browser.app.redis_cacher import get_redis + _redis = get_redis() + if _redis: + await _redis.delete(f"did_auth:{g.device_id}") + except Exception: + pass + + # Notify external services of device logout + try: + async with get_session() as s: + async with s.begin(): + await emit_activity( + s, + activity_type="rose:DeviceAuth", + actor_uri="internal:system", + object_type="Device", + object_data={ + "device_id": g.device_id, + "action": "logout", + }, + ) + except Exception: + current_app.logger.exception("[auth] failed to emit DeviceAuth logout") + + qsession.pop(SESSION_USER_KEY, None) + qsession.pop(ACCOUNT_SESSION_KEY, None) + from shared.infrastructure.urls import blog_url + return redirect(blog_url("/")) + + @auth_bp.get("/clear/") + async def clear(): + """One-time migration helper: clear all session cookies.""" + qsession.clear() + resp = redirect(account_url("/")) + resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/") + return resp + + return auth_bp diff --git a/account/bp/auth/services/__init__.py b/account/bp/auth/services/__init__.py new file mode 100644 index 0000000..648f87d --- /dev/null +++ b/account/bp/auth/services/__init__.py @@ -0,0 +1,24 @@ +from .login_redirect import pop_login_redirect_target, store_login_redirect_target +from .auth_operations import ( + get_app_host, + get_app_root, + send_magic_email, + load_user_by_id, + find_or_create_user, + create_magic_link, + validate_magic_link, + validate_email, +) + +__all__ = [ + "pop_login_redirect_target", + "store_login_redirect_target", + "get_app_host", + "get_app_root", + "send_magic_email", + "load_user_by_id", + "find_or_create_user", + "create_magic_link", + "validate_magic_link", + "validate_email", +] diff --git a/account/bp/auth/services/auth_operations.py b/account/bp/auth/services/auth_operations.py new file mode 100644 index 0000000..f727c0d --- /dev/null +++ b/account/bp/auth/services/auth_operations.py @@ -0,0 +1,156 @@ +"""Auth operations for the account app. + +Owns magic-link login. Shared models, shared config. +""" +from __future__ import annotations + +import os +import secrets +from datetime import datetime, timedelta, timezone +from typing import Optional, Tuple + +from quart import current_app, render_template, request, g +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models import User, MagicLink +from shared.config import config + + +def get_app_host() -> str: + host = ( + config().get("host") or os.getenv("APP_HOST") or "http://localhost:8000" + ).rstrip("/") + return host + + +def get_app_root() -> str: + root = (g.root).rstrip("/") + return root + + +async def send_magic_email(to_email: str, link_url: str) -> None: + host = os.getenv("SMTP_HOST") + port = int(os.getenv("SMTP_PORT") or "587") + username = os.getenv("SMTP_USER") + password = os.getenv("SMTP_PASS") + mail_from = os.getenv("MAIL_FROM") or "no-reply@example.com" + + site_name = config().get("title", "Rose Ash") + subject = f"Your sign-in link \u2014 {site_name}" + + tpl_vars = dict(site_name=site_name, link_url=link_url) + text_body = await render_template("_email/magic_link.txt", **tpl_vars) + html_body = await render_template("_email/magic_link.html", **tpl_vars) + + if not host or not username or not password: + current_app.logger.warning( + "SMTP not configured. Printing magic link to console for %s: %s", + to_email, + link_url, + ) + print(f"[DEV] Magic link for {to_email}: {link_url}") + return + + import aiosmtplib + from email.message import EmailMessage + + msg = EmailMessage() + msg["From"] = mail_from + msg["To"] = to_email + msg["Subject"] = subject + msg.set_content(text_body) + msg.add_alternative(html_body, subtype="html") + + is_secure = port == 465 + if is_secure: + smtp = aiosmtplib.SMTP( + hostname=host, port=port, use_tls=True, + username=username, password=password, + ) + else: + smtp = aiosmtplib.SMTP( + hostname=host, port=port, start_tls=True, + username=username, password=password, + ) + + async with smtp: + await smtp.send_message(msg) + + +async def load_user_by_id(session: AsyncSession, user_id: int) -> Optional[User]: + stmt = ( + select(User) + .options(selectinload(User.labels)) + .where(User.id == user_id) + ) + result = await session.execute(stmt) + return result.scalar_one_or_none() + + +async def find_or_create_user(session: AsyncSession, email: str) -> User: + result = await session.execute(select(User).where(User.email == email)) + user = result.scalar_one_or_none() + + if user is None: + user = User(email=email) + session.add(user) + await session.flush() + + return user + + +async def create_magic_link( + session: AsyncSession, + user_id: int, + purpose: str = "signin", + expires_minutes: int = 15, +) -> Tuple[str, datetime]: + token = secrets.token_urlsafe(32) + expires = datetime.now(timezone.utc) + timedelta(minutes=expires_minutes) + + ml = MagicLink( + token=token, + user_id=user_id, + purpose=purpose, + expires_at=expires, + ip=request.headers.get("x-forwarded-for", request.remote_addr), + user_agent=request.headers.get("user-agent"), + ) + session.add(ml) + + return token, expires + + +async def validate_magic_link( + session: AsyncSession, + token: str, +) -> Tuple[Optional[User], Optional[str]]: + now = datetime.now(timezone.utc) + + ml = await session.scalar( + select(MagicLink) + .where(MagicLink.token == token) + .with_for_update() + ) + + if not ml or ml.purpose != "signin": + return None, "Invalid or expired link." + + if ml.used_at or ml.expires_at < now: + return None, "This link has expired. Please request a new one." + + user = await session.get(User, ml.user_id) + if not user: + return None, "User not found." + + ml.used_at = now + return user, None + + +def validate_email(email: str) -> Tuple[bool, str]: + email = email.strip().lower() + if not email or "@" not in email: + return False, email + return True, email diff --git a/account/bp/auth/services/login_redirect.py b/account/bp/auth/services/login_redirect.py new file mode 100644 index 0000000..8382516 --- /dev/null +++ b/account/bp/auth/services/login_redirect.py @@ -0,0 +1,45 @@ +from urllib.parse import urlparse +from quart import session + +from shared.infrastructure.urls import account_url + + +LOGIN_REDIRECT_SESSION_KEY = "login_redirect_to" + + +def store_login_redirect_target() -> None: + from quart import request + + target = request.args.get("next") + if not target: + ref = request.referrer or "" + try: + parsed = urlparse(ref) + target = parsed.path or "" + except Exception: + target = "" + + if not target: + return + + # Accept both relative paths and absolute URLs (cross-app redirects) + if target.startswith("http://") or target.startswith("https://"): + session[LOGIN_REDIRECT_SESSION_KEY] = target + elif target.startswith("/") and not target.startswith("//"): + session[LOGIN_REDIRECT_SESSION_KEY] = target + + +def pop_login_redirect_target() -> str: + path = session.pop(LOGIN_REDIRECT_SESSION_KEY, None) + if not path or not isinstance(path, str): + return account_url("/") + + # Absolute URL: return as-is (cross-app redirect) + if path.startswith("http://") or path.startswith("https://"): + return path + + # Relative path: must start with / and not // + if path.startswith("/") and not path.startswith("//"): + return account_url(path) + + return account_url("/") diff --git a/account/bp/fragments/__init__.py b/account/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/account/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/account/bp/fragments/routes.py b/account/bp/fragments/routes.py new file mode 100644 index 0000000..b21a601 --- /dev/null +++ b/account/bp/fragments/routes.py @@ -0,0 +1,52 @@ +"""Account app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. + +Fragments: + auth-menu Desktop + mobile auth menu (sign-in or user link) +""" + +from __future__ import annotations + +from quart import Blueprint, Response, request, render_template + +from shared.infrastructure.fragments import FRAGMENT_HEADER + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + # --------------------------------------------------------------- + # Fragment handlers + # --------------------------------------------------------------- + + async def _auth_menu(): + user_email = request.args.get("email", "") + return await render_template( + "fragments/auth_menu.html", + user_email=user_email, + ) + + _handlers = { + "auth-menu": _auth_menu, + } + + # --------------------------------------------------------------- + # Routing + # --------------------------------------------------------------- + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + return bp diff --git a/account/entrypoint.sh b/account/entrypoint.sh new file mode 100644 index 0000000..52b4f51 --- /dev/null +++ b/account/entrypoint.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/account/models/__init__.py b/account/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/account/path_setup.py b/account/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/account/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/account/services/__init__.py b/account/services/__init__.py new file mode 100644 index 0000000..299f0ad --- /dev/null +++ b/account/services/__init__.py @@ -0,0 +1,27 @@ +"""Account app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the account app. + + Account needs all domain services since widgets (tickets, bookings) + pull data from blog, calendar, market, cart, and federation. + """ + from shared.services.registry import services + from shared.services.federation_impl import SqlFederationService + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + if not services.has("federation"): + services.federation = SqlFederationService() + if not services.has("blog"): + services.blog = SqlBlogService() + if not services.has("calendar"): + services.calendar = SqlCalendarService() + if not services.has("market"): + services.market = SqlMarketService() + if not services.has("cart"): + services.cart = SqlCartService() diff --git a/account/templates/_email/magic_link.html b/account/templates/_email/magic_link.html new file mode 100644 index 0000000..3c1eac6 --- /dev/null +++ b/account/templates/_email/magic_link.html @@ -0,0 +1,33 @@ + + + + + + +
+ + +
+

{{ site_name }}

+

Sign in to your account

+

+ Click the button below to sign in. This link will expire in 15 minutes. +

+
+ + Sign in + +
+

Or copy and paste this link into your browser:

+

+ {{ link_url }} +

+
+

+ If you did not request this email, you can safely ignore it. +

+
+
+ + diff --git a/account/templates/_email/magic_link.txt b/account/templates/_email/magic_link.txt new file mode 100644 index 0000000..28a2efb --- /dev/null +++ b/account/templates/_email/magic_link.txt @@ -0,0 +1,8 @@ +Hello, + +Click this link to sign in: +{{ link_url }} + +This link will expire in 15 minutes. + +If you did not request this, you can ignore this email. diff --git a/account/templates/_types/auth/_bookings_panel.html b/account/templates/_types/auth/_bookings_panel.html new file mode 100644 index 0000000..28f8280 --- /dev/null +++ b/account/templates/_types/auth/_bookings_panel.html @@ -0,0 +1,44 @@ +
+
+ +

Bookings

+ + {% if bookings %} +
+ {% for booking in bookings %} +
+
+
+

{{ booking.name }}

+
+ {{ booking.start_at.strftime('%d %b %Y, %H:%M') }} + {% if booking.end_at %} + – {{ booking.end_at.strftime('%H:%M') }} + {% endif %} + {% if booking.calendar_name %} + · {{ booking.calendar_name }} + {% endif %} + {% if booking.cost %} + · £{{ booking.cost }} + {% endif %} +
+
+
+ {% if booking.state == 'confirmed' %} + confirmed + {% elif booking.state == 'provisional' %} + provisional + {% else %} + {{ booking.state }} + {% endif %} +
+
+
+ {% endfor %} +
+ {% else %} +

No bookings yet.

+ {% endif %} + +
+
diff --git a/account/templates/_types/auth/_fragment_panel.html b/account/templates/_types/auth/_fragment_panel.html new file mode 100644 index 0000000..f27345c --- /dev/null +++ b/account/templates/_types/auth/_fragment_panel.html @@ -0,0 +1 @@ +{{ page_fragment_html | safe }} diff --git a/account/templates/_types/auth/_main_panel.html b/account/templates/_types/auth/_main_panel.html new file mode 100644 index 0000000..e80fd12 --- /dev/null +++ b/account/templates/_types/auth/_main_panel.html @@ -0,0 +1,49 @@ +
+
+ + {% if error %} +
+ {{ error }} +
+ {% endif %} + + {# Account header #} +
+
+

Account

+ {% if g.user %} +

{{ g.user.email }}

+ {% if g.user.name %} +

{{ g.user.name }}

+ {% endif %} + {% endif %} +
+
+ + +
+
+ + {# Labels #} + {% set labels = g.user.labels if g.user is defined and g.user.labels is defined else [] %} + {% if labels %} +
+

Labels

+
+ {% for label in labels %} + + {{ label.name }} + + {% endfor %} +
+
+ {% endif %} + +
+
diff --git a/account/templates/_types/auth/_nav.html b/account/templates/_types/auth/_nav.html new file mode 100644 index 0000000..ff5de92 --- /dev/null +++ b/account/templates/_types/auth/_nav.html @@ -0,0 +1,7 @@ +{% import 'macros/links.html' as links %} +{% call links.link(account_url('/newsletters/'), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + newsletters +{% endcall %} +{% if account_nav_html %} + {{ account_nav_html | safe }} +{% endif %} diff --git a/account/templates/_types/auth/_newsletter_toggle.html b/account/templates/_types/auth/_newsletter_toggle.html new file mode 100644 index 0000000..8bb3f69 --- /dev/null +++ b/account/templates/_types/auth/_newsletter_toggle.html @@ -0,0 +1,17 @@ +
+ +
diff --git a/account/templates/_types/auth/_newsletters_panel.html b/account/templates/_types/auth/_newsletters_panel.html new file mode 100644 index 0000000..0f3fdbb --- /dev/null +++ b/account/templates/_types/auth/_newsletters_panel.html @@ -0,0 +1,46 @@ +
+
+ +

Newsletters

+ + {% if newsletter_list %} +
+ {% for item in newsletter_list %} +
+
+

{{ item.newsletter.name }}

+ {% if item.newsletter.description %} +

{{ item.newsletter.description }}

+ {% endif %} +
+
+ {% if item.un %} + {% with un=item.un %} + {% include "_types/auth/_newsletter_toggle.html" %} + {% endwith %} + {% else %} + {# No subscription row yet — show an off toggle that will create one #} +
+ +
+ {% endif %} +
+
+ {% endfor %} +
+ {% else %} +

No newsletters available.

+ {% endif %} + +
+
diff --git a/account/templates/_types/auth/_oob_elements.html b/account/templates/_types/auth/_oob_elements.html new file mode 100644 index 0000000..cafb113 --- /dev/null +++ b/account/templates/_types/auth/_oob_elements.html @@ -0,0 +1,29 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'auth-header-child', '_types/auth/header/_header.html')}} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/auth/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include oob.main %} +{% endblock %} + + diff --git a/account/templates/_types/auth/_tickets_panel.html b/account/templates/_types/auth/_tickets_panel.html new file mode 100644 index 0000000..69f7596 --- /dev/null +++ b/account/templates/_types/auth/_tickets_panel.html @@ -0,0 +1,44 @@ +
+
+ +

Tickets

+ + {% if tickets %} +
+ {% for ticket in tickets %} +
+
+
+ + {{ ticket.entry_name }} + +
+ {{ ticket.entry_start_at.strftime('%d %b %Y, %H:%M') }} + {% if ticket.calendar_name %} + · {{ ticket.calendar_name }} + {% endif %} + {% if ticket.ticket_type_name %} + · {{ ticket.ticket_type_name }} + {% endif %} +
+
+
+ {% if ticket.state == 'checked_in' %} + checked in + {% elif ticket.state == 'confirmed' %} + confirmed + {% else %} + {{ ticket.state }} + {% endif %} +
+
+
+ {% endfor %} +
+ {% else %} +

No tickets yet.

+ {% endif %} + +
+
diff --git a/account/templates/_types/auth/check_email.html b/account/templates/_types/auth/check_email.html new file mode 100644 index 0000000..e4cea28 --- /dev/null +++ b/account/templates/_types/auth/check_email.html @@ -0,0 +1,33 @@ +{% extends "_types/root/index.html" %} +{% block content %} +
+
+

Check your email

+ +

+ If an account exists for + {{ email }}, + you’ll receive a link to sign in. It expires in 15 minutes. +

+ + {% if email_error %} + + {% endif %} + +

+ + ← Back + +

+
+
+{% endblock %} diff --git a/account/templates/_types/auth/header/_header.html b/account/templates/_types/auth/header/_header.html new file mode 100644 index 0000000..c59a712 --- /dev/null +++ b/account/templates/_types/auth/header/_header.html @@ -0,0 +1,12 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='auth-row', oob=oob) %} + {% call links.link(account_url('/'), hx_select_search ) %} + +
account
+ {% endcall %} + {% call links.desktop_nav() %} + {% include "_types/auth/_nav.html" %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/account/templates/_types/auth/index copy.html b/account/templates/_types/auth/index copy.html new file mode 100644 index 0000000..cd4d6d3 --- /dev/null +++ b/account/templates/_types/auth/index copy.html @@ -0,0 +1,18 @@ +{% extends "_types/root/_index.html" %} + + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('auth-header-child', '_types/auth/header/_header.html') %} + {% block auth_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include "_types/auth/_nav.html" %} +{% endblock %} + +{% block content %} + {% include '_types/auth/_main_panel.html' %} +{% endblock %} diff --git a/account/templates/_types/auth/index.html b/account/templates/_types/auth/index.html new file mode 100644 index 0000000..3c66bf1 --- /dev/null +++ b/account/templates/_types/auth/index.html @@ -0,0 +1,18 @@ +{% extends oob.extends %} + + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row(oob.child_id, oob.header) %} + {% block auth_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include oob.nav %} +{% endblock %} + +{% block content %} + {% include oob.main %} +{% endblock %} diff --git a/account/templates/_types/auth/login.html b/account/templates/_types/auth/login.html new file mode 100644 index 0000000..b55ea99 --- /dev/null +++ b/account/templates/_types/auth/login.html @@ -0,0 +1,46 @@ +{% extends "_types/root/index.html" %} +{% block content %} +
+
+

Sign in

+

+ Enter your email and we’ll email you a one-time sign-in link. +

+ + {% if error %} +
+ {{ error }} +
+ {% endif %} + +
+ +
+ + +
+ + +
+
+
+{% endblock %} diff --git a/account/templates/auth/check_email.html b/account/templates/auth/check_email.html new file mode 100644 index 0000000..5eb1b61 --- /dev/null +++ b/account/templates/auth/check_email.html @@ -0,0 +1,19 @@ +{% extends "_types/root/_index.html" %} +{% block meta %}{% endblock %} +{% block title %}Check your email — Rose Ash{% endblock %} +{% block content %} +
+

Check your email

+

+ We sent a sign-in link to {{ email }}. +

+

+ Click the link in the email to sign in. The link expires in 15 minutes. +

+ {% if email_error %} +
+ {{ email_error }} +
+ {% endif %} +
+{% endblock %} diff --git a/account/templates/auth/login.html b/account/templates/auth/login.html new file mode 100644 index 0000000..79031e5 --- /dev/null +++ b/account/templates/auth/login.html @@ -0,0 +1,36 @@ +{% extends "_types/root/_index.html" %} +{% block meta %}{% endblock %} +{% block title %}Login — Rose Ash{% endblock %} +{% block content %} +
+

Sign in

+ + {% if error %} +
+ {{ error }} +
+ {% endif %} + +
+ +
+ + +
+ +
+
+{% endblock %} diff --git a/account/templates/fragments/auth_menu.html b/account/templates/fragments/auth_menu.html new file mode 100644 index 0000000..eb68cdc --- /dev/null +++ b/account/templates/fragments/auth_menu.html @@ -0,0 +1,36 @@ +{# Desktop auth menu #} + +{# Mobile auth menu #} + +{% if user_email %} + + + {{ user_email }} + +{% else %} + + + sign in or register + +{% endif %} + diff --git a/blog/.gitignore b/blog/.gitignore new file mode 100644 index 0000000..87d616e --- /dev/null +++ b/blog/.gitignore @@ -0,0 +1,9 @@ +__pycache__/ +*.pyc +*.pyo +.env +node_modules/ +*.egg-info/ +dist/ +build/ +.venv/ diff --git a/blog/Dockerfile b/blog/Dockerfile new file mode 100644 index 0000000..585991f --- /dev/null +++ b/blog/Dockerfile @@ -0,0 +1,61 @@ +# syntax=docker/dockerfile:1 + +# ---------- Stage 1: Build editor JS/CSS ---------- +FROM node:20-slim AS editor-build +WORKDIR /build +COPY shared/editor/package.json shared/editor/package-lock.json* ./ +RUN npm ci --ignore-scripts 2>/dev/null || npm install +COPY shared/editor/ ./ +RUN NODE_ENV=production node build.mjs + +# ---------- Stage 2: Python runtime ---------- +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY blog/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY market/__init__.py ./market/__init__.py +COPY market/models/ ./market/models/ +COPY cart/__init__.py ./cart/__init__.py +COPY cart/models/ ./cart/models/ +COPY events/__init__.py ./events/__init__.py +COPY events/models/ ./events/models/ +COPY federation/__init__.py ./federation/__init__.py +COPY federation/models/ ./federation/models/ +COPY account/__init__.py ./account/__init__.py +COPY account/models/ ./account/models/ + +# Copy built editor assets from stage 1 +COPY --from=editor-build /static/scripts/editor.js /static/scripts/editor.css shared/static/scripts/ + +# ---------- Runtime setup ---------- +COPY blog/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/blog/README.md b/blog/README.md new file mode 100644 index 0000000..ef45943 --- /dev/null +++ b/blog/README.md @@ -0,0 +1,60 @@ +# Blog App (Coop) + +Blog, authentication, and content management service for the Rose Ash cooperative platform. Handles Ghost CMS integration, user auth, and admin settings. + +## Architecture + +One of five Quart microservices sharing a single PostgreSQL database: + +| App | Port | Domain | +|-----|------|--------| +| **blog (coop)** | 8000 | Auth, blog, admin, menus, snippets | +| market | 8001 | Product browsing, Suma scraping | +| cart | 8002 | Shopping cart, checkout, orders | +| events | 8003 | Calendars, bookings, tickets | +| federation | 8004 | ActivityPub, fediverse social | + +## Structure + +``` +app.py # Application factory (create_base_app + blueprints) +path_setup.py # Adds project root + app dir to sys.path +config/app-config.yaml # App URLs, feature flags, SumUp config +models/ # Blog-domain models (+ re-export stubs for shared models) +bp/ # Blueprints + auth/ # Magic link login, account, newsletters + blog/ # Post listing, Ghost CMS sync + post/ # Single post view and admin + admin/ # Settings admin interface + menu_items/ # Navigation menu management + snippets/ # Reusable content snippets +templates/ # Jinja2 templates +services/ # register_domain_services() — wires blog + calendar + market + cart +shared/ # Submodule -> git.rose-ash.com/coop/shared.git +``` + +## Cross-Domain Communication + +All inter-app communication uses typed service contracts (no HTTP APIs): + +- `services.calendar.*` — calendar/entry queries via CalendarService protocol +- `services.market.*` — marketplace queries via MarketService protocol +- `services.cart.*` — cart summary via CartService protocol +- `services.federation.*` — AP publishing via FederationService protocol +- `shared.services.navigation` — site navigation tree + +## Domain Events + +- `auth/routes.py` emits `user.logged_in` via `shared.events.emit_event` +- Ghost sync emits `post.published` / `post.updated` for federation + +## Running + +```bash +export DATABASE_URL_ASYNC=postgresql+asyncpg://user:pass@localhost/coop +export REDIS_URL=redis://localhost:6379/0 +export SECRET_KEY=your-secret-key + +alembic -c shared/alembic.ini upgrade head +hypercorn app:app --bind 0.0.0.0:8000 +``` diff --git a/blog/__init__.py b/blog/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/blog/app.py b/blog/app.py new file mode 100644 index 0000000..e59895f --- /dev/null +++ b/blog/app.py @@ -0,0 +1,138 @@ +from __future__ import annotations +import path_setup # noqa: F401 # adds shared/ to sys.path +from pathlib import Path + +from quart import g, request +from jinja2 import FileSystemLoader, ChoiceLoader +from sqlalchemy import select + +from shared.infrastructure.factory import create_base_app +from shared.config import config +from shared.models import KV + +from bp import ( + register_blog_bp, + register_admin, + register_menu_items, + register_snippets, + register_fragments, +) + + +async def blog_context() -> dict: + """ + Blog app context processor. + + - cart_count/cart_total: via cart service (shared DB) + - cart_mini_html / auth_menu_html / nav_tree_html: pre-fetched fragments + """ + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.services.registry import services + from shared.infrastructure.cart_identity import current_cart_identity + from shared.infrastructure.fragments import fetch_fragments + + ctx = await base_context() + + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + # Cart data via service (replaces cross-app HTTP API) + ident = current_cart_identity() + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + ctx["cart_count"] = summary.count + summary.calendar_count + summary.ticket_count + ctx["cart_total"] = float(summary.total + summary.calendar_total + summary.ticket_total) + + # Pre-fetch cross-app HTML fragments concurrently + # (fetch_fragment auto-skips when inside a fragment request to prevent circular deps) + user = getattr(g, "user", None) + cart_params = {} + if ident["user_id"] is not None: + cart_params["user_id"] = ident["user_id"] + if ident["session_id"] is not None: + cart_params["session_id"] = ident["session_id"] + + auth_params = {"email": user.email} if user else {} + nav_params = {"app_name": "blog", "path": request.path} + + cart_mini_html, auth_menu_html, nav_tree_html = await fetch_fragments([ + ("cart", "cart-mini", cart_params or None), + ("account", "auth-menu", auth_params or None), + ("blog", "nav-tree", nav_params), + ]) + ctx["cart_mini_html"] = cart_mini_html + ctx["auth_menu_html"] = auth_menu_html + ctx["nav_tree_html"] = nav_tree_html + + return ctx + + +def create_app() -> "Quart": + from services import register_domain_services + + app = create_base_app( + "blog", + context_fn=blog_context, + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + # --- blueprints --- + app.register_blueprint( + register_blog_bp( + url_prefix=config()["blog_root"], + title=config()["blog_title"], + ), + url_prefix=config()["blog_root"], + ) + + app.register_blueprint(register_admin("/settings")) + app.register_blueprint(register_menu_items()) + app.register_blueprint(register_snippets()) + app.register_blueprint(register_fragments()) + + # --- KV admin endpoints --- + @app.get("/settings/kv/") + async def kv_get(key: str): + row = ( + await g.s.execute(select(KV).where(KV.key == key)) + ).scalar_one_or_none() + return {"key": key, "value": (row.value if row else None)} + + @app.post("/settings/kv/") + async def kv_set(key: str): + data = await request.get_json() or {} + val = data.get("value", "") + obj = await g.s.get(KV, key) + if obj is None: + obj = KV(key=key, value=val) + g.s.add(obj) + else: + obj.value = val + return {"ok": True, "key": key, "value": val} + + # --- debug: url rules --- + @app.get("/__rules") + async def dump_rules(): + rules = [] + for r in app.url_map.iter_rules(): + rules.append({ + "endpoint": r.endpoint, + "rule": repr(r.rule), + "methods": sorted(r.methods - {"HEAD", "OPTIONS"}), + "strict_slashes": r.strict_slashes, + }) + return {"rules": rules} + + return app + + +app = create_app() diff --git a/blog/bp/__init__.py b/blog/bp/__init__.py new file mode 100644 index 0000000..59bc262 --- /dev/null +++ b/blog/bp/__init__.py @@ -0,0 +1,5 @@ +from .blog.routes import register as register_blog_bp +from .admin.routes import register as register_admin +from .menu_items.routes import register as register_menu_items +from .snippets.routes import register as register_snippets +from .fragments import register_fragments diff --git a/blog/bp/admin/routes.py b/blog/bp/admin/routes.py new file mode 100644 index 0000000..e387c17 --- /dev/null +++ b/blog/bp/admin/routes.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +#from quart import Blueprint, g + +from quart import ( + render_template, + make_response, + Blueprint, + redirect, + url_for, + request, + jsonify +) +from shared.browser.app.redis_cacher import clear_all_cache +from shared.browser.app.authz import require_admin +from shared.browser.app.utils.htmx import is_htmx_request +from shared.config import config +from datetime import datetime + +def register(url_prefix): + bp = Blueprint("settings", __name__, url_prefix = url_prefix) + + @bp.context_processor + async def inject_root(): + return { + "base_title": f"{config()['title']} settings", + } + + @bp.get("/") + @require_admin + async def home(): + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/root/settings/index.html", + ) + + else: + html = await render_template("_types/root/settings/_oob_elements.html") + + + return await make_response(html) + + @bp.get("/cache/") + @require_admin + async def cache(): + if not is_htmx_request(): + html = await render_template("_types/root/settings/cache/index.html") + else: + html = await render_template("_types/root/settings/cache/_oob_elements.html") + return await make_response(html) + + @bp.post("/cache_clear/") + @require_admin + async def cache_clear(): + await clear_all_cache() + if is_htmx_request(): + now = datetime.now() + html = f'Cache cleared at {now.strftime("%H:%M:%S")}' + return html + + return redirect(url_for("settings.cache")) + return bp + + diff --git a/blog/bp/blog/__init__.py b/blog/bp/blog/__init__.py new file mode 100644 index 0000000..85fd1a5 --- /dev/null +++ b/blog/bp/blog/__init__.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +# create the blueprint at package import time +from .routes import register # = Blueprint("browse_bp", __name__) + +# import routes AFTER browse_bp is defined so routes can attach to it +from . import routes # noqa: F401 diff --git a/blog/bp/blog/admin/__init__.py b/blog/bp/blog/admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/blog/bp/blog/admin/routes.py b/blog/bp/blog/admin/routes.py new file mode 100644 index 0000000..4bf8139 --- /dev/null +++ b/blog/bp/blog/admin/routes.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import re +from quart import ( + render_template, + make_response, + Blueprint, + redirect, + url_for, + request, + g, +) +from sqlalchemy import select, delete + +from shared.browser.app.authz import require_admin +from shared.browser.app.utils.htmx import is_htmx_request +from shared.browser.app.redis_cacher import invalidate_tag_cache + +from models.tag_group import TagGroup, TagGroupTag +from models.ghost_content import Tag + + +def _slugify(name: str) -> str: + s = name.strip().lower() + s = re.sub(r"[^\w\s-]", "", s) + s = re.sub(r"[\s_]+", "-", s) + return s.strip("-") + + +async def _unassigned_tags(session): + """Return public, non-deleted tags not assigned to any group.""" + assigned_sq = select(TagGroupTag.tag_id).subquery() + q = ( + select(Tag) + .where( + Tag.deleted_at.is_(None), + (Tag.visibility == "public") | (Tag.visibility.is_(None)), + Tag.id.notin_(select(assigned_sq)), + ) + .order_by(Tag.name) + ) + return list((await session.execute(q)).scalars()) + + +def register(): + bp = Blueprint("tag_groups_admin", __name__, url_prefix="/settings/tag-groups") + + @bp.get("/") + @require_admin + async def index(): + groups = list( + (await g.s.execute( + select(TagGroup).order_by(TagGroup.sort_order, TagGroup.name) + )).scalars() + ) + unassigned = await _unassigned_tags(g.s) + + ctx = {"groups": groups, "unassigned_tags": unassigned} + + if not is_htmx_request(): + return await render_template("_types/blog/admin/tag_groups/index.html", **ctx) + else: + return await render_template("_types/blog/admin/tag_groups/_oob_elements.html", **ctx) + + @bp.post("/") + @require_admin + async def create(): + form = await request.form + name = (form.get("name") or "").strip() + if not name: + return redirect(url_for("blog.tag_groups_admin.index")) + + slug = _slugify(name) + feature_image = (form.get("feature_image") or "").strip() or None + colour = (form.get("colour") or "").strip() or None + sort_order = int(form.get("sort_order") or 0) + + tg = TagGroup( + name=name, slug=slug, + feature_image=feature_image, colour=colour, + sort_order=sort_order, + ) + g.s.add(tg) + await g.s.flush() + + await invalidate_tag_cache("blog") + return redirect(url_for("blog.tag_groups_admin.index")) + + @bp.get("//") + @require_admin + async def edit(id: int): + tg = await g.s.get(TagGroup, id) + if not tg: + return redirect(url_for("blog.tag_groups_admin.index")) + + # Assigned tag IDs for this group + assigned_rows = list( + (await g.s.execute( + select(TagGroupTag.tag_id).where(TagGroupTag.tag_group_id == id) + )).scalars() + ) + assigned_tag_ids = set(assigned_rows) + + # All public, non-deleted tags + all_tags = list( + (await g.s.execute( + select(Tag).where( + Tag.deleted_at.is_(None), + (Tag.visibility == "public") | (Tag.visibility.is_(None)), + ).order_by(Tag.name) + )).scalars() + ) + + ctx = { + "group": tg, + "all_tags": all_tags, + "assigned_tag_ids": assigned_tag_ids, + } + + if not is_htmx_request(): + return await render_template("_types/blog/admin/tag_groups/edit.html", **ctx) + else: + return await render_template("_types/blog/admin/tag_groups/_edit_oob.html", **ctx) + + @bp.post("//") + @require_admin + async def save(id: int): + tg = await g.s.get(TagGroup, id) + if not tg: + return redirect(url_for("blog.tag_groups_admin.index")) + + form = await request.form + name = (form.get("name") or "").strip() + if name: + tg.name = name + tg.slug = _slugify(name) + tg.feature_image = (form.get("feature_image") or "").strip() or None + tg.colour = (form.get("colour") or "").strip() or None + tg.sort_order = int(form.get("sort_order") or 0) + + # Update tag assignments + selected_tag_ids = set() + for val in form.getlist("tag_ids"): + try: + selected_tag_ids.add(int(val)) + except (ValueError, TypeError): + pass + + # Remove old assignments + await g.s.execute( + delete(TagGroupTag).where(TagGroupTag.tag_group_id == id) + ) + await g.s.flush() + + # Add new assignments + for tid in selected_tag_ids: + g.s.add(TagGroupTag(tag_group_id=id, tag_id=tid)) + await g.s.flush() + + await invalidate_tag_cache("blog") + return redirect(url_for("blog.tag_groups_admin.edit", id=id)) + + @bp.post("//delete/") + @require_admin + async def delete_group(id: int): + tg = await g.s.get(TagGroup, id) + if tg: + await g.s.delete(tg) + await g.s.flush() + await invalidate_tag_cache("blog") + return redirect(url_for("blog.tag_groups_admin.index")) + + return bp diff --git a/blog/bp/blog/filters/qs.py b/blog/bp/blog/filters/qs.py new file mode 100644 index 0000000..073dd13 --- /dev/null +++ b/blog/bp/blog/filters/qs.py @@ -0,0 +1,120 @@ +from quart import request + +from typing import Iterable, Optional, Union + +from shared.browser.app.filters.qs_base import ( + KEEP, _norm, make_filter_set, build_qs, +) +from shared.browser.app.filters.query_types import BlogQuery + + +def decode() -> BlogQuery: + page = int(request.args.get("page", 1)) + search = request.args.get("search") + sort = request.args.get("sort") + liked = request.args.get("liked") + drafts = request.args.get("drafts") + + selected_tags = tuple(s.strip() for s in request.args.getlist("tag") if s.strip())[:1] + selected_authors = tuple(s.strip().lower() for s in request.args.getlist("author") if s.strip())[:1] + selected_groups = tuple(s.strip() for s in request.args.getlist("group") if s.strip())[:1] + view = request.args.get("view") or None + + return BlogQuery(page, search, sort, selected_tags, selected_authors, liked, view, drafts, selected_groups) + + +def makeqs_factory(): + """ + Build a makeqs(...) that starts from the current filters + page. + Auto-resets page to 1 when filters change unless you pass page explicitly. + """ + q = decode() + base_tags = [s for s in q.selected_tags if (s or "").strip()] + base_authors = [s for s in q.selected_authors if (s or "").strip()] + base_groups = [s for s in q.selected_groups if (s or "").strip()] + base_search = q.search or None + base_liked = q.liked or None + base_sort = q.sort or None + base_page = int(q.page or 1) + base_view = q.view or None + base_drafts = q.drafts or None + + def makeqs( + *, + clear_filters: bool = False, + add_tag: Union[str, Iterable[str], None] = None, + remove_tag: Union[str, Iterable[str], None] = None, + add_author: Union[str, Iterable[str], None] = None, + remove_author: Union[str, Iterable[str], None] = None, + add_group: Union[str, Iterable[str], None] = None, + remove_group: Union[str, Iterable[str], None] = None, + search: Union[str, None, object] = KEEP, + sort: Union[str, None, object] = KEEP, + page: Union[int, None, object] = None, + extra: Optional[Iterable[tuple]] = None, + leading_q: bool = True, + liked: Union[bool, None, object] = KEEP, + view: Union[str, None, object] = KEEP, + drafts: Union[str, None, object] = KEEP, + ) -> str: + groups = make_filter_set(base_groups, add_group, remove_group, clear_filters, single_select=True) + tags = make_filter_set(base_tags, add_tag, remove_tag, clear_filters, single_select=True) + authors = make_filter_set(base_authors, add_author, remove_author, clear_filters, single_select=True) + + # Mutual exclusion: selecting a group clears tags, selecting a tag clears groups + if add_group is not None: + tags = [] + if add_tag is not None: + groups = [] + + final_search = None if clear_filters else base_search if search is KEEP else ((search or "").strip() or None) + final_sort = base_sort if sort is KEEP else (sort or None) + final_liked = None if clear_filters else base_liked if liked is KEEP else liked + final_view = base_view if view is KEEP else (view or None) + final_drafts = None if clear_filters else base_drafts if drafts is KEEP else (drafts or None) + + # Did filters change? + filters_changed = ( + set(map(_norm, tags)) != set(map(_norm, base_tags)) + or set(map(_norm, authors)) != set(map(_norm, base_authors)) + or set(map(_norm, groups)) != set(map(_norm, base_groups)) + or final_search != base_search + or final_sort != base_sort + or final_liked != base_liked + or final_drafts != base_drafts + ) + + # Page logic + if page is KEEP: + final_page = 1 if filters_changed else base_page + else: + final_page = page + + # Build params + params = [] + for s in groups: + params.append(("group", s)) + for s in tags: + params.append(("tag", s)) + for s in authors: + params.append(("author", s)) + if final_search: + params.append(("search", final_search)) + if final_liked is not None: + params.append(("liked", final_liked)) + if final_sort: + params.append(("sort", final_sort)) + if final_view: + params.append(("view", final_view)) + if final_drafts: + params.append(("drafts", final_drafts)) + if final_page is not None: + params.append(("page", str(final_page))) + if extra: + for k, v in extra: + if v is not None: + params.append((k, str(v))) + + return build_qs(params, leading_q=leading_q) + + return makeqs diff --git a/blog/bp/blog/ghost/editor_api.py b/blog/bp/blog/ghost/editor_api.py new file mode 100644 index 0000000..c37fa96 --- /dev/null +++ b/blog/bp/blog/ghost/editor_api.py @@ -0,0 +1,256 @@ +""" +Editor API proxy – image/media/file uploads and oembed. + +Forwards requests to the Ghost Admin API with JWT auth so the browser +never needs direct Ghost access. +""" +from __future__ import annotations + +import logging +import os + +import httpx +from quart import Blueprint, request, jsonify, g +from sqlalchemy import select, or_ + +from shared.browser.app.authz import require_admin, require_login +from models import Snippet +from .ghost_admin_token import make_ghost_admin_jwt + +log = logging.getLogger(__name__) + +GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"] +MAX_IMAGE_SIZE = 10 * 1024 * 1024 # 10 MB +MAX_MEDIA_SIZE = 100 * 1024 * 1024 # 100 MB +MAX_FILE_SIZE = 50 * 1024 * 1024 # 50 MB + +ALLOWED_IMAGE_MIMETYPES = frozenset({ + "image/jpeg", "image/png", "image/gif", "image/webp", "image/svg+xml", +}) +ALLOWED_MEDIA_MIMETYPES = frozenset({ + "audio/mpeg", "audio/ogg", "audio/wav", "audio/mp4", "audio/aac", + "video/mp4", "video/webm", "video/ogg", +}) + +editor_api_bp = Blueprint("editor_api", __name__, url_prefix="/editor-api") + + +def _auth_header() -> dict[str, str]: + return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"} + + +@editor_api_bp.post("/images/upload/") +@require_admin +async def upload_image(): + """Proxy image upload to Ghost Admin API.""" + files = await request.files + uploaded = files.get("file") + if not uploaded: + return jsonify({"errors": [{"message": "No file provided"}]}), 400 + + content = uploaded.read() + if len(content) > MAX_IMAGE_SIZE: + return jsonify({"errors": [{"message": "File too large (max 10 MB)"}]}), 413 + + if uploaded.content_type not in ALLOWED_IMAGE_MIMETYPES: + return jsonify({"errors": [{"message": f"Unsupported file type: {uploaded.content_type}"}]}), 415 + + url = f"{GHOST_ADMIN_API_URL}/images/upload/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.post( + url, + headers=_auth_header(), + files={"file": (uploaded.filename, content, uploaded.content_type)}, + ) + + if not resp.is_success: + log.error("Ghost image upload failed %s: %s", resp.status_code, resp.text[:500]) + + return resp.json(), resp.status_code + + +@editor_api_bp.post("/media/upload/") +@require_admin +async def upload_media(): + """Proxy audio/video upload to Ghost Admin API.""" + files = await request.files + uploaded = files.get("file") + if not uploaded: + return jsonify({"errors": [{"message": "No file provided"}]}), 400 + + content = uploaded.read() + if len(content) > MAX_MEDIA_SIZE: + return jsonify({"errors": [{"message": "File too large (max 100 MB)"}]}), 413 + + if uploaded.content_type not in ALLOWED_MEDIA_MIMETYPES: + return jsonify({"errors": [{"message": f"Unsupported media type: {uploaded.content_type}"}]}), 415 + + ghost_files = {"file": (uploaded.filename, content, uploaded.content_type)} + + # Optional video thumbnail + thumbnail = files.get("thumbnail") + if thumbnail: + thumb_content = thumbnail.read() + ghost_files["thumbnail"] = (thumbnail.filename, thumb_content, thumbnail.content_type) + + url = f"{GHOST_ADMIN_API_URL}/media/upload/" + async with httpx.AsyncClient(timeout=60) as client: + resp = await client.post(url, headers=_auth_header(), files=ghost_files) + + if not resp.is_success: + log.error("Ghost media upload failed %s: %s", resp.status_code, resp.text[:500]) + + return resp.json(), resp.status_code + + +@editor_api_bp.post("/files/upload/") +@require_admin +async def upload_file(): + """Proxy file upload to Ghost Admin API.""" + files = await request.files + uploaded = files.get("file") + if not uploaded: + return jsonify({"errors": [{"message": "No file provided"}]}), 400 + + content = uploaded.read() + if len(content) > MAX_FILE_SIZE: + return jsonify({"errors": [{"message": "File too large (max 50 MB)"}]}), 413 + + url = f"{GHOST_ADMIN_API_URL}/files/upload/" + async with httpx.AsyncClient(timeout=60) as client: + resp = await client.post( + url, + headers=_auth_header(), + files={"file": (uploaded.filename, content, uploaded.content_type)}, + ) + + if not resp.is_success: + log.error("Ghost file upload failed %s: %s", resp.status_code, resp.text[:500]) + + return resp.json(), resp.status_code + + +@editor_api_bp.get("/oembed/") +@require_admin +async def oembed_proxy(): + """Proxy oembed lookups to Ghost Admin API.""" + params = dict(request.args) + if not params.get("url"): + return jsonify({"errors": [{"message": "url parameter required"}]}), 400 + + url = f"{GHOST_ADMIN_API_URL}/oembed/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header(), params=params) + + if not resp.is_success: + log.error("Ghost oembed failed %s: %s", resp.status_code, resp.text[:500]) + + return resp.json(), resp.status_code + + +# ── Snippets ──────────────────────────────────────────────────────── + +VALID_VISIBILITY = frozenset({"private", "shared", "admin"}) + + +@editor_api_bp.get("/snippets/") +@require_login +async def list_snippets(): + """Return snippets visible to the current user.""" + uid = g.user.id + is_admin = g.rights.get("admin") + + filters = [Snippet.user_id == uid, Snippet.visibility == "shared"] + if is_admin: + filters.append(Snippet.visibility == "admin") + + rows = (await g.s.execute( + select(Snippet).where(or_(*filters)).order_by(Snippet.name) + )).scalars().all() + + return jsonify([ + {"id": s.id, "name": s.name, "value": s.value, "visibility": s.visibility} + for s in rows + ]) + + +@editor_api_bp.post("/snippets/") +@require_login +async def create_snippet(): + """Create or upsert a snippet by (user_id, name).""" + data = await request.get_json(force=True) + name = (data.get("name") or "").strip() + value = data.get("value") + visibility = data.get("visibility", "private") + + if not name or value is None: + return jsonify({"error": "name and value are required"}), 400 + if visibility not in VALID_VISIBILITY: + return jsonify({"error": f"visibility must be one of {sorted(VALID_VISIBILITY)}"}), 400 + if visibility != "private" and not g.rights.get("admin"): + visibility = "private" + + uid = g.user.id + + existing = (await g.s.execute( + select(Snippet).where(Snippet.user_id == uid, Snippet.name == name) + )).scalar_one_or_none() + + if existing: + existing.value = value + existing.visibility = visibility + snippet = existing + else: + snippet = Snippet(user_id=uid, name=name, value=value, visibility=visibility) + g.s.add(snippet) + + await g.s.flush() + return jsonify({ + "id": snippet.id, "name": snippet.name, + "value": snippet.value, "visibility": snippet.visibility, + }), 200 if existing else 201 + + +@editor_api_bp.patch("/snippets//") +@require_login +async def patch_snippet(snippet_id: int): + """Update snippet visibility. Only admins may set shared/admin.""" + snippet = await g.s.get(Snippet, snippet_id) + if not snippet: + return jsonify({"error": "not found"}), 404 + + is_admin = g.rights.get("admin") + + if snippet.user_id != g.user.id and not is_admin: + return jsonify({"error": "forbidden"}), 403 + + data = await request.get_json(force=True) + visibility = data.get("visibility") + if visibility is not None: + if visibility not in VALID_VISIBILITY: + return jsonify({"error": f"visibility must be one of {sorted(VALID_VISIBILITY)}"}), 400 + if visibility != "private" and not is_admin: + return jsonify({"error": "only admins may set shared/admin visibility"}), 403 + snippet.visibility = visibility + + await g.s.flush() + return jsonify({ + "id": snippet.id, "name": snippet.name, + "value": snippet.value, "visibility": snippet.visibility, + }) + + +@editor_api_bp.delete("/snippets//") +@require_login +async def delete_snippet(snippet_id: int): + """Delete a snippet. Owners can delete their own; admins can delete any.""" + snippet = await g.s.get(Snippet, snippet_id) + if not snippet: + return jsonify({"error": "not found"}), 404 + + if snippet.user_id != g.user.id and not g.rights.get("admin"): + return jsonify({"error": "forbidden"}), 403 + + await g.s.delete(snippet) + await g.s.flush() + return jsonify({"ok": True}) diff --git a/blog/bp/blog/ghost/ghost_admin_token.py b/blog/bp/blog/ghost/ghost_admin_token.py new file mode 100644 index 0000000..1974075 --- /dev/null +++ b/blog/bp/blog/ghost/ghost_admin_token.py @@ -0,0 +1,46 @@ +import os +import time +import jwt # PyJWT +from typing import Tuple + + +def _split_key(raw_key: str) -> Tuple[str, bytes]: + """ + raw_key is the 'id:secret' from Ghost. + Returns (id, secret_bytes) + """ + key_id, key_secret_hex = raw_key.split(':', 1) + secret_bytes = bytes.fromhex(key_secret_hex) + return key_id, secret_bytes + + +def make_ghost_admin_jwt() -> str: + """ + Generate a short-lived JWT suitable for Authorization: Ghost + """ + raw_key = os.environ["GHOST_ADMIN_API_KEY"] + key_id, secret_bytes = _split_key(raw_key) + + now = int(time.time()) + + payload = { + "iat": now, + "exp": now + 5 * 60, # now + 5 minutes + "aud": "/admin/", + } + + headers = { + "alg": "HS256", + "kid": key_id, + "typ": "JWT", + } + + token = jwt.encode( + payload, + secret_bytes, + algorithm="HS256", + headers=headers, + ) + + # PyJWT returns str in recent versions; Ghost expects bare token string + return token diff --git a/blog/bp/blog/ghost/ghost_posts.py b/blog/bp/blog/ghost/ghost_posts.py new file mode 100644 index 0000000..7d16fbf --- /dev/null +++ b/blog/bp/blog/ghost/ghost_posts.py @@ -0,0 +1,204 @@ +""" +Ghost Admin API – post CRUD. + +Uses the same JWT auth and httpx patterns as ghost_sync.py. +""" +from __future__ import annotations + +import logging +import os + +import httpx + +from .ghost_admin_token import make_ghost_admin_jwt + +log = logging.getLogger(__name__) + +GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"] + + +def _auth_header() -> dict[str, str]: + return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"} + + +def _check(resp: httpx.Response) -> None: + """Raise with the Ghost error body so callers see what went wrong.""" + if resp.is_success: + return + body = resp.text[:2000] + log.error("Ghost API %s %s → %s: %s", resp.request.method, resp.request.url, resp.status_code, body) + resp.raise_for_status() + + +async def get_post_for_edit(ghost_id: str, *, is_page: bool = False) -> dict | None: + """Fetch a single post/page by Ghost ID, including lexical source.""" + resource = "pages" if is_page else "posts" + url = ( + f"{GHOST_ADMIN_API_URL}/{resource}/{ghost_id}/" + "?formats=lexical,html,mobiledoc&include=newsletters" + ) + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header()) + if resp.status_code == 404: + return None + _check(resp) + return resp.json()[resource][0] + + +async def create_post( + title: str, + lexical_json: str, + status: str = "draft", + feature_image: str | None = None, + custom_excerpt: str | None = None, + feature_image_caption: str | None = None, +) -> dict: + """Create a new post in Ghost. Returns the created post dict.""" + post_body: dict = { + "title": title, + "lexical": lexical_json, + "mobiledoc": None, + "status": status, + } + if feature_image: + post_body["feature_image"] = feature_image + if custom_excerpt: + post_body["custom_excerpt"] = custom_excerpt + if feature_image_caption is not None: + post_body["feature_image_caption"] = feature_image_caption + payload = {"posts": [post_body]} + url = f"{GHOST_ADMIN_API_URL}/posts/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.post(url, json=payload, headers=_auth_header()) + _check(resp) + return resp.json()["posts"][0] + + +async def create_page( + title: str, + lexical_json: str, + status: str = "draft", + feature_image: str | None = None, + custom_excerpt: str | None = None, + feature_image_caption: str | None = None, +) -> dict: + """Create a new page in Ghost (via /pages/ endpoint). Returns the created page dict.""" + page_body: dict = { + "title": title, + "lexical": lexical_json, + "mobiledoc": None, + "status": status, + } + if feature_image: + page_body["feature_image"] = feature_image + if custom_excerpt: + page_body["custom_excerpt"] = custom_excerpt + if feature_image_caption is not None: + page_body["feature_image_caption"] = feature_image_caption + payload = {"pages": [page_body]} + url = f"{GHOST_ADMIN_API_URL}/pages/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.post(url, json=payload, headers=_auth_header()) + _check(resp) + return resp.json()["pages"][0] + + +async def update_post( + ghost_id: str, + lexical_json: str, + title: str | None, + updated_at: str, + feature_image: str | None = None, + custom_excerpt: str | None = None, + feature_image_caption: str | None = None, + status: str | None = None, + newsletter_slug: str | None = None, + email_segment: str | None = None, + email_only: bool | None = None, + is_page: bool = False, +) -> dict: + """Update an existing Ghost post. Returns the updated post dict. + + ``updated_at`` is Ghost's optimistic-locking token – pass the value + you received from ``get_post_for_edit``. + + When ``newsletter_slug`` is set the publish request also triggers an + email send via Ghost's query-parameter API: + ``?newsletter={slug}&email_segment={segment}``. + """ + post_body: dict = { + "lexical": lexical_json, + "mobiledoc": None, + "updated_at": updated_at, + } + if title is not None: + post_body["title"] = title + if feature_image is not None: + post_body["feature_image"] = feature_image or None + if custom_excerpt is not None: + post_body["custom_excerpt"] = custom_excerpt or None + if feature_image_caption is not None: + post_body["feature_image_caption"] = feature_image_caption + if status is not None: + post_body["status"] = status + if email_only: + post_body["email_only"] = True + resource = "pages" if is_page else "posts" + payload = {resource: [post_body]} + + url = f"{GHOST_ADMIN_API_URL}/{resource}/{ghost_id}/" + if newsletter_slug: + url += f"?newsletter={newsletter_slug}" + if email_segment: + url += f"&email_segment={email_segment}" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.put(url, json=payload, headers=_auth_header()) + _check(resp) + return resp.json()[resource][0] + + +_SETTINGS_FIELDS = ( + "slug", + "published_at", + "featured", + "visibility", + "email_only", + "custom_template", + "meta_title", + "meta_description", + "canonical_url", + "og_image", + "og_title", + "og_description", + "twitter_image", + "twitter_title", + "twitter_description", + "tags", + "feature_image_alt", +) + + +async def update_post_settings( + ghost_id: str, + updated_at: str, + is_page: bool = False, + **kwargs, +) -> dict: + """Update Ghost post/page settings (slug, tags, SEO, social, etc.). + + Only non-None keyword args are included in the PUT payload. + Accepts any key from ``_SETTINGS_FIELDS``. + """ + resource = "pages" if is_page else "posts" + post_body: dict = {"updated_at": updated_at} + for key in _SETTINGS_FIELDS: + val = kwargs.get(key) + if val is not None: + post_body[key] = val + + payload = {resource: [post_body]} + url = f"{GHOST_ADMIN_API_URL}/{resource}/{ghost_id}/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.put(url, json=payload, headers=_auth_header()) + _check(resp) + return resp.json()[resource][0] diff --git a/blog/bp/blog/ghost/ghost_sync.py b/blog/bp/blog/ghost/ghost_sync.py new file mode 100644 index 0000000..c3d92ee --- /dev/null +++ b/blog/bp/blog/ghost/ghost_sync.py @@ -0,0 +1,1240 @@ +from __future__ import annotations +import os +import re +import asyncio +from datetime import datetime +from html import escape as html_escape +from typing import Dict, Any, Optional + +import httpx +from sqlalchemy import select, delete, or_, and_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified # for non-Mutable JSON columns + +# Content models +from models.ghost_content import ( + Post, Author, Tag, PostAuthor, PostTag +) +from shared.models.page_config import PageConfig + +# User-centric membership models +from shared.models import User +from shared.models.ghost_membership_entities import ( + GhostLabel, UserLabel, + GhostNewsletter, UserNewsletter, + GhostTier, GhostSubscription, +) + +from .ghost_admin_token import make_ghost_admin_jwt + +from urllib.parse import quote + +GHOST_ADMIN_API_URL = os.environ["GHOST_ADMIN_API_URL"] + +from shared.browser.app.utils import ( + utcnow +) + + + +def _auth_header() -> dict[str, str]: + return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"} + + +def _iso(val: str | None) -> datetime | None: + if not val: + return None + return datetime.fromisoformat(val.replace("Z", "+00:00")) + +def _to_str_or_none(v) -> Optional[str]: + """Return a trimmed string if v is safely stringifiable; else None.""" + if v is None: + return None + # Disallow complex types that would stringify to JSON-like noise + if isinstance(v, (dict, list, set, tuple, bytes, bytearray)): + return None + s = str(v).strip() + return s or None + + +def _sanitize_member_payload(payload: dict) -> dict: + """Coerce types Ghost expects and drop empties to avoid 422/500 quirks.""" + out: dict = {} + + # email -> lowercase string + email = _to_str_or_none(payload.get("email")) + if email: + out["email"] = email.lower() + + # name / note must be strings if present + name = _to_str_or_none(payload.get("name")) + if name is not None: + out["name"] = name + + note = _to_str_or_none(payload.get("note")) + if note is not None: + out["note"] = note + + # subscribed -> bool + if "subscribed" in payload: + out["subscribed"] = bool(payload.get("subscribed")) + + # labels: keep only rows that have a non-empty id OR name + labels = [] + for item in payload.get("labels") or []: + gid = _to_str_or_none(item.get("id")) + gname = _to_str_or_none(item.get("name")) + if gid: + labels.append({"id": gid}) + elif gname: # only include if non-empty + labels.append({"name": gname}) + if labels: + out["labels"] = labels + + # newsletters: keep only rows with id OR name; coerce subscribed -> bool + newsletters = [] + for item in payload.get("newsletters") or []: + gid = _to_str_or_none(item.get("id")) + gname = _to_str_or_none(item.get("name")) + row = {"subscribed": bool(item.get("subscribed", True))} + if gid: + row["id"] = gid + newsletters.append(row) + elif gname: + row["name"] = gname + newsletters.append(row) + if newsletters: + out["newsletters"] = newsletters + + # id (if we carry a known ghost_id) + gid = _to_str_or_none(payload.get("id")) + if gid: + out["id"] = gid + + return out +# ===================== +# CONTENT UPSERT HELPERS +# ===================== + +async def _upsert_author(sess: AsyncSession, ga: Dict[str, Any]) -> Author: + res = await sess.execute(select(Author).where(Author.ghost_id == ga["id"])) + obj = res.scalar_one_or_none() + if obj is None: + obj = Author(ghost_id=ga["id"]) + sess.add(obj) + + # revive if soft-deleted + obj.deleted_at = None + + obj.slug = ga.get("slug") or obj.slug + obj.name = ga.get("name") or obj.name + obj.email = ga.get("email") or obj.email + obj.profile_image = ga.get("profile_image") + obj.cover_image = ga.get("cover_image") + obj.bio = ga.get("bio") + obj.website = ga.get("website") + obj.location = ga.get("location") + obj.facebook = ga.get("facebook") + obj.twitter = ga.get("twitter") + obj.created_at = _iso(ga.get("created_at")) or obj.created_at or utcnow() + obj.updated_at = _iso(ga.get("updated_at")) or utcnow() + + await sess.flush() + return obj + + +async def _upsert_tag(sess: AsyncSession, gt: Dict[str, Any]) -> Tag: + res = await sess.execute(select(Tag).where(Tag.ghost_id == gt["id"])) + obj = res.scalar_one_or_none() + if obj is None: + obj = Tag(ghost_id=gt["id"]) + sess.add(obj) + + obj.deleted_at = None # revive if soft-deleted + + obj.slug = gt.get("slug") or obj.slug + obj.name = gt.get("name") or obj.name + obj.description = gt.get("description") + obj.visibility = gt.get("visibility") or obj.visibility + obj.feature_image = gt.get("feature_image") + obj.meta_title = gt.get("meta_title") + obj.meta_description = gt.get("meta_description") + obj.created_at = _iso(gt.get("created_at")) or obj.created_at or utcnow() + obj.updated_at = _iso(gt.get("updated_at")) or utcnow() + + await sess.flush() + return obj + + +def _apply_ghost_fields(obj: Post, gp: Dict[str, Any], author_map: Dict[str, Author], tag_map: Dict[str, Tag]) -> None: + """Apply Ghost API fields to a Post ORM object.""" + obj.deleted_at = None # revive if soft-deleted + + obj.uuid = gp.get("uuid") or obj.uuid + obj.slug = gp.get("slug") or obj.slug + obj.title = gp.get("title") or obj.title + obj.html = gp.get("html") + obj.plaintext = gp.get("plaintext") + obj.mobiledoc = gp.get("mobiledoc") + obj.lexical = gp.get("lexical") + obj.feature_image = gp.get("feature_image") + obj.feature_image_alt = gp.get("feature_image_alt") + obj.feature_image_caption = gp.get("feature_image_caption") + obj.excerpt = gp.get("excerpt") + obj.custom_excerpt = gp.get("custom_excerpt") + obj.visibility = gp.get("visibility") or obj.visibility + obj.status = gp.get("status") or obj.status + obj.featured = bool(gp.get("featured") or False) + obj.is_page = bool(gp.get("page") or False) + obj.email_only = bool(gp.get("email_only") or False) + obj.canonical_url = gp.get("canonical_url") + obj.meta_title = gp.get("meta_title") + obj.meta_description = gp.get("meta_description") + obj.og_image = gp.get("og_image") + obj.og_title = gp.get("og_title") + obj.og_description = gp.get("og_description") + obj.twitter_image = gp.get("twitter_image") + obj.twitter_title = gp.get("twitter_title") + obj.twitter_description = gp.get("twitter_description") + obj.custom_template = gp.get("custom_template") + obj.reading_time = gp.get("reading_time") + obj.comment_id = gp.get("comment_id") + + obj.published_at = _iso(gp.get("published_at")) + obj.updated_at = _iso(gp.get("updated_at")) or obj.updated_at or utcnow() + obj.created_at = _iso(gp.get("created_at")) or obj.created_at or utcnow() + + pa = gp.get("primary_author") + obj.primary_author_id = author_map[pa["id"].strip()].id if pa else None # type: ignore[index] + + pt = gp.get("primary_tag") + obj.primary_tag_id = tag_map[pt["id"].strip()].id if (pt and pt["id"] in tag_map) else None # type: ignore[index] + + +async def _upsert_post(sess: AsyncSession, gp: Dict[str, Any], author_map: Dict[str, Author], tag_map: Dict[str, Tag]) -> tuple[Post, str | None]: + """Upsert a post. Returns (post, old_status) where old_status is None for new rows.""" + from sqlalchemy.exc import IntegrityError + + res = await sess.execute(select(Post).where(Post.ghost_id == gp["id"])) + obj = res.scalar_one_or_none() + + old_status = obj.status if obj is not None else None + + if obj is not None: + # Row exists — just update + _apply_ghost_fields(obj, gp, author_map, tag_map) + await sess.flush() + else: + # Row doesn't exist — try to insert within a savepoint + obj = Post(ghost_id=gp["id"]) # type: ignore[call-arg] + try: + async with sess.begin_nested(): + sess.add(obj) + _apply_ghost_fields(obj, gp, author_map, tag_map) + await sess.flush() + except IntegrityError: + # Race condition: another request inserted this ghost_id. + # Savepoint rolled back; re-select and update. + res = await sess.execute(select(Post).where(Post.ghost_id == gp["id"])) + obj = res.scalar_one() + _apply_ghost_fields(obj, gp, author_map, tag_map) + await sess.flush() + + # Backfill user_id from primary author email if not already set + if obj.user_id is None and obj.primary_author_id is not None: + pa_obj = author_map.get(gp.get("primary_author", {}).get("id", "")) + if pa_obj and pa_obj.email: + user_res = await sess.execute( + select(User).where(User.email.ilike(pa_obj.email)) + ) + matched_user = user_res.scalar_one_or_none() + if matched_user: + obj.user_id = matched_user.id + await sess.flush() + + # rebuild post_authors + await sess.execute(delete(PostAuthor).where(PostAuthor.post_id == obj.id)) + for idx, a in enumerate(gp.get("authors") or []): + aa = author_map[a["id"]] + sess.add(PostAuthor(post_id=obj.id, author_id=aa.id, sort_order=idx)) + + # rebuild post_tags + await sess.execute(delete(PostTag).where(PostTag.post_id == obj.id)) + for idx, t in enumerate(gp.get("tags") or []): + tt = tag_map[t["id"]] + sess.add(PostTag(post_id=obj.id, tag_id=tt.id, sort_order=idx)) + + # Auto-create PageConfig for pages + if obj.is_page: + existing_pc = (await sess.execute( + select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == obj.id) + )).scalar_one_or_none() + if existing_pc is None: + sess.add(PageConfig(container_type="page", container_id=obj.id, features={})) + await sess.flush() + + return obj, old_status + +async def _ghost_find_member_by_email(email: str) -> Optional[dict]: + """Return first Ghost member with this email, or None.""" + if not email: + return None + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/members/?filter=email:{quote(email)}&limit=1", + headers=_auth_header(), + ) + resp.raise_for_status() + members = resp.json().get("members") or [] + return members[0] if members else None + + +# --- add this helper next to fetch_all_posts_from_ghost() --- + +async def _fetch_all_from_ghost(endpoint: str) -> list[dict[str, Any]]: + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/{endpoint}/?include=authors,tags&limit=all&formats=html,plaintext,mobiledoc,lexical", + headers=_auth_header(), + ) + resp.raise_for_status() + # admin posts endpoint returns {"posts": [...]}, pages returns {"pages": [...]} + key = "posts" if endpoint == "posts" else "pages" + return resp.json().get(key, []) + +async def fetch_all_posts_and_pages_from_ghost() -> list[dict[str, Any]]: + posts, pages = await asyncio.gather( + _fetch_all_from_ghost("posts"), + _fetch_all_from_ghost("pages"), + ) + # Be explicit: ensure page flag exists for pages (Ghost typically includes "page": true) + for p in pages: + p["page"] = True + return posts + pages + + +async def sync_all_content_from_ghost(sess: AsyncSession) -> None: + #data = await fetch_all_posts_from_ghost() + data = await fetch_all_posts_and_pages_from_ghost() + # Use a transaction so all upserts/soft-deletes commit together + # buckets of authors/tags we saw in Ghost + author_bucket: Dict[str, dict[str, Any]] = {} + tag_bucket: Dict[str, dict[str, Any]] = {} + + for p in data: + for a in p.get("authors") or []: + author_bucket[a["id"]] = a + if p.get("primary_author"): + author_bucket[p["primary_author"]["id"]] = p["primary_author"] + + for t in p.get("tags") or []: + tag_bucket[t["id"]] = t + if p.get("primary_tag"): + tag_bucket[p["primary_tag"]["id"]] = p["primary_tag"] + + # sets of ghost_ids we've seen in Ghost RIGHT NOW + seen_post_ids = {p["id"] for p in data} + seen_author_ids = set(author_bucket.keys()) + seen_tag_ids = set(tag_bucket.keys()) + + # upsert authors + author_map: Dict[str, Author] = {} + for ga in author_bucket.values(): + a = await _upsert_author(sess, ga) + author_map[ga["id"]] = a + + # upsert tags + tag_map: Dict[str, Tag] = {} + for gt in tag_bucket.values(): + t = await _upsert_tag(sess, gt) + tag_map[gt["id"]] = t + + # upsert posts (including M2M) + for gp in data: + await _upsert_post(sess, gp, author_map, tag_map) + + # soft-delete anything that no longer exists in Ghost + now = utcnow() + + # Authors not seen -> mark deleted_at if not already + db_authors = await sess.execute(select(Author)) + for local_author in db_authors.scalars(): + if local_author.ghost_id not in seen_author_ids: + if local_author.deleted_at is None: + local_author.deleted_at = now + + # Tags not seen -> mark deleted_at + db_tags = await sess.execute(select(Tag)) + for local_tag in db_tags.scalars(): + if local_tag.ghost_id not in seen_tag_ids: + if local_tag.deleted_at is None: + local_tag.deleted_at = now + + # Posts not seen -> mark deleted_at + db_posts = await sess.execute(select(Post)) + for local_post in db_posts.scalars(): + if local_post.ghost_id not in seen_post_ids: + if local_post.deleted_at is None: + local_post.deleted_at = now + + # transaction auto-commits here + + +#===================================================== +# MEMBERSHIP SYNC (USER-CENTRIC) Ghost -> DB +#===================================================== + +def _member_email(m: dict[str, Any]) -> Optional[str]: + email = (m.get("email") or "").strip().lower() or None + return email + + +# ---- small upsert helpers for related entities ---- + +async def _upsert_label(sess: AsyncSession, data: dict) -> GhostLabel: + res = await sess.execute(select(GhostLabel).where(GhostLabel.ghost_id == data["id"])) + obj = res.scalar_one_or_none() + if not obj: + obj = GhostLabel(ghost_id=data["id"]) + sess.add(obj) + obj.name = data.get("name") or obj.name + obj.slug = data.get("slug") or obj.slug + await sess.flush() + return obj + + +async def _upsert_newsletter(sess: AsyncSession, data: dict) -> GhostNewsletter: + res = await sess.execute(select(GhostNewsletter).where(GhostNewsletter.ghost_id == data["id"])) + obj = res.scalar_one_or_none() + if not obj: + obj = GhostNewsletter(ghost_id=data["id"]) + sess.add(obj) + obj.name = data.get("name") or obj.name + obj.slug = data.get("slug") or obj.slug + obj.description = data.get("description") or obj.description + await sess.flush() + return obj + + +async def _upsert_tier(sess: AsyncSession, data: dict) -> GhostTier: + res = await sess.execute(select(GhostTier).where(GhostTier.ghost_id == data["id"])) + obj = res.scalar_one_or_none() + if not obj: + obj = GhostTier(ghost_id=data["id"]) + sess.add(obj) + obj.name = data.get("name") or obj.name + obj.slug = data.get("slug") or obj.slug + obj.type = data.get("type") or obj.type + obj.visibility = data.get("visibility") or obj.visibility + await sess.flush() + return obj + + +def _price_cents(sd: dict) -> Optional[int]: + try: + return int((sd.get("price") or {}).get("amount")) + except Exception: + return None + + +# ---- application of member payload onto User + related tables ---- + +async def _find_or_create_user_by_ghost_or_email(sess: AsyncSession, data: dict) -> User: + ghost_id = data.get("id") + email = _member_email(data) + + if ghost_id: + res = await sess.execute(select(User).where(User.ghost_id == ghost_id)) + u = res.scalar_one_or_none() + if u: + return u + + if email: + res = await sess.execute(select(User).where(User.email.ilike(email))) + u = res.scalar_one_or_none() + if u: + if ghost_id and not u.ghost_id: + u.ghost_id = ghost_id + return u + + # create a new user (Ghost is source of truth for member list) + u = User(email=email or f"_ghost_{ghost_id}@invalid.local") + if ghost_id: + u.ghost_id = ghost_id + sess.add(u) + await sess.flush() + return u + + +async def _apply_user_membership(sess: AsyncSession, user: User, m: dict) -> User: + """Apply Ghost member payload to local User WITHOUT touching relationship collections directly. + We mutate join tables explicitly to avoid lazy-loads (which cause MissingGreenlet in async). + """ + sess.add(user) + + # scalar fields + user.name = m.get("name") or user.name + user.ghost_status = m.get("status") or user.ghost_status + user.ghost_subscribed = bool(m.get("subscribed", True)) + user.ghost_note = m.get("note") or user.ghost_note + user.avatar_image = m.get("avatar_image") or user.avatar_image + user.stripe_customer_id = ( + (m.get("stripe") or {}).get("customer_id") + or (m.get("customer") or {}).get("id") + or m.get("stripe_customer_id") + or user.stripe_customer_id + ) + user.ghost_raw = dict(m) + flag_modified(user, "ghost_raw") + + await sess.flush() # ensure user.id exists + + # Labels join + label_ids: list[int] = [] + for ld in m.get("labels") or []: + lbl = await _upsert_label(sess, ld) + label_ids.append(lbl.id) + await sess.execute(delete(UserLabel).where(UserLabel.user_id == user.id)) + for lid in label_ids: + sess.add(UserLabel(user_id=user.id, label_id=lid)) + await sess.flush() + + # Newsletters join with subscribed flag + nl_rows: list[tuple[int, bool]] = [] + for nd in m.get("newsletters") or []: + nl = await _upsert_newsletter(sess, nd) + nl_rows.append((nl.id, bool(nd.get("subscribed", True)))) + await sess.execute(delete(UserNewsletter).where(UserNewsletter.user_id == user.id)) + for nl_id, subbed in nl_rows: + sess.add(UserNewsletter(user_id=user.id, newsletter_id=nl_id, subscribed=subbed)) + await sess.flush() + + # Subscriptions + for sd in m.get("subscriptions") or []: + sid = sd.get("id") + if not sid: + continue + + tier_id: Optional[int] = None + if sd.get("tier"): + tier = await _upsert_tier(sess, sd["tier"]) + await sess.flush() + tier_id = tier.id + + res = await sess.execute(select(GhostSubscription).where(GhostSubscription.ghost_id == sid)) + sub = res.scalar_one_or_none() + if not sub: + sub = GhostSubscription(ghost_id=sid, user_id=user.id) + sess.add(sub) + + sub.user_id = user.id + sub.status = sd.get("status") or sub.status + sub.cadence = (sd.get("plan") or {}).get("interval") or sd.get("cadence") or sub.cadence + sub.price_amount = _price_cents(sd) + sub.price_currency = (sd.get("price") or {}).get("currency") or sub.price_currency + sub.stripe_customer_id = ( + (sd.get("customer") or {}).get("id") + or (sd.get("stripe") or {}).get("customer_id") + or sub.stripe_customer_id + ) + sub.stripe_subscription_id = ( + sd.get("stripe_subscription_id") + or (sd.get("stripe") or {}).get("subscription_id") + or sub.stripe_subscription_id + ) + if tier_id is not None: + sub.tier_id = tier_id + sub.raw = dict(sd) + flag_modified(sub, "raw") + + await sess.flush() + return user + + +# ===================================================== +# PUSH MEMBERS FROM LOCAL DB -> GHOST (DB -> Ghost) +# ===================================================== + +def _ghost_member_payload_base(u: User) -> dict: + """Compose writable Ghost member fields from local User, validating types.""" + email = _to_str_or_none(getattr(u, "email", None)) + payload: dict = {} + if email: + payload["email"] = email.lower() + + name = _to_str_or_none(getattr(u, "name", None)) + if name: + payload["name"] = name + + note = _to_str_or_none(getattr(u, "ghost_note", None)) + if note: + payload["note"] = note + + # If ghost_subscribed is None, default True (Ghost expects boolean) + subscribed = getattr(u, "ghost_subscribed", True) + payload["subscribed"] = bool(subscribed) + + return payload + +async def _newsletters_for_user(sess: AsyncSession, user_id: int) -> list[dict]: + """Return list of {'id': ghost_id, 'subscribed': bool} rows for Ghost API, excluding blanks.""" + q = await sess.execute( + select(GhostNewsletter.ghost_id, UserNewsletter.subscribed, GhostNewsletter.name) + .join(UserNewsletter, UserNewsletter.newsletter_id == GhostNewsletter.id) + .where(UserNewsletter.user_id == user_id) + ) + seen = set() + out: list[dict] = [] + for gid, subscribed, name in q.all(): + gid = (gid or "").strip() or None + name = (name or "").strip() or None + row: dict = {"subscribed": bool(subscribed)} + if gid: + key = ("id", gid) + if key in seen: + continue + row["id"] = gid + seen.add(key) + out.append(row) + elif name: + key = ("name", name.lower()) + if key in seen: + continue + row["name"] = name + seen.add(key) + out.append(row) + # else: skip + return out + +async def _labels_for_user(sess: AsyncSession, user_id: int) -> list[dict]: + """Return list of {'id': ghost_id} or {'name': name} for Ghost API, excluding blanks.""" + q = await sess.execute( + select(GhostLabel.ghost_id, GhostLabel.name) + .join(UserLabel, UserLabel.label_id == GhostLabel.id) + .where(UserLabel.user_id == user_id) + ) + seen = set() + out: list[dict] = [] + for gid, name in q.all(): + gid = (gid or "").strip() or None + name = (name or "").strip() or None + if gid: + key = ("id", gid) + if key not in seen: + out.append({"id": gid}) + seen.add(key) + elif name: + key = ("name", name.lower()) + if key not in seen: + out.append({"name": name}) + seen.add(key) + # else: skip empty label row + return out + + +async def _ghost_find_member_by_email(email: str) -> dict | None: + """Query Ghost for a member by email to resolve conflicts / missing IDs.""" + if not email: + return None + async with httpx.AsyncClient(timeout=20) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/members/", + headers=_auth_header(), + params={"filter": f"email:{email}", "limit": 1}, + ) + resp.raise_for_status() + members = (resp.json() or {}).get("members") or [] + return members[0] if members else None + + +from urllib.parse import quote # make sure this import exists at top + +async def _ghost_find_member_by_email(email: str) -> Optional[dict]: + if not email: + return None + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/members/?filter=email:{quote(email)}&limit=1", + headers=_auth_header(), + ) + resp.raise_for_status() + members = resp.json().get("members") or [] + return members[0] if members else None + +async def _ghost_upsert_member(payload: dict, ghost_id: str | None = None) -> dict: + """Create/update a member, with sanitization + 5xx retry/backoff. + - Prefer PUT if ghost_id given. + - On 422: retry without name/note; if 'already exists', find-by-email then PUT. + - On 404: find-by-email and PUT; if still missing, POST create. + - On 5xx: small exponential backoff retry. + """ + safe_keys = ("email", "name", "note", "subscribed", "labels", "newsletters", "id") + pl_raw = {k: v for k, v in payload.items() if k in safe_keys} + pl = _sanitize_member_payload(pl_raw) + + async def _request_with_retry(client: httpx.AsyncClient, method: str, url: str, json: dict) -> httpx.Response: + delay = 0.5 + for attempt in range(3): + r = await client.request(method, url, headers=_auth_header(), json=json) + if r.status_code >= 500: + if attempt < 2: + await asyncio.sleep(delay) + delay *= 2 + continue + return r + return r # last response + + async with httpx.AsyncClient(timeout=30) as client: + + async def _put(mid: str, p: dict) -> dict: + r = await _request_with_retry( + client, "PUT", + f"{GHOST_ADMIN_API_URL}/members/{mid}/", + {"members": [p]}, + ) + if r.status_code == 404: + # Stale id: try by email, then create if absent + existing = await _ghost_find_member_by_email(p.get("email", "")) + if existing and existing.get("id"): + r2 = await _request_with_retry( + client, "PUT", + f"{GHOST_ADMIN_API_URL}/members/{existing['id']}/", + {"members": [p]}, + ) + r2.raise_for_status() + return (r2.json().get("members") or [None])[0] or {} + r3 = await _request_with_retry( + client, "POST", + f"{GHOST_ADMIN_API_URL}/members/", + {"members": [p]}, + ) + r3.raise_for_status() + return (r3.json().get("members") or [None])[0] or {} + + if r.status_code == 422: + body = (r.text or "").lower() + retry = dict(p) + dropped = False + if '"note"' in body or "for note" in body: + retry.pop("note", None); dropped = True + if '"name"' in body or "for name" in body: + retry.pop("name", None); dropped = True + if "labels.name" in body: + retry.pop("labels", None); dropped = True + if dropped: + r2 = await _request_with_retry( + client, "PUT", + f"{GHOST_ADMIN_API_URL}/members/{mid}/", + {"members": [retry]}, + ) + if r2.status_code == 404: + existing = await _ghost_find_member_by_email(retry.get("email", "")) + if existing and existing.get("id"): + r3 = await _request_with_retry( + client, "PUT", + f"{GHOST_ADMIN_API_URL}/members/{existing['id']}/", + {"members": [retry]}, + ) + r3.raise_for_status() + return (r3.json().get("members") or [None])[0] or {} + r3 = await _request_with_retry( + client, "POST", + f"{GHOST_ADMIN_API_URL}/members/", + {"members": [retry]}, + ) + r3.raise_for_status() + return (r3.json().get("members") or [None])[0] or {} + r2.raise_for_status() + return (r2.json().get("members") or [None])[0] or {} + r.raise_for_status() + return (r.json().get("members") or [None])[0] or {} + + async def _post_upsert(p: dict) -> dict: + r = await _request_with_retry( + client, "POST", + f"{GHOST_ADMIN_API_URL}/members/?upsert=true", + {"members": [p]}, + ) + if r.status_code == 422: + lower = (r.text or "").lower() + + # sanitize further name/note/labels on schema complaints + retry = dict(p) + changed = False + if '"note"' in lower or "for note" in lower: + retry.pop("note", None); changed = True + if '"name"' in lower or "for name" in lower: + retry.pop("name", None); changed = True + if "labels.name" in lower: + retry.pop("labels", None); changed = True + + if changed: + r2 = await _request_with_retry( + client, "POST", + f"{GHOST_ADMIN_API_URL}/members/?upsert=true", + {"members": [retry]}, + ) + if r2.status_code != 422: + r2.raise_for_status() + return (r2.json().get("members") or [None])[0] or {} + lower = (r2.text or "").lower() + + # existing email => find-by-email then PUT + if "already exists" in lower and "email address" in lower: + existing = await _ghost_find_member_by_email(p.get("email", "")) + if existing and existing.get("id"): + return await _put(existing["id"], p) + + # unrecoverable + raise httpx.HTTPStatusError( + "Validation error, cannot edit member.", + request=r.request, + response=r, + ) + r.raise_for_status() + return (r.json().get("members") or [None])[0] or {} + + if ghost_id: + return await _put(ghost_id, pl) + return await _post_upsert(pl) + +async def sync_member_to_ghost(sess: AsyncSession, user_id: int) -> Optional[str]: + res = await sess.execute(select(User).where(User.id == user_id)) + user = res.scalar_one_or_none() + if not user: + return None + + payload = _ghost_member_payload_base(user) + + labels = await _labels_for_user(sess, user.id) + if labels: + payload["labels"] = labels # Ghost accepts label ids on upsert + + ghost_member = await _ghost_upsert_member(payload, ghost_id=user.ghost_id) + + if ghost_member: + gm_id = ghost_member.get("id") + if gm_id and user.ghost_id != gm_id: + user.ghost_id = gm_id + user.ghost_raw = dict(ghost_member) + flag_modified(user, "ghost_raw") + await sess.flush() + return user.ghost_id or gm_id + return user.ghost_id + + +async def sync_members_to_ghost( + sess: AsyncSession, + changed_since: Optional[datetime] = None, + limit: Optional[int] = None, +) -> int: + """Upsert a batch of users to Ghost. Returns count processed.""" + stmt = select(User.id) + if changed_since: + stmt = stmt.where( + or_( + User.created_at >= changed_since, + and_(User.last_login_at != None, User.last_login_at >= changed_since), + ) + ) + if limit: + stmt = stmt.limit(limit) + + ids = [row[0] for row in (await sess.execute(stmt)).all()] + processed = 0 + for uid in ids: + try: + await sync_member_to_ghost(sess, uid) + processed += 1 + except httpx.HTTPStatusError as e: + # Log and continue; don't kill startup + print(f"[ghost sync] failed upsert for user {uid}: {e.response.status_code} {e.response.text}") + except Exception as e: + print(f"[ghost sync] failed upsert for user {uid}: {e}") + return processed + + +# ===================================================== +# Membership fetch/sync (Ghost -> DB) bulk + single +# ===================================================== + +async def fetch_all_members_from_ghost() -> list[dict[str, Any]]: + async with httpx.AsyncClient(timeout=60) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/members/?include=labels,subscriptions,tiers,newsletters&limit=all", + headers=_auth_header(), + ) + resp.raise_for_status() + return resp.json().get("members", []) + + +async def sync_all_membership_from_ghost(sess: AsyncSession) -> None: + members = await fetch_all_members_from_ghost() + + # collect related lookups and ensure catalogs exist first (avoid FK races) + label_bucket: Dict[str, dict[str, Any]] = {} + tier_bucket: Dict[str, dict[str, Any]] = {} + newsletter_bucket: Dict[str, dict[str, Any]] = {} + + for m in members: + for l in m.get("labels") or []: + label_bucket[l["id"]] = l + for n in m.get("newsletters") or []: + newsletter_bucket[n["id"]] = n + for s in m.get("subscriptions") or []: + t = s.get("tier") + if isinstance(t, dict) and t.get("id"): + tier_bucket[t["id"]] = t + + for L in label_bucket.values(): + await _upsert_label(sess, L) + for T in tier_bucket.values(): + await _upsert_tier(sess, T) + for N in newsletter_bucket.values(): + await _upsert_newsletter(sess, N) + + # Users + for gm in members: + user = await _find_or_create_user_by_ghost_or_email(sess, gm) + await _apply_user_membership(sess, user, gm) + + # transaction auto-commits here + + +async def fetch_single_member_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]: + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get( + f"{GHOST_ADMIN_API_URL}/members/{ghost_id}/?include=labels,newsletters,subscriptions,tiers", + headers=_auth_header(), + ) + if resp.status_code == 404: + return None + resp.raise_for_status() + data = resp.json() + items = data.get("members") or data.get("member") or [] + if isinstance(items, dict): + return items + return (items[0] if items else None) + + +async def sync_single_member(sess: AsyncSession, ghost_id: str) -> None: + m = await fetch_single_member_from_ghost(ghost_id) + if m is None: + # If member deleted in Ghost, we won't delete local user here. + return + + # ensure catalogs for this payload + for l in m.get("labels") or []: + await _upsert_label(sess, l) + for n in m.get("newsletters") or []: + await _upsert_newsletter(sess, n) + for s in m.get("subscriptions") or []: + if isinstance(s.get("tier"), dict): + await _upsert_tier(sess, s["tier"]) + + user = await _find_or_create_user_by_ghost_or_email(sess, m) + await _apply_user_membership(sess, user, m) + # transaction auto-commits here + + +# ===================================================== +# Single-item content helpers (posts/authors/tags) +# ===================================================== + +async def fetch_single_post_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]: + url = ( + f"{GHOST_ADMIN_API_URL}/posts/{ghost_id}/" + "?include=authors,tags&formats=html,plaintext,mobiledoc,lexical" + ) + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header()) + if resp.status_code == 404: + return None + resp.raise_for_status() + data = resp.json() + posts = data.get("posts") or [] + return posts[0] if posts else None + + +async def fetch_single_page_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]: + url = ( + f"{GHOST_ADMIN_API_URL}/pages/{ghost_id}/" + "?include=authors,tags&formats=html,plaintext,mobiledoc,lexical" + ) + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header()) + if resp.status_code == 404: + return None + resp.raise_for_status() + data = resp.json() + pages = data.get("pages") or [] + return pages[0] if pages else None + + +async def fetch_single_author_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]: + url = f"{GHOST_ADMIN_API_URL}/users/{ghost_id}/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header()) + if resp.status_code == 404: + return None + resp.raise_for_status() + data = resp.json() + users = data.get("users") or [] + return users[0] if users else None + + +async def fetch_single_tag_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]: + url = f"{GHOST_ADMIN_API_URL}/tags/{ghost_id}/" + async with httpx.AsyncClient(timeout=30) as client: + resp = await client.get(url, headers=_auth_header()) + if resp.status_code == 404: + return None + resp.raise_for_status() + data = resp.json() + tags = data.get("tags") or [] + return tags[0] if tags else None + + +def _build_ap_post_data(post, post_url: str, tag_objs: list) -> dict: + """Build rich AP object_data for a blog post/page. + + Produces a Note with HTML content (excerpt), feature image + inline + images as attachments, and tags as AP Hashtag objects. + """ + # Content HTML: title + excerpt + "Read more" link + parts: list[str] = [] + if post.title: + parts.append(f"

{html_escape(post.title)}

") + + body = post.plaintext or post.custom_excerpt or post.excerpt or "" + + if body: + for para in body.split("\n\n"): + para = para.strip() + if para: + parts.append(f"

{html_escape(para)}

") + + parts.append(f'

Read more \u2192

') + + # Hashtag links in content (Mastodon expects them inline too) + if tag_objs: + ht_links = [] + for t in tag_objs: + clean = t.slug.replace("-", "") + ht_links.append( + f'' + ) + parts.append(f'

{" ".join(ht_links)}

') + + obj: dict = { + "name": post.title or "", + "content": "\n".join(parts), + "url": post_url, + } + + # Attachments: feature image + inline images (max 4) + attachments: list[dict] = [] + seen: set[str] = set() + + if post.feature_image: + att: dict = {"type": "Image", "url": post.feature_image} + if post.feature_image_alt: + att["name"] = post.feature_image_alt + attachments.append(att) + seen.add(post.feature_image) + + if post.html: + for src in re.findall(r']+src="([^"]+)"', post.html): + if src not in seen and len(attachments) < 4: + attachments.append({"type": "Image", "url": src}) + seen.add(src) + + if attachments: + obj["attachment"] = attachments + + # AP Hashtag objects + if tag_objs: + obj["tag"] = [ + { + "type": "Hashtag", + "href": f"{post_url}tag/{t.slug}/", + "name": f"#{t.slug.replace('-', '')}", + } + for t in tag_objs + ] + + return obj + + +async def sync_single_post(sess: AsyncSession, ghost_id: str) -> None: + gp = await fetch_single_post_from_ghost(ghost_id) + if gp is None: + res = await sess.execute(select(Post).where(Post.ghost_id == ghost_id)) + obj = res.scalar_one_or_none() + if obj is not None and obj.deleted_at is None: + obj.deleted_at = utcnow() + return + + author_map: Dict[str, Author] = {} + tag_map: Dict[str, Tag] = {} + + for a in gp.get("authors") or []: + author_obj = await _upsert_author(sess, a) + author_map[a["id"]] = author_obj + if gp.get("primary_author"): + pa = gp["primary_author"] + author_obj = await _upsert_author(sess, pa) + author_map[pa["id"]] = author_obj + + for t in gp.get("tags") or []: + tag_obj = await _upsert_tag(sess, t) + tag_map[t["id"]] = tag_obj + if gp.get("primary_tag"): + pt = gp["primary_tag"] + tag_obj = await _upsert_tag(sess, pt) + tag_map[pt["id"]] = tag_obj + + post, old_status = await _upsert_post(sess, gp, author_map, tag_map) + + # Publish to federation inline (posts, not pages) + if not post.is_page and post.user_id: + from shared.services.federation_publish import try_publish + from shared.infrastructure.urls import app_url + post_url = app_url("blog", f"/{post.slug}/") + post_tags = [tag_map[t["id"]] for t in (gp.get("tags") or []) if t["id"] in tag_map] + + if post.status == "published": + activity_type = "Create" if old_status != "published" else "Update" + await try_publish( + sess, + user_id=post.user_id, + activity_type=activity_type, + object_type="Note", + object_data=_build_ap_post_data(post, post_url, post_tags), + source_type="Post", + source_id=post.id, + ) + elif old_status == "published" and post.status != "published": + await try_publish( + sess, + user_id=post.user_id, + activity_type="Delete", + object_type="Tombstone", + object_data={ + "id": post_url, + "formerType": "Note", + }, + source_type="Post", + source_id=post.id, + ) + + +async def sync_single_page(sess: AsyncSession, ghost_id: str) -> None: + gp = await fetch_single_page_from_ghost(ghost_id) + if gp is not None: + gp["page"] = True # Ghost /pages/ endpoint may omit this flag + if gp is None: + res = await sess.execute(select(Post).where(Post.ghost_id == ghost_id)) + obj = res.scalar_one_or_none() + if obj is not None and obj.deleted_at is None: + obj.deleted_at = utcnow() + return + + author_map: Dict[str, Author] = {} + tag_map: Dict[str, Tag] = {} + + for a in gp.get("authors") or []: + author_obj = await _upsert_author(sess, a) + author_map[a["id"]] = author_obj + if gp.get("primary_author"): + pa = gp["primary_author"] + author_obj = await _upsert_author(sess, pa) + author_map[pa["id"]] = author_obj + + for t in gp.get("tags") or []: + tag_obj = await _upsert_tag(sess, t) + tag_map[t["id"]] = tag_obj + if gp.get("primary_tag"): + pt = gp["primary_tag"] + tag_obj = await _upsert_tag(sess, pt) + tag_map[pt["id"]] = tag_obj + + post, old_status = await _upsert_post(sess, gp, author_map, tag_map) + + # Publish to federation inline (pages) + if post.user_id: + from shared.services.federation_publish import try_publish + from shared.infrastructure.urls import app_url + post_url = app_url("blog", f"/{post.slug}/") + post_tags = [tag_map[t["id"]] for t in (gp.get("tags") or []) if t["id"] in tag_map] + + if post.status == "published": + activity_type = "Create" if old_status != "published" else "Update" + await try_publish( + sess, + user_id=post.user_id, + activity_type=activity_type, + object_type="Note", + object_data=_build_ap_post_data(post, post_url, post_tags), + source_type="Post", + source_id=post.id, + ) + elif old_status == "published" and post.status != "published": + await try_publish( + sess, + user_id=post.user_id, + activity_type="Delete", + object_type="Tombstone", + object_data={ + "id": post_url, + "formerType": "Note", + }, + source_type="Post", + source_id=post.id, + ) + + +async def sync_single_author(sess: AsyncSession, ghost_id: str) -> None: + ga = await fetch_single_author_from_ghost(ghost_id) + if ga is None: + result = await sess.execute(select(Author).where(Author.ghost_id == ghost_id)) + author_obj = result.scalar_one_or_none() + if author_obj and author_obj.deleted_at is None: + author_obj.deleted_at = utcnow() + return + + await _upsert_author(sess, ga) + + +async def sync_single_tag(sess: AsyncSession, ghost_id: str) -> None: + gt = await fetch_single_tag_from_ghost(ghost_id) + if gt is None: + result = await sess.execute(select(Tag).where(Tag.ghost_id == ghost_id)) + tag_obj = result.scalar_one_or_none() + if tag_obj and tag_obj.deleted_at is None: + tag_obj.deleted_at = utcnow() + return + + await _upsert_tag(sess, gt) + + +# ---- explicit public exports (back-compat) ---- +__all__ = [ + # bulk content + "sync_all_content_from_ghost", + # bulk membership (user-centric) + "sync_all_membership_from_ghost", + # DB -> Ghost + "sync_member_to_ghost", + "sync_members_to_ghost", + # single fetch + "fetch_single_post_from_ghost", + "fetch_single_author_from_ghost", + "fetch_single_tag_from_ghost", + "fetch_single_member_from_ghost", + # single sync + "sync_single_post", + "sync_single_author", + "sync_single_tag", + "sync_single_member", +] diff --git a/blog/bp/blog/ghost/lexical_renderer.py b/blog/bp/blog/ghost/lexical_renderer.py new file mode 100644 index 0000000..fafe7b5 --- /dev/null +++ b/blog/bp/blog/ghost/lexical_renderer.py @@ -0,0 +1,668 @@ +""" +Lexical JSON → HTML renderer. + +Produces HTML matching Ghost's ``kg-*`` class conventions so the existing +``cards.css`` stylesheet works unchanged. + +Public API +---------- + render_lexical(doc) – Lexical JSON (dict or string) → HTML string +""" +from __future__ import annotations + +import html +import json +from typing import Callable + +import mistune + +# --------------------------------------------------------------------------- +# Registry +# --------------------------------------------------------------------------- + +_RENDERERS: dict[str, Callable[[dict], str]] = {} + + +def _renderer(node_type: str): + """Decorator — register a function as the renderer for *node_type*.""" + def decorator(fn: Callable[[dict], str]) -> Callable[[dict], str]: + _RENDERERS[node_type] = fn + return fn + return decorator + + +# --------------------------------------------------------------------------- +# Public entry point +# --------------------------------------------------------------------------- + +def render_lexical(doc: dict | str) -> str: + """Render a Lexical JSON document to an HTML string.""" + if isinstance(doc, str): + doc = json.loads(doc) + root = doc.get("root", doc) + return _render_children(root.get("children", [])) + + +# --------------------------------------------------------------------------- +# Core dispatch +# --------------------------------------------------------------------------- + +def _render_node(node: dict) -> str: + node_type = node.get("type", "") + renderer = _RENDERERS.get(node_type) + if renderer: + return renderer(node) + return "" + + +def _render_children(children: list[dict]) -> str: + return "".join(_render_node(c) for c in children) + + +# --------------------------------------------------------------------------- +# Text formatting +# --------------------------------------------------------------------------- + +# Lexical format bitmask +_FORMAT_BOLD = 1 +_FORMAT_ITALIC = 2 +_FORMAT_STRIKETHROUGH = 4 +_FORMAT_UNDERLINE = 8 +_FORMAT_CODE = 16 +_FORMAT_SUBSCRIPT = 32 +_FORMAT_SUPERSCRIPT = 64 +_FORMAT_HIGHLIGHT = 128 + +_FORMAT_TAGS: list[tuple[int, str, str]] = [ + (_FORMAT_BOLD, "", ""), + (_FORMAT_ITALIC, "", ""), + (_FORMAT_STRIKETHROUGH, "", ""), + (_FORMAT_UNDERLINE, "", ""), + (_FORMAT_CODE, "", ""), + (_FORMAT_SUBSCRIPT, "", ""), + (_FORMAT_SUPERSCRIPT, "", ""), + (_FORMAT_HIGHLIGHT, "", ""), +] + +# Element-level alignment from ``format`` field +_ALIGN_MAP = { + 1: "text-align: left", + 2: "text-align: center", + 3: "text-align: right", + 4: "text-align: justify", +} + + +def _align_style(node: dict) -> str: + fmt = node.get("format") + if isinstance(fmt, int) and fmt in _ALIGN_MAP: + return f' style="{_ALIGN_MAP[fmt]}"' + if isinstance(fmt, str) and fmt: + return f' style="text-align: {fmt}"' + return "" + + +def _wrap_format(text: str, fmt: int) -> str: + for mask, open_tag, close_tag in _FORMAT_TAGS: + if fmt & mask: + text = f"{open_tag}{text}{close_tag}" + return text + + +# --------------------------------------------------------------------------- +# Tier 1 — text nodes +# --------------------------------------------------------------------------- + +@_renderer("text") +def _text(node: dict) -> str: + text = html.escape(node.get("text", "")) + fmt = node.get("format", 0) + if isinstance(fmt, int) and fmt: + text = _wrap_format(text, fmt) + return text + + +@_renderer("linebreak") +def _linebreak(_node: dict) -> str: + return "
" + + +@_renderer("tab") +def _tab(_node: dict) -> str: + return "\t" + + +@_renderer("paragraph") +def _paragraph(node: dict) -> str: + inner = _render_children(node.get("children", [])) + if not inner: + inner = "
" + style = _align_style(node) + return f"{inner}

" + + +@_renderer("extended-text") +def _extended_text(node: dict) -> str: + return _paragraph(node) + + +@_renderer("heading") +def _heading(node: dict) -> str: + tag = node.get("tag", "h2") + inner = _render_children(node.get("children", [])) + style = _align_style(node) + return f"<{tag}{style}>{inner}" + + +@_renderer("extended-heading") +def _extended_heading(node: dict) -> str: + return _heading(node) + + +@_renderer("quote") +def _quote(node: dict) -> str: + inner = _render_children(node.get("children", [])) + return f"
{inner}
" + + +@_renderer("extended-quote") +def _extended_quote(node: dict) -> str: + return _quote(node) + + +@_renderer("aside") +def _aside(node: dict) -> str: + inner = _render_children(node.get("children", [])) + return f"" + + +@_renderer("link") +def _link(node: dict) -> str: + href = html.escape(node.get("url", ""), quote=True) + target = node.get("target", "") + rel = node.get("rel", "") + inner = _render_children(node.get("children", [])) + attrs = f' href="{href}"' + if target: + attrs += f' target="{html.escape(target, quote=True)}"' + if rel: + attrs += f' rel="{html.escape(rel, quote=True)}"' + return f"{inner}" + + +@_renderer("autolink") +def _autolink(node: dict) -> str: + return _link(node) + + +@_renderer("at-link") +def _at_link(node: dict) -> str: + return _link(node) + + +@_renderer("list") +def _list(node: dict) -> str: + tag = "ol" if node.get("listType") == "number" else "ul" + start = node.get("start") + inner = _render_children(node.get("children", [])) + attrs = "" + if tag == "ol" and start and start != 1: + attrs = f' start="{start}"' + return f"<{tag}{attrs}>{inner}" + + +@_renderer("listitem") +def _listitem(node: dict) -> str: + inner = _render_children(node.get("children", [])) + return f"
  • {inner}
  • " + + +@_renderer("horizontalrule") +def _horizontalrule(_node: dict) -> str: + return "
    " + + +@_renderer("code") +def _code(node: dict) -> str: + # Inline code nodes from Lexical — just render inner text + inner = _render_children(node.get("children", [])) + return f"{inner}" + + +@_renderer("codeblock") +def _codeblock(node: dict) -> str: + lang = node.get("language", "") + code = html.escape(node.get("code", "")) + cls = f' class="language-{html.escape(lang)}"' if lang else "" + return f'
    {code}
    ' + + +@_renderer("code-highlight") +def _code_highlight(node: dict) -> str: + text = html.escape(node.get("text", "")) + highlight_type = node.get("highlightType", "") + if highlight_type: + return f'{text}' + return text + + +# --------------------------------------------------------------------------- +# Tier 2 — common cards +# --------------------------------------------------------------------------- + +@_renderer("image") +def _image(node: dict) -> str: + src = node.get("src", "") + alt = node.get("alt", "") + caption = node.get("caption", "") + width = node.get("cardWidth", "") or node.get("width", "") + href = node.get("href", "") + + width_class = "" + if width == "wide": + width_class = " kg-width-wide" + elif width == "full": + width_class = " kg-width-full" + + img_tag = f'{html.escape(alt, quote=True)}' + if href: + img_tag = f'{img_tag}' + + parts = [f'
    '] + parts.append(img_tag) + if caption: + parts.append(f"
    {caption}
    ") + parts.append("
    ") + return "".join(parts) + + +@_renderer("gallery") +def _gallery(node: dict) -> str: + images = node.get("images", []) + if not images: + return "" + + rows = [] + for i in range(0, len(images), 3): + row_imgs = images[i:i + 3] + row_cls = f"kg-gallery-row" if len(row_imgs) <= 3 else "kg-gallery-row" + imgs_html = [] + for img in row_imgs: + src = img.get("src", "") + alt = img.get("alt", "") + caption = img.get("caption", "") + img_tag = f'{html.escape(alt, quote=True)}' + fig = f'" + imgs_html.append(fig) + rows.append(f'
    {"".join(imgs_html)}
    ') + + caption = node.get("caption", "") + caption_html = f"
    {caption}
    " if caption else "" + return ( + f'" + ) + + +@_renderer("html") +def _html_card(node: dict) -> str: + raw = node.get("html", "") + return f"{raw}" + + +@_renderer("markdown") +def _markdown(node: dict) -> str: + md_text = node.get("markdown", "") + rendered = mistune.html(md_text) + return f"{rendered}" + + +@_renderer("embed") +def _embed(node: dict) -> str: + embed_html = node.get("html", "") + caption = node.get("caption", "") + url = node.get("url", "") + caption_html = f"
    {caption}
    " if caption else "" + return ( + f'
    ' + f"{embed_html}{caption_html}
    " + ) + + +@_renderer("bookmark") +def _bookmark(node: dict) -> str: + url = node.get("url", "") + title = html.escape(node.get("metadata", {}).get("title", "") or node.get("title", "")) + description = html.escape(node.get("metadata", {}).get("description", "") or node.get("description", "")) + icon = node.get("metadata", {}).get("icon", "") or node.get("icon", "") + author = html.escape(node.get("metadata", {}).get("author", "") or node.get("author", "")) + publisher = html.escape(node.get("metadata", {}).get("publisher", "") or node.get("publisher", "")) + thumbnail = node.get("metadata", {}).get("thumbnail", "") or node.get("thumbnail", "") + caption = node.get("caption", "") + + icon_html = f'' if icon else "" + thumbnail_html = ( + f'
    ' + f'
    ' + ) if thumbnail else "" + + meta_parts = [] + if icon_html: + meta_parts.append(icon_html) + if author: + meta_parts.append(f'{author}') + if publisher: + meta_parts.append(f'{publisher}') + metadata_html = f'' if meta_parts else "" + + caption_html = f"
    {caption}
    " if caption else "" + + return ( + f'
    ' + f'' + f'
    ' + f'
    {title}
    ' + f'
    {description}
    ' + f'{metadata_html}' + f'
    ' + f'{thumbnail_html}' + f'
    ' + f'{caption_html}' + f'
    ' + ) + + +@_renderer("callout") +def _callout(node: dict) -> str: + color = node.get("backgroundColor", "grey") + emoji = node.get("calloutEmoji", "") + inner = _render_children(node.get("children", [])) + + emoji_html = f'
    {emoji}
    ' if emoji else "" + return ( + f'
    ' + f'{emoji_html}' + f'
    {inner}
    ' + f'
    ' + ) + + +@_renderer("button") +def _button(node: dict) -> str: + text = html.escape(node.get("buttonText", "")) + url = html.escape(node.get("buttonUrl", ""), quote=True) + alignment = node.get("alignment", "center") + return ( + f'
    ' + f'{text}' + f'
    ' + ) + + +@_renderer("toggle") +def _toggle(node: dict) -> str: + heading = node.get("heading", "") + # Toggle content is in children + inner = _render_children(node.get("children", [])) + return ( + f'
    ' + f'
    ' + f'

    {heading}

    ' + f'' + f'
    ' + f'
    {inner}
    ' + f'
    ' + ) + + +# --------------------------------------------------------------------------- +# Tier 3 — media & remaining cards +# --------------------------------------------------------------------------- + +@_renderer("audio") +def _audio(node: dict) -> str: + src = node.get("src", "") + title = html.escape(node.get("title", "")) + duration = node.get("duration", 0) + thumbnail = node.get("thumbnailSrc", "") + + duration_min = int(duration) // 60 + duration_sec = int(duration) % 60 + duration_str = f"{duration_min}:{duration_sec:02d}" + + if thumbnail: + thumb_html = ( + f'audio-thumbnail' + ) + else: + thumb_html = ( + '
    ' + '' + '
    ' + ) + + return ( + f'
    ' + f'{thumb_html}' + f'
    ' + f'
    {title}
    ' + f'
    ' + f'' + f'
    0:00
    ' + f'
    / {duration_str}
    ' + f'' + f'' + f'' + f'' + f'
    ' + f'
    ' + f'' + f'
    ' + ) + + +@_renderer("video") +def _video(node: dict) -> str: + src = node.get("src", "") + caption = node.get("caption", "") + width = node.get("cardWidth", "") + thumbnail = node.get("thumbnailSrc", "") or node.get("customThumbnailSrc", "") + loop = node.get("loop", False) + + width_class = "" + if width == "wide": + width_class = " kg-width-wide" + elif width == "full": + width_class = " kg-width-full" + + loop_attr = " loop" if loop else "" + poster_attr = f' poster="{html.escape(thumbnail, quote=True)}"' if thumbnail else "" + caption_html = f"
    {caption}
    " if caption else "" + + return ( + f'
    ' + f'
    ' + f'' + f'
    ' + f'{caption_html}' + f'
    ' + ) + + +@_renderer("file") +def _file(node: dict) -> str: + src = node.get("src", "") + title = html.escape(node.get("fileName", "") or node.get("title", "")) + caption = node.get("caption", "") + file_size = node.get("fileSize", 0) + file_name = html.escape(node.get("fileName", "")) + + # Format size + if file_size: + kb = file_size / 1024 + if kb < 1024: + size_str = f"{kb:.0f} KB" + else: + size_str = f"{kb / 1024:.1f} MB" + else: + size_str = "" + + caption_html = f'
    {caption}
    ' if caption else "" + size_html = f'
    {size_str}
    ' if size_str else "" + + return ( + f'' + ) + + +@_renderer("paywall") +def _paywall(_node: dict) -> str: + return "" + + +@_renderer("header") +def _header(node: dict) -> str: + heading = node.get("heading", "") + subheading = node.get("subheading", "") + size = node.get("size", "small") + style = node.get("style", "dark") + bg_image = node.get("backgroundImageSrc", "") + button_text = node.get("buttonText", "") + button_url = node.get("buttonUrl", "") + + bg_style = f' style="background-image: url({html.escape(bg_image, quote=True)})"' if bg_image else "" + heading_html = f"

    {heading}

    " if heading else "" + subheading_html = f"

    {subheading}

    " if subheading else "" + button_html = ( + f'{html.escape(button_text)}' + if button_text and button_url else "" + ) + + return ( + f'
    ' + f'{heading_html}{subheading_html}{button_html}' + f'
    ' + ) + + +@_renderer("signup") +def _signup(node: dict) -> str: + heading = node.get("heading", "") + subheading = node.get("subheading", "") + disclaimer = node.get("disclaimer", "") + button_text = html.escape(node.get("buttonText", "Subscribe")) + button_color = node.get("buttonColor", "") + bg_color = node.get("backgroundColor", "") + bg_image = node.get("backgroundImageSrc", "") + style = node.get("style", "dark") + + bg_style_parts = [] + if bg_color: + bg_style_parts.append(f"background-color: {bg_color}") + if bg_image: + bg_style_parts.append(f"background-image: url({html.escape(bg_image, quote=True)})") + style_attr = f' style="{"; ".join(bg_style_parts)}"' if bg_style_parts else "" + + heading_html = f"

    {heading}

    " if heading else "" + subheading_html = f"

    {subheading}

    " if subheading else "" + disclaimer_html = f'' if disclaimer else "" + btn_style = f' style="background-color: {button_color}"' if button_color else "" + + return ( + f'' + ) + + +@_renderer("product") +def _product(node: dict) -> str: + title = html.escape(node.get("productTitle", "") or node.get("title", "")) + description = node.get("productDescription", "") or node.get("description", "") + img_src = node.get("productImageSrc", "") + button_text = html.escape(node.get("buttonText", "")) + button_url = node.get("buttonUrl", "") + rating = node.get("rating", 0) + + img_html = ( + f'' + if img_src else "" + ) + button_html = ( + f'{button_text}' + if button_text and button_url else "" + ) + stars = "" + if rating: + active = int(rating) + stars_html = [] + for i in range(5): + cls = "kg-product-card-rating-active" if i < active else "" + stars_html.append( + f'' + f'' + f'' + ) + stars = f'
    {"".join(stars_html)}
    ' + + return ( + f'
    ' + f'{img_html}' + f'
    ' + f'

    {title}

    ' + f'{stars}' + f'
    {description}
    ' + f'{button_html}' + f'
    ' + f'
    ' + ) + + +@_renderer("email") +def _email(node: dict) -> str: + raw_html = node.get("html", "") + return f"{raw_html}" + + +@_renderer("email-cta") +def _email_cta(node: dict) -> str: + raw_html = node.get("html", "") + return f"{raw_html}" + + +@_renderer("call-to-action") +def _call_to_action(node: dict) -> str: + raw_html = node.get("html", "") + sponsor_label = node.get("sponsorLabel", "") + label_html = ( + f'{html.escape(sponsor_label)}' + if sponsor_label else "" + ) + return ( + f'
    ' + f'{label_html}{raw_html}' + f'
    ' + ) diff --git a/blog/bp/blog/ghost/lexical_validator.py b/blog/bp/blog/ghost/lexical_validator.py new file mode 100644 index 0000000..3cd39a2 --- /dev/null +++ b/blog/bp/blog/ghost/lexical_validator.py @@ -0,0 +1,86 @@ +""" +Server-side validation for Lexical editor JSON. + +Walk the document tree and reject any node whose ``type`` is not in +ALLOWED_NODE_TYPES. This is a belt-and-braces check: the Lexical +client already restricts which nodes can be created, but we validate +server-side too. +""" +from __future__ import annotations + +ALLOWED_NODE_TYPES: frozenset[str] = frozenset( + { + # Standard Lexical nodes + "root", + "paragraph", + "heading", + "quote", + "list", + "listitem", + "link", + "autolink", + "code", + "code-highlight", + "linebreak", + "text", + "horizontalrule", + "image", + "tab", + # Ghost "extended-*" variants + "extended-text", + "extended-heading", + "extended-quote", + # Ghost card types + "html", + "gallery", + "embed", + "bookmark", + "markdown", + "email", + "email-cta", + "button", + "callout", + "toggle", + "video", + "audio", + "file", + "product", + "header", + "signup", + "aside", + "codeblock", + "call-to-action", + "at-link", + "paywall", + } +) + + +def validate_lexical(doc: dict) -> tuple[bool, str | None]: + """Recursively validate a Lexical JSON document. + + Returns ``(True, None)`` when the document is valid, or + ``(False, reason)`` when an unknown node type is found. + """ + if not isinstance(doc, dict): + return False, "Document must be a JSON object" + + root = doc.get("root") + if not isinstance(root, dict): + return False, "Document must contain a 'root' object" + + return _walk(root) + + +def _walk(node: dict) -> tuple[bool, str | None]: + node_type = node.get("type") + if node_type is not None and node_type not in ALLOWED_NODE_TYPES: + return False, f"Disallowed node type: {node_type}" + + for child in node.get("children", []): + if isinstance(child, dict): + ok, reason = _walk(child) + if not ok: + return False, reason + + return True, None diff --git a/blog/bp/blog/ghost_db.py b/blog/bp/blog/ghost_db.py new file mode 100644 index 0000000..1e9eda6 --- /dev/null +++ b/blog/bp/blog/ghost_db.py @@ -0,0 +1,632 @@ +from __future__ import annotations + +from typing import Any, Dict, List, Optional, Sequence, Tuple +from sqlalchemy import select, func, asc, desc, and_, or_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload, joinedload + +from models.ghost_content import Post, Author, Tag, PostTag +from shared.models.page_config import PageConfig +from models.tag_group import TagGroup, TagGroupTag + + +class DBAPIError(Exception): + """Raised when our local DB returns something unexpected.""" + + +def _author_to_public(a: Optional[Author]) -> Optional[Dict[str, Any]]: + if a is None: + return None + if a.deleted_at is not None: + # treat deleted authors as missing + return None + return { + "id": a.ghost_id, + "slug": a.slug, + "name": a.name, + "profile_image": a.profile_image, + "cover_image": a.cover_image, + # expose more (bio, etc.) if needed + } + + +def _tag_to_public(t: Tag) -> Dict[str, Any]: + return { + "id": t.ghost_id, + "slug": t.slug, + "name": t.name, + "description": t.description, + "feature_image": t.feature_image, # fixed key + "visibility": t.visibility, + "deleted_at": t.deleted_at, + } + + +def _post_to_public(p: Post) -> Dict[str, Any]: + """ + Shape a Post to the public JSON used by the app, mirroring GhostClient._normalise_post. + """ + # Primary author: explicit or first available + primary_author = p.primary_author or (p.authors[0] if p.authors else None) + + # Primary tag: prefer explicit relationship, otherwise first public/non-deleted tag + primary_tag = getattr(p, "primary_tag", None) + if primary_tag is None: + public_tags = [ + t for t in (p.tags or []) + if t.deleted_at is None and (t.visibility or "public") == "public" + ] + primary_tag = public_tags[0] if public_tags else None + + return { + "id": p.id, + "ghost_id": p.ghost_id, + "slug": p.slug, + "title": p.title, + "html": p.html, + "is_page": p.is_page, + "excerpt": p.custom_excerpt or p.excerpt, + "custom_excerpt": p.custom_excerpt, + "published_at": p.published_at, + "updated_at": p.updated_at, + "visibility": p.visibility, + "status": p.status, + "deleted_at": p.deleted_at, + "feature_image": p.feature_image, + "user_id": p.user_id, + "publish_requested": p.publish_requested, + "primary_author": _author_to_public(primary_author), + "primary_tag": _tag_to_public(primary_tag) if primary_tag else None, + "tags": [ + _tag_to_public(t) + for t in (p.tags or []) + if t.deleted_at is None and (t.visibility or "public") == "public" + ], + "authors": [ + _author_to_public(a) + for a in (p.authors or []) + if a and a.deleted_at is None + ], + } + + +class DBClient: + """ + Drop-in replacement for GhostClient, but served from our mirrored tables. + Call methods with an AsyncSession. + """ + + def __init__(self, session: AsyncSession): + self.sess = session + + async def list_posts( + self, + limit: int = 10, + page: int = 1, + selected_tags: Optional[Sequence[str]] = None, + selected_authors: Optional[Sequence[str]] = None, + search: Optional[str] = None, + drafts: bool = False, + drafts_user_id: Optional[int] = None, + exclude_covered_tag_ids: Optional[Sequence[int]] = None, + ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: + """ + List published posts, optionally filtered by tags/authors and a search term. + When drafts=True, lists draft posts instead (filtered by drafts_user_id if given). + Returns (posts, pagination). + """ + + # ---- base visibility filters + if drafts: + base_filters = [ + Post.deleted_at.is_(None), + Post.status == "draft", + Post.is_page.is_(False), + ] + if drafts_user_id is not None: + base_filters.append(Post.user_id == drafts_user_id) + else: + base_filters = [ + Post.deleted_at.is_(None), + Post.status == "published", + Post.is_page.is_(False), + ] + + q = select(Post).where(*base_filters) + + # ---- TAG FILTER (matches any tag on the post) + if selected_tags: + tag_slugs = list(selected_tags) + q = q.where( + Post.tags.any( + and_( + Tag.slug.in_(tag_slugs), + Tag.deleted_at.is_(None), + ) + ) + ) + + # ---- EXCLUDE-COVERED FILTER ("etc" mode: posts NOT covered by any group) + if exclude_covered_tag_ids: + covered_sq = ( + select(PostTag.post_id) + .join(Tag, Tag.id == PostTag.tag_id) + .where( + Tag.id.in_(list(exclude_covered_tag_ids)), + Tag.deleted_at.is_(None), + ) + ) + q = q.where(Post.id.notin_(covered_sq)) + + # ---- AUTHOR FILTER (matches primary or any author) + if selected_authors: + author_slugs = list(selected_authors) + q = q.where( + or_( + Post.primary_author.has( + and_( + Author.slug.in_(author_slugs), + Author.deleted_at.is_(None), + ) + ), + Post.authors.any( + and_( + Author.slug.in_(author_slugs), + Author.deleted_at.is_(None), + ) + ), + ) + ) + + # ---- SEARCH FILTER (title OR excerpt OR plaintext contains) + if search: + term = f"%{search.strip().lower()}%" + q = q.where( + or_( + func.lower(func.coalesce(Post.title, "")).like(term), + func.lower(func.coalesce(Post.excerpt, "")).like(term), + func.lower(func.coalesce(Post.plaintext,"")).like(term), + ) + ) + + # ---- ordering + if drafts: + q = q.order_by(desc(Post.updated_at)) + else: + q = q.order_by(desc(Post.published_at)) + + # ---- pagination math + if page < 1: + page = 1 + offset_val = (page - 1) * limit + + # ---- total count with SAME filters (including tag/author/search) + q_no_limit = q.with_only_columns(Post.id).order_by(None) + count_q = select(func.count()).select_from(q_no_limit.subquery()) + total = int((await self.sess.execute(count_q)).scalar() or 0) + + # ---- eager load relationships to avoid N+1 / greenlet issues + q = ( + q.options( + joinedload(Post.primary_author), + joinedload(Post.primary_tag), + selectinload(Post.authors), + selectinload(Post.tags), + ) + .limit(limit) + .offset(offset_val) + ) + + rows: List[Post] = list((await self.sess.execute(q)).scalars()) + posts = [_post_to_public(p) for p in rows] + + # ---- search_count: reflect same filters + search (i.e., equals total once filters applied) + search_count = total + + pages_total = (total + limit - 1) // limit if limit else 1 + pagination = { + "page": page, + "limit": limit, + "pages": pages_total, + "total": total, + "search_count": search_count, + "next": page + 1 if page < pages_total else None, + "prev": page - 1 if page > 1 else None, + } + + return posts, pagination + + async def list_pages( + self, + limit: int = 10, + page: int = 1, + search: Optional[str] = None, + ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: + """ + List published pages (is_page=True) with their PageConfig eagerly loaded. + Returns (pages, pagination). + """ + base_filters = [ + Post.deleted_at.is_(None), + Post.status == "published", + Post.is_page.is_(True), + ] + + q = select(Post).where(*base_filters) + + if search: + term = f"%{search.strip().lower()}%" + q = q.where( + or_( + func.lower(func.coalesce(Post.title, "")).like(term), + func.lower(func.coalesce(Post.excerpt, "")).like(term), + func.lower(func.coalesce(Post.plaintext, "")).like(term), + ) + ) + + q = q.order_by(desc(Post.published_at)) + + if page < 1: + page = 1 + offset_val = (page - 1) * limit + + q_no_limit = q.with_only_columns(Post.id).order_by(None) + count_q = select(func.count()).select_from(q_no_limit.subquery()) + total = int((await self.sess.execute(count_q)).scalar() or 0) + + q = ( + q.options( + joinedload(Post.primary_author), + joinedload(Post.primary_tag), + selectinload(Post.authors), + selectinload(Post.tags), + joinedload(Post.page_config), + ) + .limit(limit) + .offset(offset_val) + ) + + rows: List[Post] = list((await self.sess.execute(q)).scalars()) + + def _page_to_public(p: Post) -> Dict[str, Any]: + d = _post_to_public(p) + pc = p.page_config + d["features"] = pc.features if pc else {} + return d + + pages_list = [_page_to_public(p) for p in rows] + + pages_total = (total + limit - 1) // limit if limit else 1 + pagination = { + "page": page, + "limit": limit, + "pages": pages_total, + "total": total, + "next": page + 1 if page < pages_total else None, + "prev": page - 1 if page > 1 else None, + } + + return pages_list, pagination + + async def posts_by_slug( + self, + slug: str, + include: Sequence[str] = ("tags", "authors"), + fields: Sequence[str] = ( + "id", + "slug", + "title", + "html", + "excerpt", + "custom_excerpt", + "published_at", + "feature_image", + ), + include_drafts: bool = False, + ) -> List[Dict[str, Any]]: + """ + Return posts (usually 1) matching this slug. + + Only returns published, non-deleted posts by default. + When include_drafts=True, also returns draft posts (for admin access). + + Eager-load related objects via selectinload/joinedload so we don't N+1 when + serializing in _post_to_public(). + """ + + # Build .options(...) dynamically based on `include` + load_options = [] + + # Tags + if "tags" in include: + load_options.append(selectinload(Post.tags)) + if hasattr(Post, "primary_tag"): + # joinedload is fine too; selectin keeps a single extra roundtrip + load_options.append(selectinload(Post.primary_tag)) + + # Authors + if "authors" in include: + if hasattr(Post, "primary_author"): + load_options.append(selectinload(Post.primary_author)) + if hasattr(Post, "authors"): + load_options.append(selectinload(Post.authors)) + + filters = [Post.deleted_at.is_(None), Post.slug == slug] + if not include_drafts: + filters.append(Post.status == "published") + + q = ( + select(Post) + .where(*filters) + .order_by(desc(Post.published_at)) + .options(*load_options) + ) + + result = await self.sess.execute(q) + rows: List[Post] = list(result.scalars()) + + return [(_post_to_public(p), p) for p in rows] + + async def list_tags( + self, + limit: int = 5000, + page: int = 1, + is_page=False, + ) -> List[Dict[str, Any]]: + """ + Return public, not-soft-deleted tags. + Include published_post_count = number of published (not deleted) posts using that tag. + """ + + if page < 1: + page = 1 + offset_val = (page - 1) * limit + + # Subquery: count published posts per tag + tag_post_counts_sq = ( + select( + PostTag.tag_id.label("tag_id"), + func.count().label("published_post_count"), + ) + .select_from(PostTag) + .join(Post, Post.id == PostTag.post_id) + .where( + Post.deleted_at.is_(None), + Post.published_at.is_not(None), + Post.is_page.is_(is_page), + ) + .group_by(PostTag.tag_id) + .subquery() + ) + + q = ( + select( + Tag, + func.coalesce(tag_post_counts_sq.c.published_post_count, 0).label( + "published_post_count" + ), + ) + .outerjoin( + tag_post_counts_sq, + tag_post_counts_sq.c.tag_id == Tag.id, + ) + .where( + Tag.deleted_at.is_(None), + (Tag.visibility == "public") | (Tag.visibility.is_(None)), + func.coalesce(tag_post_counts_sq.c.published_post_count, 0) > 0, + ) + .order_by(desc(func.coalesce(tag_post_counts_sq.c.published_post_count, 0)), asc(Tag.name)) + .limit(limit) + .offset(offset_val) + ) + + result = await self.sess.execute(q) + + # result will return rows like (Tag, published_post_count) + rows = list(result.all()) + + tags = [ + { + "id": tag.ghost_id, + "slug": tag.slug, + "name": tag.name, + "description": tag.description, + "feature_image": tag.feature_image, + "visibility": tag.visibility, + "published_post_count": count, + } + for (tag, count) in rows + ] + + return tags + + async def list_authors( + self, + limit: int = 5000, + page: int = 1, + is_page=False, + ) -> List[Dict[str, Any]]: + """ + Return non-deleted authors. + Include published_post_count = number of published (not deleted) posts by that author + (counted via Post.primary_author_id). + """ + + if page < 1: + page = 1 + offset_val = (page - 1) * limit + + # Subquery: count published posts per primary author + author_post_counts_sq = ( + select( + Post.primary_author_id.label("author_id"), + func.count().label("published_post_count"), + ) + .where( + Post.deleted_at.is_(None), + Post.published_at.is_not(None), + Post.is_page.is_(is_page), + ) + .group_by(Post.primary_author_id) + .subquery() + ) + + q = ( + select( + Author, + func.coalesce(author_post_counts_sq.c.published_post_count, 0).label( + "published_post_count" + ), + ) + .outerjoin( + author_post_counts_sq, + author_post_counts_sq.c.author_id == Author.id, + ) + .where( + Author.deleted_at.is_(None), + ) + .order_by(asc(Author.name)) + .limit(limit) + .offset(offset_val) + ) + + result = await self.sess.execute(q) + rows = list(result.all()) + + authors = [ + { + "id": a.ghost_id, + "slug": a.slug, + "name": a.name, + "bio": a.bio, + "profile_image": a.profile_image, + "cover_image": a.cover_image, + "website": a.website, + "location": a.location, + "facebook": a.facebook, + "twitter": a.twitter, + "published_post_count": count, + } + for (a, count) in rows + ] + + return authors + + async def count_drafts(self, user_id: Optional[int] = None) -> int: + """Count draft (non-page, non-deleted) posts, optionally for a single user.""" + q = select(func.count()).select_from(Post).where( + Post.deleted_at.is_(None), + Post.status == "draft", + Post.is_page.is_(False), + ) + if user_id is not None: + q = q.where(Post.user_id == user_id) + return int((await self.sess.execute(q)).scalar() or 0) + + async def list_tag_groups_with_counts(self) -> List[Dict[str, Any]]: + """ + Return all tag groups with aggregated published post counts. + Each group dict includes a `tag_slugs` list and `tag_ids` list. + Count = distinct published posts having ANY member tag. + Ordered by sort_order, name. + """ + # Subquery: distinct published post IDs per tag group + post_count_sq = ( + select( + TagGroupTag.tag_group_id.label("group_id"), + func.count(func.distinct(PostTag.post_id)).label("post_count"), + ) + .select_from(TagGroupTag) + .join(PostTag, PostTag.tag_id == TagGroupTag.tag_id) + .join(Post, Post.id == PostTag.post_id) + .where( + Post.deleted_at.is_(None), + Post.published_at.is_not(None), + Post.is_page.is_(False), + ) + .group_by(TagGroupTag.tag_group_id) + .subquery() + ) + + q = ( + select( + TagGroup, + func.coalesce(post_count_sq.c.post_count, 0).label("post_count"), + ) + .outerjoin(post_count_sq, post_count_sq.c.group_id == TagGroup.id) + .order_by(asc(TagGroup.sort_order), asc(TagGroup.name)) + ) + + rows = list((await self.sess.execute(q)).all()) + + groups = [] + for tg, count in rows: + # Fetch member tag slugs + ids for this group + tag_rows = list( + (await self.sess.execute( + select(Tag.slug, Tag.id) + .join(TagGroupTag, TagGroupTag.tag_id == Tag.id) + .where( + TagGroupTag.tag_group_id == tg.id, + Tag.deleted_at.is_(None), + (Tag.visibility == "public") | (Tag.visibility.is_(None)), + ) + )).all() + ) + groups.append({ + "id": tg.id, + "name": tg.name, + "slug": tg.slug, + "feature_image": tg.feature_image, + "colour": tg.colour, + "sort_order": tg.sort_order, + "post_count": count, + "tag_slugs": [r[0] for r in tag_rows], + "tag_ids": [r[1] for r in tag_rows], + }) + + return groups + + async def count_etc_posts(self, assigned_tag_ids: List[int]) -> int: + """ + Count published posts not covered by any tag group. + Includes posts with no tags and posts whose tags are all unassigned. + """ + base = [ + Post.deleted_at.is_(None), + Post.published_at.is_not(None), + Post.is_page.is_(False), + ] + if assigned_tag_ids: + covered_sq = ( + select(PostTag.post_id) + .join(Tag, Tag.id == PostTag.tag_id) + .where( + Tag.id.in_(assigned_tag_ids), + Tag.deleted_at.is_(None), + ) + ) + base.append(Post.id.notin_(covered_sq)) + + q = select(func.count()).select_from(Post).where(*base) + return int((await self.sess.execute(q)).scalar() or 0) + + async def list_drafts(self) -> List[Dict[str, Any]]: + """Return all draft (non-page, non-deleted) posts, newest-updated first.""" + q = ( + select(Post) + .where( + Post.deleted_at.is_(None), + Post.status == "draft", + Post.is_page.is_(False), + ) + .order_by(desc(Post.updated_at)) + .options( + joinedload(Post.primary_author), + joinedload(Post.primary_tag), + selectinload(Post.authors), + selectinload(Post.tags), + ) + ) + rows: List[Post] = list((await self.sess.execute(q)).scalars()) + return [_post_to_public(p) for p in rows] diff --git a/blog/bp/blog/routes.py b/blog/bp/blog/routes.py new file mode 100644 index 0000000..e6a6336 --- /dev/null +++ b/blog/bp/blog/routes.py @@ -0,0 +1,369 @@ +from __future__ import annotations + +#from quart import Blueprint, g + +import json +import os + +from quart import ( + request, + render_template, + make_response, + g, + Blueprint, + redirect, + url_for, +) +from .ghost_db import DBClient # adjust import path +from shared.db.session import get_session +from .filters.qs import makeqs_factory, decode +from .services.posts_data import posts_data +from .services.pages_data import pages_data + +from shared.browser.app.redis_cacher import cache_page, invalidate_tag_cache +from shared.browser.app.utils.htmx import is_htmx_request +from shared.browser.app.authz import require_admin +from shared.utils import host_url + +def register(url_prefix, title): + blogs_bp = Blueprint("blog", __name__, url_prefix) + + from .web_hooks.routes import ghost_webhooks + blogs_bp.register_blueprint(ghost_webhooks) + + from .ghost.editor_api import editor_api_bp + blogs_bp.register_blueprint(editor_api_bp) + + + + from ..post.routes import register as register_blog + blogs_bp.register_blueprint( + register_blog(), + ) + + from .admin.routes import register as register_tag_groups_admin + blogs_bp.register_blueprint(register_tag_groups_admin()) + + + @blogs_bp.before_app_serving + async def init(): + from .ghost.ghost_sync import ( + sync_all_content_from_ghost, + sync_all_membership_from_ghost, + ) + + async with get_session() as s: + await sync_all_content_from_ghost(s) + await sync_all_membership_from_ghost(s) + await s.commit() + + @blogs_bp.before_request + def route(): + g.makeqs_factory = makeqs_factory + + + @blogs_bp.context_processor + async def inject_root(): + return { + "blog_title": title, + "qs": makeqs_factory()(), + "unsplash_api_key": os.environ.get("UNSPLASH_ACCESS_KEY", ""), + } + + SORT_MAP = { + "newest": "published_at DESC", + "oldest": "published_at ASC", + "az": "title ASC", + "za": "title DESC", + "featured": "featured DESC, published_at DESC", + } + + @blogs_bp.get("/") + async def home(): + """Render the Ghost page with slug 'home' as the site homepage.""" + from ..post.services.post_data import post_data as _post_data + from shared.config import config as get_config + from shared.infrastructure.cart_identity import current_cart_identity + from shared.services.registry import services as svc + from shared.infrastructure.fragments import fetch_fragment, fetch_fragments + + p_data = await _post_data("home", g.s, include_drafts=False) + if not p_data: + # Fall back to blog index if "home" page doesn't exist yet + return redirect(host_url(url_for("blog.index"))) + + g.post_data = p_data + + # Build the same context the post blueprint's context_processor provides + db_post_id = p_data["post"]["id"] + post_slug = p_data["post"]["slug"] + + # Fetch container nav fragments from events + market + paginate_url = url_for( + 'blog.post.widget_paginate', + slug=post_slug, widget_domain='calendar', + ) + nav_params = { + "container_type": "page", + "container_id": str(db_post_id), + "post_slug": post_slug, + "paginate_url": paginate_url, + } + events_nav_html, market_nav_html = await fetch_fragments([ + ("events", "container-nav", nav_params), + ("market", "container-nav", nav_params), + ]) + container_nav_html = events_nav_html + market_nav_html + + ctx = { + **p_data, + "base_title": f"{get_config()['title']} {p_data['post']['title']}", + "container_nav_html": container_nav_html, + } + + # Page cart badge + if p_data["post"].get("is_page"): + ident = current_cart_identity() + page_summary = await svc.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + page_slug=post_slug, + ) + ctx["page_cart_count"] = page_summary.count + page_summary.calendar_count + page_summary.ticket_count + ctx["page_cart_total"] = float(page_summary.total + page_summary.calendar_total + page_summary.ticket_total) + + if not is_htmx_request(): + html = await render_template("_types/home/index.html", **ctx) + else: + html = await render_template("_types/home/_oob_elements.html", **ctx) + return await make_response(html) + + @blogs_bp.get("/index") + @blogs_bp.get("/index/") + async def index(): + """Blog listing — moved from / to /index.""" + + q = decode() + content_type = request.args.get("type", "posts") + + if content_type == "pages": + data = await pages_data(g.s, q.page, q.search) + context = { + **data, + "content_type": "pages", + "search": q.search, + "selected_tags": (), + "selected_authors": (), + "selected_groups": (), + "sort": None, + "view": None, + "drafts": None, + "draft_count": 0, + "tags": [], + "authors": [], + "tag_groups": [], + "posts": data.get("pages", []), + } + if not is_htmx_request(): + html = await render_template("_types/blog/index.html", **context) + elif q.page > 1: + html = await render_template("_types/blog/_page_cards.html", **context) + else: + html = await render_template("_types/blog/_oob_elements.html", **context) + return await make_response(html) + + # Default: posts listing + # Drafts filter requires login; ignore if not logged in + show_drafts = bool(q.drafts and g.user) + is_admin = bool((g.get("rights") or {}).get("admin")) + drafts_user_id = None if (not show_drafts or is_admin) else g.user.id + + # For the draft count badge: admin sees all drafts, non-admin sees own + count_drafts_uid = None if (g.user and is_admin) else (g.user.id if g.user else False) + + data = await posts_data( + g.s, q.page, q.search, q.sort, q.selected_tags, q.selected_authors, q.liked, + drafts=show_drafts, drafts_user_id=drafts_user_id, + count_drafts_for_user_id=count_drafts_uid, + selected_groups=q.selected_groups, + ) + + context = { + **data, + "content_type": "posts", + "selected_tags": q.selected_tags, + "selected_authors": q.selected_authors, + "selected_groups": q.selected_groups, + "sort": q.sort, + "search": q.search, + "view": q.view, + "drafts": q.drafts if show_drafts else None, + } + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/blog/index.html", **context) + elif q.page > 1: + # HTMX pagination: just blog cards + sentinel + html = await render_template("_types/blog/_cards.html", **context) + else: + # HTMX navigation (page 1): main panel + OOB elements + #main_panel = await render_template("_types/blog/_main_panel.html", **context) + html = await render_template("_types/blog/_oob_elements.html", **context) + #html = oob_elements + main_panel + + return await make_response(html) + + @blogs_bp.get("/new/") + @require_admin + async def new_post(): + if not is_htmx_request(): + html = await render_template("_types/blog_new/index.html") + else: + html = await render_template("_types/blog_new/_oob_elements.html") + return await make_response(html) + + @blogs_bp.post("/new/") + @require_admin + async def new_post_save(): + from .ghost.ghost_posts import create_post + from .ghost.lexical_validator import validate_lexical + from .ghost.ghost_sync import sync_single_post + + form = await request.form + title = form.get("title", "").strip() or "Untitled" + lexical_raw = form.get("lexical", "") + status = form.get("status", "draft") + feature_image = form.get("feature_image", "").strip() + custom_excerpt = form.get("custom_excerpt", "").strip() + feature_image_caption = form.get("feature_image_caption", "").strip() + + # Validate + try: + lexical_doc = json.loads(lexical_raw) + except (json.JSONDecodeError, TypeError): + html = await render_template( + "_types/blog_new/index.html", + save_error="Invalid JSON in editor content.", + ) + return await make_response(html, 400) + + ok, reason = validate_lexical(lexical_doc) + if not ok: + html = await render_template( + "_types/blog_new/index.html", + save_error=reason, + ) + return await make_response(html, 400) + + # Create in Ghost + ghost_post = await create_post( + title=title, + lexical_json=lexical_raw, + status=status, + feature_image=feature_image or None, + custom_excerpt=custom_excerpt or None, + feature_image_caption=feature_image_caption or None, + ) + + # Sync to local DB + await sync_single_post(g.s, ghost_post["id"]) + await g.s.flush() + + # Set user_id on the newly created post + from models.ghost_content import Post + from sqlalchemy import select + local_post = (await g.s.execute( + select(Post).where(Post.ghost_id == ghost_post["id"]) + )).scalar_one_or_none() + if local_post and local_post.user_id is None: + local_post.user_id = g.user.id + await g.s.flush() + + # Clear blog listing cache + await invalidate_tag_cache("blog") + + # Redirect to the edit page (post is likely a draft, so public detail would 404) + return redirect(host_url(url_for("blog.post.admin.edit", slug=ghost_post["slug"]))) + + + @blogs_bp.get("/new-page/") + @require_admin + async def new_page(): + if not is_htmx_request(): + html = await render_template("_types/blog_new/index.html", is_page=True) + else: + html = await render_template("_types/blog_new/_oob_elements.html", is_page=True) + return await make_response(html) + + @blogs_bp.post("/new-page/") + @require_admin + async def new_page_save(): + from .ghost.ghost_posts import create_page + from .ghost.lexical_validator import validate_lexical + from .ghost.ghost_sync import sync_single_page + + form = await request.form + title = form.get("title", "").strip() or "Untitled" + lexical_raw = form.get("lexical", "") + status = form.get("status", "draft") + feature_image = form.get("feature_image", "").strip() + custom_excerpt = form.get("custom_excerpt", "").strip() + feature_image_caption = form.get("feature_image_caption", "").strip() + + # Validate + try: + lexical_doc = json.loads(lexical_raw) + except (json.JSONDecodeError, TypeError): + html = await render_template( + "_types/blog_new/index.html", + save_error="Invalid JSON in editor content.", + is_page=True, + ) + return await make_response(html, 400) + + ok, reason = validate_lexical(lexical_doc) + if not ok: + html = await render_template( + "_types/blog_new/index.html", + save_error=reason, + is_page=True, + ) + return await make_response(html, 400) + + # Create in Ghost (as page) + ghost_page = await create_page( + title=title, + lexical_json=lexical_raw, + status=status, + feature_image=feature_image or None, + custom_excerpt=custom_excerpt or None, + feature_image_caption=feature_image_caption or None, + ) + + # Sync to local DB (uses pages endpoint) + await sync_single_page(g.s, ghost_page["id"]) + await g.s.flush() + + # Set user_id on the newly created page + from models.ghost_content import Post + from sqlalchemy import select + local_post = (await g.s.execute( + select(Post).where(Post.ghost_id == ghost_page["id"]) + )).scalar_one_or_none() + if local_post and local_post.user_id is None: + local_post.user_id = g.user.id + await g.s.flush() + + # Clear blog listing cache + await invalidate_tag_cache("blog") + + # Redirect to the page admin + return redirect(host_url(url_for("blog.post.admin.edit", slug=ghost_page["slug"]))) + + + @blogs_bp.get("/drafts/") + async def drafts(): + return redirect(host_url(url_for("blog.index")) + "?drafts=1") + + return blogs_bp \ No newline at end of file diff --git a/blog/bp/blog/services/pages_data.py b/blog/bp/blog/services/pages_data.py new file mode 100644 index 0000000..cc88fa1 --- /dev/null +++ b/blog/bp/blog/services/pages_data.py @@ -0,0 +1,18 @@ +from ..ghost_db import DBClient + + +async def pages_data(session, page, search): + client = DBClient(session) + + pages, pagination = await client.list_pages( + limit=10, + page=page, + search=search, + ) + + return { + "pages": pages, + "page": pagination.get("page", page), + "total_pages": pagination.get("pages", 1), + "search": search, + } diff --git a/blog/bp/blog/services/posts_data.py b/blog/bp/blog/services/posts_data.py new file mode 100644 index 0000000..3203aae --- /dev/null +++ b/blog/bp/blog/services/posts_data.py @@ -0,0 +1,142 @@ +import re + +from ..ghost_db import DBClient # adjust import path +from sqlalchemy import select +from models.ghost_content import PostLike +from shared.infrastructure.fragments import fetch_fragment +from quart import g + +async def posts_data( + session, + page, search, sort, selected_tags, selected_authors, liked, + drafts=False, drafts_user_id=None, count_drafts_for_user_id=None, + selected_groups=(), + ): + client = DBClient(session) + + # --- Tag-group resolution --- + tag_groups = await client.list_tag_groups_with_counts() + + # Collect all assigned tag IDs across groups + all_assigned_tag_ids = [] + for grp in tag_groups: + all_assigned_tag_ids.extend(grp["tag_ids"]) + + # Build slug-lookup for groups + group_by_slug = {grp["slug"]: grp for grp in tag_groups} + + # Resolve selected group → post filtering + # Groups and tags are mutually exclusive — groups override tags when set + effective_tags = selected_tags + etc_mode_tag_ids = None # set when "etc" is selected + if selected_groups: + group_slug = selected_groups[0] + if group_slug == "etc": + # etc = posts NOT covered by any group (includes untagged) + etc_mode_tag_ids = all_assigned_tag_ids + effective_tags = () + elif group_slug in group_by_slug: + effective_tags = tuple(group_by_slug[group_slug]["tag_slugs"]) + + # Compute "etc" virtual group + etc_count = await client.count_etc_posts(all_assigned_tag_ids) + if etc_count > 0 or (selected_groups and selected_groups[0] == "etc"): + tag_groups.append({ + "id": None, + "name": "etc", + "slug": "etc", + "feature_image": None, + "colour": None, + "sort_order": 999999, + "post_count": etc_count, + "tag_slugs": [], + "tag_ids": [], + }) + + posts, pagination = await client.list_posts( + limit=10, + page=page, + selected_tags=effective_tags, + selected_authors=selected_authors, + search=search, + drafts=drafts, + drafts_user_id=drafts_user_id, + exclude_covered_tag_ids=etc_mode_tag_ids, + ) + + # Get all post IDs in this batch + post_ids = [p["id"] for p in posts] + + # Add is_liked field to each post for current user + if g.user: + # Fetch all likes for this user and these posts in one query + liked_posts = await session.execute( + select(PostLike.post_id).where( + PostLike.user_id == g.user.id, + PostLike.post_id.in_(post_ids), + PostLike.deleted_at.is_(None), + ) + ) + liked_post_ids = {row[0] for row in liked_posts} + + # Add is_liked to each post + for post in posts: + post["is_liked"] = post["id"] in liked_post_ids + else: + # Not logged in - no posts are liked + for post in posts: + post["is_liked"] = False + + # Fetch card decoration fragments from events + card_widgets_html = {} + if post_ids: + post_slugs = [p.get("slug", "") for p in posts] + cards_html = await fetch_fragment("events", "container-cards", params={ + "post_ids": ",".join(str(pid) for pid in post_ids), + "post_slugs": ",".join(post_slugs), + }) + if cards_html: + card_widgets_html = _parse_card_fragments(cards_html) + + tags=await client.list_tags( + limit=50000 + ) + authors=await client.list_authors( + limit=50000 + ) + + # Draft count for the logged-in user (None → admin sees all) + draft_count = 0 + if count_drafts_for_user_id is not False: + draft_count = await client.count_drafts(user_id=count_drafts_for_user_id) + + return { + "posts": posts, + "page": pagination.get("page", page), + "total_pages": pagination.get("pages", 1), + "search_count": pagination.get("search_count"), + "tags": tags, + "authors": authors, + "draft_count": draft_count, + "tag_groups": tag_groups, + "selected_groups": selected_groups, + "card_widgets_html": card_widgets_html, + } + + +# Regex to extract per-post blocks delimited by comment markers +_CARD_MARKER_RE = re.compile( + r'(.*?)', + re.DOTALL, +) + + +def _parse_card_fragments(html: str) -> dict[str, str]: + """Parse the container-cards fragment into {post_id_str: html} dict.""" + result = {} + for m in _CARD_MARKER_RE.finditer(html): + post_id_str = m.group(1) + inner = m.group(2).strip() + if inner: + result[post_id_str] = inner + return result diff --git a/blog/bp/blog/web_hooks/routes.py b/blog/bp/blog/web_hooks/routes.py new file mode 100644 index 0000000..b02138b --- /dev/null +++ b/blog/bp/blog/web_hooks/routes.py @@ -0,0 +1,120 @@ +# suma_browser/webhooks.py +from __future__ import annotations +import os +from quart import Blueprint, request, abort, Response, g + +from ..ghost.ghost_sync import ( + sync_single_member, + sync_single_page, + sync_single_post, + sync_single_author, + sync_single_tag, +) +from shared.browser.app.redis_cacher import clear_cache +from shared.browser.app.csrf import csrf_exempt + +ghost_webhooks = Blueprint("ghost_webhooks", __name__, url_prefix="/__ghost-webhook") + +def _check_secret(req) -> None: + expected = os.getenv("GHOST_WEBHOOK_SECRET") + if not expected: + # if you don't set a secret, we allow anything (dev mode) + return + got = req.args.get("secret") or req.headers.get("X-Webhook-Secret") + if got != expected: + abort(401) + +def _extract_id(data: dict, key: str) -> str | None: + """ + key is "post", "tag", or "user"/"author". + Ghost usually sends { key: { current: { id: ... }, previous: { id: ... } } } + We'll try current first, then previous. + """ + block = data.get(key) or {} + cur = block.get("current") or {} + prev = block.get("previous") or {} + return cur.get("id") or prev.get("id") + + +@csrf_exempt +@ghost_webhooks.route("/member/", methods=["POST"]) +#@ghost_webhooks.post("/member/") +async def webhook_member() -> Response: + _check_secret(request) + + data = await request.get_json(force=True, silent=True) or {} + ghost_id = _extract_id(data, "member") + if not ghost_id: + abort(400, "no member id") + + # sync one post + #async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py + await sync_single_member(g.s, ghost_id) + return Response(status=204) + +@csrf_exempt +@ghost_webhooks.post("/post/") +@clear_cache(tag='blog') +async def webhook_post() -> Response: + _check_secret(request) + + data = await request.get_json(force=True, silent=True) or {} + ghost_id = _extract_id(data, "post") + if not ghost_id: + abort(400, "no post id") + + # sync one post + #async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py + await sync_single_post(g.s, ghost_id) + + return Response(status=204) + +@csrf_exempt +@ghost_webhooks.post("/page/") +@clear_cache(tag='blog') +async def webhook_page() -> Response: + _check_secret(request) + + data = await request.get_json(force=True, silent=True) or {} + ghost_id = _extract_id(data, "page") + if not ghost_id: + abort(400, "no page id") + + # sync one post + #async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] # we'll set this in app.py + await sync_single_page(g.s, ghost_id) + + return Response(status=204) + +@csrf_exempt +@ghost_webhooks.post("/author/") +@clear_cache(tag='blog') +async def webhook_author() -> Response: + _check_secret(request) + + data = await request.get_json(force=True, silent=True) or {} + # Ghost calls them "user" in webhook payload in many versions, + # and you want authors in your mirror. We'll try both keys. + ghost_id = _extract_id(data, "user") or _extract_id(data, "author") + if not ghost_id: + abort(400, "no author id") + + #async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] + await sync_single_author(g.s, ghost_id) + + return Response(status=204) + +@csrf_exempt +@ghost_webhooks.post("/tag/") +@clear_cache(tag='blog') +async def webhook_tag() -> Response: + _check_secret(request) + + data = await request.get_json(force=True, silent=True) or {} + ghost_id = _extract_id(data, "tag") + if not ghost_id: + abort(400, "no tag id") + + #async_session_factory = request.app.config["ASYNC_SESSION_FACTORY"] + await sync_single_tag(g.s, ghost_id) + return Response(status=204) diff --git a/blog/bp/fragments/__init__.py b/blog/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/blog/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/blog/bp/fragments/routes.py b/blog/bp/fragments/routes.py new file mode 100644 index 0000000..07d6e67 --- /dev/null +++ b/blog/bp/fragments/routes.py @@ -0,0 +1,52 @@ +"""Blog app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. +""" + +from __future__ import annotations + +from quart import Blueprint, Response, g, render_template, request + +from shared.infrastructure.fragments import FRAGMENT_HEADER +from shared.services.navigation import get_navigation_tree + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + # Registry of fragment handlers: type -> async callable returning HTML str + _handlers: dict[str, object] = {} + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + # --- nav-tree fragment --- + async def _nav_tree_handler(): + app_name = request.args.get("app_name", "") + path = request.args.get("path", "/") + first_seg = path.strip("/").split("/")[0] + menu_items = await get_navigation_tree(g.s) + return await render_template( + "fragments/nav_tree.html", + menu_items=menu_items, + frag_app_name=app_name, + frag_first_seg=first_seg, + ) + + _handlers["nav-tree"] = _nav_tree_handler + + # Store handlers dict on blueprint so app code can register handlers + bp._fragment_handlers = _handlers + + return bp diff --git a/blog/bp/menu_items/__init__.py b/blog/bp/menu_items/__init__.py new file mode 100644 index 0000000..be248ab --- /dev/null +++ b/blog/bp/menu_items/__init__.py @@ -0,0 +1,3 @@ +from .routes import register + +__all__ = ["register"] diff --git a/blog/bp/menu_items/routes.py b/blog/bp/menu_items/routes.py new file mode 100644 index 0000000..26ac745 --- /dev/null +++ b/blog/bp/menu_items/routes.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +from quart import Blueprint, render_template, make_response, request, jsonify, g + +from shared.browser.app.authz import require_admin +from .services.menu_items import ( + get_all_menu_items, + get_menu_item_by_id, + create_menu_item, + update_menu_item, + delete_menu_item, + search_pages, + MenuItemError, +) +from shared.browser.app.utils.htmx import is_htmx_request + +def register(): + bp = Blueprint("menu_items", __name__, url_prefix='/settings/menu_items') + + async def get_menu_items_nav_oob(): + """Helper to generate OOB update for root nav menu items""" + menu_items = await get_all_menu_items(g.s) + + nav_oob = await render_template( + "_types/menu_items/_nav_oob.html", + menu_items=menu_items, + ) + return nav_oob + + @bp.get("/") + @require_admin + async def list_menu_items(): + """List all menu items""" + menu_items = await get_all_menu_items(g.s) + + + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/menu_items/index.html", + menu_items=menu_items, + ) + else: + + html = await render_template( + "_types/menu_items/_oob_elements.html", + menu_items=menu_items, + ) + #html = await render_template("_types/root/settings/_oob_elements.html") + + + return await make_response(html) + + @bp.get("/new/") + @require_admin + async def new_menu_item(): + """Show form to create new menu item""" + html = await render_template( + "_types/menu_items/_form.html", + menu_item=None, + ) + return await make_response(html) + + @bp.post("/") + @require_admin + async def create_menu_item_route(): + """Create a new menu item""" + form = await request.form + post_id = form.get("post_id") + + if not post_id: + return jsonify({"message": "Page is required", "errors": {"post_id": ["Please select a page"]}}), 422 + + try: + post_id = int(post_id) + except ValueError: + return jsonify({"message": "Invalid page ID", "errors": {"post_id": ["Invalid page"]}}), 422 + + try: + menu_item = await create_menu_item(g.s, post_id) + await g.s.flush() + + # Get updated list and nav OOB + menu_items = await get_all_menu_items(g.s) + nav_oob = await get_menu_items_nav_oob() + + html = await render_template( + "_types/menu_items/_list.html", + menu_items=menu_items, + ) + return await make_response(html + nav_oob, 200) + + except MenuItemError as e: + return jsonify({"message": str(e), "errors": {}}), 400 + + @bp.get("//edit/") + @require_admin + async def edit_menu_item(item_id: int): + """Show form to edit menu item""" + menu_item = await get_menu_item_by_id(g.s, item_id) + if not menu_item: + return await make_response("Menu item not found", 404) + + html = await render_template( + "_types/menu_items/_form.html", + menu_item=menu_item, + ) + return await make_response(html) + + @bp.put("//") + @require_admin + async def update_menu_item_route(item_id: int): + """Update a menu item""" + form = await request.form + post_id = form.get("post_id") + + if not post_id: + return jsonify({"message": "Page is required", "errors": {"post_id": ["Please select a page"]}}), 422 + + try: + post_id = int(post_id) + except ValueError: + return jsonify({"message": "Invalid page ID", "errors": {"post_id": ["Invalid page"]}}), 422 + + try: + menu_item = await update_menu_item(g.s, item_id, post_id=post_id) + await g.s.flush() + + # Get updated list and nav OOB + menu_items = await get_all_menu_items(g.s) + nav_oob = await get_menu_items_nav_oob() + + html = await render_template( + "_types/menu_items/_list.html", + menu_items=menu_items, + ) + return await make_response(html + nav_oob, 200) + + except MenuItemError as e: + return jsonify({"message": str(e), "errors": {}}), 400 + + @bp.delete("//") + @require_admin + async def delete_menu_item_route(item_id: int): + """Delete a menu item""" + success = await delete_menu_item(g.s, item_id) + + if not success: + return await make_response("Menu item not found", 404) + + await g.s.flush() + + # Get updated list and nav OOB + menu_items = await get_all_menu_items(g.s) + nav_oob = await get_menu_items_nav_oob() + + html = await render_template( + "_types/menu_items/_list.html", + menu_items=menu_items, + ) + return await make_response(html + nav_oob, 200) + + @bp.get("/pages/search/") + @require_admin + async def search_pages_route(): + """Search for pages to add as menu items""" + query = request.args.get("q", "").strip() + page = int(request.args.get("page", 1)) + per_page = 10 + + pages, total = await search_pages(g.s, query, page, per_page) + has_more = (page * per_page) < total + + html = await render_template( + "_types/menu_items/_page_search_results.html", + pages=pages, + query=query, + page=page, + has_more=has_more, + ) + return await make_response(html) + + @bp.post("/reorder/") + @require_admin + async def reorder_menu_items_route(): + """Reorder menu items""" + from .services.menu_items import reorder_menu_items + + form = await request.form + item_ids_str = form.get("item_ids", "") + + if not item_ids_str: + return jsonify({"message": "No items to reorder", "errors": {}}), 400 + + try: + item_ids = [int(id.strip()) for id in item_ids_str.split(",") if id.strip()] + except ValueError: + return jsonify({"message": "Invalid item IDs", "errors": {}}), 400 + + await reorder_menu_items(g.s, item_ids) + await g.s.flush() + + # Get updated list and nav OOB + menu_items = await get_all_menu_items(g.s) + nav_oob = await get_menu_items_nav_oob() + + html = await render_template( + "_types/menu_items/_list.html", + menu_items=menu_items, + ) + return await make_response(html + nav_oob, 200) + + return bp diff --git a/blog/bp/menu_items/services/menu_items.py b/blog/bp/menu_items/services/menu_items.py new file mode 100644 index 0000000..d79c89f --- /dev/null +++ b/blog/bp/menu_items/services/menu_items.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from shared.models.menu_node import MenuNode +from models.ghost_content import Post +from shared.services.relationships import attach_child, detach_child + + +class MenuItemError(ValueError): + """Base error for menu item service operations.""" + + +async def get_all_menu_items(session: AsyncSession) -> list[MenuNode]: + """ + Get all menu nodes (excluding deleted), ordered by sort_order. + """ + result = await session.execute( + select(MenuNode) + .where(MenuNode.deleted_at.is_(None), MenuNode.depth == 0) + .order_by(MenuNode.sort_order.asc(), MenuNode.id.asc()) + ) + return list(result.scalars().all()) + + +async def get_menu_item_by_id(session: AsyncSession, item_id: int) -> MenuNode | None: + """Get a menu node by ID (excluding deleted).""" + result = await session.execute( + select(MenuNode) + .where(MenuNode.id == item_id, MenuNode.deleted_at.is_(None)) + ) + return result.scalar_one_or_none() + + +async def create_menu_item( + session: AsyncSession, + post_id: int, + sort_order: int | None = None +) -> MenuNode: + """ + Create a MenuNode + ContainerRelation for a page. + If sort_order is not provided, adds to end of list. + """ + # Verify post exists and is a page + post = await session.scalar( + select(Post).where(Post.id == post_id) + ) + if not post: + raise MenuItemError(f"Post {post_id} does not exist.") + + if not post.is_page: + raise MenuItemError("Only pages can be added as menu items, not posts.") + + # If no sort_order provided, add to end + if sort_order is None: + max_order = await session.scalar( + select(func.max(MenuNode.sort_order)) + .where(MenuNode.deleted_at.is_(None), MenuNode.depth == 0) + ) + sort_order = (max_order or 0) + 1 + + # Check for duplicate (same page, not deleted) + existing = await session.scalar( + select(MenuNode).where( + MenuNode.container_type == "page", + MenuNode.container_id == post_id, + MenuNode.deleted_at.is_(None), + ) + ) + if existing: + raise MenuItemError("Menu item for this page already exists.") + + menu_node = MenuNode( + container_type="page", + container_id=post_id, + label=post.title, + slug=post.slug, + feature_image=post.feature_image, + sort_order=sort_order, + ) + session.add(menu_node) + await session.flush() + await attach_child(session, "page", post_id, "menu_node", menu_node.id) + + return menu_node + + +async def update_menu_item( + session: AsyncSession, + item_id: int, + post_id: int | None = None, + sort_order: int | None = None +) -> MenuNode: + """Update an existing menu node.""" + menu_node = await get_menu_item_by_id(session, item_id) + if not menu_node: + raise MenuItemError(f"Menu item {item_id} not found.") + + if post_id is not None: + # Verify post exists and is a page + post = await session.scalar( + select(Post).where(Post.id == post_id) + ) + if not post: + raise MenuItemError(f"Post {post_id} does not exist.") + + if not post.is_page: + raise MenuItemError("Only pages can be added as menu items, not posts.") + + # Check for duplicate (same page, different menu node) + existing = await session.scalar( + select(MenuNode).where( + MenuNode.container_type == "page", + MenuNode.container_id == post_id, + MenuNode.id != item_id, + MenuNode.deleted_at.is_(None), + ) + ) + if existing: + raise MenuItemError("Menu item for this page already exists.") + + old_post_id = menu_node.container_id + menu_node.container_id = post_id + menu_node.label = post.title + menu_node.slug = post.slug + menu_node.feature_image = post.feature_image + + if sort_order is not None: + menu_node.sort_order = sort_order + + await session.flush() + + if post_id is not None and post_id != old_post_id: + await detach_child(session, "page", old_post_id, "menu_node", menu_node.id) + await attach_child(session, "page", post_id, "menu_node", menu_node.id) + + return menu_node + + +async def delete_menu_item(session: AsyncSession, item_id: int) -> bool: + """Soft delete a menu node.""" + menu_node = await get_menu_item_by_id(session, item_id) + if not menu_node: + return False + + menu_node.deleted_at = func.now() + await session.flush() + await detach_child(session, "page", menu_node.container_id, "menu_node", menu_node.id) + + return True + + +async def reorder_menu_items( + session: AsyncSession, + item_ids: list[int] +) -> list[MenuNode]: + """ + Reorder menu nodes by providing a list of IDs in desired order. + Updates sort_order for each node. + """ + items = [] + for index, item_id in enumerate(item_ids): + menu_node = await get_menu_item_by_id(session, item_id) + if menu_node: + menu_node.sort_order = index + items.append(menu_node) + + await session.flush() + + return items + + +async def search_pages( + session: AsyncSession, + query: str, + page: int = 1, + per_page: int = 10 +) -> tuple[list[Post], int]: + """ + Search for pages (not posts) by title. + Returns (pages, total_count). + """ + filters = [ + Post.is_page == True, # noqa: E712 + Post.status == "published", + Post.deleted_at.is_(None) + ] + + if query: + filters.append(Post.title.ilike(f"%{query}%")) + + # Get total count + count_result = await session.execute( + select(func.count(Post.id)).where(*filters) + ) + total = count_result.scalar() or 0 + + # Get paginated results + offset = (page - 1) * per_page + result = await session.execute( + select(Post) + .where(*filters) + .order_by(Post.title.asc()) + .limit(per_page) + .offset(offset) + ) + pages = list(result.scalars().all()) + + return pages, total diff --git a/blog/bp/post/admin/routes.py b/blog/bp/post/admin/routes.py new file mode 100644 index 0000000..c468a43 --- /dev/null +++ b/blog/bp/post/admin/routes.py @@ -0,0 +1,688 @@ +from __future__ import annotations + + +from quart import ( + render_template, + make_response, + Blueprint, + g, + request, + redirect, + url_for, +) +from shared.browser.app.authz import require_admin, require_post_author +from shared.browser.app.utils.htmx import is_htmx_request +from shared.utils import host_url + +def register(): + bp = Blueprint("admin", __name__, url_prefix='/admin') + + + @bp.get("/") + @require_admin + async def admin(slug: str): + from shared.browser.app.utils.htmx import is_htmx_request + from shared.models.page_config import PageConfig + from sqlalchemy import select as sa_select + + # Load features for page admin + post = (g.post_data or {}).get("post", {}) + features = {} + sumup_configured = False + sumup_merchant_code = "" + sumup_checkout_prefix = "" + if post.get("is_page"): + pc = (await g.s.execute( + sa_select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post["id"]) + )).scalar_one_or_none() + if pc: + features = pc.features or {} + sumup_configured = bool(pc.sumup_api_key) + sumup_merchant_code = pc.sumup_merchant_code or "" + sumup_checkout_prefix = pc.sumup_checkout_prefix or "" + + ctx = { + "features": features, + "sumup_configured": sumup_configured, + "sumup_merchant_code": sumup_merchant_code, + "sumup_checkout_prefix": sumup_checkout_prefix, + } + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/post/admin/index.html", **ctx) + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/post/admin/_oob_elements.html", **ctx) + + return await make_response(html) + + @bp.put("/features/") + @require_admin + async def update_features(slug: str): + """Update PageConfig.features for a page.""" + from shared.models.page_config import PageConfig + from models.ghost_content import Post + from sqlalchemy import select as sa_select + from quart import jsonify + import json + + post = g.post_data.get("post") + if not post or not post.get("is_page"): + return jsonify({"error": "This is not a page."}), 400 + + post_id = post["id"] + + # Load or create PageConfig + pc = (await g.s.execute( + sa_select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post_id) + )).scalar_one_or_none() + if pc is None: + pc = PageConfig(container_type="page", container_id=post_id, features={}) + g.s.add(pc) + await g.s.flush() + from shared.services.relationships import attach_child + await attach_child(g.s, "page", post_id, "page_config", pc.id) + + # Parse request body + body = await request.get_json() + if body is None: + # Fall back to form data + form = await request.form + body = {} + for key in ("calendar", "market"): + val = form.get(key) + if val is not None: + body[key] = val in ("true", "1", "on") + + if not isinstance(body, dict): + return jsonify({"error": "Expected JSON object with feature flags."}), 400 + + # Merge features + features = dict(pc.features or {}) + for key, val in body.items(): + if isinstance(val, bool): + features[key] = val + elif val in ("true", "1", "on"): + features[key] = True + elif val in ("false", "0", "off", None): + features[key] = False + + pc.features = features + from sqlalchemy.orm.attributes import flag_modified + flag_modified(pc, "features") + await g.s.flush() + + # Return updated features panel + html = await render_template( + "_types/post/admin/_features_panel.html", + features=features, + post=post, + sumup_configured=bool(pc.sumup_api_key), + sumup_merchant_code=pc.sumup_merchant_code or "", + sumup_checkout_prefix=pc.sumup_checkout_prefix or "", + ) + return await make_response(html) + + @bp.put("/admin/sumup/") + @require_admin + async def update_sumup(slug: str): + """Update PageConfig SumUp credentials for a page.""" + from shared.models.page_config import PageConfig + from sqlalchemy import select as sa_select + from quart import jsonify + + post = g.post_data.get("post") + if not post or not post.get("is_page"): + return jsonify({"error": "This is not a page."}), 400 + + post_id = post["id"] + + pc = (await g.s.execute( + sa_select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post_id) + )).scalar_one_or_none() + if pc is None: + pc = PageConfig(container_type="page", container_id=post_id, features={}) + g.s.add(pc) + await g.s.flush() + from shared.services.relationships import attach_child + await attach_child(g.s, "page", post_id, "page_config", pc.id) + + form = await request.form + merchant_code = (form.get("merchant_code") or "").strip() + api_key = (form.get("api_key") or "").strip() + checkout_prefix = (form.get("checkout_prefix") or "").strip() + + pc.sumup_merchant_code = merchant_code or None + pc.sumup_checkout_prefix = checkout_prefix or None + # Only update API key if non-empty (allows updating other fields without re-entering key) + if api_key: + pc.sumup_api_key = api_key + + await g.s.flush() + + features = pc.features or {} + html = await render_template( + "_types/post/admin/_features_panel.html", + features=features, + post=post, + sumup_configured=bool(pc.sumup_api_key), + sumup_merchant_code=pc.sumup_merchant_code or "", + sumup_checkout_prefix=pc.sumup_checkout_prefix or "", + ) + return await make_response(html) + + @bp.get("/data/") + @require_admin + async def data(slug: str): + if not is_htmx_request(): + html = await render_template( + "_types/post_data/index.html", + ) + else: + html = await render_template( + "_types/post_data/_oob_elements.html", + ) + + return await make_response(html) + + @bp.get("/entries/calendar//") + @require_admin + async def calendar_view(slug: str, calendar_id: int): + """Show calendar month view for browsing entries""" + from shared.models.calendars import Calendar + from shared.utils.calendar_helpers import parse_int_arg, add_months, build_calendar_weeks + from shared.services.registry import services + from sqlalchemy import select + from datetime import datetime, timezone + import calendar as pycalendar + from quart import session as qsession + from ..services.entry_associations import get_post_entry_ids + + # Get month/year from query params + today = datetime.now(timezone.utc).date() + month = parse_int_arg("month") + year = parse_int_arg("year") + + if year is None: + year = today.year + if month is None or not (1 <= month <= 12): + month = today.month + + # Load calendar + result = await g.s.execute( + select(Calendar).where(Calendar.id == calendar_id, Calendar.deleted_at.is_(None)) + ) + calendar_obj = result.scalar_one_or_none() + if not calendar_obj: + return await make_response("Calendar not found", 404) + + # Build calendar data + prev_month_year, prev_month = add_months(year, month, -1) + next_month_year, next_month = add_months(year, month, +1) + prev_year = year - 1 + next_year = year + 1 + + weeks = build_calendar_weeks(year, month) + month_name = pycalendar.month_name[month] + weekday_names = [pycalendar.day_abbr[i] for i in range(7)] + + # Get entries for this month + period_start = datetime(year, month, 1, tzinfo=timezone.utc) + next_y, next_m = add_months(year, month, +1) + period_end = datetime(next_y, next_m, 1, tzinfo=timezone.utc) + + user = getattr(g, "user", None) + user_id = user.id if user else None + is_admin = bool(user and getattr(user, "is_admin", False)) + session_id = qsession.get("calendar_sid") + + month_entries = await services.calendar.visible_entries_for_period( + g.s, calendar_obj.id, period_start, period_end, + user_id=user_id, is_admin=is_admin, session_id=session_id, + ) + + # Get associated entry IDs for this post + post_id = g.post_data["post"]["id"] + associated_entry_ids = await get_post_entry_ids(g.s, post_id) + + html = await render_template( + "_types/post/admin/_calendar_view.html", + calendar=calendar_obj, + year=year, + month=month, + month_name=month_name, + weekday_names=weekday_names, + weeks=weeks, + prev_month=prev_month, + prev_month_year=prev_month_year, + next_month=next_month, + next_month_year=next_month_year, + prev_year=prev_year, + next_year=next_year, + month_entries=month_entries, + associated_entry_ids=associated_entry_ids, + ) + return await make_response(html) + + @bp.get("/entries/") + @require_admin + async def entries(slug: str): + from ..services.entry_associations import get_post_entry_ids + from shared.models.calendars import Calendar + from sqlalchemy import select + + post_id = g.post_data["post"]["id"] + associated_entry_ids = await get_post_entry_ids(g.s, post_id) + + # Load ALL calendars (not just this post's calendars) + result = await g.s.execute( + select(Calendar) + .where(Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + all_calendars = result.scalars().all() + + # Load entries and post for each calendar + for calendar in all_calendars: + await g.s.refresh(calendar, ["entries", "post"]) + if not is_htmx_request(): + html = await render_template( + "_types/post_entries/index.html", + all_calendars=all_calendars, + associated_entry_ids=associated_entry_ids, + ) + else: + html = await render_template( + "_types/post_entries/_oob_elements.html", + all_calendars=all_calendars, + associated_entry_ids=associated_entry_ids, + ) + + return await make_response(html) + + @bp.post("/entries//toggle/") + @require_admin + async def toggle_entry(slug: str, entry_id: int): + from ..services.entry_associations import toggle_entry_association, get_post_entry_ids, get_associated_entries + from shared.models.calendars import Calendar + from sqlalchemy import select + from quart import jsonify + + post_id = g.post_data["post"]["id"] + is_associated, error = await toggle_entry_association(g.s, post_id, entry_id) + + if error: + return jsonify({"message": error, "errors": {}}), 400 + + await g.s.flush() + + # Return updated association status + associated_entry_ids = await get_post_entry_ids(g.s, post_id) + + # Load ALL calendars + result = await g.s.execute( + select(Calendar) + .where(Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + all_calendars = result.scalars().all() + + # Load entries and post for each calendar + for calendar in all_calendars: + await g.s.refresh(calendar, ["entries", "post"]) + + # Fetch associated entries for nav display + associated_entries = await get_associated_entries(g.s, post_id) + + # Load calendars for this post (for nav display) + calendars = ( + await g.s.execute( + select(Calendar) + .where(Calendar.container_type == "page", Calendar.container_id == post_id, Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + ).scalars().all() + + # Return the associated entries admin list + OOB update for nav entries + admin_list = await render_template( + "_types/post/admin/_associated_entries.html", + all_calendars=all_calendars, + associated_entry_ids=associated_entry_ids, + ) + + nav_entries_oob = await render_template( + "_types/post/admin/_nav_entries_oob.html", + associated_entries=associated_entries, + calendars=calendars, + post=g.post_data["post"], + ) + + return await make_response(admin_list + nav_entries_oob) + + @bp.get("/settings/") + @require_post_author + async def settings(slug: str): + from ...blog.ghost.ghost_posts import get_post_for_edit + + ghost_id = g.post_data["post"]["ghost_id"] + is_page = bool(g.post_data["post"].get("is_page")) + ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) + save_success = request.args.get("saved") == "1" + + if not is_htmx_request(): + html = await render_template( + "_types/post_settings/index.html", + ghost_post=ghost_post, + save_success=save_success, + ) + else: + html = await render_template( + "_types/post_settings/_oob_elements.html", + ghost_post=ghost_post, + save_success=save_success, + ) + + return await make_response(html) + + @bp.post("/settings/") + @require_post_author + async def settings_save(slug: str): + from ...blog.ghost.ghost_posts import update_post_settings + from ...blog.ghost.ghost_sync import sync_single_post, sync_single_page + from shared.browser.app.redis_cacher import invalidate_tag_cache + + ghost_id = g.post_data["post"]["ghost_id"] + is_page = bool(g.post_data["post"].get("is_page")) + form = await request.form + + updated_at = form.get("updated_at", "") + + # Build kwargs — only include fields that were submitted + kwargs: dict = {} + + # Text fields + for field in ( + "slug", "custom_template", "meta_title", "meta_description", + "canonical_url", "og_image", "og_title", "og_description", + "twitter_image", "twitter_title", "twitter_description", + "feature_image_alt", + ): + val = form.get(field) + if val is not None: + kwargs[field] = val.strip() + + # Select fields + visibility = form.get("visibility") + if visibility is not None: + kwargs["visibility"] = visibility + + # Datetime + published_at = form.get("published_at", "").strip() + if published_at: + kwargs["published_at"] = published_at + + # Checkbox fields: present = True, absent = False + kwargs["featured"] = form.get("featured") == "on" + kwargs["email_only"] = form.get("email_only") == "on" + + # Tags — comma-separated string → list of {"name": "..."} dicts + tags_str = form.get("tags", "").strip() + if tags_str: + kwargs["tags"] = [{"name": t.strip()} for t in tags_str.split(",") if t.strip()] + else: + kwargs["tags"] = [] + + # Update in Ghost + await update_post_settings( + ghost_id=ghost_id, + updated_at=updated_at, + is_page=is_page, + **kwargs, + ) + + # Sync to local DB + if is_page: + await sync_single_page(g.s, ghost_id) + else: + await sync_single_post(g.s, ghost_id) + await g.s.flush() + + # Clear caches + await invalidate_tag_cache("blog") + await invalidate_tag_cache("post.post_detail") + + return redirect(host_url(url_for("blog.post.admin.settings", slug=slug)) + "?saved=1") + + @bp.get("/edit/") + @require_post_author + async def edit(slug: str): + from ...blog.ghost.ghost_posts import get_post_for_edit + from shared.models.ghost_membership_entities import GhostNewsletter + from sqlalchemy import select as sa_select + + ghost_id = g.post_data["post"]["ghost_id"] + is_page = bool(g.post_data["post"].get("is_page")) + ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) + save_success = request.args.get("saved") == "1" + + newsletters = (await g.s.execute( + sa_select(GhostNewsletter).order_by(GhostNewsletter.name) + )).scalars().all() + + if not is_htmx_request(): + html = await render_template( + "_types/post_edit/index.html", + ghost_post=ghost_post, + save_success=save_success, + newsletters=newsletters, + ) + else: + html = await render_template( + "_types/post_edit/_oob_elements.html", + ghost_post=ghost_post, + save_success=save_success, + newsletters=newsletters, + ) + + return await make_response(html) + + @bp.post("/edit/") + @require_post_author + async def edit_save(slug: str): + import json + from ...blog.ghost.ghost_posts import update_post + from ...blog.ghost.lexical_validator import validate_lexical + from ...blog.ghost.ghost_sync import sync_single_post, sync_single_page + from shared.browser.app.redis_cacher import invalidate_tag_cache + + ghost_id = g.post_data["post"]["ghost_id"] + is_page = bool(g.post_data["post"].get("is_page")) + form = await request.form + title = form.get("title", "").strip() + lexical_raw = form.get("lexical", "") + updated_at = form.get("updated_at", "") + status = form.get("status", "draft") + publish_mode = form.get("publish_mode", "web") + newsletter_slug = form.get("newsletter_slug", "").strip() or None + feature_image = form.get("feature_image", "").strip() + custom_excerpt = form.get("custom_excerpt", "").strip() + feature_image_caption = form.get("feature_image_caption", "").strip() + + # Validate the lexical JSON + try: + lexical_doc = json.loads(lexical_raw) + except (json.JSONDecodeError, TypeError): + from ...blog.ghost.ghost_posts import get_post_for_edit + ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) + html = await render_template( + "_types/post_edit/index.html", + ghost_post=ghost_post, + save_error="Invalid JSON in editor content.", + ) + return await make_response(html, 400) + + ok, reason = validate_lexical(lexical_doc) + if not ok: + from ...blog.ghost.ghost_posts import get_post_for_edit + ghost_post = await get_post_for_edit(ghost_id, is_page=is_page) + html = await render_template( + "_types/post_edit/index.html", + ghost_post=ghost_post, + save_error=reason, + ) + return await make_response(html, 400) + + # Update in Ghost (content save — no status change yet) + ghost_post = await update_post( + ghost_id=ghost_id, + lexical_json=lexical_raw, + title=title or None, + updated_at=updated_at, + feature_image=feature_image, + custom_excerpt=custom_excerpt, + feature_image_caption=feature_image_caption, + is_page=is_page, + ) + + # Publish workflow + is_admin = bool((g.get("rights") or {}).get("admin")) + publish_requested_msg = None + + # Guard: if already emailed, force publish_mode to "web" to prevent re-send + already_emailed = bool(ghost_post.get("email") and ghost_post["email"].get("status")) + if already_emailed and publish_mode in ("email", "both"): + publish_mode = "web" + + if status == "published" and ghost_post.get("status") != "published" and not is_admin: + # Non-admin requesting publish: don't send status to Ghost, set local flag + publish_requested_msg = "Publish requested — an admin will review." + elif status and status != ghost_post.get("status"): + # Status is changing — determine email params based on publish_mode + email_kwargs: dict = {} + if status == "published" and publish_mode in ("email", "both") and newsletter_slug: + email_kwargs["newsletter_slug"] = newsletter_slug + email_kwargs["email_segment"] = "all" + if publish_mode == "email": + email_kwargs["email_only"] = True + + from ...blog.ghost.ghost_posts import update_post as _up + ghost_post = await _up( + ghost_id=ghost_id, + lexical_json=lexical_raw, + title=None, + updated_at=ghost_post["updated_at"], + status=status, + is_page=is_page, + **email_kwargs, + ) + + # Sync to local DB + if is_page: + await sync_single_page(g.s, ghost_id) + else: + await sync_single_post(g.s, ghost_id) + await g.s.flush() + + # Handle publish_requested flag on the local post + from models.ghost_content import Post + from sqlalchemy import select as sa_select + local_post = (await g.s.execute( + sa_select(Post).where(Post.ghost_id == ghost_id) + )).scalar_one_or_none() + if local_post: + if publish_requested_msg: + local_post.publish_requested = True + elif status == "published" and is_admin: + local_post.publish_requested = False + await g.s.flush() + + # Clear caches + await invalidate_tag_cache("blog") + await invalidate_tag_cache("post.post_detail") + + # Redirect to GET to avoid resubmit warning on refresh (PRG pattern) + redirect_url = host_url(url_for("blog.post.admin.edit", slug=slug)) + "?saved=1" + if publish_requested_msg: + redirect_url += "&publish_requested=1" + return redirect(redirect_url) + + + @bp.get("/markets/") + @require_admin + async def markets(slug: str): + """List markets for this page.""" + from shared.services.registry import services + + post = (g.post_data or {}).get("post", {}) + post_id = post.get("id") + if not post_id: + return await make_response("Post not found", 404) + + page_markets = await services.market.marketplaces_for_container(g.s, "page", post_id) + + html = await render_template( + "_types/post/admin/_markets_panel.html", + markets=page_markets, + post=post, + ) + return await make_response(html) + + @bp.post("/markets/new/") + @require_admin + async def create_market(slug: str): + """Create a new market for this page.""" + from ..services.markets import create_market as _create_market, MarketError + from shared.services.registry import services + from quart import jsonify + + post = (g.post_data or {}).get("post", {}) + post_id = post.get("id") + if not post_id: + return jsonify({"error": "Post not found"}), 404 + + form = await request.form + name = (form.get("name") or "").strip() + + try: + await _create_market(g.s, post_id, name) + except MarketError as e: + return jsonify({"error": str(e)}), 400 + + # Return updated markets list + page_markets = await services.market.marketplaces_for_container(g.s, "page", post_id) + + html = await render_template( + "_types/post/admin/_markets_panel.html", + markets=page_markets, + post=post, + ) + return await make_response(html) + + @bp.delete("/markets//") + @require_admin + async def delete_market(slug: str, market_slug: str): + """Soft-delete a market.""" + from ..services.markets import soft_delete_market + from shared.services.registry import services + from quart import jsonify + + post = (g.post_data or {}).get("post", {}) + post_id = post.get("id") + + deleted = await soft_delete_market(g.s, slug, market_slug) + if not deleted: + return jsonify({"error": "Market not found"}), 404 + + # Return updated markets list + page_markets = await services.market.marketplaces_for_container(g.s, "page", post_id) + + html = await render_template( + "_types/post/admin/_markets_panel.html", + markets=page_markets, + post=post, + ) + return await make_response(html) + + return bp diff --git a/blog/bp/post/routes.py b/blog/bp/post/routes.py new file mode 100644 index 0000000..7aa3fb4 --- /dev/null +++ b/blog/bp/post/routes.py @@ -0,0 +1,180 @@ +from __future__ import annotations + + +from quart import ( + render_template, + make_response, + g, + Blueprint, + abort, + url_for, + request, +) +from .services.post_data import post_data +from .services.post_operations import toggle_post_like +from shared.services.registry import services +from shared.infrastructure.fragments import fetch_fragment, fetch_fragments + +from shared.browser.app.redis_cacher import cache_page, clear_cache + + +from .admin.routes import register as register_admin +from shared.config import config +from shared.browser.app.utils.htmx import is_htmx_request + +def register(): + bp = Blueprint("post", __name__, url_prefix='/') + bp.register_blueprint( + register_admin() + ) + + # Calendar blueprints now live in the events service. + # Post pages link to events_url() instead of embedding calendars. + + @bp.url_value_preprocessor + def pull_blog(endpoint, values): + g.post_slug = values.get("slug") + + @bp.before_request + async def hydrate_post_data(): + slug = getattr(g, "post_slug", None) + if not slug: + return # not a blog route or no slug in this URL + + is_admin = bool((g.get("rights") or {}).get("admin")) + # Always include drafts so we can check ownership below + p_data = await post_data(slug, g.s, include_drafts=True) + if not p_data: + abort(404) + return + + # Access control for draft posts + if p_data["post"].get("status") != "published": + if is_admin: + pass # admin can see all drafts + elif g.user and p_data["post"].get("user_id") == g.user.id: + pass # author can see their own drafts + else: + abort(404) + return + + g.post_data = p_data + + @bp.context_processor + async def context(): + p_data = getattr(g, "post_data", None) + if p_data: + from shared.infrastructure.cart_identity import current_cart_identity + + db_post_id = (g.post_data.get("post") or {}).get("id") + post_slug = (g.post_data.get("post") or {}).get("slug", "") + + # Fetch container nav fragments from events + market + paginate_url = url_for( + 'blog.post.widget_paginate', + slug=post_slug, widget_domain='calendar', + ) + nav_params = { + "container_type": "page", + "container_id": str(db_post_id), + "post_slug": post_slug, + "paginate_url": paginate_url, + } + events_nav_html, market_nav_html = await fetch_fragments([ + ("events", "container-nav", nav_params), + ("market", "container-nav", nav_params), + ]) + container_nav_html = events_nav_html + market_nav_html + + ctx = { + **p_data, + "base_title": f"{config()['title']} {p_data['post']['title']}", + "container_nav_html": container_nav_html, + } + + # Page cart badge via service + post_dict = p_data.get("post") or {} + if post_dict.get("is_page"): + ident = current_cart_identity() + page_summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + page_slug=post_dict["slug"], + ) + ctx["page_cart_count"] = page_summary.count + page_summary.calendar_count + page_summary.ticket_count + ctx["page_cart_total"] = float(page_summary.total + page_summary.calendar_total + page_summary.ticket_total) + + return ctx + else: + return {} + + @bp.get("/") + @cache_page(tag="post.post_detail") + async def post_detail(slug: str): + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/post/index.html") + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/post/_oob_elements.html") + + return await make_response(html) + + @bp.post("/like/toggle/") + @clear_cache(tag="post.post_detail", tag_scope="user") + async def like_toggle(slug: str): + from shared.utils import host_url + + # Get post_id from g.post_data + if not g.user: + html = await render_template( + "_types/browse/like/button.html", + slug=slug, + liked=False, + like_url=host_url(url_for('blog.post.like_toggle', slug=slug)), + item_type='post', + ) + resp = make_response(html, 403) + return resp + + post_id = g.post_data["post"]["id"] + user_id = g.user.id + + liked, error = await toggle_post_like(g.s, user_id, post_id) + + if error: + resp = make_response(error, 404) + return resp + + html = await render_template( + "_types/browse/like/button.html", + slug=slug, + liked=liked, + like_url=host_url(url_for('blog.post.like_toggle', slug=slug)), + item_type='post', + ) + return html + + @bp.get("/w//") + async def widget_paginate(slug: str, widget_domain: str): + """Proxies paginated widget requests to the appropriate fragment provider.""" + page = int(request.args.get("page", 1)) + post_id = g.post_data["post"]["id"] + + if widget_domain == "calendar": + html = await fetch_fragment("events", "container-nav", params={ + "container_type": "page", + "container_id": str(post_id), + "post_slug": slug, + "page": str(page), + "paginate_url": url_for( + 'blog.post.widget_paginate', + slug=slug, widget_domain='calendar', + ), + }) + return await make_response(html or "") + abort(404) + + return bp + + diff --git a/blog/bp/post/services/entry_associations.py b/blog/bp/post/services/entry_associations.py new file mode 100644 index 0000000..5afe195 --- /dev/null +++ b/blog/bp/post/services/entry_associations.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.services.registry import services + + +async def toggle_entry_association( + session: AsyncSession, + post_id: int, + entry_id: int +) -> tuple[bool, str | None]: + """ + Toggle association between a post and calendar entry. + Returns (is_now_associated, error_message). + """ + post = await services.blog.get_post_by_id(session, post_id) + if not post: + return False, "Post not found" + + is_associated = await services.calendar.toggle_entry_post( + session, entry_id, "post", post_id, + ) + return is_associated, None + + +async def get_post_entry_ids( + session: AsyncSession, + post_id: int +) -> set[int]: + """ + Get all entry IDs associated with this post. + Returns a set of entry IDs. + """ + return await services.calendar.entry_ids_for_content(session, "post", post_id) + + +async def get_associated_entries( + session: AsyncSession, + post_id: int, + page: int = 1, + per_page: int = 10 +) -> dict: + """ + Get paginated associated entries for this post. + Returns dict with entries (CalendarEntryDTOs), total_count, and has_more. + """ + entries, has_more = await services.calendar.associated_entries( + session, "post", post_id, page, + ) + total_count = len(entries) + (page - 1) * per_page + if has_more: + total_count += 1 # at least one more + + return { + "entries": entries, + "total_count": total_count, + "has_more": has_more, + "page": page, + } diff --git a/blog/bp/post/services/markets.py b/blog/bp/post/services/markets.py new file mode 100644 index 0000000..c825bb8 --- /dev/null +++ b/blog/bp/post/services/markets.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +import re +import unicodedata + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.page_config import PageConfig +from shared.contracts.dtos import MarketPlaceDTO +from shared.services.registry import services + + +class MarketError(ValueError): + """Base error for market service operations.""" + + +def slugify(value: str, max_len: int = 255) -> str: + if value is None: + value = "" + value = unicodedata.normalize("NFKD", value) + value = value.encode("ascii", "ignore").decode("ascii") + value = value.lower() + value = value.replace("/", "-") + value = re.sub(r"[^a-z0-9]+", "-", value) + value = re.sub(r"-{2,}", "-", value) + value = value.strip("-")[:max_len].strip("-") + return value or "market" + + +async def create_market(sess: AsyncSession, post_id: int, name: str) -> MarketPlaceDTO: + name = (name or "").strip() + if not name: + raise MarketError("Market name must not be empty.") + slug = slugify(name) + + post = await services.blog.get_post_by_id(sess, post_id) + if not post: + raise MarketError(f"Post {post_id} does not exist.") + + if not post.is_page: + raise MarketError("Markets can only be created on pages, not posts.") + + pc = (await sess.execute( + select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post_id) + )).scalar_one_or_none() + if pc is None or not (pc.features or {}).get("market"): + raise MarketError("Market feature is not enabled for this page. Enable it in page settings first.") + + try: + return await services.market.create_marketplace(sess, "page", post_id, name, slug) + except ValueError as e: + raise MarketError(str(e)) from e + + +async def soft_delete_market(sess: AsyncSession, post_slug: str, market_slug: str) -> bool: + post = await services.blog.get_post_by_slug(sess, post_slug) + if not post: + return False + + return await services.market.soft_delete_marketplace(sess, "page", post.id, market_slug) diff --git a/blog/bp/post/services/post_data.py b/blog/bp/post/services/post_data.py new file mode 100644 index 0000000..0d0d225 --- /dev/null +++ b/blog/bp/post/services/post_data.py @@ -0,0 +1,42 @@ +from ...blog.ghost_db import DBClient # adjust import path +from sqlalchemy import select +from models.ghost_content import PostLike +from quart import g + +async def post_data(slug, session, include_drafts=False): + client = DBClient(session) + posts = (await client.posts_by_slug(slug, include_drafts=include_drafts)) + + if not posts: + # 404 page (you can make a template for this if you want) + return None + post, original_post = posts[0] + + # Check if current user has liked this post + is_liked = False + if g.user: + liked_record = await session.scalar( + select(PostLike).where( + PostLike.user_id == g.user.id, + PostLike.post_id == post["id"], + PostLike.deleted_at.is_(None), + ) + ) + is_liked = liked_record is not None + + # Add is_liked to post dict + post["is_liked"] = is_liked + + tags=await client.list_tags( + limit=50000 + ) # <-- new + authors=await client.list_authors( + limit=50000 + ) + + return { + "post": post, + "original_post": original_post, + "tags": tags, + "authors": authors, + } diff --git a/blog/bp/post/services/post_operations.py b/blog/bp/post/services/post_operations.py new file mode 100644 index 0000000..e4bb102 --- /dev/null +++ b/blog/bp/post/services/post_operations.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import Optional + +from sqlalchemy import select, func, update +from sqlalchemy.ext.asyncio import AsyncSession + +from models.ghost_content import Post, PostLike + + +async def toggle_post_like( + session: AsyncSession, + user_id: int, + post_id: int, +) -> tuple[bool, Optional[str]]: + """ + Toggle a post like for a given user using soft deletes. + Returns (liked_state, error_message). + - If error_message is not None, an error occurred. + - liked_state indicates whether post is now liked (True) or unliked (False). + """ + + # Verify post exists + post_exists = await session.scalar( + select(Post.id).where(Post.id == post_id, Post.deleted_at.is_(None)) + ) + if not post_exists: + return False, "Post not found" + + # Check if like exists (not deleted) + existing = await session.scalar( + select(PostLike).where( + PostLike.user_id == user_id, + PostLike.post_id == post_id, + PostLike.deleted_at.is_(None), + ) + ) + + if existing: + # Unlike: soft delete the like + await session.execute( + update(PostLike) + .where( + PostLike.user_id == user_id, + PostLike.post_id == post_id, + PostLike.deleted_at.is_(None), + ) + .values(deleted_at=func.now()) + ) + return False, None + else: + # Like: add a new like + new_like = PostLike( + user_id=user_id, + post_id=post_id, + ) + session.add(new_like) + return True, None diff --git a/blog/bp/snippets/__init__.py b/blog/bp/snippets/__init__.py new file mode 100644 index 0000000..be248ab --- /dev/null +++ b/blog/bp/snippets/__init__.py @@ -0,0 +1,3 @@ +from .routes import register + +__all__ = ["register"] diff --git a/blog/bp/snippets/routes.py b/blog/bp/snippets/routes.py new file mode 100644 index 0000000..8f4778a --- /dev/null +++ b/blog/bp/snippets/routes.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +from quart import Blueprint, render_template, make_response, request, g, abort +from sqlalchemy import select, or_ +from sqlalchemy.orm import selectinload + +from shared.browser.app.authz import require_login +from shared.browser.app.utils.htmx import is_htmx_request +from models import Snippet + + +VALID_VISIBILITY = frozenset({"private", "shared", "admin"}) + + +async def _visible_snippets(session): + """Return snippets visible to the current user (own + shared + admin-if-admin).""" + uid = g.user.id + is_admin = g.rights.get("admin") + + filters = [Snippet.user_id == uid, Snippet.visibility == "shared"] + if is_admin: + filters.append(Snippet.visibility == "admin") + + rows = (await session.execute( + select(Snippet).where(or_(*filters)).order_by(Snippet.name) + )).scalars().all() + + return rows + + +def register(): + bp = Blueprint("snippets", __name__, url_prefix="/settings/snippets") + + @bp.get("/") + @require_login + async def list_snippets(): + """List snippets visible to the current user.""" + snippets = await _visible_snippets(g.s) + is_admin = g.rights.get("admin") + + if not is_htmx_request(): + html = await render_template( + "_types/snippets/index.html", + snippets=snippets, + is_admin=is_admin, + ) + else: + html = await render_template( + "_types/snippets/_oob_elements.html", + snippets=snippets, + is_admin=is_admin, + ) + + return await make_response(html) + + @bp.delete("//") + @require_login + async def delete_snippet(snippet_id: int): + """Delete a snippet. Owners delete their own; admins can delete any.""" + snippet = await g.s.get(Snippet, snippet_id) + if not snippet: + abort(404) + + is_admin = g.rights.get("admin") + if snippet.user_id != g.user.id and not is_admin: + abort(403) + + await g.s.delete(snippet) + await g.s.flush() + + snippets = await _visible_snippets(g.s) + html = await render_template( + "_types/snippets/_list.html", + snippets=snippets, + is_admin=is_admin, + ) + return await make_response(html) + + @bp.patch("//visibility/") + @require_login + async def patch_visibility(snippet_id: int): + """Change snippet visibility. Admin only.""" + if not g.rights.get("admin"): + abort(403) + + snippet = await g.s.get(Snippet, snippet_id) + if not snippet: + abort(404) + + form = await request.form + visibility = form.get("visibility", "").strip() + + if visibility not in VALID_VISIBILITY: + abort(400) + + snippet.visibility = visibility + await g.s.flush() + + snippets = await _visible_snippets(g.s) + html = await render_template( + "_types/snippets/_list.html", + snippets=snippets, + is_admin=True, + ) + return await make_response(html) + + return bp diff --git a/blog/config/app-config.yaml b/blog/config/app-config.yaml new file mode 100644 index 0000000..3aa6a76 --- /dev/null +++ b/blog/config/app-config.yaml @@ -0,0 +1,84 @@ +# App-wide settings +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: Rose Ash +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "http://localhost:8000" + market: "http://localhost:8001" + cart: "http://localhost:8002" + events: "http://localhost:8003" + federation: "http://localhost:8004" +cache: + fs_root: _snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/wines + - branded-goods/ciders + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + - ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html + product-details: + - General Information + - A Note About Prices + +# SumUp payment settings (fill these in for live usage) +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING" + checkout_reference_prefix: 'dev-' + diff --git a/blog/entrypoint.sh b/blog/entrypoint.sh new file mode 100644 index 0000000..685a882 --- /dev/null +++ b/blog/entrypoint.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# Run DB migrations only if RUN_MIGRATIONS=true (blog service only) +if [[ "${RUN_MIGRATIONS:-}" == "true" ]]; then + echo "Running Alembic migrations..." + (cd shared && alembic upgrade head) +fi + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/blog/models/__init__.py b/blog/models/__init__.py new file mode 100644 index 0000000..e434f4a --- /dev/null +++ b/blog/models/__init__.py @@ -0,0 +1,14 @@ +from .ghost_content import Post, Author, Tag, PostAuthor, PostTag, PostLike +from .snippet import Snippet +from .tag_group import TagGroup, TagGroupTag + +# Shared models — canonical definitions live in shared/models/ +from shared.models.ghost_membership_entities import ( + GhostLabel, UserLabel, + GhostNewsletter, UserNewsletter, + GhostTier, GhostSubscription, +) +from shared.models.menu_item import MenuItem +from shared.models.kv import KV +from shared.models.magic_link import MagicLink +from shared.models.user import User diff --git a/blog/models/ghost_content.py b/blog/models/ghost_content.py new file mode 100644 index 0000000..cd18161 --- /dev/null +++ b/blog/models/ghost_content.py @@ -0,0 +1,3 @@ +from shared.models.ghost_content import ( # noqa: F401 + Tag, Post, Author, PostAuthor, PostTag, PostLike, +) diff --git a/blog/models/ghost_membership_entities.py b/blog/models/ghost_membership_entities.py new file mode 100644 index 0000000..d07520f --- /dev/null +++ b/blog/models/ghost_membership_entities.py @@ -0,0 +1,12 @@ +# Re-export from canonical shared location +from shared.models.ghost_membership_entities import ( + GhostLabel, UserLabel, + GhostNewsletter, UserNewsletter, + GhostTier, GhostSubscription, +) + +__all__ = [ + "GhostLabel", "UserLabel", + "GhostNewsletter", "UserNewsletter", + "GhostTier", "GhostSubscription", +] diff --git a/blog/models/kv.py b/blog/models/kv.py new file mode 100644 index 0000000..d54f0a3 --- /dev/null +++ b/blog/models/kv.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.models.kv import KV + +__all__ = ["KV"] diff --git a/blog/models/magic_link.py b/blog/models/magic_link.py new file mode 100644 index 0000000..9031ca4 --- /dev/null +++ b/blog/models/magic_link.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.models.magic_link import MagicLink + +__all__ = ["MagicLink"] diff --git a/blog/models/menu_item.py b/blog/models/menu_item.py new file mode 100644 index 0000000..f36a146 --- /dev/null +++ b/blog/models/menu_item.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.models.menu_item import MenuItem + +__all__ = ["MenuItem"] diff --git a/blog/models/snippet.py b/blog/models/snippet.py new file mode 100644 index 0000000..47cad35 --- /dev/null +++ b/blog/models/snippet.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import Integer, String, Text, DateTime, ForeignKey, UniqueConstraint, Index, func +from sqlalchemy.orm import Mapped, mapped_column + +from shared.db.base import Base + + +class Snippet(Base): + __tablename__ = "snippets" + __table_args__ = ( + UniqueConstraint("user_id", "name", name="uq_snippets_user_name"), + Index("ix_snippets_visibility", "visibility"), + ) + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + user_id: Mapped[int] = mapped_column( + ForeignKey("users.id", ondelete="CASCADE"), nullable=False, + ) + name: Mapped[str] = mapped_column(String(255), nullable=False) + value: Mapped[str] = mapped_column(Text, nullable=False) + visibility: Mapped[str] = mapped_column( + String(20), nullable=False, default="private", server_default="private", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now(), + ) diff --git a/blog/models/tag_group.py b/blog/models/tag_group.py new file mode 100644 index 0000000..77ddc41 --- /dev/null +++ b/blog/models/tag_group.py @@ -0,0 +1,52 @@ +from datetime import datetime +from typing import List, Optional +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ( + Integer, + String, + Text, + DateTime, + ForeignKey, + UniqueConstraint, + func, +) +from shared.db.base import Base + + +class TagGroup(Base): + __tablename__ = "tag_groups" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + name: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[str] = mapped_column(String(191), unique=True, nullable=False) + feature_image: Mapped[Optional[str]] = mapped_column(Text()) + colour: Mapped[Optional[str]] = mapped_column(String(32)) + sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now() + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now() + ) + + tag_links: Mapped[List["TagGroupTag"]] = relationship( + "TagGroupTag", back_populates="group", cascade="all, delete-orphan", passive_deletes=True + ) + + +class TagGroupTag(Base): + __tablename__ = "tag_group_tags" + __table_args__ = ( + UniqueConstraint("tag_group_id", "tag_id", name="uq_tag_group_tag"), + ) + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + tag_group_id: Mapped[int] = mapped_column( + ForeignKey("tag_groups.id", ondelete="CASCADE"), nullable=False + ) + tag_id: Mapped[int] = mapped_column( + ForeignKey("tags.id", ondelete="CASCADE"), nullable=False + ) + + group: Mapped["TagGroup"] = relationship("TagGroup", back_populates="tag_links") diff --git a/blog/models/user.py b/blog/models/user.py new file mode 100644 index 0000000..3feae81 --- /dev/null +++ b/blog/models/user.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.models.user import User + +__all__ = ["User"] diff --git a/blog/path_setup.py b/blog/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/blog/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/blog/services/__init__.py b/blog/services/__init__.py new file mode 100644 index 0000000..11d9769 --- /dev/null +++ b/blog/services/__init__.py @@ -0,0 +1,28 @@ +"""Blog app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the blog app. + + Blog owns: Post, Tag, Author, PostAuthor, PostTag, PostLike. + Standard deployment registers all 4 services as real DB impls + (shared DB). For composable deployments, swap non-owned services + with stubs from shared.services.stubs. + """ + from shared.services.registry import services + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + services.blog = SqlBlogService() + if not services.has("calendar"): + services.calendar = SqlCalendarService() + if not services.has("market"): + services.market = SqlMarketService() + if not services.has("cart"): + services.cart = SqlCartService() + if not services.has("federation"): + from shared.services.federation_impl import SqlFederationService + services.federation = SqlFederationService() diff --git a/blog/templates/_email/magic_link.html b/blog/templates/_email/magic_link.html new file mode 100644 index 0000000..3c1eac6 --- /dev/null +++ b/blog/templates/_email/magic_link.html @@ -0,0 +1,33 @@ + + + + + + +
    + + +
    +

    {{ site_name }}

    +

    Sign in to your account

    +

    + Click the button below to sign in. This link will expire in 15 minutes. +

    +
    + + Sign in + +
    +

    Or copy and paste this link into your browser:

    +

    + {{ link_url }} +

    +
    +

    + If you did not request this email, you can safely ignore it. +

    +
    +
    + + diff --git a/blog/templates/_email/magic_link.txt b/blog/templates/_email/magic_link.txt new file mode 100644 index 0000000..28a2efb --- /dev/null +++ b/blog/templates/_email/magic_link.txt @@ -0,0 +1,8 @@ +Hello, + +Click this link to sign in: +{{ link_url }} + +This link will expire in 15 minutes. + +If you did not request this, you can ignore this email. diff --git a/blog/templates/_types/blog/_action_buttons.html b/blog/templates/_types/blog/_action_buttons.html new file mode 100644 index 0000000..7184ab0 --- /dev/null +++ b/blog/templates/_types/blog/_action_buttons.html @@ -0,0 +1,64 @@ +{# New Post/Page + Drafts toggle — shown in aside (desktop + mobile) #} +
    + {% if has_access('blog.new_post') %} + {% set new_href = url_for('blog.new_post')|host %} + + New Post + + {% set new_page_href = url_for('blog.new_page')|host %} + + New Page + + {% endif %} + {% if g.user and (draft_count or drafts) %} + {% if drafts %} + {% set drafts_off_href = (current_local_href ~ {'drafts': None}|qs)|host %} + + Drafts + {{ draft_count }} + + {% else %} + {% set drafts_on_href = (current_local_href ~ {'drafts': '1'}|qs)|host %} + + Drafts + {{ draft_count }} + + {% endif %} + {% endif %} +
    diff --git a/blog/templates/_types/blog/_card.html b/blog/templates/_types/blog/_card.html new file mode 100644 index 0000000..89ce8e7 --- /dev/null +++ b/blog/templates/_types/blog/_card.html @@ -0,0 +1,80 @@ +{% import 'macros/stickers.html' as stick %} +
    + {# ❤️ like button - OUTSIDE the link, aligned with image top #} + {% if g.user %} +
    + {% set slug = post.slug %} + {% set liked = post.is_liked or False %} + {% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %} + {% set item_type = 'post' %} + {% include "_types/browse/like/button.html" %} +
    + {% endif %} + + {% set _href=url_for('blog.post.post_detail', slug=post.slug)|host %} + +
    +

    + {{ post.title }} +

    + + {% if post.status == "draft" %} +
    + Draft + {% if post.publish_requested %} + Publish requested + {% endif %} +
    + {% if post.updated_at %} +

    + Updated: {{ post.updated_at.strftime("%-d %b %Y at %H:%M") }} +

    + {% endif %} + {% elif post.published_at %} +

    + Published: {{ post.published_at.strftime("%-d %b %Y at %H:%M") }} +

    + {% endif %} + +
    + + {% if post.feature_image %} +
    + +
    + {% endif %} + {% if post.custom_excerpt %} +

    + {{ post.custom_excerpt }} +

    + {% else %} + {% if post.excerpt %} +

    + {{ post.excerpt }} +

    + {% endif %} + {% endif %} +
    + + {# Card decorations — via fragments #} + {% if card_widgets_html %} + {% set _card_html = card_widgets_html.get(post.id|string, "") %} + {% if _card_html %}{{ _card_html | safe }}{% endif %} + {% endif %} + + {% include '_types/blog/_card/at_bar.html' %} + +
    diff --git a/blog/templates/_types/blog/_card/at_bar.html b/blog/templates/_types/blog/_card/at_bar.html new file mode 100644 index 0000000..f226d92 --- /dev/null +++ b/blog/templates/_types/blog/_card/at_bar.html @@ -0,0 +1,19 @@ +
    + {% if post.tags %} +
    +
    in
    +
      + {% include '_types/blog/_card/tags.html' %} +
    +
    + {% endif %} +
    + {% if post.authors %} +
    +
    by
    +
      + {% include '_types/blog/_card/authors.html' %} +
    +
    + {% endif %} +
    diff --git a/blog/templates/_types/blog/_card/author.html b/blog/templates/_types/blog/_card/author.html new file mode 100644 index 0000000..7ddddf7 --- /dev/null +++ b/blog/templates/_types/blog/_card/author.html @@ -0,0 +1,21 @@ +{% macro author(author) %} + {% if author %} + {% if author.profile_image %} + {{ author.name }} + {% else %} +
    + {# optional fallback circle with first letter +
    + {{ author.name[:1] }} +
    #} + {% endif %} + + + {{ author.name }} + + {% endif %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/blog/_card/authors.html b/blog/templates/_types/blog/_card/authors.html new file mode 100644 index 0000000..5b8911d --- /dev/null +++ b/blog/templates/_types/blog/_card/authors.html @@ -0,0 +1,32 @@ +{# --- AUTHORS LIST STARTS HERE --- #} + {% if post.authors and post.authors|length %} + {% for a in post.authors %} + {% for author in authors if author.slug==a.slug %} +
  • + + {% if author.profile_image %} + {{ author.name }} + {% else %} + {# optional fallback circle with first letter #} +
    + {{ author.name[:1] }} +
    + {% endif %} + + + {{ author.name }} + +
    +
  • + {% endfor %} + {% endfor %} + {% endif %} + + {# --- AUTHOR LIST ENDS HERE --- #} \ No newline at end of file diff --git a/blog/templates/_types/blog/_card/tag.html b/blog/templates/_types/blog/_card/tag.html new file mode 100644 index 0000000..137cb0c --- /dev/null +++ b/blog/templates/_types/blog/_card/tag.html @@ -0,0 +1,19 @@ +{% macro tag(tag) %} + {% if tag %} + {% if tag.feature_image %} + {{ tag.name }} + {% else %} +
    + {{ tag.name[:1] }} +
    + {% endif %} + + + {{ tag.name }} + + {% endif %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/blog/_card/tag_group.html b/blog/templates/_types/blog/_card/tag_group.html new file mode 100644 index 0000000..21c9974 --- /dev/null +++ b/blog/templates/_types/blog/_card/tag_group.html @@ -0,0 +1,22 @@ +{% macro tag_group(group) %} + {% if group %} + {% if group.feature_image %} + {{ group.name }} + {% else %} +
    + {{ group.name[:1] }} +
    + {% endif %} + + + {{ group.name }} + + {% endif %} +{% endmacro %} diff --git a/blog/templates/_types/blog/_card/tags.html b/blog/templates/_types/blog/_card/tags.html new file mode 100644 index 0000000..2ea7ad1 --- /dev/null +++ b/blog/templates/_types/blog/_card/tags.html @@ -0,0 +1,17 @@ +{% import '_types/blog/_card/tag.html' as dotag %} +{# --- TAG LIST STARTS HERE --- #} + {% if post.tags and post.tags|length %} + {% for t in post.tags %} + {% for tag in tags if tag.slug==t.slug %} +
  • + + {{dotag.tag(tag)}} + +
  • + {% endfor %} + {% endfor %} + {% endif %} + {# --- TAG LIST ENDS HERE --- #} \ No newline at end of file diff --git a/blog/templates/_types/blog/_card_tile.html b/blog/templates/_types/blog/_card_tile.html new file mode 100644 index 0000000..f03ca16 --- /dev/null +++ b/blog/templates/_types/blog/_card_tile.html @@ -0,0 +1,59 @@ +
    + {% set _href=url_for('blog.post.post_detail', slug=post.slug)|host %} + + {% if post.feature_image %} +
    + +
    + {% endif %} + +
    +

    + {{ post.title }} +

    + + {% if post.status == "draft" %} +
    + Draft + {% if post.publish_requested %} + Publish requested + {% endif %} +
    + {% if post.updated_at %} +

    + Updated: {{ post.updated_at.strftime("%-d %b %Y at %H:%M") }} +

    + {% endif %} + {% elif post.published_at %} +

    + Published: {{ post.published_at.strftime("%-d %b %Y at %H:%M") }} +

    + {% endif %} + + {% if post.custom_excerpt %} +

    + {{ post.custom_excerpt }} +

    + {% elif post.excerpt %} +

    + {{ post.excerpt }} +

    + {% endif %} +
    +
    + + {% include '_types/blog/_card/at_bar.html' %} +
    diff --git a/blog/templates/_types/blog/_cards.html b/blog/templates/_types/blog/_cards.html new file mode 100644 index 0000000..82eee98 --- /dev/null +++ b/blog/templates/_types/blog/_cards.html @@ -0,0 +1,111 @@ +{% for post in posts %} + {% if view == 'tile' %} + {% include "_types/blog/_card_tile.html" %} + {% else %} + {% include "_types/blog/_card.html" %} + {% endif %} +{% endfor %} +{% if page < total_pages|int %} + + + + + +{% else %} +
    End of results
    +{% endif %} + diff --git a/blog/templates/_types/blog/_main_panel.html b/blog/templates/_types/blog/_main_panel.html new file mode 100644 index 0000000..055e164 --- /dev/null +++ b/blog/templates/_types/blog/_main_panel.html @@ -0,0 +1,84 @@ + + {# Content type tabs: Posts | Pages #} +
    + {% set posts_href = (url_for('blog.index'))|host %} + {% set pages_href = (url_for('blog.index') ~ '?type=pages')|host %} + Posts + Pages +
    + + {% if content_type == 'pages' %} + {# Pages listing #} +
    + {% set page_num = page %} + {% include "_types/blog/_page_cards.html" %} +
    +
    + {% else %} + + {# View toggle bar - desktop only #} + + + {# Cards container - list or grid based on view #} + {% if view == 'tile' %} +
    + {% include "_types/blog/_cards.html" %} +
    + {% else %} +
    + {% include "_types/blog/_cards.html" %} +
    + {% endif %} +
    + {% endif %}{# end content_type check #} diff --git a/blog/templates/_types/blog/_oob_elements.html b/blog/templates/_types/blog/_oob_elements.html new file mode 100644 index 0000000..2aa02cb --- /dev/null +++ b/blog/templates/_types/blog/_oob_elements.html @@ -0,0 +1,40 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob_.html' import root_header with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'blog-header-child', '_types/blog/header/_header.html')}} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + + +{# Filter container - blog doesn't have child_summary but still needs this element #} +{% block filter %} + {% include "_types/blog/mobile/_filter/summary.html" %} +{% endblock %} + +{# Aside with filters #} +{% block aside %} + {% include "_types/blog/desktop/menu.html" %} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/root/_nav.html' %} + {% include '_types/root/_nav_panel.html' %} +{% endblock %} + + +{% block content %} + {% include '_types/blog/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog/_page_card.html b/blog/templates/_types/blog/_page_card.html new file mode 100644 index 0000000..b4a75b9 --- /dev/null +++ b/blog/templates/_types/blog/_page_card.html @@ -0,0 +1,56 @@ +{# Single page card for pages listing #} +
    + {% set _href = url_for('blog.post.post_detail', slug=page.slug)|host %} + +
    +

    + {{ page.title }} +

    + + {# Feature badges #} + {% if page.features %} +
    + {% if page.features.get('calendar') %} + + Calendar + + {% endif %} + {% if page.features.get('market') %} + + Market + + {% endif %} +
    + {% endif %} + + {% if page.published_at %} +

    + Published: {{ page.published_at.strftime("%-d %b %Y at %H:%M") }} +

    + {% endif %} +
    + + {% if page.feature_image %} +
    + +
    + {% endif %} + {% if page.custom_excerpt or page.excerpt %} +

    + {{ page.custom_excerpt or page.excerpt }} +

    + {% endif %} +
    +
    diff --git a/blog/templates/_types/blog/_page_cards.html b/blog/templates/_types/blog/_page_cards.html new file mode 100644 index 0000000..6d1f008 --- /dev/null +++ b/blog/templates/_types/blog/_page_cards.html @@ -0,0 +1,19 @@ +{# Page cards loop with pagination sentinel #} +{% for page in pages %} + {% include "_types/blog/_page_card.html" %} +{% endfor %} +{% if page_num < total_pages|int %} +
    +{% else %} + {% if pages %} +
    End of results
    + {% else %} +
    No pages found.
    + {% endif %} +{% endif %} diff --git a/blog/templates/_types/blog/admin/tag_groups/_edit_header.html b/blog/templates/_types/blog/admin/tag_groups/_edit_header.html new file mode 100644 index 0000000..ade4ee9 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_edit_header.html @@ -0,0 +1,9 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='tag-groups-edit-row', oob=oob) %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('blog.tag_groups_admin.edit', id=group.id), 'pencil', group.name, select_colours, aclass='') }} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/blog/admin/tag_groups/_edit_main_panel.html b/blog/templates/_types/blog/admin/tag_groups/_edit_main_panel.html new file mode 100644 index 0000000..7d1fa96 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_edit_main_panel.html @@ -0,0 +1,79 @@ +
    + + {# --- Edit group form --- #} +
    + + +
    +
    + + +
    +
    +
    + + +
    +
    + + +
    +
    +
    + + +
    +
    + + {# --- Tag checkboxes --- #} +
    + +
    + {% for tag in all_tags %} + + {% endfor %} +
    +
    + +
    + +
    +
    + + {# --- Delete form --- #} +
    + + +
    + +
    diff --git a/blog/templates/_types/blog/admin/tag_groups/_edit_oob.html b/blog/templates/_types/blog/admin/tag_groups/_edit_oob.html new file mode 100644 index 0000000..116bc7b --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_edit_oob.html @@ -0,0 +1,17 @@ +{% extends 'oob_elements.html' %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('tag-groups-header-child', 'tag-groups-edit-child', '_types/blog/admin/tag_groups/_edit_header.html')}} + {{oob_header('root-settings-header-child', 'tag-groups-header-child', '_types/blog/admin/tag_groups/_header.html')}} + + {% from '_types/root/settings/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + +{% block mobile_menu %} +{% endblock %} + +{% block content %} + {% include '_types/blog/admin/tag_groups/_edit_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog/admin/tag_groups/_header.html b/blog/templates/_types/blog/admin/tag_groups/_header.html new file mode 100644 index 0000000..d9c3095 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_header.html @@ -0,0 +1,9 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='tag-groups-row', oob=oob) %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('blog.tag_groups_admin.index'), 'tags', 'Tag Groups', select_colours, aclass='') }} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/blog/admin/tag_groups/_main_panel.html b/blog/templates/_types/blog/admin/tag_groups/_main_panel.html new file mode 100644 index 0000000..1c8b8f4 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_main_panel.html @@ -0,0 +1,73 @@ +
    + + {# --- Create new group form --- #} +
    + +

    New Group

    +
    + + + +
    + + +
    + + {# --- Existing groups list --- #} + {% if groups %} +
      + {% for group in groups %} +
    • + {% if group.feature_image %} + {{ group.name }} + {% else %} +
      + {{ group.name[:1] }} +
      + {% endif %} +
      + + {{ group.name }} + + {{ group.slug }} +
      + order: {{ group.sort_order }} +
    • + {% endfor %} +
    + {% else %} +

    No tag groups yet.

    + {% endif %} + + {# --- Unassigned tags --- #} + {% if unassigned_tags %} +
    +

    Unassigned Tags ({{ unassigned_tags|length }})

    +
    + {% for tag in unassigned_tags %} + + {{ tag.name }} + + {% endfor %} +
    +
    + {% endif %} + +
    diff --git a/blog/templates/_types/blog/admin/tag_groups/_oob_elements.html b/blog/templates/_types/blog/admin/tag_groups/_oob_elements.html new file mode 100644 index 0000000..cb00363 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/_oob_elements.html @@ -0,0 +1,16 @@ +{% extends 'oob_elements.html' %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-settings-header-child', 'tag-groups-header-child', '_types/blog/admin/tag_groups/_header.html')}} + + {% from '_types/root/settings/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + +{% block mobile_menu %} +{% endblock %} + +{% block content %} + {% include '_types/blog/admin/tag_groups/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog/admin/tag_groups/edit.html b/blog/templates/_types/blog/admin/tag_groups/edit.html new file mode 100644 index 0000000..5fefbc6 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/edit.html @@ -0,0 +1,13 @@ +{% extends '_types/blog/admin/tag_groups/index.html' %} + +{% block tag_groups_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/blog/admin/tag_groups/_edit_header.html' import header_row with context %} + {{ header_row() }} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/blog/admin/tag_groups/_edit_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog/admin/tag_groups/index.html b/blog/templates/_types/blog/admin/tag_groups/index.html new file mode 100644 index 0000000..680b051 --- /dev/null +++ b/blog/templates/_types/blog/admin/tag_groups/index.html @@ -0,0 +1,20 @@ +{% extends '_types/root/settings/index.html' %} + +{% block root_settings_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/blog/admin/tag_groups/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block tag_groups_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/blog/admin/tag_groups/_main_panel.html' %} +{% endblock %} + +{% block _main_mobile_menu %} +{% endblock %} diff --git a/blog/templates/_types/blog/desktop/menu.html b/blog/templates/_types/blog/desktop/menu.html new file mode 100644 index 0000000..2c1afc4 --- /dev/null +++ b/blog/templates/_types/blog/desktop/menu.html @@ -0,0 +1,19 @@ +{% from 'macros/search.html' import search_desktop %} +{{ search_desktop(current_local_href, search, search_count, hx_select) }} +{% include '_types/blog/_action_buttons.html' %} +
    + {% include '_types/blog/desktop/menu/tag_groups.html' %} + {% include '_types/blog/desktop/menu/authors.html' %} +
    + +
    + +
    + + \ No newline at end of file diff --git a/blog/templates/_types/blog/desktop/menu/authors.html b/blog/templates/_types/blog/desktop/menu/authors.html new file mode 100644 index 0000000..de939e0 --- /dev/null +++ b/blog/templates/_types/blog/desktop/menu/authors.html @@ -0,0 +1,62 @@ + {% import '_types/blog/_card/author.html' as doauthor %} + + {# Author filter bar #} + + diff --git a/blog/templates/_types/blog/desktop/menu/tag_groups.html b/blog/templates/_types/blog/desktop/menu/tag_groups.html new file mode 100644 index 0000000..e23a879 --- /dev/null +++ b/blog/templates/_types/blog/desktop/menu/tag_groups.html @@ -0,0 +1,70 @@ + {# Tag group filter bar #} + diff --git a/blog/templates/_types/blog/desktop/menu/tags.html b/blog/templates/_types/blog/desktop/menu/tags.html new file mode 100644 index 0000000..c20b5bc --- /dev/null +++ b/blog/templates/_types/blog/desktop/menu/tags.html @@ -0,0 +1,59 @@ + {% import '_types/blog/_card/tag.html' as dotag %} + + {# Tag filter bar #} + + diff --git a/blog/templates/_types/blog/header/_header.html b/blog/templates/_types/blog/header/_header.html new file mode 100644 index 0000000..67325b9 --- /dev/null +++ b/blog/templates/_types/blog/header/_header.html @@ -0,0 +1,7 @@ + +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='blog-row', oob=oob) %} +
    + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/blog/index.html b/blog/templates/_types/blog/index.html new file mode 100644 index 0000000..5978020 --- /dev/null +++ b/blog/templates/_types/blog/index.html @@ -0,0 +1,37 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %} + {{ super() }} + +{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('root-blog-header', '_types/blog/header/_header.html') %} + {% block root_blog_header %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block aside %} + {% include "_types/blog/desktop/menu.html" %} +{% endblock %} + +{% block filter %} + {% include "_types/blog/mobile/_filter/summary.html" %} +{% endblock %} + +{% block content %} + {% include '_types/blog/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog/mobile/_filter/_hamburger.html b/blog/templates/_types/blog/mobile/_filter/_hamburger.html new file mode 100644 index 0000000..10e0b9c --- /dev/null +++ b/blog/templates/_types/blog/mobile/_filter/_hamburger.html @@ -0,0 +1,13 @@ +
    + + + + + + + + +
    diff --git a/blog/templates/_types/blog/mobile/_filter/summary.html b/blog/templates/_types/blog/mobile/_filter/summary.html new file mode 100644 index 0000000..4ed013b --- /dev/null +++ b/blog/templates/_types/blog/mobile/_filter/summary.html @@ -0,0 +1,14 @@ +{% import 'macros/layout.html' as layout %} + +{% call layout.details('/filter', 'md:hidden') %} + {% call layout.filter_summary("filter-summary-mobile", current_local_href, search, search_count, hx_select) %} + {% include '_types/blog/mobile/_filter/summary/tag_groups.html' %} + {% include '_types/blog/mobile/_filter/summary/authors.html' %} + {% endcall %} + {% include '_types/blog/_action_buttons.html' %} +
    + {% include '_types/blog/desktop/menu/tag_groups.html' %} + {% include '_types/blog/desktop/menu/authors.html' %} +
    +{% endcall %} + \ No newline at end of file diff --git a/blog/templates/_types/blog/mobile/_filter/summary/authors.html b/blog/templates/_types/blog/mobile/_filter/summary/authors.html new file mode 100644 index 0000000..32796d9 --- /dev/null +++ b/blog/templates/_types/blog/mobile/_filter/summary/authors.html @@ -0,0 +1,31 @@ +{% if selected_authors and selected_authors|length %} +
      + {% for st in selected_authors %} + {% for author in authors %} + {% if st == author.slug %} +
    • + {% if author.profile_image %} + {{ author.name }} + {% else %} + {# optional fallback circle with first letter #} +
      + {{ author.name[:1] }} +
      + {% endif %} + + + {{ author.name }} + + + {{author.published_post_count}} + +
    • + {% endif %} + {% endfor %} + {% endfor %} +
    +{% endif %} \ No newline at end of file diff --git a/blog/templates/_types/blog/mobile/_filter/summary/tag_groups.html b/blog/templates/_types/blog/mobile/_filter/summary/tag_groups.html new file mode 100644 index 0000000..7bf142e --- /dev/null +++ b/blog/templates/_types/blog/mobile/_filter/summary/tag_groups.html @@ -0,0 +1,33 @@ +{% if selected_groups and selected_groups|length %} +
      + {% for sg in selected_groups %} + {% for group in tag_groups %} + {% if sg == group.slug %} +
    • + {% if group.feature_image %} + {{ group.name }} + {% else %} +
      + {{ group.name[:1] }} +
      + {% endif %} + + + {{ group.name }} + + + {{group.post_count}} + +
    • + {% endif %} + {% endfor %} + {% endfor %} +
    +{% endif %} diff --git a/blog/templates/_types/blog/mobile/_filter/summary/tags.html b/blog/templates/_types/blog/mobile/_filter/summary/tags.html new file mode 100644 index 0000000..df6169d --- /dev/null +++ b/blog/templates/_types/blog/mobile/_filter/summary/tags.html @@ -0,0 +1,31 @@ +{% if selected_tags and selected_tags|length %} +
      + {% for st in selected_tags %} + {% for tag in tags %} + {% if st == tag.slug %} +
    • + {% if tag.feature_image %} + {{ tag.name }} + {% else %} + {# optional fallback circle with first letter #} +
      + {{ tag.name[:1] }} +
      + {% endif %} + + + {{ tag.name }} + + + {{tag.published_post_count}} + +
    • + {% endif %} + {% endfor %} + {% endfor %} +
    +{% endif %} \ No newline at end of file diff --git a/blog/templates/_types/blog/not_found.html b/blog/templates/_types/blog/not_found.html new file mode 100644 index 0000000..f539822 --- /dev/null +++ b/blog/templates/_types/blog/not_found.html @@ -0,0 +1,22 @@ +{% extends '_types/root/_index.html' %} + +{% block content %} +
    +
    📝
    +

    Post Not Found

    +

    + The post "{{ slug }}" could not be found. +

    + + ← Back to Blog + +
    +{% endblock %} diff --git a/blog/templates/_types/blog_drafts/_main_panel.html b/blog/templates/_types/blog_drafts/_main_panel.html new file mode 100644 index 0000000..8cb0b7a --- /dev/null +++ b/blog/templates/_types/blog_drafts/_main_panel.html @@ -0,0 +1,55 @@ +
    + +
    +

    Drafts

    + {% set new_href = url_for('blog.new_post')|host %} + + New Post + +
    + + {% if drafts %} + + {% else %} +

    No drafts yet.

    + {% endif %} + +
    diff --git a/blog/templates/_types/blog_drafts/_oob_elements.html b/blog/templates/_types/blog_drafts/_oob_elements.html new file mode 100644 index 0000000..8d9790b --- /dev/null +++ b/blog/templates/_types/blog_drafts/_oob_elements.html @@ -0,0 +1,12 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/blog/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block content %} + {% include '_types/blog_drafts/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog_drafts/index.html b/blog/templates/_types/blog_drafts/index.html new file mode 100644 index 0000000..6ce38f1 --- /dev/null +++ b/blog/templates/_types/blog_drafts/index.html @@ -0,0 +1,11 @@ +{% extends '_types/root/_index.html' %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('root-blog-header', '_types/blog/header/_header.html') %} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/blog_drafts/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog_new/_main_panel.html b/blog/templates/_types/blog_new/_main_panel.html new file mode 100644 index 0000000..6c3d264 --- /dev/null +++ b/blog/templates/_types/blog_new/_main_panel.html @@ -0,0 +1,259 @@ +{# ── Error banner ── #} +{% if save_error %} +
    + Save failed: {{ save_error }} +
    +{% endif %} + +
    + + + + + + {# ── Feature image ── #} +
    + {# Empty state: add link #} +
    + +
    + + {# Filled state: image preview + controls #} + + + {# Upload spinner overlay #} + + + {# Hidden file input #} + +
    + + {# ── Title ── #} + + + {# ── Excerpt ── #} + + + {# ── Editor mount point ── #} +
    + + {# ── Status + Save footer ── #} +
    + + + +
    +
    + +{# ── Koenig editor assets ── #} + + + + diff --git a/blog/templates/_types/blog_new/_oob_elements.html b/blog/templates/_types/blog_new/_oob_elements.html new file mode 100644 index 0000000..61e78f5 --- /dev/null +++ b/blog/templates/_types/blog_new/_oob_elements.html @@ -0,0 +1,12 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/blog/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block content %} + {% include '_types/blog_new/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/blog_new/index.html b/blog/templates/_types/blog_new/index.html new file mode 100644 index 0000000..3c802d4 --- /dev/null +++ b/blog/templates/_types/blog_new/index.html @@ -0,0 +1,11 @@ +{% extends '_types/root/_index.html' %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('root-blog-header', '_types/blog/header/_header.html') %} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/blog_new/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/home/_oob_elements.html b/blog/templates/_types/home/_oob_elements.html new file mode 100644 index 0000000..03a4f17 --- /dev/null +++ b/blog/templates/_types/home/_oob_elements.html @@ -0,0 +1,19 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block content %} +
    +
    + {% if post.html %} + {{post.html|safe}} + {% endif %} +
    +
    +{% endblock %} diff --git a/blog/templates/_types/home/index.html b/blog/templates/_types/home/index.html new file mode 100644 index 0000000..e5df191 --- /dev/null +++ b/blog/templates/_types/home/index.html @@ -0,0 +1,14 @@ +{% extends '_types/root/_index.html' %} +{% block meta %} + {% include '_types/post/_meta.html' %} +{% endblock %} + +{% block content %} +
    +
    + {% if post.html %} + {{post.html|safe}} + {% endif %} +
    +
    +{% endblock %} diff --git a/blog/templates/_types/menu_items/_form.html b/blog/templates/_types/menu_items/_form.html new file mode 100644 index 0000000..8eed1c0 --- /dev/null +++ b/blog/templates/_types/menu_items/_form.html @@ -0,0 +1,125 @@ + + + diff --git a/blog/templates/_types/menu_items/_list.html b/blog/templates/_types/menu_items/_list.html new file mode 100644 index 0000000..3892f07 --- /dev/null +++ b/blog/templates/_types/menu_items/_list.html @@ -0,0 +1,68 @@ +
    + {% if menu_items %} +
    + {% for item in menu_items %} +
    + {# Drag handle #} +
    + +
    + + {# Page image #} + {% if item.feature_image %} + {{ item.label }} + {% else %} +
    + {% endif %} + + {# Page title #} +
    +
    {{ item.label }}
    +
    {{ item.slug }}
    +
    + + {# Sort order #} +
    + Order: {{ item.sort_order }} +
    + + {# Actions #} +
    + + +
    +
    + {% endfor %} +
    + {% else %} +
    + +

    No menu items yet. Add one to get started!

    +
    + {% endif %} +
    diff --git a/blog/templates/_types/menu_items/_main_panel.html b/blog/templates/_types/menu_items/_main_panel.html new file mode 100644 index 0000000..bc502dd --- /dev/null +++ b/blog/templates/_types/menu_items/_main_panel.html @@ -0,0 +1,20 @@ +
    +
    + +
    + + {# Form container #} + + + {# Menu items list #} + +
    diff --git a/blog/templates/_types/menu_items/_nav_oob.html b/blog/templates/_types/menu_items/_nav_oob.html new file mode 100644 index 0000000..e25189a --- /dev/null +++ b/blog/templates/_types/menu_items/_nav_oob.html @@ -0,0 +1,31 @@ +{% set _app_slugs = {'cart': cart_url('/')} %} +{% set _first_seg = request.path.strip('/').split('/')[0] %} + diff --git a/blog/templates/_types/menu_items/_oob_elements.html b/blog/templates/_types/menu_items/_oob_elements.html new file mode 100644 index 0000000..c242593 --- /dev/null +++ b/blog/templates/_types/menu_items/_oob_elements.html @@ -0,0 +1,23 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-settings-header-child', 'menu_items-header-child', '_types/menu_items/header/_header.html')}} + + {% from '_types/root/settings/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} + +{% endblock %} + +{% block mobile_menu %} +{#% include '_types/root/settings/_nav.html' %#} +{% endblock %} + +{% block content %} + {% include '_types/menu_items/_main_panel.html' %} +{% endblock %} + diff --git a/blog/templates/_types/menu_items/_page_search_results.html b/blog/templates/_types/menu_items/_page_search_results.html new file mode 100644 index 0000000..df36d0d --- /dev/null +++ b/blog/templates/_types/menu_items/_page_search_results.html @@ -0,0 +1,44 @@ +{% if pages %} +
    + {% for post in pages %} +
    + + {# Page image #} + {% if post.feature_image %} + {{ post.title }} + {% else %} +
    + {% endif %} + + {# Page info #} +
    +
    {{ post.title }}
    +
    {{ post.slug }}
    +
    +
    + {% endfor %} + + {# Infinite scroll sentinel #} + {% if has_more %} +
    + Loading more... +
    + {% endif %} +
    +{% elif query %} +
    + No pages found matching "{{ query }}" +
    +{% endif %} diff --git a/blog/templates/_types/menu_items/header/_header.html b/blog/templates/_types/menu_items/header/_header.html new file mode 100644 index 0000000..55a18d6 --- /dev/null +++ b/blog/templates/_types/menu_items/header/_header.html @@ -0,0 +1,9 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='menu_items-row', oob=oob) %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('menu_items.list_menu_items'), 'bars', 'Menu Items', select_colours, aclass='') }} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/menu_items/index.html b/blog/templates/_types/menu_items/index.html new file mode 100644 index 0000000..5bcf7da --- /dev/null +++ b/blog/templates/_types/menu_items/index.html @@ -0,0 +1,20 @@ +{% extends '_types/root/settings/index.html' %} + +{% block root_settings_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/menu_items/header/_header.html' import header_row with context %} + {{ header_row() }} + + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/menu_items/_main_panel.html' %} +{% endblock %} + +{% block _main_mobile_menu %} +{% endblock %} diff --git a/blog/templates/_types/post/_entry_container.html b/blog/templates/_types/post/_entry_container.html new file mode 100644 index 0000000..3c3965a --- /dev/null +++ b/blog/templates/_types/post/_entry_container.html @@ -0,0 +1,24 @@ +
    +
    + {% include '_types/post/_entry_items.html' with context %} +
    +
    + + diff --git a/blog/templates/_types/post/_entry_items.html b/blog/templates/_types/post/_entry_items.html new file mode 100644 index 0000000..d221e85 --- /dev/null +++ b/blog/templates/_types/post/_entry_items.html @@ -0,0 +1,38 @@ +{# Get entries from either direct variable or associated_entries dict #} +{% set entry_list = entries if entries is defined else (associated_entries.entries if associated_entries is defined else []) %} +{% set current_page = page if page is defined else (associated_entries.page if associated_entries is defined else 1) %} +{% set has_more_entries = has_more if has_more is defined else (associated_entries.has_more if associated_entries is defined else False) %} + +{% for entry in entry_list %} + {% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %} + + {% if post.feature_image %} + {{ post.title }} + {% else %} +
    + {% endif %} +
    +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%b %d, %Y at %H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    +
    +{% endfor %} + +{# Load more entries one at a time until container is full #} +{% if has_more_entries %} +
    +
    +{% endif %} diff --git a/blog/templates/_types/post/_main_panel.html b/blog/templates/_types/post/_main_panel.html new file mode 100644 index 0000000..52a2c3a --- /dev/null +++ b/blog/templates/_types/post/_main_panel.html @@ -0,0 +1,65 @@ +{# Main panel fragment for HTMX navigation - post/page article content #} +
    + {# Draft indicator + edit link (shown for both posts and pages) #} + {% if post.status == "draft" %} +
    + Draft + {% if post.publish_requested %} + Publish requested + {% endif %} + {% set is_admin = (g.get("rights") or {}).get("admin") %} + {% if is_admin or (g.user and post.user_id == g.user.id) %} + {% set edit_href = url_for('blog.post.admin.edit', slug=post.slug)|host %} + + Edit + + {% endif %} +
    + {% endif %} + + {% if not post.is_page %} + {# ── Blog post chrome: like button, excerpt, tags/authors ── #} + {% if g.user %} +
    + {% set slug = post.slug %} + {% set liked = post.is_liked or False %} + {% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %} + {% set item_type = 'post' %} + {% include "_types/browse/like/button.html" %} +
    + {% endif %} + + {% if post.custom_excerpt %} +
    + {{post.custom_excerpt|safe}} +
    + {% endif %} + + {% endif %} + + {% if post.feature_image %} +
    + +
    + {% endif %} +
    + {% if post.html %} + {{post.html|safe}} + {% endif %} +
    +
    +
    diff --git a/blog/templates/_types/post/_meta.html b/blog/templates/_types/post/_meta.html new file mode 100644 index 0000000..c4ef2ad --- /dev/null +++ b/blog/templates/_types/post/_meta.html @@ -0,0 +1,124 @@ +{# --- social/meta_post.html --- #} +{# Context expected: + site, post, request +#} + +{# Visibility → robots #} +{% set is_public = (post.visibility == 'public') %} +{% set is_published = (post.status == 'published') %} +{% set robots_here = 'index,follow' if (is_public and is_published and not post.email_only) else 'noindex,nofollow' %} + +{# Compute canonical early so both this file and base can use it #} +{% set _site_url = site().url.rstrip('/') if site and site().url else '' %} +{% set _post_path = request.path if request else ('/posts/' ~ (post.slug or post.uuid)) %} +{% set canonical = post.canonical_url or (_site_url ~ _post_path if _site_url else (request.url if request else None)) %} + +{# Include common base (charset, viewport, robots default, RSS, Org/WebSite JSON-LD) #} +{% set robots_override = robots_here %} +{% include 'social/meta_base.html' %} + +{# ---- Titles / descriptions ---- #} +{% set og_title = post.og_title or base_title %} +{% set tw_title = post.twitter_title or base_title %} + +{# Description best-effort, trimmed #} +{% set desc_source = post.meta_description + or post.og_description + or post.twitter_description + or post.custom_excerpt + or post.excerpt + or (post.plaintext if post.plaintext else (post.html|striptags if post.html else '')) %} +{% set description = (desc_source|trim|replace('\n',' ')|replace('\r',' ')|striptags)|truncate(160, True, '…') %} + +{# Image priority #} +{% set image_url = post.og_image + or post.twitter_image + or post.feature_image + or (site().default_image if site and site().default_image else None) %} + +{# Dates #} +{% set published_iso = post.published_at.isoformat() if post.published_at else None %} +{% set updated_iso = post.updated_at.isoformat() if post.updated_at + else (post.created_at.isoformat() if post.created_at else None) %} + +{# Authors / tags #} +{% set primary_author = post.primary_author %} +{% set authors = post.authors or ([primary_author] if primary_author else []) %} +{% set tag_names = (post.tags or []) | map(attribute='name') | list %} +{% set is_article = not post.is_page %} + +{{ base_title }} + +{% if canonical %}{% endif %} + +{# ---- Open Graph ---- #} + + + + +{% if canonical %}{% endif %} +{% if image_url %}{% endif %} +{% if is_article and published_iso %}{% endif %} +{% if is_article and updated_iso %} + + +{% endif %} +{% if is_article and post.primary_tag and post.primary_tag.name %} + +{% endif %} +{% if is_article %} + {% for t in tag_names %} + + {% endfor %} +{% endif %} + +{# ---- Twitter ---- #} + +{% if site and site().twitter_site %}{% endif %} +{% if primary_author and primary_author.twitter %} + +{% endif %} + + +{% if image_url %}{% endif %} + +{# ---- JSON-LD author value (no list comprehensions) ---- #} +{% if authors and authors|length == 1 %} + {% set author_value = {"@type": "Person", "name": authors[0].name} %} +{% elif authors %} + {% set ns = namespace(arr=[]) %} + {% for a in authors %} + {% set _ = ns.arr.append({"@type": "Person", "name": a.name}) %} + {% endfor %} + {% set author_value = ns.arr %} +{% else %} + {% set author_value = none %} +{% endif %} + +{# ---- JSON-LD using combine for optionals ---- #} +{% set jsonld = { + "@context": "https://schema.org", + "@type": "BlogPosting" if is_article else "WebPage", + "mainEntityOfPage": canonical, + "headline": base_title, + "description": description, + "image": image_url, + "datePublished": published_iso, + "author": author_value, + "publisher": { + "@type": "Organization", + "name": site().title if site and site().title else "", + "logo": {"@type": "ImageObject", "url": site().logo if site and site().logo else image_url} + } +} %} + +{% if updated_iso %} + {% set jsonld = jsonld | combine({"dateModified": updated_iso}) %} +{% endif %} +{% if tag_names %} + {% set jsonld = jsonld | combine({"keywords": tag_names | join(", ")}) %} +{% endif %} + + diff --git a/blog/templates/_types/post/_nav.html b/blog/templates/_types/post/_nav.html new file mode 100644 index 0000000..037bdcd --- /dev/null +++ b/blog/templates/_types/post/_nav.html @@ -0,0 +1,15 @@ +{% import 'macros/links.html' as links %} + {# Widget-driven container nav — entries, calendars, markets #} + {% if container_nav_widgets %} +
    + {% include '_types/post/admin/_nav_entries.html' %} +
    + {% endif %} + + {# Admin link #} + {% if post and has_access('blog.post.admin.admin') %} + {% call links.link(url_for('blog.post.admin.admin', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + + {% endcall %} + {% endif %} diff --git a/blog/templates/_types/post/_oob_elements.html b/blog/templates/_types/post/_oob_elements.html new file mode 100644 index 0000000..d8bda2c --- /dev/null +++ b/blog/templates/_types/post/_oob_elements.html @@ -0,0 +1,36 @@ +{% extends 'oob_elements.html' %} + + +{# OOB elements for HTMX navigation - all elements that need updating #} +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + + +{% block oobs %} + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% from '_types/root/_n/macros.html' import header with context %} +{% call header(id='root-header-child', oob=True) %} + {% call header() %} + {% from '_types/post/header/_header.html' import header_row with context %} + {{header_row()}} +
    + +
    + {% endcall %} +{% endcall %} + + +{# Mobile menu #} + +{% block mobile_menu %} + {% include '_types/post/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post/admin/_associated_entries.html b/blog/templates/_types/post/admin/_associated_entries.html new file mode 100644 index 0000000..d9fe853 --- /dev/null +++ b/blog/templates/_types/post/admin/_associated_entries.html @@ -0,0 +1,50 @@ +
    +

    Associated Entries

    + {% if associated_entry_ids %} +
    + {% for calendar in all_calendars %} + {% for entry in calendar.entries %} + {% if entry.id in associated_entry_ids and entry.deleted_at is none %} + + {% endif %} + {% endfor %} + {% endfor %} +
    + {% else %} +
    No entries associated yet. Browse calendars below to add entries.
    + {% endif %} +
    diff --git a/blog/templates/_types/post/admin/_calendar_view.html b/blog/templates/_types/post/admin/_calendar_view.html new file mode 100644 index 0000000..80ae33f --- /dev/null +++ b/blog/templates/_types/post/admin/_calendar_view.html @@ -0,0 +1,88 @@ +
    + {# Month/year navigation #} +
    + +
    + + {# Calendar grid #} +
    + + +
    + {% for week in weeks %} + {% for day in week %} +
    +
    {{ day.date.day }}
    + + {# Entries for this day #} +
    + {% for e in month_entries %} + {% if e.start_at.date() == day.date %} + {% if e.id in associated_entry_ids %} + {# Associated entry - show with delete button #} +
    + {{ e.name }} + +
    + {% else %} + {# Non-associated entry - clickable to add #} + + {% endif %} + {% endif %} + {% endfor %} +
    +
    + {% endfor %} + {% endfor %} +
    +
    +
    diff --git a/blog/templates/_types/post/admin/_features_panel.html b/blog/templates/_types/post/admin/_features_panel.html new file mode 100644 index 0000000..19f9296 --- /dev/null +++ b/blog/templates/_types/post/admin/_features_panel.html @@ -0,0 +1,112 @@ +{# Feature toggles for PageConfig #} +
    +

    Page Features

    + +
    + + + +
    + + {# SumUp credentials — shown when calendar or market is enabled #} + {% if features.get('calendar') or features.get('market') %} +
    +

    + + SumUp Payment +

    +

    + Configure per-page SumUp credentials. Leave blank to use the global merchant account. +

    + +
    +
    + + +
    + +
    + + + {% if sumup_configured %} +

    Key is set. Leave blank to keep current key.

    + {% endif %} +
    + +
    + + +
    + + + + {% if sumup_configured %} + + Connected + + {% endif %} +
    +
    + {% endif %} +
    diff --git a/blog/templates/_types/post/admin/_main_panel.html b/blog/templates/_types/post/admin/_main_panel.html new file mode 100644 index 0000000..58d5238 --- /dev/null +++ b/blog/templates/_types/post/admin/_main_panel.html @@ -0,0 +1,7 @@ +{# Main panel fragment for HTMX navigation - post admin #} +
    +
    +
    diff --git a/blog/templates/_types/post/admin/_markets_panel.html b/blog/templates/_types/post/admin/_markets_panel.html new file mode 100644 index 0000000..d40076a --- /dev/null +++ b/blog/templates/_types/post/admin/_markets_panel.html @@ -0,0 +1,44 @@ +
    +

    Markets

    + + {% if markets %} +
      + {% for m in markets %} +
    • +
      + {{ m.name }} + /{{ m.slug }}/ +
      + +
    • + {% endfor %} +
    + {% else %} +

    No markets yet.

    + {% endif %} + +
    + + +
    +
    diff --git a/blog/templates/_types/post/admin/_nav.html b/blog/templates/_types/post/admin/_nav.html new file mode 100644 index 0000000..c0bfab6 --- /dev/null +++ b/blog/templates/_types/post/admin/_nav.html @@ -0,0 +1,28 @@ +{% import 'macros/links.html' as links %} + + + +{% call links.link(url_for('blog.post.admin.entries', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + entries +{% endcall %} +{% call links.link(url_for('blog.post.admin.data', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + data +{% endcall %} +{% call links.link(url_for('blog.post.admin.edit', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + edit +{% endcall %} +{% call links.link(url_for('blog.post.admin.settings', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + settings +{% endcall %} \ No newline at end of file diff --git a/blog/templates/_types/post/admin/_nav_entries.html b/blog/templates/_types/post/admin/_nav_entries.html new file mode 100644 index 0000000..47290d4 --- /dev/null +++ b/blog/templates/_types/post/admin/_nav_entries.html @@ -0,0 +1,50 @@ + + {# Left scroll arrow - desktop only #} + + + {# Widget-driven nav items container #} +
    +
    + {% for wdata in container_nav_widgets %} + {% with ctx=wdata.ctx %} + {% include wdata.widget.template with context %} + {% endwith %} + {% endfor %} +
    +
    + + + + {# Right scroll arrow - desktop only #} + diff --git a/blog/templates/_types/post/admin/_nav_entries_oob.html b/blog/templates/_types/post/admin/_nav_entries_oob.html new file mode 100644 index 0000000..eecc3d5 --- /dev/null +++ b/blog/templates/_types/post/admin/_nav_entries_oob.html @@ -0,0 +1,80 @@ +{# OOB swap for nav entries and calendars when toggling associations or editing calendars #} +{% import 'macros/links.html' as links %} + +{# Associated Entries and Calendars - vertical on mobile, horizontal with arrows on desktop #} +{% if (associated_entries and associated_entries.entries) or calendars %} +
    + {# Left scroll arrow - desktop only #} + + +
    +
    + {# Calendar entries #} + {% if associated_entries and associated_entries.entries %} + {% for entry in associated_entries.entries %} + {% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %} + +
    +
    +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%b %d, %Y at %H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    +
    + {% endfor %} + {% endif %} + {# Calendar links #} + {% if calendars %} + {% for calendar in calendars %} + {% set local_href=events_url('/' + post.slug + '/calendars/' + calendar.slug + '/') %} + + +
    {{calendar.name}}
    +
    + {% endfor %} + {% endif %} +
    +
    + + + + {# Right scroll arrow - desktop only #} + +
    +{% else %} + {# Empty placeholder to remove nav items when all are disassociated/deleted #} +
    +{% endif %} diff --git a/blog/templates/_types/post/admin/_oob_elements.html b/blog/templates/_types/post/admin/_oob_elements.html new file mode 100644 index 0000000..d397c68 --- /dev/null +++ b/blog/templates/_types/post/admin/_oob_elements.html @@ -0,0 +1,22 @@ +{% extends "oob_elements.html" %} +{# OOB elements for post admin page #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-header-child', 'post-admin-header-child', '_types/post/admin/header/_header.html')}} + + {% from '_types/post/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/post/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post/admin/_main_panel.html' %} +{% endblock %} \ No newline at end of file diff --git a/blog/templates/_types/post/admin/header/_header.html b/blog/templates/_types/post/admin/header/_header.html new file mode 100644 index 0000000..2708e4f --- /dev/null +++ b/blog/templates/_types/post/admin/header/_header.html @@ -0,0 +1,13 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post-admin-row', oob=oob) %} + {% call links.link( + url_for('blog.post.admin.admin', slug=post.slug), + hx_select_search) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/post/admin/index.html b/blog/templates/_types/post/admin/index.html new file mode 100644 index 0000000..1a7cc45 --- /dev/null +++ b/blog/templates/_types/post/admin/index.html @@ -0,0 +1,18 @@ +{% extends '_types/post/index.html' %} +{% import 'macros/layout.html' as layout %} + +{% block post_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-admin-header-child', '_types/post/admin/header/_header.html') %} + {% block post_admin_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post/admin/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post/header/_header.html b/blog/templates/_types/post/header/_header.html new file mode 100644 index 0000000..143e79d --- /dev/null +++ b/blog/templates/_types/post/header/_header.html @@ -0,0 +1,28 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post-row', oob=oob) %} + {% call links.link(url_for('blog.post.post_detail', slug=post.slug), hx_select_search ) %} + {% if post.feature_image %} + + {% endif %} + + {{ post.title | truncate(160, True, '…') }} + + {% endcall %} + {% call links.desktop_nav() %} + {% if page_cart_count is defined and page_cart_count > 0 %} + + + {{ page_cart_count }} + + {% endif %} + {% include '_types/post/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/post/index.html b/blog/templates/_types/post/index.html new file mode 100644 index 0000000..56ed99c --- /dev/null +++ b/blog/templates/_types/post/index.html @@ -0,0 +1,25 @@ +{% extends '_types/root/_index.html' %} +{% import 'macros/layout.html' as layout %} +{% block meta %} + {% include '_types/post/_meta.html' %} +{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% block post_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post/_nav.html' %} +{% endblock %} + + +{% block aside %} +{% endblock %} + +{% block content %} + {% include '_types/post/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_data/_main_panel.html b/blog/templates/_types/post_data/_main_panel.html new file mode 100644 index 0000000..83dcc32 --- /dev/null +++ b/blog/templates/_types/post_data/_main_panel.html @@ -0,0 +1,137 @@ +{% macro render_scalar_table(obj) -%} +
    + + + + + + + + + {% for col in obj.__mapper__.columns %} + {% set key = col.key %} + {% set val = obj|attr(key) %} + {% if key != "_sa_instance_state" %} + + + + + {% endif %} + {% endfor %} + +
    FieldValue
    {{ key }} + {% if val is none %} + + {% elif val.__class__.__name__ in ["datetime", "date"] and val.isoformat is defined %} +
    {{ val.isoformat() }}
    + {% elif val is string %} +
    {{ val }}
    + {% else %} +
    {{ val }}
    + {% endif %} +
    +
    +{%- endmacro %} + +{% macro render_model(obj, depth=0, max_depth=2) -%} + {% if obj is none %} + + {% else %} +
    + {{ render_scalar_table(obj) }} + +
    + {% for rel in obj.__mapper__.relationships %} + {% set rel_name = rel.key %} + {% set loaded = rel.key in obj.__dict__ %} + {% if loaded %} + {% set value = obj|attr(rel_name) %} + {% else %} + {% set value = none %} + {% endif %} + +
    +
    + Relationship: {{ rel_name }} + + {{ 'many' if rel.uselist else 'one' }} → {{ rel.mapper.class_.__name__ }} + {% if not loaded %} • not loaded{% endif %} + +
    + +
    + {% if value is none %} + + + {% elif rel.uselist %} + {% set items = value or [] %} +
    {{ items|length }} item{{ '' if items|length == 1 else 's' }}
    + + {% if items %} +
    + + + + + + + + + {% for it in items %} + + + + + {% endfor %} + +
    #Summary
    {{ loop.index }} + {% set ident = [] %} + {% for k in ['id','ghost_id','uuid','slug','name','title'] if k in it.__mapper__.c %} + {% set v = (it|attr(k))|default('', true) %} + {% do ident.append(k ~ '=' ~ v) %} + {% endfor %} +
    {{ (ident|join(' • ')) or it|string }}
    + + {% if depth < max_depth %} +
    + {{ render_model(it, depth+1, max_depth) }} +
    + {% else %} +
    …max depth reached…
    + {% endif %} +
    +
    + {% endif %} + + {% else %} + {% set child = value %} + {% set ident = [] %} + {% for k in ['id','ghost_id','uuid','slug','name','title'] if k in child.__mapper__.c %} + {% set v = (child|attr(k))|default('', true) %} + {% do ident.append(k ~ '=' ~ v) %} + {% endfor %} +
    {{ (ident|join(' • ')) or child|string }}
    + + {% if depth < max_depth %} +
    + {{ render_model(child, depth+1, max_depth) }} +
    + {% else %} +
    …max depth reached…
    + {% endif %} + {% endif %} +
    +
    + {% endfor %} +
    +
    + {% endif %} +{%- endmacro %} + +
    +
    + Model: Post • Table: {{ original_post.__tablename__ }} +
    + {{ render_model(original_post, 0, 2) }} +
    + diff --git a/blog/templates/_types/post_data/_nav.html b/blog/templates/_types/post_data/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/blog/templates/_types/post_data/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/blog/templates/_types/post_data/_oob_elements.html b/blog/templates/_types/post_data/_oob_elements.html new file mode 100644 index 0000000..32fd0c7 --- /dev/null +++ b/blog/templates/_types/post_data/_oob_elements.html @@ -0,0 +1,28 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'post_data-header-child', '_types/post_data/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/post_data/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/post_data/_main_panel.html" %} +{% endblock %} + + diff --git a/blog/templates/_types/post_data/header/_header.html b/blog/templates/_types/post_data/header/_header.html new file mode 100644 index 0000000..27eaf6f --- /dev/null +++ b/blog/templates/_types/post_data/header/_header.html @@ -0,0 +1,15 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post_data-row', oob=oob) %} + + +
    data
    +
    + {% call links.desktop_nav() %} + {#% include '_types/post_data/_nav.html' %#} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/blog/templates/_types/post_data/index.html b/blog/templates/_types/post_data/index.html new file mode 100644 index 0000000..1df67b8 --- /dev/null +++ b/blog/templates/_types/post_data/index.html @@ -0,0 +1,24 @@ +{% extends '_types/post/admin/index.html' %} + +{% block ___app_title %} + {% import 'macros/links.html' as links %} + {% call links.menu_row() %} + {% call links.link(url_for('blog.post.admin.data', slug=post.slug), hx_select_search) %} + +
    + data +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post_data/_nav.html' %} + {% endcall %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post_data/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_data/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_edit/_main_panel.html b/blog/templates/_types/post_edit/_main_panel.html new file mode 100644 index 0000000..05d9251 --- /dev/null +++ b/blog/templates/_types/post_edit/_main_panel.html @@ -0,0 +1,352 @@ +{# ── Error banner ── #} +{% if save_error %} +
    + Save failed: {{ save_error }} +
    +{% endif %} + +
    + + + + + + + {# ── Feature image ── #} +
    + {# Empty state: add link #} +
    + +
    + + {# Filled state: image preview + controls #} +
    + + {# Delete button (top-right, visible on hover) #} + + + {# Caption input #} + +
    + + {# Upload spinner overlay #} + + + {# Hidden file input #} + +
    + + {# ── Title ── #} + + + {# ── Excerpt ── #} + + + {# ── Editor mount point ── #} +
    + + {# ── Initial Lexical JSON from Ghost ── #} + + + {# ── Status + Publish mode + Save footer ── #} + {% set already_emailed = ghost_post and ghost_post.email and ghost_post.email.status %} +
    + + + {# Publish mode — only relevant when publishing #} + + + {# Newsletter picker — only when email is involved #} + + + + + {% if save_success %} + Saved. + {% endif %} + {% if request.args.get('publish_requested') %} + Publish requested — an admin will review. + {% endif %} + {% if post and post.publish_requested %} + Publish requested + {% endif %} + {% if already_emailed %} + + Emailed{% if ghost_post.newsletter %} to {{ ghost_post.newsletter.name }}{% endif %} + + {% endif %} +
    + + {# ── Publish-mode show/hide logic ── #} + +
    + +{# ── Koenig editor assets ── #} + + + + diff --git a/blog/templates/_types/post_edit/_nav.html b/blog/templates/_types/post_edit/_nav.html new file mode 100644 index 0000000..0b1d08a --- /dev/null +++ b/blog/templates/_types/post_edit/_nav.html @@ -0,0 +1,5 @@ +{% import 'macros/links.html' as links %} +{% call links.link(url_for('blog.post.admin.settings', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + + settings +{% endcall %} diff --git a/blog/templates/_types/post_edit/_oob_elements.html b/blog/templates/_types/post_edit/_oob_elements.html new file mode 100644 index 0000000..694096c --- /dev/null +++ b/blog/templates/_types/post_edit/_oob_elements.html @@ -0,0 +1,19 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'post_edit-header-child', '_types/post_edit/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/post_edit/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_edit/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_edit/header/_header.html b/blog/templates/_types/post_edit/header/_header.html new file mode 100644 index 0000000..60e07e7 --- /dev/null +++ b/blog/templates/_types/post_edit/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post_edit-row', oob=oob) %} + {% call links.link(url_for('blog.post.admin.edit', slug=post.slug), hx_select_search) %} + +
    + edit +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post_edit/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/post_edit/index.html b/blog/templates/_types/post_edit/index.html new file mode 100644 index 0000000..b5c7212 --- /dev/null +++ b/blog/templates/_types/post_edit/index.html @@ -0,0 +1,17 @@ +{% extends '_types/post/admin/index.html' %} + +{% block post_admin_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-admin-header-child', '_types/post_edit/header/_header.html') %} + {% block post_edit_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post_edit/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_edit/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_entries/_main_panel.html b/blog/templates/_types/post_entries/_main_panel.html new file mode 100644 index 0000000..342041e --- /dev/null +++ b/blog/templates/_types/post_entries/_main_panel.html @@ -0,0 +1,48 @@ +
    + + {# Associated Entries List #} + {% include '_types/post/admin/_associated_entries.html' %} + + {# Calendars Browser #} +
    +

    Browse Calendars

    + {% for calendar in all_calendars %} +
    + + {% if calendar.post.feature_image %} + {{ calendar.post.title }} + {% else %} +
    + {% endif %} +
    +
    + + {{ calendar.name }} +
    +
    + {{ calendar.post.title }} +
    +
    +
    +
    +
    Loading calendar...
    +
    +
    + {% else %} +
    No calendars found.
    + {% endfor %} +
    +
    \ No newline at end of file diff --git a/blog/templates/_types/post_entries/_nav.html b/blog/templates/_types/post_entries/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/blog/templates/_types/post_entries/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/blog/templates/_types/post_entries/_oob_elements.html b/blog/templates/_types/post_entries/_oob_elements.html new file mode 100644 index 0000000..3ef5559 --- /dev/null +++ b/blog/templates/_types/post_entries/_oob_elements.html @@ -0,0 +1,28 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'post_entries-header-child', '_types/post_entries/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/post_entries/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/post_entries/_main_panel.html" %} +{% endblock %} + + diff --git a/blog/templates/_types/post_entries/header/_header.html b/blog/templates/_types/post_entries/header/_header.html new file mode 100644 index 0000000..019c000 --- /dev/null +++ b/blog/templates/_types/post_entries/header/_header.html @@ -0,0 +1,17 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post_entries-row', oob=oob) %} + {% call links.link(url_for('blog.post.admin.entries', slug=post.slug), hx_select_search) %} + +
    + entries +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post_entries/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/blog/templates/_types/post_entries/index.html b/blog/templates/_types/post_entries/index.html new file mode 100644 index 0000000..382d297 --- /dev/null +++ b/blog/templates/_types/post_entries/index.html @@ -0,0 +1,19 @@ +{% extends '_types/post/admin/index.html' %} + + + +{% block post_admin_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-admin-header-child', '_types/post_entries/header/_header.html') %} + {% block post_entries_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post_entries/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_entries/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_settings/_main_panel.html b/blog/templates/_types/post_settings/_main_panel.html new file mode 100644 index 0000000..038fab1 --- /dev/null +++ b/blog/templates/_types/post_settings/_main_panel.html @@ -0,0 +1,198 @@ +{# ── Post/Page Settings Form ── #} +{% set gp = ghost_post or {} %} +{% set _is_page = post.is_page if post else False %} + +{% macro field_label(text, field_for=None) %} + +{% endmacro %} + +{% macro text_input(name, value='', placeholder='', type='text', maxlength=None) %} + +{% endmacro %} + +{% macro textarea_input(name, value='', placeholder='', rows=3, maxlength=None) %} + +{% endmacro %} + +{% macro checkbox_input(name, checked=False, label='') %} + +{% endmacro %} + +{% macro section(title, open=False) %} +
    + + {{ title }} + +
    + {{ caller() }} +
    +
    +{% endmacro %} + +
    + + + +
    + + {# ── General ── #} + {% call section('General', open=True) %} +
    + {{ field_label('Slug', 'settings-slug') }} + {{ text_input('slug', gp.slug or '', 'page-slug' if _is_page else 'post-slug') }} +
    +
    + {{ field_label('Published at', 'settings-published_at') }} + +
    +
    + {{ checkbox_input('featured', gp.featured, 'Featured page' if _is_page else 'Featured post') }} +
    +
    + {{ field_label('Visibility', 'settings-visibility') }} + +
    +
    + {{ checkbox_input('email_only', gp.email_only, 'Email only') }} +
    + {% endcall %} + + {# ── Tags ── #} + {% call section('Tags') %} +
    + {{ field_label('Tags (comma-separated)', 'settings-tags') }} + {% set tag_names = gp.tags|map(attribute='name')|list|join(', ') if gp.tags else '' %} + {{ text_input('tags', tag_names, 'news, updates, featured') }} +

    Unknown tags will be created automatically.

    +
    + {% endcall %} + + {# ── Feature Image ── #} + {% call section('Feature Image') %} +
    + {{ field_label('Alt text', 'settings-feature_image_alt') }} + {{ text_input('feature_image_alt', gp.feature_image_alt or '', 'Describe the feature image') }} +
    + {% endcall %} + + {# ── SEO / Meta ── #} + {% call section('SEO / Meta') %} +
    + {{ field_label('Meta title', 'settings-meta_title') }} + {{ text_input('meta_title', gp.meta_title or '', 'SEO title', maxlength=300) }} +

    Recommended: 70 characters. Max: 300.

    +
    +
    + {{ field_label('Meta description', 'settings-meta_description') }} + {{ textarea_input('meta_description', gp.meta_description or '', 'SEO description', rows=2, maxlength=500) }} +

    Recommended: 156 characters.

    +
    +
    + {{ field_label('Canonical URL', 'settings-canonical_url') }} + {{ text_input('canonical_url', gp.canonical_url or '', 'https://example.com/original-post', type='url') }} +
    + {% endcall %} + + {# ── Facebook / OpenGraph ── #} + {% call section('Facebook / OpenGraph') %} +
    + {{ field_label('OG title', 'settings-og_title') }} + {{ text_input('og_title', gp.og_title or '') }} +
    +
    + {{ field_label('OG description', 'settings-og_description') }} + {{ textarea_input('og_description', gp.og_description or '', rows=2) }} +
    +
    + {{ field_label('OG image URL', 'settings-og_image') }} + {{ text_input('og_image', gp.og_image or '', 'https://...', type='url') }} +
    + {% endcall %} + + {# ── X / Twitter ── #} + {% call section('X / Twitter') %} +
    + {{ field_label('Twitter title', 'settings-twitter_title') }} + {{ text_input('twitter_title', gp.twitter_title or '') }} +
    +
    + {{ field_label('Twitter description', 'settings-twitter_description') }} + {{ textarea_input('twitter_description', gp.twitter_description or '', rows=2) }} +
    +
    + {{ field_label('Twitter image URL', 'settings-twitter_image') }} + {{ text_input('twitter_image', gp.twitter_image or '', 'https://...', type='url') }} +
    + {% endcall %} + + {# ── Advanced ── #} + {% call section('Advanced') %} +
    + {{ field_label('Custom template', 'settings-custom_template') }} + {{ text_input('custom_template', gp.custom_template or '', 'custom-page.hbs' if _is_page else 'custom-post.hbs') }} +
    + {% endcall %} + +
    + + {# ── Save footer ── #} +
    + + + {% if save_success %} + Saved. + {% endif %} +
    +
    diff --git a/blog/templates/_types/post_settings/_nav.html b/blog/templates/_types/post_settings/_nav.html new file mode 100644 index 0000000..a08d80a --- /dev/null +++ b/blog/templates/_types/post_settings/_nav.html @@ -0,0 +1,5 @@ +{% import 'macros/links.html' as links %} +{% call links.link(url_for('blog.post.admin.edit', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + + edit +{% endcall %} diff --git a/blog/templates/_types/post_settings/_oob_elements.html b/blog/templates/_types/post_settings/_oob_elements.html new file mode 100644 index 0000000..d2d6beb --- /dev/null +++ b/blog/templates/_types/post_settings/_oob_elements.html @@ -0,0 +1,19 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'post_settings-header-child', '_types/post_settings/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/post_settings/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_settings/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/post_settings/header/_header.html b/blog/templates/_types/post_settings/header/_header.html new file mode 100644 index 0000000..ba187fe --- /dev/null +++ b/blog/templates/_types/post_settings/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post_settings-row', oob=oob) %} + {% call links.link(url_for('blog.post.admin.settings', slug=post.slug), hx_select_search) %} + +
    + settings +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post_settings/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/post_settings/index.html b/blog/templates/_types/post_settings/index.html new file mode 100644 index 0000000..59835f4 --- /dev/null +++ b/blog/templates/_types/post_settings/index.html @@ -0,0 +1,17 @@ +{% extends '_types/post/admin/index.html' %} + +{% block post_admin_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-admin-header-child', '_types/post_settings/header/_header.html') %} + {% block post_settings_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/post_settings/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/post_settings/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/root/header/_header.html b/blog/templates/_types/root/header/_header.html new file mode 100644 index 0000000..7792cd5 --- /dev/null +++ b/blog/templates/_types/root/header/_header.html @@ -0,0 +1,42 @@ +{% set select_colours = " + [.hover-capable_&]:hover:bg-yellow-300 + aria-selected:bg-stone-500 aria-selected:text-white + [.hover-capable_&[aria-selected=true]:hover]:bg-orange-500 +"%} +{% import 'macros/links.html' as links %} + +{% macro header_row(oob=False) %} + {% call links.menu_row(id='root-row', oob=oob) %} +
    + {# Cart mini — fetched from cart app as fragment #} + {% if cart_mini_html %} + {{ cart_mini_html | safe }} + {% endif %} + + {# Site title #} +
    + {% from 'macros/title.html' import title with context %} + {{ title('flex justify-center md:justify-start')}} +
    + + {# Desktop nav #} + + {% include '_types/root/_hamburger.html' %} +
    + {% endcall %} + {# Mobile user info #} +
    + {% if auth_menu_html %} + {{ auth_menu_html | safe }} + {% endif %} +
    +{% endmacro %} diff --git a/blog/templates/_types/root/settings/_main_panel.html b/blog/templates/_types/root/settings/_main_panel.html new file mode 100644 index 0000000..9f4c9a8 --- /dev/null +++ b/blog/templates/_types/root/settings/_main_panel.html @@ -0,0 +1,2 @@ +
    +
    diff --git a/blog/templates/_types/root/settings/_nav.html b/blog/templates/_types/root/settings/_nav.html new file mode 100644 index 0000000..f9d4420 --- /dev/null +++ b/blog/templates/_types/root/settings/_nav.html @@ -0,0 +1,5 @@ +{% from 'macros/admin_nav.html' import admin_nav_item %} +{{ admin_nav_item(url_for('menu_items.list_menu_items'), 'bars', 'Menu Items', select_colours) }} +{{ admin_nav_item(url_for('snippets.list_snippets'), 'puzzle-piece', 'Snippets', select_colours) }} +{{ admin_nav_item(url_for('blog.tag_groups_admin.index'), 'tags', 'Tag Groups', select_colours) }} +{{ admin_nav_item(url_for('settings.cache'), 'refresh', 'Cache', select_colours) }} diff --git a/blog/templates/_types/root/settings/_oob_elements.html b/blog/templates/_types/root/settings/_oob_elements.html new file mode 100644 index 0000000..fbe1bf3 --- /dev/null +++ b/blog/templates/_types/root/settings/_oob_elements.html @@ -0,0 +1,26 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob_.html' import root_header with context %} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'root-settings-header-child', '_types/root/settings/header/_header.html')}} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} +{% include '_types/root/settings/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include '_types/root/settings/_main_panel.html' %} +{% endblock %} + diff --git a/blog/templates/_types/root/settings/cache/_header.html b/blog/templates/_types/root/settings/cache/_header.html new file mode 100644 index 0000000..64f8535 --- /dev/null +++ b/blog/templates/_types/root/settings/cache/_header.html @@ -0,0 +1,9 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='cache-row', oob=oob) %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('settings.cache'), 'refresh', 'Cache', select_colours, aclass='') }} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/root/settings/cache/_main_panel.html b/blog/templates/_types/root/settings/cache/_main_panel.html new file mode 100644 index 0000000..854012d --- /dev/null +++ b/blog/templates/_types/root/settings/cache/_main_panel.html @@ -0,0 +1,14 @@ +
    +
    +
    + + +
    +
    +
    +
    diff --git a/blog/templates/_types/root/settings/cache/_oob_elements.html b/blog/templates/_types/root/settings/cache/_oob_elements.html new file mode 100644 index 0000000..5989bf7 --- /dev/null +++ b/blog/templates/_types/root/settings/cache/_oob_elements.html @@ -0,0 +1,16 @@ +{% extends 'oob_elements.html' %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-settings-header-child', 'cache-header-child', '_types/root/settings/cache/_header.html')}} + + {% from '_types/root/settings/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + +{% block mobile_menu %} +{% endblock %} + +{% block content %} + {% include '_types/root/settings/cache/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/root/settings/cache/index.html b/blog/templates/_types/root/settings/cache/index.html new file mode 100644 index 0000000..05706f8 --- /dev/null +++ b/blog/templates/_types/root/settings/cache/index.html @@ -0,0 +1,20 @@ +{% extends '_types/root/settings/index.html' %} + +{% block root_settings_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/root/settings/cache/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block cache_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/root/settings/cache/_main_panel.html' %} +{% endblock %} + +{% block _main_mobile_menu %} +{% endblock %} diff --git a/blog/templates/_types/root/settings/header/_header.html b/blog/templates/_types/root/settings/header/_header.html new file mode 100644 index 0000000..69e7c72 --- /dev/null +++ b/blog/templates/_types/root/settings/header/_header.html @@ -0,0 +1,11 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='root-settings-row', oob=oob) %} + {% call links.link(url_for('settings.home'), hx_select_search) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/root/settings/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/blog/templates/_types/root/settings/index.html b/blog/templates/_types/root/settings/index.html new file mode 100644 index 0000000..1773f3d --- /dev/null +++ b/blog/templates/_types/root/settings/index.html @@ -0,0 +1,18 @@ +{% extends '_types/root/_index.html' %} +{% import 'macros/layout.html' as layout %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('root-settings-header-child', '_types/root/settings/header/_header.html') %} + {% block root_settings_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/root/settings/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/root/settings/_main_panel.html' %} +{% endblock %} \ No newline at end of file diff --git a/blog/templates/_types/snippets/_list.html b/blog/templates/_types/snippets/_list.html new file mode 100644 index 0000000..2b982ca --- /dev/null +++ b/blog/templates/_types/snippets/_list.html @@ -0,0 +1,73 @@ +
    + {% if snippets %} +
    + {% for s in snippets %} +
    + {# Name #} +
    +
    {{ s.name }}
    +
    + {% if s.user_id == g.user.id %} + You + {% else %} + User #{{ s.user_id }} + {% endif %} +
    +
    + + {# Visibility badge #} + {% set badge_colours = { + 'private': 'bg-stone-200 text-stone-700', + 'shared': 'bg-blue-100 text-blue-700', + 'admin': 'bg-amber-100 text-amber-700', + } %} + + {{ s.visibility }} + + + {# Admin: inline visibility select #} + {% if is_admin %} + + {% endif %} + + {# Delete button #} + {% if s.user_id == g.user.id or is_admin %} + + {% endif %} +
    + {% endfor %} +
    + {% else %} +
    + +

    No snippets yet. Create one from the blog editor.

    +
    + {% endif %} +
    diff --git a/blog/templates/_types/snippets/_main_panel.html b/blog/templates/_types/snippets/_main_panel.html new file mode 100644 index 0000000..73b50b7 --- /dev/null +++ b/blog/templates/_types/snippets/_main_panel.html @@ -0,0 +1,9 @@ +
    +
    +

    Snippets

    +
    + +
    + {% include '_types/snippets/_list.html' %} +
    +
    diff --git a/blog/templates/_types/snippets/_oob_elements.html b/blog/templates/_types/snippets/_oob_elements.html new file mode 100644 index 0000000..a1377cf --- /dev/null +++ b/blog/templates/_types/snippets/_oob_elements.html @@ -0,0 +1,18 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-settings-header-child', 'snippets-header-child', '_types/snippets/header/_header.html')}} + + {% from '_types/root/settings/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + +{% block mobile_menu %} +{% endblock %} + +{% block content %} + {% include '_types/snippets/_main_panel.html' %} +{% endblock %} diff --git a/blog/templates/_types/snippets/header/_header.html b/blog/templates/_types/snippets/header/_header.html new file mode 100644 index 0000000..0882518 --- /dev/null +++ b/blog/templates/_types/snippets/header/_header.html @@ -0,0 +1,9 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='snippets-row', oob=oob) %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('snippets.list_snippets'), 'puzzle-piece', 'Snippets', select_colours, aclass='') }} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/blog/templates/_types/snippets/index.html b/blog/templates/_types/snippets/index.html new file mode 100644 index 0000000..90f0106 --- /dev/null +++ b/blog/templates/_types/snippets/index.html @@ -0,0 +1,20 @@ +{% extends '_types/root/settings/index.html' %} + +{% block root_settings_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/snippets/header/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block snippets_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/snippets/_main_panel.html' %} +{% endblock %} + +{% block _main_mobile_menu %} +{% endblock %} diff --git a/blog/templates/fragments/nav_tree.html b/blog/templates/fragments/nav_tree.html new file mode 100644 index 0000000..df41dc8 --- /dev/null +++ b/blog/templates/fragments/nav_tree.html @@ -0,0 +1,32 @@ +{# Nav-tree fragment — rendered by blog, consumed by all apps. + Uses frag_app_name / frag_first_seg instead of request.path / app_name + so the consuming app's context is reflected correctly. + No hx-boost — cross-app nav links are full page navigations. #} +{% set _app_slugs = { + 'cart': cart_url('/'), + 'market': market_url('/'), + 'events': events_url('/'), + 'federation': federation_url('/'), + 'account': account_url('/'), +} %} + diff --git a/blog/templates/macros/admin_nav.html b/blog/templates/macros/admin_nav.html new file mode 100644 index 0000000..738a319 --- /dev/null +++ b/blog/templates/macros/admin_nav.html @@ -0,0 +1,21 @@ +{# + Shared admin navigation macro + Use this instead of duplicate _nav.html files +#} + +{% macro admin_nav_item(href, icon='cog', label='', select_colours='', aclass=styles.nav_button) %} + {% import 'macros/links.html' as links %} + {% call links.link(href, hx_select_search, select_colours, True, aclass=aclass) %} + + {{ label }} + {% endcall %} +{% endmacro %} + +{% macro placeholder_nav() %} +{# Placeholder for admin sections without specific nav items #} + +{% endmacro %} diff --git a/blog/templates/macros/scrolling_menu.html b/blog/templates/macros/scrolling_menu.html new file mode 100644 index 0000000..d1a823a --- /dev/null +++ b/blog/templates/macros/scrolling_menu.html @@ -0,0 +1,68 @@ +{# + Scrolling menu macro with arrow navigation + + Creates a horizontally scrollable menu (desktop) or vertically scrollable (mobile) + with arrow buttons that appear/hide based on content overflow. + + Parameters: + - container_id: Unique ID for the scroll container + - items: List of items to iterate over + - item_content: Caller block that renders each item (receives 'item' variable) + - wrapper_class: Optional additional classes for outer wrapper + - container_class: Optional additional classes for scroll container + - item_class: Optional additional classes for each item wrapper +#} + +{% macro scrolling_menu(container_id, items, wrapper_class='', container_class='', item_class='') %} + {% if items %} + {# Left scroll arrow - desktop only #} + + + {# Scrollable container #} +
    +
    + {% for item in items %} +
    + {{ caller(item) }} +
    + {% endfor %} +
    +
    + + + + {# Right scroll arrow - desktop only #} + + {% endif %} +{% endmacro %} diff --git a/blog/templates/macros/stickers.html b/blog/templates/macros/stickers.html new file mode 100644 index 0000000..2be5b9f --- /dev/null +++ b/blog/templates/macros/stickers.html @@ -0,0 +1,24 @@ +{% macro sticker(src, title, enabled, size=40, found=false) -%} + + + + {{ title|capitalize }} + + + + + +{%- endmacro -%} + diff --git a/cart/.gitignore b/cart/.gitignore new file mode 100644 index 0000000..be20105 --- /dev/null +++ b/cart/.gitignore @@ -0,0 +1,8 @@ +__pycache__/ +*.pyc +.env +node_modules/ +*.egg-info/ +dist/ +build/ +.venv/ diff --git a/cart/Dockerfile b/cart/Dockerfile new file mode 100644 index 0000000..7fb990e --- /dev/null +++ b/cart/Dockerfile @@ -0,0 +1,50 @@ +# syntax=docker/dockerfile:1 + +# ---------- Python application ---------- +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY cart/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY blog/__init__.py ./blog/__init__.py +COPY blog/models/ ./blog/models/ +COPY market/__init__.py ./market/__init__.py +COPY market/models/ ./market/models/ +COPY events/__init__.py ./events/__init__.py +COPY events/models/ ./events/models/ +COPY federation/__init__.py ./federation/__init__.py +COPY federation/models/ ./federation/models/ +COPY account/__init__.py ./account/__init__.py +COPY account/models/ ./account/models/ + +# ---------- Runtime setup ---------- +COPY cart/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/cart/README.md b/cart/README.md new file mode 100644 index 0000000..22374d1 --- /dev/null +++ b/cart/README.md @@ -0,0 +1,76 @@ +# Cart App + +Shopping cart, checkout, and order management service for the Rose Ash cooperative. + +## Architecture + +One of five Quart microservices sharing a single PostgreSQL database: + +| App | Port | Domain | +|-----|------|--------| +| blog (coop) | 8000 | Auth, blog, admin, menus, snippets | +| market | 8001 | Product browsing, Suma scraping | +| **cart** | 8002 | Shopping cart, checkout, orders | +| events | 8003 | Calendars, bookings, tickets | +| federation | 8004 | ActivityPub, fediverse social | + +## Structure + +``` +app.py # Application factory (create_base_app + blueprints) +path_setup.py # Adds project root + app dir to sys.path +config/app-config.yaml # App URLs, SumUp config +models/ # Cart-domain models (Order, OrderItem, PageConfig) +bp/ + cart/ # Cart blueprint + global_routes.py # Add to cart, checkout, webhooks, return page + page_routes.py # Page-scoped cart and checkout + overview_routes.py # Cart overview / summary page + services/ # Business logic + checkout.py # Order creation, SumUp integration + check_sumup_status.py # Payment status polling + calendar_cart.py # Calendar entry cart queries + page_cart.py # Page-scoped cart queries + get_cart.py # Cart item queries + identity.py # Cart identity (user_id / session_id) + total.py # Price calculations + clear_cart_for_order.py # Soft-delete cart after checkout + order/ # Single order detail view + orders/ # Order listing view +services/ # register_domain_services() — wires cart + calendar + market +shared/ # Submodule -> git.rose-ash.com/coop/shared.git +``` + +## Cross-Domain Communication + +- `services.calendar.*` — claim/confirm entries for orders, adopt on login +- `services.market.*` — marketplace queries for page-scoped carts +- `services.blog.*` — post lookup for page context +- `shared.services.navigation` — site navigation tree + +## Domain Events + +- `checkout.py` emits `order.created` via `shared.events.emit_event` +- `check_sumup_status.py` emits `order.paid` via `shared.events.emit_event` + +## Checkout Flow + +``` +1. User clicks "Checkout" +2. create_order_from_cart() creates Order + OrderItems +3. services.calendar.claim_entries_for_order() marks entries as "ordered" +4. emit: order.created event +5. SumUp hosted checkout created, user redirected +6. SumUp webhook / return page triggers check_sumup_status() +7. If PAID: services.calendar.confirm_entries_for_order(), emit: order.paid +``` + +## Running + +```bash +export DATABASE_URL_ASYNC=postgresql+asyncpg://user:pass@localhost/coop +export REDIS_URL=redis://localhost:6379/0 +export SECRET_KEY=your-secret-key + +hypercorn app:app --bind 0.0.0.0:8002 +``` diff --git a/cart/__init__.py b/cart/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cart/app.py b/cart/app.py new file mode 100644 index 0000000..dad13cd --- /dev/null +++ b/cart/app.py @@ -0,0 +1,235 @@ +from __future__ import annotations +import path_setup # noqa: F401 # adds shared/ to sys.path + +from decimal import Decimal +from pathlib import Path + +from quart import g, abort, request +from jinja2 import FileSystemLoader, ChoiceLoader +from sqlalchemy import select + +from shared.infrastructure.factory import create_base_app + +from bp import ( + register_cart_overview, + register_page_cart, + register_cart_global, + register_orders, + register_fragments, +) +from bp.cart.services import ( + get_cart, + total, + get_calendar_cart_entries, + calendar_total, + get_ticket_cart_entries, + ticket_total, +) +from bp.cart.services.page_cart import ( + get_cart_for_page, + get_calendar_entries_for_page, + get_tickets_for_page, +) +from bp.cart.services.ticket_groups import group_tickets + + +async def _load_cart(): + """Load the full cart for the cart app (before each request).""" + g.cart = await get_cart(g.s) + + +async def cart_context() -> dict: + """ + Cart app context processor. + + - cart / calendar_cart_entries / total / calendar_total: direct DB + (cart app owns this data) + - cart_count: derived from cart + calendar entries (for _mini.html) + - nav_tree_html: fetched from blog as fragment + + When g.page_post exists, cart and calendar_cart_entries are page-scoped. + Global cart_count / cart_total stay global for cart-mini. + """ + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.infrastructure.fragments import fetch_fragment + + ctx = await base_context() + + ctx["nav_tree_html"] = await fetch_fragment( + "blog", "nav-tree", + params={"app_name": "cart", "path": request.path}, + ) + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + # Cart app owns cart data — use g.cart from _load_cart + all_cart = getattr(g, "cart", None) or [] + all_cal = await get_calendar_cart_entries(g.s) + all_tickets = await get_ticket_cart_entries(g.s) + + # Global counts for cart-mini (always global) + cart_qty = sum(ci.quantity for ci in all_cart) if all_cart else 0 + ctx["cart_count"] = cart_qty + len(all_cal) + len(all_tickets) + ctx["cart_total"] = (total(all_cart) or Decimal(0)) + (calendar_total(all_cal) or Decimal(0)) + (ticket_total(all_tickets) or Decimal(0)) + + # Page-scoped data when viewing a page cart + page_post = getattr(g, "page_post", None) + if page_post: + page_cart = await get_cart_for_page(g.s, page_post.id) + page_cal = await get_calendar_entries_for_page(g.s, page_post.id) + page_tickets = await get_tickets_for_page(g.s, page_post.id) + ctx["cart"] = page_cart + ctx["calendar_cart_entries"] = page_cal + ctx["ticket_cart_entries"] = page_tickets + ctx["page_post"] = page_post + ctx["page_config"] = getattr(g, "page_config", None) + else: + ctx["cart"] = all_cart + ctx["calendar_cart_entries"] = all_cal + ctx["ticket_cart_entries"] = all_tickets + + ctx["ticket_groups"] = group_tickets(ctx.get("ticket_cart_entries", [])) + ctx["total"] = total + ctx["calendar_total"] = calendar_total + ctx["ticket_total"] = ticket_total + + return ctx + + +def create_app() -> "Quart": + from shared.models.page_config import PageConfig + from shared.services.registry import services + from services import register_domain_services + + app = create_base_app( + "cart", + context_fn=cart_context, + before_request_fns=[_load_cart], + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + app.jinja_env.globals["cart_quantity_url"] = lambda product_id: f"/quantity/{product_id}/" + app.jinja_env.globals["cart_delete_url"] = lambda product_id: f"/delete/{product_id}/" + + app.register_blueprint(register_fragments()) + + # --- Page slug hydration (follows events/market app pattern) --- + + @app.url_value_preprocessor + def pull_page_slug(endpoint, values): + if values and "page_slug" in values: + g.page_slug = values.pop("page_slug") + + @app.url_defaults + def inject_page_slug(endpoint, values): + slug = g.get("page_slug") + if slug and "page_slug" not in values: + if app.url_map.is_endpoint_expecting(endpoint, "page_slug"): + values["page_slug"] = slug + + @app.before_request + async def hydrate_page(): + slug = getattr(g, "page_slug", None) + if not slug: + return + post = await services.blog.get_post_by_slug(g.s, slug) + if not post or not post.is_page: + abort(404) + g.page_post = post + g.page_config = ( + await g.s.execute( + select(PageConfig).where( + PageConfig.container_type == "page", + PageConfig.container_id == post.id, + ) + ) + ).scalar_one_or_none() + + # --- Blueprint registration --- + # Static prefixes first, dynamic (page_slug) last + + # Orders blueprint + app.register_blueprint(register_orders(url_prefix="/orders")) + + # Global routes (webhook, return, add — specific paths under /) + app.register_blueprint( + register_cart_global(url_prefix="/"), + url_prefix="/", + ) + + # Cart overview at GET / + app.register_blueprint( + register_cart_overview(url_prefix="/"), + url_prefix="/", + ) + + # Page cart at // (dynamic, matched last) + app.register_blueprint( + register_page_cart(url_prefix="/"), + url_prefix="/", + ) + + # --- Reconcile stale pending orders on startup --- + @app.before_serving + async def _reconcile_pending_orders(): + """Check SumUp status for orders stuck in 'pending' with a checkout ID. + + Handles the case where SumUp webhooks fired while the service was down + or were rejected (e.g. CSRF). Runs once on boot. + """ + import logging + from datetime import datetime, timezone, timedelta + from sqlalchemy import select + from sqlalchemy.orm import selectinload + from shared.db.session import get_session + from shared.models.order import Order + from bp.cart.services.check_sumup_status import check_sumup_status + + log = logging.getLogger("cart.reconcile") + + try: + async with get_session() as sess: + async with sess.begin(): + # Orders that are pending, have a SumUp checkout, and are + # older than 2 minutes (avoid racing with in-flight checkouts) + cutoff = datetime.now(timezone.utc) - timedelta(minutes=2) + result = await sess.execute( + select(Order) + .where( + Order.status == "pending", + Order.sumup_checkout_id.isnot(None), + Order.created_at < cutoff, + ) + .options(selectinload(Order.page_config)) + .limit(50) + ) + stale_orders = result.scalars().all() + + if not stale_orders: + return + + log.info("Reconciling %d stale pending orders", len(stale_orders)) + for order in stale_orders: + try: + await check_sumup_status(sess, order) + log.info( + "Order %d reconciled: %s", + order.id, order.status, + ) + except Exception: + log.exception("Failed to reconcile order %d", order.id) + except Exception: + log.exception("Order reconciliation failed") + + return app + + +app = create_app() diff --git a/cart/bp/__init__.py b/cart/bp/__init__.py new file mode 100644 index 0000000..e75b584 --- /dev/null +++ b/cart/bp/__init__.py @@ -0,0 +1,6 @@ +from .cart.overview_routes import register as register_cart_overview +from .cart.page_routes import register as register_page_cart +from .cart.global_routes import register as register_cart_global +from .order.routes import register as register_order +from .orders.routes import register as register_orders +from .fragments import register_fragments diff --git a/cart/bp/cart/global_routes.py b/cart/bp/cart/global_routes.py new file mode 100644 index 0000000..ba2459f --- /dev/null +++ b/cart/bp/cart/global_routes.py @@ -0,0 +1,294 @@ +# bp/cart/global_routes.py — Global cart routes (webhook, return, add) + +from __future__ import annotations + +from quart import Blueprint, g, request, render_template, redirect, url_for, make_response +from sqlalchemy import select + +from shared.models.market import CartItem +from shared.models.order import Order +from shared.models.market_place import MarketPlace +from shared.services.registry import services +from .services import ( + current_cart_identity, + get_cart, + total, + get_calendar_cart_entries, + calendar_total, + get_ticket_cart_entries, + ticket_total, + check_sumup_status, +) +from .services.checkout import ( + find_or_create_cart_item, + create_order_from_cart, + resolve_page_config, + build_sumup_description, + build_sumup_reference, + build_webhook_url, + validate_webhook_secret, + get_order_with_details, +) +from shared.browser.app.payments.sumup import create_checkout as sumup_create_checkout +from shared.browser.app.csrf import csrf_exempt + + +def register(url_prefix: str) -> Blueprint: + bp = Blueprint("cart_global", __name__, url_prefix=url_prefix) + + @bp.post("/add//") + async def add_to_cart(product_id: int): + ident = current_cart_identity() + + cart_item = await find_or_create_cart_item( + g.s, + product_id, + ident["user_id"], + ident["session_id"], + ) + + if not cart_item: + return await make_response("Product not found", 404) + + if request.headers.get("HX-Request") == "true": + # Redirect to overview for HTMX + return redirect(url_for("cart_overview.overview")) + + return redirect(url_for("cart_overview.overview")) + + @bp.post("/quantity//") + async def update_quantity(product_id: int): + ident = current_cart_identity() + form = await request.form + count = int(form.get("count", 0)) + + filters = [ + CartItem.deleted_at.is_(None), + CartItem.product_id == product_id, + ] + if ident["user_id"] is not None: + filters.append(CartItem.user_id == ident["user_id"]) + else: + filters.append(CartItem.session_id == ident["session_id"]) + + existing = await g.s.scalar(select(CartItem).where(*filters)) + + if existing: + existing.quantity = max(count, 0) + await g.s.flush() + + resp = await make_response("", 200) + resp.headers["HX-Refresh"] = "true" + return resp + + @bp.post("/ticket-quantity/") + async def update_ticket_quantity(): + """Adjust reserved ticket count (+/- pattern, like products).""" + ident = current_cart_identity() + form = await request.form + entry_id = int(form.get("entry_id", 0)) + count = max(int(form.get("count", 0)), 0) + tt_raw = (form.get("ticket_type_id") or "").strip() + ticket_type_id = int(tt_raw) if tt_raw else None + + await services.calendar.adjust_ticket_quantity( + g.s, entry_id, count, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=ticket_type_id, + ) + await g.s.flush() + + resp = await make_response("", 200) + resp.headers["HX-Refresh"] = "true" + return resp + + @bp.post("/delete//") + async def delete_item(product_id: int): + ident = current_cart_identity() + + filters = [ + CartItem.deleted_at.is_(None), + CartItem.product_id == product_id, + ] + if ident["user_id"] is not None: + filters.append(CartItem.user_id == ident["user_id"]) + else: + filters.append(CartItem.session_id == ident["session_id"]) + + existing = await g.s.scalar(select(CartItem).where(*filters)) + + if existing: + await g.s.delete(existing) + await g.s.flush() + + resp = await make_response("", 200) + resp.headers["HX-Refresh"] = "true" + return resp + + @bp.post("/checkout/") + async def checkout(): + """Legacy global checkout (for orphan items without page scope).""" + cart = await get_cart(g.s) + calendar_entries = await get_calendar_cart_entries(g.s) + tickets = await get_ticket_cart_entries(g.s) + + if not cart and not calendar_entries and not tickets: + return redirect(url_for("cart_overview.overview")) + + product_total = total(cart) or 0 + calendar_amount = calendar_total(calendar_entries) or 0 + ticket_amount = ticket_total(tickets) or 0 + cart_total = product_total + calendar_amount + ticket_amount + + if cart_total <= 0: + return redirect(url_for("cart_overview.overview")) + + try: + page_config = await resolve_page_config(g.s, cart, calendar_entries, tickets) + except ValueError as e: + html = await render_template( + "_types/cart/checkout_error.html", + order=None, + error=str(e), + ) + return await make_response(html, 400) + + ident = current_cart_identity() + order = await create_order_from_cart( + g.s, + cart, + calendar_entries, + ident.get("user_id"), + ident.get("session_id"), + product_total, + calendar_amount, + ticket_total=ticket_amount, + ) + + if page_config: + order.page_config_id = page_config.id + + redirect_url = url_for("cart_global.checkout_return", order_id=order.id, _external=True) + order.sumup_reference = build_sumup_reference(order.id, page_config=page_config) + description = build_sumup_description(cart, order.id, ticket_count=len(tickets)) + + webhook_base_url = url_for("cart_global.checkout_webhook", order_id=order.id, _external=True) + webhook_url = build_webhook_url(webhook_base_url) + + checkout_data = await sumup_create_checkout( + order, + redirect_url=redirect_url, + webhook_url=webhook_url, + description=description, + page_config=page_config, + ) + order.sumup_checkout_id = checkout_data.get("id") + order.sumup_status = checkout_data.get("status") + order.description = checkout_data.get("description") + + hosted_cfg = checkout_data.get("hosted_checkout") or {} + hosted_url = hosted_cfg.get("hosted_checkout_url") or checkout_data.get("hosted_checkout_url") + order.sumup_hosted_url = hosted_url + + await g.s.flush() + + if not hosted_url: + html = await render_template( + "_types/cart/checkout_error.html", + order=order, + error="No hosted checkout URL returned from SumUp.", + ) + return await make_response(html, 500) + + return redirect(hosted_url) + + @csrf_exempt + @bp.post("/checkout/webhook//") + async def checkout_webhook(order_id: int): + """Webhook endpoint for SumUp CHECKOUT_STATUS_CHANGED events.""" + if not validate_webhook_secret(request.args.get("token")): + return "", 204 + + try: + payload = await request.get_json() + except Exception: + payload = None + + if not isinstance(payload, dict): + return "", 204 + + if payload.get("event_type") != "CHECKOUT_STATUS_CHANGED": + return "", 204 + + checkout_id = payload.get("id") + if not checkout_id: + return "", 204 + + result = await g.s.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + return "", 204 + + if order.sumup_checkout_id and order.sumup_checkout_id != checkout_id: + return "", 204 + + try: + await check_sumup_status(g.s, order) + except Exception: + pass + + return "", 204 + + @bp.get("/checkout/return//") + async def checkout_return(order_id: int): + """Handle the browser returning from SumUp after payment.""" + order = await get_order_with_details(g.s, order_id) + + if not order: + html = await render_template( + "_types/cart/checkout_return.html", + order=None, + status="missing", + calendar_entries=[], + ) + return await make_response(html) + + # Resolve page/market slugs so product links render correctly + if order.page_config: + post = await services.blog.get_post_by_id(g.s, order.page_config.container_id) + if post: + g.page_slug = post.slug + result = await g.s.execute( + select(MarketPlace).where( + MarketPlace.container_type == "page", + MarketPlace.container_id == post.id, + MarketPlace.deleted_at.is_(None), + ).limit(1) + ) + mp = result.scalar_one_or_none() + if mp: + g.market_slug = mp.slug + + if order.sumup_checkout_id: + try: + await check_sumup_status(g.s, order) + except Exception: + pass + + status = (order.status or "pending").lower() + + calendar_entries = await services.calendar.get_entries_for_order(g.s, order.id) + order_tickets = await services.calendar.get_tickets_for_order(g.s, order.id) + await g.s.flush() + + html = await render_template( + "_types/cart/checkout_return.html", + order=order, + status=status, + calendar_entries=calendar_entries, + order_tickets=order_tickets, + ) + return await make_response(html) + + return bp diff --git a/cart/bp/cart/overview_routes.py b/cart/bp/cart/overview_routes.py new file mode 100644 index 0000000..15f9eb6 --- /dev/null +++ b/cart/bp/cart/overview_routes.py @@ -0,0 +1,31 @@ +# bp/cart/overview_routes.py — Cart overview (list of page carts) + +from __future__ import annotations + +from quart import Blueprint, render_template, make_response + +from shared.browser.app.utils.htmx import is_htmx_request +from .services import get_cart_grouped_by_page + + +def register(url_prefix: str) -> Blueprint: + bp = Blueprint("cart_overview", __name__, url_prefix=url_prefix) + + @bp.get("/") + async def overview(): + from quart import g + page_groups = await get_cart_grouped_by_page(g.s) + + if not is_htmx_request(): + html = await render_template( + "_types/cart/overview/index.html", + page_groups=page_groups, + ) + else: + html = await render_template( + "_types/cart/overview/_oob_elements.html", + page_groups=page_groups, + ) + return await make_response(html) + + return bp diff --git a/cart/bp/cart/page_routes.py b/cart/bp/cart/page_routes.py new file mode 100644 index 0000000..6526093 --- /dev/null +++ b/cart/bp/cart/page_routes.py @@ -0,0 +1,129 @@ +# bp/cart/page_routes.py — Per-page cart (view + checkout) + +from __future__ import annotations + +from quart import Blueprint, g, render_template, redirect, make_response, url_for + +from shared.browser.app.utils.htmx import is_htmx_request +from shared.browser.app.payments.sumup import create_checkout as sumup_create_checkout +from shared.config import config +from .services import ( + total, + calendar_total, + ticket_total, +) +from .services.page_cart import get_cart_for_page, get_calendar_entries_for_page, get_tickets_for_page +from .services.ticket_groups import group_tickets +from .services.checkout import ( + create_order_from_cart, + build_sumup_description, + build_sumup_reference, + build_webhook_url, +) +from .services import current_cart_identity + + +def register(url_prefix: str) -> Blueprint: + bp = Blueprint("page_cart", __name__, url_prefix=url_prefix) + + @bp.get("/") + async def page_view(): + post = g.page_post + cart = await get_cart_for_page(g.s, post.id) + cal_entries = await get_calendar_entries_for_page(g.s, post.id) + page_tickets = await get_tickets_for_page(g.s, post.id) + + ticket_groups = group_tickets(page_tickets) + + tpl_ctx = dict( + page_post=post, + page_config=getattr(g, "page_config", None), + cart=cart, + calendar_cart_entries=cal_entries, + ticket_cart_entries=page_tickets, + ticket_groups=ticket_groups, + total=total, + calendar_total=calendar_total, + ticket_total=ticket_total, + ) + + if not is_htmx_request(): + html = await render_template("_types/cart/page/index.html", **tpl_ctx) + else: + html = await render_template("_types/cart/page/_oob_elements.html", **tpl_ctx) + return await make_response(html) + + @bp.post("/checkout/") + async def page_checkout(): + post = g.page_post + page_config = getattr(g, "page_config", None) + + cart = await get_cart_for_page(g.s, post.id) + cal_entries = await get_calendar_entries_for_page(g.s, post.id) + page_tickets = await get_tickets_for_page(g.s, post.id) + + if not cart and not cal_entries and not page_tickets: + return redirect(url_for("page_cart.page_view")) + + product_total = total(cart) or 0 + calendar_amount = calendar_total(cal_entries) or 0 + ticket_amount = ticket_total(page_tickets) or 0 + cart_total = product_total + calendar_amount + ticket_amount + + if cart_total <= 0: + return redirect(url_for("page_cart.page_view")) + + # Create order scoped to this page + ident = current_cart_identity() + order = await create_order_from_cart( + g.s, + cart, + cal_entries, + ident.get("user_id"), + ident.get("session_id"), + product_total, + calendar_amount, + ticket_total=ticket_amount, + page_post_id=post.id, + ) + + # Set page_config on order + if page_config: + order.page_config_id = page_config.id + + # Build SumUp checkout details — webhook/return use global routes + redirect_url = url_for("cart_global.checkout_return", order_id=order.id, _external=True) + order.sumup_reference = build_sumup_reference(order.id, page_config=page_config) + description = build_sumup_description(cart, order.id, ticket_count=len(page_tickets)) + + webhook_base_url = url_for("cart_global.checkout_webhook", order_id=order.id, _external=True) + webhook_url = build_webhook_url(webhook_base_url) + + checkout_data = await sumup_create_checkout( + order, + redirect_url=redirect_url, + webhook_url=webhook_url, + description=description, + page_config=page_config, + ) + order.sumup_checkout_id = checkout_data.get("id") + order.sumup_status = checkout_data.get("status") + order.description = checkout_data.get("description") + + hosted_cfg = checkout_data.get("hosted_checkout") or {} + hosted_url = hosted_cfg.get("hosted_checkout_url") or checkout_data.get("hosted_checkout_url") + order.sumup_hosted_url = hosted_url + + await g.s.flush() + + if not hosted_url: + html = await render_template( + "_types/cart/checkout_error.html", + order=order, + error="No hosted checkout URL returned from SumUp.", + ) + return await make_response(html, 500) + + return redirect(hosted_url) + + return bp diff --git a/cart/bp/cart/services/__init__.py b/cart/bp/cart/services/__init__.py new file mode 100644 index 0000000..8ba68b4 --- /dev/null +++ b/cart/bp/cart/services/__init__.py @@ -0,0 +1,13 @@ +from .get_cart import get_cart +from .identity import current_cart_identity +from .total import total +from .clear_cart_for_order import clear_cart_for_order +from .calendar_cart import get_calendar_cart_entries, calendar_total, get_ticket_cart_entries, ticket_total +from .check_sumup_status import check_sumup_status +from .page_cart import ( + get_cart_for_page, + get_calendar_entries_for_page, + get_tickets_for_page, + get_cart_grouped_by_page, +) + diff --git a/cart/bp/cart/services/calendar_cart.py b/cart/bp/cart/services/calendar_cart.py new file mode 100644 index 0000000..febd778 --- /dev/null +++ b/cart/bp/cart/services/calendar_cart.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from decimal import Decimal + +from shared.services.registry import services +from .identity import current_cart_identity + + +async def get_calendar_cart_entries(session): + """ + Return all *pending* calendar entries (as CalendarEntryDTOs) for the + current cart identity (user or anonymous session). + """ + ident = current_cart_identity() + return await services.calendar.pending_entries( + session, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + +def calendar_total(entries) -> Decimal: + """ + Total cost of pending calendar entries. + """ + return sum( + (Decimal(str(e.cost)) if e.cost else Decimal(0)) + for e in entries + if e.cost is not None + ) + + +async def get_ticket_cart_entries(session): + """Return all reserved tickets (as TicketDTOs) for the current identity.""" + ident = current_cart_identity() + return await services.calendar.pending_tickets( + session, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + +def ticket_total(tickets) -> Decimal: + """Total cost of reserved tickets.""" + return sum((Decimal(str(t.price)) if t.price else Decimal(0) for t in tickets), Decimal(0)) diff --git a/cart/bp/cart/services/check_sumup_status.py b/cart/bp/cart/services/check_sumup_status.py new file mode 100644 index 0000000..269a03d --- /dev/null +++ b/cart/bp/cart/services/check_sumup_status.py @@ -0,0 +1,43 @@ +from shared.browser.app.payments.sumup import get_checkout as sumup_get_checkout +from shared.events import emit_activity +from shared.services.registry import services +from .clear_cart_for_order import clear_cart_for_order + + +async def check_sumup_status(session, order): + # Use order's page_config for per-page SumUp credentials + page_config = getattr(order, "page_config", None) + checkout_data = await sumup_get_checkout(order.sumup_checkout_id, page_config=page_config) + order.sumup_status = checkout_data.get("status") or order.sumup_status + sumup_status = (order.sumup_status or "").upper() + + if sumup_status == "PAID": + if order.status != "paid": + order.status = "paid" + await services.calendar.confirm_entries_for_order( + session, order.id, order.user_id, order.session_id + ) + await services.calendar.confirm_tickets_for_order(session, order.id) + + # Clear cart only after payment is confirmed + page_post_id = page_config.container_id if page_config else None + await clear_cart_for_order(session, order, page_post_id=page_post_id) + + await emit_activity( + session, + activity_type="rose:OrderPaid", + actor_uri="internal:cart", + object_type="rose:Order", + object_data={ + "order_id": order.id, + "user_id": order.user_id, + }, + source_type="order", + source_id=order.id, + ) + elif sumup_status == "FAILED": + order.status = "failed" + else: + order.status = sumup_status.lower() or order.status + + await session.flush() diff --git a/cart/bp/cart/services/checkout.py b/cart/bp/cart/services/checkout.py new file mode 100644 index 0000000..0db306b --- /dev/null +++ b/cart/bp/cart/services/checkout.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +from typing import Optional +from urllib.parse import urlencode + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models.market import Product, CartItem +from shared.models.order import Order, OrderItem +from shared.models.page_config import PageConfig +from shared.models.market_place import MarketPlace +from shared.config import config +from shared.contracts.dtos import CalendarEntryDTO +from shared.events import emit_activity +from shared.services.registry import services + + +async def find_or_create_cart_item( + session: AsyncSession, + product_id: int, + user_id: Optional[int], + session_id: Optional[str], +) -> Optional[CartItem]: + """ + Find an existing cart item for this product/identity, or create a new one. + Returns None if the product doesn't exist. + Increments quantity if item already exists. + """ + # Make sure product exists + product = await session.scalar( + select(Product).where(Product.id == product_id) + ) + if not product: + return None + + # Look for existing cart item + filters = [ + CartItem.deleted_at.is_(None), + CartItem.product_id == product_id, + ] + if user_id is not None: + filters.append(CartItem.user_id == user_id) + else: + filters.append(CartItem.session_id == session_id) + + existing = await session.scalar(select(CartItem).where(*filters)) + + if existing: + existing.quantity += 1 + return existing + else: + cart_item = CartItem( + user_id=user_id, + session_id=session_id, + product_id=product.id, + quantity=1, + ) + session.add(cart_item) + return cart_item + + +async def resolve_page_config( + session: AsyncSession, + cart: list[CartItem], + calendar_entries: list[CalendarEntryDTO], + tickets=None, +) -> Optional["PageConfig"]: + """Determine the PageConfig for this order. + + Returns PageConfig or None (use global credentials). + Raises ValueError if items span multiple pages. + """ + post_ids: set[int] = set() + + # From cart items via market_place + for ci in cart: + if ci.market_place_id: + mp = await session.get(MarketPlace, ci.market_place_id) + if mp: + post_ids.add(mp.container_id) + + # From calendar entries via calendar + for entry in calendar_entries: + if entry.calendar_container_id: + post_ids.add(entry.calendar_container_id) + + # From tickets via calendar_container_id + for tk in (tickets or []): + if tk.calendar_container_id: + post_ids.add(tk.calendar_container_id) + + if len(post_ids) > 1: + raise ValueError("Cannot checkout items from multiple pages") + + if not post_ids: + return None # global credentials + + post_id = post_ids.pop() + pc = (await session.execute( + select(PageConfig).where( + PageConfig.container_type == "page", + PageConfig.container_id == post_id, + ) + )).scalar_one_or_none() + return pc + + +async def create_order_from_cart( + session: AsyncSession, + cart: list[CartItem], + calendar_entries: list[CalendarEntryDTO], + user_id: Optional[int], + session_id: Optional[str], + product_total: float, + calendar_total: float, + *, + ticket_total: float = 0, + page_post_id: int | None = None, +) -> Order: + """ + Create an Order and OrderItems from the current cart + calendar entries + tickets. + + When *page_post_id* is given, only calendar entries/tickets whose calendar + belongs to that page are marked as "ordered". Otherwise all pending + entries are updated (legacy behaviour). + """ + cart_total = product_total + calendar_total + ticket_total + + # Determine currency from first product + first_product = cart[0].product if cart else None + currency = (first_product.regular_price_currency if first_product else None) or "GBP" + + # Create order + order = Order( + user_id=user_id, + session_id=session_id, + status="pending", + currency=currency, + total_amount=cart_total, + ) + session.add(order) + await session.flush() + + # Create order items from cart + for ci in cart: + price = ci.product.special_price or ci.product.regular_price or 0 + oi = OrderItem( + order=order, + product_id=ci.product.id, + product_title=ci.product.title, + quantity=ci.quantity, + unit_price=price, + currency=currency, + ) + session.add(oi) + + # Mark pending calendar entries as "ordered" via calendar service + await services.calendar.claim_entries_for_order( + session, order.id, user_id, session_id, page_post_id + ) + + # Claim reserved tickets for this order + await services.calendar.claim_tickets_for_order( + session, order.id, user_id, session_id, page_post_id + ) + + await emit_activity( + session, + activity_type="Create", + actor_uri="internal:cart", + object_type="rose:Order", + object_data={ + "order_id": order.id, + "user_id": user_id, + "session_id": session_id, + }, + source_type="order", + source_id=order.id, + ) + + return order + + +def build_sumup_description(cart: list[CartItem], order_id: int, *, ticket_count: int = 0) -> str: + """Build a human-readable description for SumUp checkout.""" + titles = [ci.product.title for ci in cart if ci.product and ci.product.title] + item_count = sum(ci.quantity for ci in cart) + + parts = [] + if titles: + if len(titles) <= 3: + parts.append(", ".join(titles)) + else: + parts.append(", ".join(titles[:3]) + f" + {len(titles) - 3} more") + if ticket_count: + parts.append(f"{ticket_count} ticket{'s' if ticket_count != 1 else ''}") + + summary = ", ".join(parts) if parts else "order items" + total_count = item_count + ticket_count + + return f"Order {order_id} ({total_count} item{'s' if total_count != 1 else ''}): {summary}" + + +def build_sumup_reference(order_id: int, page_config=None) -> str: + """Build a SumUp reference with configured prefix.""" + if page_config and page_config.sumup_checkout_prefix: + prefix = page_config.sumup_checkout_prefix + else: + sumup_cfg = config().get("sumup", {}) or {} + prefix = sumup_cfg.get("checkout_reference_prefix", "") + return f"{prefix}{order_id}" + + +def build_webhook_url(base_url: str) -> str: + """Add webhook secret token to URL if configured.""" + sumup_cfg = config().get("sumup", {}) or {} + webhook_secret = sumup_cfg.get("webhook_secret") + + if webhook_secret: + sep = "&" if "?" in base_url else "?" + return f"{base_url}{sep}{urlencode({'token': webhook_secret})}" + + return base_url + + +def validate_webhook_secret(token: Optional[str]) -> bool: + """Validate webhook token against configured secret.""" + sumup_cfg = config().get("sumup", {}) or {} + webhook_secret = sumup_cfg.get("webhook_secret") + + if not webhook_secret: + return True # No secret configured, allow all + + return token is not None and token == webhook_secret + + +async def get_order_with_details(session: AsyncSession, order_id: int) -> Optional[Order]: + """Fetch an order with items and calendar entries eagerly loaded.""" + result = await session.execute( + select(Order) + .options( + selectinload(Order.items).selectinload(OrderItem.product), + ) + .where(Order.id == order_id) + ) + return result.scalar_one_or_none() diff --git a/cart/bp/cart/services/clear_cart_for_order.py b/cart/bp/cart/services/clear_cart_for_order.py new file mode 100644 index 0000000..3643839 --- /dev/null +++ b/cart/bp/cart/services/clear_cart_for_order.py @@ -0,0 +1,37 @@ +from sqlalchemy import update, func, select + +from shared.models.market import CartItem +from shared.models.market_place import MarketPlace +from shared.models.order import Order + + +async def clear_cart_for_order(session, order: Order, *, page_post_id: int | None = None) -> None: + """ + Soft-delete CartItem rows belonging to this order's user_id/session_id. + + When *page_post_id* is given, only items whose market_place belongs to + that page are cleared. Otherwise all items are cleared (legacy behaviour). + """ + filters = [CartItem.deleted_at.is_(None)] + if order.user_id is not None: + filters.append(CartItem.user_id == order.user_id) + if order.session_id is not None: + filters.append(CartItem.session_id == order.session_id) + + if len(filters) == 1: + # no user_id/session_id on order – nothing to clear + return + + if page_post_id is not None: + mp_ids = select(MarketPlace.id).where( + MarketPlace.container_type == "page", + MarketPlace.container_id == page_post_id, + MarketPlace.deleted_at.is_(None), + ).scalar_subquery() + filters.append(CartItem.market_place_id.in_(mp_ids)) + + await session.execute( + update(CartItem) + .where(*filters) + .values(deleted_at=func.now()) + ) diff --git a/cart/bp/cart/services/get_cart.py b/cart/bp/cart/services/get_cart.py new file mode 100644 index 0000000..ad1c0ce --- /dev/null +++ b/cart/bp/cart/services/get_cart.py @@ -0,0 +1,25 @@ +from sqlalchemy import select +from sqlalchemy.orm import selectinload + +from shared.models.market import CartItem +from .identity import current_cart_identity + +async def get_cart(session): + ident = current_cart_identity() + + filters = [CartItem.deleted_at.is_(None)] + if ident["user_id"] is not None: + filters.append(CartItem.user_id == ident["user_id"]) + else: + filters.append(CartItem.session_id == ident["session_id"]) + + result = await session.execute( + select(CartItem) + .where(*filters) + .order_by(CartItem.created_at.desc()) + .options( + selectinload(CartItem.product), + selectinload(CartItem.market_place), + ) + ) + return result.scalars().all() diff --git a/cart/bp/cart/services/identity.py b/cart/bp/cart/services/identity.py new file mode 100644 index 0000000..50ecb70 --- /dev/null +++ b/cart/bp/cart/services/identity.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.infrastructure.cart_identity import CartIdentity, current_cart_identity + +__all__ = ["CartIdentity", "current_cart_identity"] diff --git a/cart/bp/cart/services/page_cart.py b/cart/bp/cart/services/page_cart.py new file mode 100644 index 0000000..ce59113 --- /dev/null +++ b/cart/bp/cart/services/page_cart.py @@ -0,0 +1,212 @@ +""" +Page-scoped cart queries. + +Groups cart items and calendar entries by their owning page (Post), +determined via CartItem.market_place.container_id and CalendarEntry.calendar.container_id +(where container_type == "page"). +""" +from __future__ import annotations + +from collections import defaultdict + +from sqlalchemy import select +from sqlalchemy.orm import selectinload + +from shared.models.market import CartItem +from shared.models.market_place import MarketPlace +from shared.models.page_config import PageConfig +from shared.services.registry import services +from .identity import current_cart_identity + + +async def get_cart_for_page(session, post_id: int) -> list[CartItem]: + """Return cart items scoped to a specific page (via MarketPlace.container_id).""" + ident = current_cart_identity() + + filters = [ + CartItem.deleted_at.is_(None), + MarketPlace.container_type == "page", + MarketPlace.container_id == post_id, + MarketPlace.deleted_at.is_(None), + ] + if ident["user_id"] is not None: + filters.append(CartItem.user_id == ident["user_id"]) + else: + filters.append(CartItem.session_id == ident["session_id"]) + + result = await session.execute( + select(CartItem) + .join(MarketPlace, CartItem.market_place_id == MarketPlace.id) + .where(*filters) + .order_by(CartItem.created_at.desc()) + .options( + selectinload(CartItem.product), + selectinload(CartItem.market_place), + ) + ) + return result.scalars().all() + + +async def get_calendar_entries_for_page(session, post_id: int): + """Return pending calendar entries (DTOs) scoped to a specific page.""" + ident = current_cart_identity() + return await services.calendar.entries_for_page( + session, post_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + +async def get_tickets_for_page(session, post_id: int): + """Return reserved tickets (DTOs) scoped to a specific page.""" + ident = current_cart_identity() + return await services.calendar.tickets_for_page( + session, post_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + +async def get_cart_grouped_by_page(session) -> list[dict]: + """ + Load all cart items + calendar entries for the current identity, + grouped by market_place (one card per market). + + Returns a list of dicts: + { + "post": Post | None, + "page_config": PageConfig | None, + "market_place": MarketPlace | None, + "cart_items": [...], + "calendar_entries": [...], + "product_count": int, + "product_total": float, + "calendar_count": int, + "calendar_total": float, + "total": float, + } + + Calendar entries (no market concept) attach to a page-level group. + Items without a market_place go in an orphan bucket (post=None). + """ + from .get_cart import get_cart + from .calendar_cart import get_calendar_cart_entries, get_ticket_cart_entries + from .total import total as calc_product_total + from .calendar_cart import calendar_total as calc_calendar_total, ticket_total as calc_ticket_total + + cart_items = await get_cart(session) + cal_entries = await get_calendar_cart_entries(session) + all_tickets = await get_ticket_cart_entries(session) + + # Group cart items by market_place_id + market_groups: dict[int | None, dict] = {} + for ci in cart_items: + mp_id = ci.market_place_id if ci.market_place else None + if mp_id not in market_groups: + market_groups[mp_id] = { + "market_place": ci.market_place, + "post_id": ci.market_place.container_id if ci.market_place else None, + "cart_items": [], + "calendar_entries": [], + "tickets": [], + } + market_groups[mp_id]["cart_items"].append(ci) + + # Attach calendar entries to an existing market group for the same page, + # or create a page-level group if no market group exists for that page. + page_to_market: dict[int | None, int | None] = {} + for mp_id, grp in market_groups.items(): + pid = grp["post_id"] + if pid is not None and pid not in page_to_market: + page_to_market[pid] = mp_id + + for ce in cal_entries: + pid = ce.calendar_container_id or None + if pid in page_to_market: + market_groups[page_to_market[pid]]["calendar_entries"].append(ce) + else: + # Create a page-level group for calendar-only entries + key = ("cal", pid) + if key not in market_groups: + market_groups[key] = { + "market_place": None, + "post_id": pid, + "cart_items": [], + "calendar_entries": [], + "tickets": [], + } + if pid is not None: + page_to_market[pid] = key + market_groups[key]["calendar_entries"].append(ce) + + # Attach tickets to page groups (via calendar_container_id) + for tk in all_tickets: + pid = tk.calendar_container_id or None + if pid in page_to_market: + market_groups[page_to_market[pid]]["tickets"].append(tk) + else: + key = ("tk", pid) + if key not in market_groups: + market_groups[key] = { + "market_place": None, + "post_id": pid, + "cart_items": [], + "calendar_entries": [], + "tickets": [], + } + if pid is not None: + page_to_market[pid] = key + market_groups[key]["tickets"].append(tk) + + # Batch-load Post DTOs and PageConfig objects + post_ids = list({ + grp["post_id"] for grp in market_groups.values() + if grp["post_id"] is not None + }) + posts_by_id: dict[int, object] = {} + configs_by_post: dict[int, PageConfig] = {} + + if post_ids: + for p in await services.blog.get_posts_by_ids(session, post_ids): + posts_by_id[p.id] = p + + pc_result = await session.execute( + select(PageConfig).where( + PageConfig.container_type == "page", + PageConfig.container_id.in_(post_ids), + ) + ) + for pc in pc_result.scalars().all(): + configs_by_post[pc.container_id] = pc + + # Build result list (markets with pages first, orphan last) + result = [] + for _key, grp in sorted( + market_groups.items(), + key=lambda kv: (kv[1]["post_id"] is None, kv[1]["post_id"] or 0), + ): + items = grp["cart_items"] + entries = grp["calendar_entries"] + tks = grp["tickets"] + prod_total = calc_product_total(items) or 0 + cal_total = calc_calendar_total(entries) or 0 + tk_total = calc_ticket_total(tks) or 0 + pid = grp["post_id"] + + result.append({ + "post": posts_by_id.get(pid) if pid else None, + "page_config": configs_by_post.get(pid) if pid else None, + "market_place": grp["market_place"], + "cart_items": items, + "calendar_entries": entries, + "tickets": tks, + "product_count": sum(ci.quantity for ci in items), + "product_total": prod_total, + "calendar_count": len(entries), + "calendar_total": cal_total, + "ticket_count": len(tks), + "ticket_total": tk_total, + "total": prod_total + cal_total + tk_total, + }) + + return result diff --git a/cart/bp/cart/services/ticket_groups.py b/cart/bp/cart/services/ticket_groups.py new file mode 100644 index 0000000..cd5d910 --- /dev/null +++ b/cart/bp/cart/services/ticket_groups.py @@ -0,0 +1,43 @@ +"""Group individual TicketDTOs by (entry_id, ticket_type_id) for cart display.""" +from __future__ import annotations + +from collections import OrderedDict + + +def group_tickets(tickets) -> list[dict]: + """ + Group a flat list of TicketDTOs into aggregate rows. + + Returns list of dicts: + { + "entry_id": int, + "entry_name": str, + "entry_start_at": datetime, + "entry_end_at": datetime | None, + "ticket_type_id": int | None, + "ticket_type_name": str | None, + "price": Decimal | None, + "quantity": int, + "line_total": float, + } + """ + groups: OrderedDict[tuple, dict] = OrderedDict() + + for tk in tickets: + key = (tk.entry_id, getattr(tk, "ticket_type_id", None)) + if key not in groups: + groups[key] = { + "entry_id": tk.entry_id, + "entry_name": tk.entry_name, + "entry_start_at": tk.entry_start_at, + "entry_end_at": tk.entry_end_at, + "ticket_type_id": getattr(tk, "ticket_type_id", None), + "ticket_type_name": tk.ticket_type_name, + "price": tk.price, + "quantity": 0, + "line_total": 0, + } + groups[key]["quantity"] += 1 + groups[key]["line_total"] += float(tk.price or 0) + + return list(groups.values()) diff --git a/cart/bp/cart/services/total.py b/cart/bp/cart/services/total.py new file mode 100644 index 0000000..8dcdaf9 --- /dev/null +++ b/cart/bp/cart/services/total.py @@ -0,0 +1,13 @@ +from decimal import Decimal + + +def total(cart): + return sum( + ( + Decimal(str(item.product.special_price or item.product.regular_price)) + * item.quantity + ) + for item in cart + if (item.product.special_price or item.product.regular_price) is not None + ) + \ No newline at end of file diff --git a/cart/bp/fragments/__init__.py b/cart/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/cart/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/cart/bp/fragments/routes.py b/cart/bp/fragments/routes.py new file mode 100644 index 0000000..6724837 --- /dev/null +++ b/cart/bp/fragments/routes.py @@ -0,0 +1,70 @@ +"""Cart app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. + +Fragments: + cart-mini Cart icon with badge (or logo when empty) + account-nav-item "orders" link for account dashboard +""" + +from __future__ import annotations + +from quart import Blueprint, Response, request, render_template, g + +from shared.infrastructure.fragments import FRAGMENT_HEADER + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + # --------------------------------------------------------------- + # Fragment handlers + # --------------------------------------------------------------- + + async def _cart_mini(): + from shared.services.registry import services + + user_id = request.args.get("user_id", type=int) + session_id = request.args.get("session_id") + + summary = await services.cart.cart_summary( + g.s, user_id=user_id, session_id=session_id, + ) + count = summary.count + summary.calendar_count + summary.ticket_count + return await render_template("fragments/cart_mini.html", cart_count=count) + + async def _account_nav_item(): + from shared.infrastructure.urls import cart_url + + href = cart_url("/orders/") + return ( + '' + ) + + _handlers = { + "cart-mini": _cart_mini, + "account-nav-item": _account_nav_item, + } + + # --------------------------------------------------------------- + # Routing + # --------------------------------------------------------------- + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + return bp diff --git a/cart/bp/order/filters/qs.py b/cart/bp/order/filters/qs.py new file mode 100644 index 0000000..03707e8 --- /dev/null +++ b/cart/bp/order/filters/qs.py @@ -0,0 +1,74 @@ +# suma_browser/app/bp/order/filters/qs.py +from quart import request + +from typing import Iterable, Optional, Union + +from shared.browser.app.filters.qs_base import KEEP, build_qs +from shared.browser.app.filters.query_types import OrderQuery + + +def decode() -> OrderQuery: + """ + Decode current query string into an OrderQuery(page, search). + """ + try: + page = int(request.args.get("page", 1) or 1) + except ValueError: + page = 1 + + search = request.args.get("search") or None + return OrderQuery(page, search) + + +def makeqs_factory(): + """ + Build a makeqs(...) that starts from the current filters + page. + + Behaviour: + - If filters change and you don't explicitly pass page, + the page is reset to 1 (same pattern as browse/blog). + - You can clear search with search=None. + """ + q = decode() + base_search = q.search or None + base_page = int(q.page or 1) + + def makeqs( + *, + clear_filters: bool = False, + search: Union[str, None, object] = KEEP, + page: Union[int, None, object] = None, + extra: Optional[Iterable[tuple]] = None, + leading_q: bool = True, + ) -> str: + filters_changed = False + + # --- search logic --- + if search is KEEP and not clear_filters: + final_search = base_search + else: + filters_changed = True + final_search = (search or None) + + # --- page logic --- + if page is None: + final_page = 1 if filters_changed else base_page + else: + final_page = page + + # --- build params --- + params: list[tuple[str, str]] = [] + + if final_search: + params.append(("search", final_search)) + if final_page is not None: + params.append(("page", str(final_page))) + + if extra: + for k, v in extra: + if v is not None: + params.append((k, str(v))) + + return build_qs(params, leading_q=leading_q) + + return makeqs diff --git a/cart/bp/order/routes.py b/cart/bp/order/routes.py new file mode 100644 index 0000000..b85087f --- /dev/null +++ b/cart/bp/order/routes.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +from quart import Blueprint, g, render_template, redirect, url_for, make_response +from sqlalchemy import select, func, or_, cast, String, exists +from sqlalchemy.orm import selectinload + + +from shared.models.market import Product +from shared.models.order import Order, OrderItem +from shared.browser.app.payments.sumup import create_checkout as sumup_create_checkout +from shared.config import config + +from shared.infrastructure.http_utils import vary as _vary, current_url_without_page as _current_url_without_page +from bp.cart.services import check_sumup_status +from shared.browser.app.utils.htmx import is_htmx_request + +from .filters.qs import makeqs_factory, decode + + +def register() -> Blueprint: + bp = Blueprint("order", __name__, url_prefix='/') + + ORDERS_PER_PAGE = 10 # keep in sync with browse page size / your preference + + @bp.before_request + def route(): + # this is the crucial bit for the |qs filter + g.makeqs_factory = makeqs_factory + + @bp.get("/") + async def order_detail(order_id: int): + """ + Show a single order + items. + """ + result = await g.s.execute( + select(Order) + .options( + selectinload(Order.items).selectinload(OrderItem.product) + ) + .where(Order.id == order_id) + ) + order = result.scalar_one_or_none() + if not order: + return await make_response("Order not found", 404) + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/order/index.html", order=order,) + else: + # HTMX navigation (page 1): main panel + OOB elements + html = await render_template("_types/order/_oob_elements.html", order=order,) + + return await make_response(html) + + @bp.get("/pay/") + async def order_pay(order_id: int): + """ + Re-open the SumUp payment page for this order. + If already paid, just go back to the order detail. + If not, (re)create a SumUp checkout and redirect. + """ + result = await g.s.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + return await make_response("Order not found", 404) + + if order.status == "paid": + # Already paid; nothing to pay + return redirect(url_for("orders.order.order_detail", order_id=order.id)) + + # Prefer to reuse existing hosted URL if we have one + if order.sumup_hosted_url: + return redirect(order.sumup_hosted_url) + + # Otherwise, create a fresh checkout for this order + redirect_url = url_for("cart_global.checkout_return", order_id=order.id, _external=True) + + sumup_cfg = config().get("sumup", {}) or {} + webhook_secret = sumup_cfg.get("webhook_secret") + + webhook_url = url_for("cart_global.checkout_webhook", order_id=order.id, _external=True) + if webhook_secret: + from urllib.parse import urlencode + + sep = "&" if "?" in webhook_url else "?" + webhook_url = f"{webhook_url}{sep}{urlencode({'token': webhook_secret})}" + + checkout_data = await sumup_create_checkout( + order, + redirect_url=redirect_url, + webhook_url=webhook_url, + ) + + order.sumup_checkout_id = checkout_data.get("id") + order.sumup_status = checkout_data.get("status") + + hosted_cfg = checkout_data.get("hosted_checkout") or {} + hosted_url = hosted_cfg.get("hosted_checkout_url") or checkout_data.get("hosted_checkout_url") + order.sumup_hosted_url = hosted_url + + await g.s.flush() + + if not hosted_url: + html = await render_template( + "_types/cart/checkout_error.html", + order=order, + error="No hosted checkout URL returned from SumUp when trying to reopen payment.", + ) + return await make_response(html, 500) + + return redirect(hosted_url) + + @bp.post("/recheck/") + async def order_recheck(order_id: int): + """ + Manually re-check this order's status with SumUp. + Useful if the webhook hasn't fired or the user didn't return correctly. + """ + result = await g.s.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + return await make_response("Order not found", 404) + + # If we don't have a checkout ID yet, nothing to query + if not order.sumup_checkout_id: + return redirect(url_for("orders.order.order_detail", order_id=order.id)) + + try: + await check_sumup_status(g.s, order) + except Exception: + # In a real app, log the error; here we just fall back to previous status + pass + + return redirect(url_for("orders.order.order_detail", order_id=order.id)) + + + return bp + diff --git a/cart/bp/orders/filters/qs.py b/cart/bp/orders/filters/qs.py new file mode 100644 index 0000000..984e2c3 --- /dev/null +++ b/cart/bp/orders/filters/qs.py @@ -0,0 +1,77 @@ +# suma_browser/app/bp/orders/filters/qs.py +from quart import request + +from typing import Iterable, Optional, Union + +from shared.browser.app.filters.qs_base import KEEP, build_qs +from shared.browser.app.filters.query_types import OrderQuery + + +def decode() -> OrderQuery: + """ + Decode current query string into an OrderQuery(page, search). + """ + try: + page = int(request.args.get("page", 1) or 1) + except ValueError: + page = 1 + + search = request.args.get("search") or None + return OrderQuery(page, search) + + +def makeqs_factory(): + """ + Build a makeqs(...) that starts from the current filters + page. + + Behaviour: + - If filters change and you don't explicitly pass page, + the page is reset to 1 (same pattern as browse/blog). + - You can clear search with search=None. + """ + q = decode() + base_search = q.search or None + base_page = int(q.page or 1) + + def makeqs( + *, + clear_filters: bool = False, + search: Union[str, None, object] = KEEP, + page: Union[int, None, object] = None, + extra: Optional[Iterable[tuple]] = None, + leading_q: bool = True, + ) -> str: + filters_changed = False + + # --- search logic --- + if search is KEEP and not clear_filters: + final_search = base_search + else: + filters_changed = True + if search is KEEP: + final_search = None + else: + final_search = (search or None) + + # --- page logic --- + if page is None: + final_page = 1 if filters_changed else base_page + else: + final_page = page + + # --- build params --- + params: list[tuple[str, str]] = [] + + if final_search: + params.append(("search", final_search)) + if final_page is not None: + params.append(("page", str(final_page))) + + if extra: + for k, v in extra: + if v is not None: + params.append((k, str(v))) + + return build_qs(params, leading_q=leading_q) + + return makeqs diff --git a/cart/bp/orders/routes.py b/cart/bp/orders/routes.py new file mode 100644 index 0000000..e7363c2 --- /dev/null +++ b/cart/bp/orders/routes.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from quart import Blueprint, g, render_template, redirect, url_for, make_response +from sqlalchemy import select, func, or_, cast, String, exists +from sqlalchemy.orm import selectinload + + +from shared.models.market import Product +from shared.models.order import Order, OrderItem +from shared.browser.app.payments.sumup import create_checkout as sumup_create_checkout +from shared.config import config + +from shared.infrastructure.http_utils import vary as _vary, current_url_without_page as _current_url_without_page +from bp.cart.services import check_sumup_status +from shared.browser.app.utils.htmx import is_htmx_request +from bp import register_order + +from .filters.qs import makeqs_factory, decode + + +def register(url_prefix: str) -> Blueprint: + bp = Blueprint("orders", __name__, url_prefix=url_prefix) + bp.register_blueprint( + register_order(), + ) + ORDERS_PER_PAGE = 10 # keep in sync with browse page size / your preference + + oob = { + "extends": "_types/root/_index.html", + "child_id": "auth-header-child", + "header": "_types/auth/header/_header.html", + "nav": "_types/auth/_nav.html", + "main": "_types/auth/_main_panel.html", + } + + @bp.context_processor + def inject_oob(): + return {"oob": oob} + + @bp.before_request + def route(): + # this is the crucial bit for the |qs filter + g.makeqs_factory = makeqs_factory + + @bp.get("/") + async def list_orders(): + + # --- decode filters from query string (page + search) --- + q = decode() + page, search = q.page, q.search + + # sanity clamp page + if page < 1: + page = 1 + + # --- build where clause for search --- + where_clause = None + if search: + term = f"%{search.strip()}%" + conditions = [ + Order.status.ilike(term), + Order.currency.ilike(term), + Order.sumup_checkout_id.ilike(term), + Order.sumup_status.ilike(term), + Order.description.ilike(term), + ] + + conditions.append( + exists( + select(1) + .select_from(OrderItem) + .join(Product, Product.id == OrderItem.product_id) + .where( + OrderItem.order_id == Order.id, + or_( + OrderItem.product_title.ilike(term), + Product.title.ilike(term), + Product.description_short.ilike(term), + Product.description_html.ilike(term), + Product.slug.ilike(term), + Product.brand.ilike(term), + ), + ) + ) + ) + + # allow exact ID match or partial (string) match + try: + search_id = int(search) + except (TypeError, ValueError): + search_id = None + + if search_id is not None: + conditions.append(Order.id == search_id) + else: + conditions.append(cast(Order.id, String).ilike(term)) + + where_clause = or_(*conditions) + + # --- total count & total pages (respecting search) --- + count_stmt = select(func.count()).select_from(Order) + if where_clause is not None: + count_stmt = count_stmt.where(where_clause) + + total_count_result = await g.s.execute(count_stmt) + total_count = total_count_result.scalar_one() or 0 + total_pages = max(1, (total_count + ORDERS_PER_PAGE - 1) // ORDERS_PER_PAGE) + + # clamp page if beyond range (just in case) + if page > total_pages: + page = total_pages + + # --- paginated orders (respecting search) --- + offset = (page - 1) * ORDERS_PER_PAGE + stmt = ( + select(Order) + .order_by(Order.created_at.desc()) + .offset(offset) + .limit(ORDERS_PER_PAGE) + ) + if where_clause is not None: + stmt = stmt.where(where_clause) + + result = await g.s.execute(stmt) + orders = result.scalars().all() + + context = { + "orders": orders, + "page": page, + "total_pages": total_pages, + "search": search, + "search_count": total_count, # For search display + } + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/orders/index.html", **context) + elif page > 1: + # HTMX pagination: just table rows + sentinel + html = await render_template("_types/orders/_rows.html", **context) + else: + # HTMX navigation (page 1): main panel + OOB elements + html = await render_template("_types/orders/_oob_elements.html", **context) + + resp = await make_response(html) + resp.headers["Hx-Push-Url"] = _current_url_without_page() + return _vary(resp) + + return bp + diff --git a/cart/config/app-config.yaml b/cart/config/app-config.yaml new file mode 100644 index 0000000..3aa6a76 --- /dev/null +++ b/cart/config/app-config.yaml @@ -0,0 +1,84 @@ +# App-wide settings +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: Rose Ash +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "http://localhost:8000" + market: "http://localhost:8001" + cart: "http://localhost:8002" + events: "http://localhost:8003" + federation: "http://localhost:8004" +cache: + fs_root: _snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/wines + - branded-goods/ciders + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + - ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html + product-details: + - General Information + - A Note About Prices + +# SumUp payment settings (fill these in for live usage) +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING" + checkout_reference_prefix: 'dev-' + diff --git a/cart/entrypoint.sh b/cart/entrypoint.sh new file mode 100644 index 0000000..dc7838b --- /dev/null +++ b/cart/entrypoint.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# NOTE: Cart app does NOT run Alembic migrations. +# Migrations are managed by the blog app which owns the shared database schema. + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/cart/models/__init__.py b/cart/models/__init__.py new file mode 100644 index 0000000..508c4b0 --- /dev/null +++ b/cart/models/__init__.py @@ -0,0 +1,2 @@ +from .order import Order, OrderItem +from .page_config import PageConfig diff --git a/cart/models/order.py b/cart/models/order.py new file mode 100644 index 0000000..93953fe --- /dev/null +++ b/cart/models/order.py @@ -0,0 +1 @@ +from shared.models.order import Order, OrderItem # noqa: F401 diff --git a/cart/models/page_config.py b/cart/models/page_config.py new file mode 100644 index 0000000..ec23c6d --- /dev/null +++ b/cart/models/page_config.py @@ -0,0 +1 @@ +from shared.models.page_config import PageConfig # noqa: F401 diff --git a/cart/path_setup.py b/cart/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/cart/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/cart/services/__init__.py b/cart/services/__init__.py new file mode 100644 index 0000000..390cd88 --- /dev/null +++ b/cart/services/__init__.py @@ -0,0 +1,28 @@ +"""Cart app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the cart app. + + Cart owns: Order, OrderItem. + Standard deployment registers all 4 services as real DB impls + (shared DB). For composable deployments, swap non-owned services + with stubs from shared.services.stubs. + """ + from shared.services.registry import services + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + services.cart = SqlCartService() + if not services.has("blog"): + services.blog = SqlBlogService() + if not services.has("calendar"): + services.calendar = SqlCalendarService() + if not services.has("market"): + services.market = SqlMarketService() + if not services.has("federation"): + from shared.services.federation_impl import SqlFederationService + services.federation = SqlFederationService() diff --git a/cart/templates/_types/auth/header/_header.html b/cart/templates/_types/auth/header/_header.html new file mode 100644 index 0000000..c59a712 --- /dev/null +++ b/cart/templates/_types/auth/header/_header.html @@ -0,0 +1,12 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='auth-row', oob=oob) %} + {% call links.link(account_url('/'), hx_select_search ) %} + +
    account
    + {% endcall %} + {% call links.desktop_nav() %} + {% include "_types/auth/_nav.html" %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/cart/templates/_types/auth/index.html b/cart/templates/_types/auth/index.html new file mode 100644 index 0000000..3c66bf1 --- /dev/null +++ b/cart/templates/_types/auth/index.html @@ -0,0 +1,18 @@ +{% extends oob.extends %} + + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row(oob.child_id, oob.header) %} + {% block auth_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include oob.nav %} +{% endblock %} + +{% block content %} + {% include oob.main %} +{% endblock %} diff --git a/cart/templates/_types/cart/_cart.html b/cart/templates/_types/cart/_cart.html new file mode 100644 index 0000000..30e3d22 --- /dev/null +++ b/cart/templates/_types/cart/_cart.html @@ -0,0 +1,260 @@ +{% macro show_cart(oob=False) %} +
    + {# Empty cart #} + {% if not cart and not calendar_cart_entries and not ticket_cart_entries %} +
    +
    + +
    +

    + Your cart is empty +

    + {# +

    + Add some items from the shop to see them here. +

    + #} +
    + + {% else %} + +
    + {# Items list #} +
    + {% for item in cart %} + {% from '_types/product/_cart.html' import cart_item with context %} + {{ cart_item()}} + {% endfor %} + {% if calendar_cart_entries %} +
    +

    + Calendar bookings +

    + +
      + {% for entry in calendar_cart_entries %} +
    • +
      +
      + {{ entry.name or entry.calendar_name }} +
      +
      + {{ entry.start_at }} + {% if entry.end_at %} + – {{ entry.end_at }} + {% endif %} +
      +
      +
      + £{{ "%.2f"|format(entry.cost or 0) }} +
      +
    • + {% endfor %} +
    +
    + {% endif %} + {% if ticket_groups is defined and ticket_groups %} +
    +

    + + Event tickets +

    + +
    + {% for tg in ticket_groups %} +
    +
    +
    +
    +

    + {{ tg.entry_name }} +

    + {% if tg.ticket_type_name %} +

    + {{ tg.ticket_type_name }} +

    + {% endif %} +

    + {{ tg.entry_start_at.strftime('%-d %b %Y, %H:%M') }} + {% if tg.entry_end_at %} + – {{ tg.entry_end_at.strftime('%-d %b %Y, %H:%M') }} + {% endif %} +

    +
    +
    +

    + £{{ "%.2f"|format(tg.price or 0) }} +

    +
    +
    + +
    +
    + Quantity + {% set qty_url = url_for('cart_global.update_ticket_quantity') %} + +
    + + + {% if tg.ticket_type_id %} + + {% endif %} + + +
    + + + {{ tg.quantity }} + + +
    + + + {% if tg.ticket_type_id %} + + {% endif %} + + +
    +
    + +
    +

    + Line total: + £{{ "%.2f"|format(tg.line_total) }} +

    +
    +
    +
    +
    + {% endfor %} +
    +
    + {% endif %} +
    + {{summary(cart, total, calendar_total, calendar_cart_entries, ticket_total, ticket_cart_entries)}} + +
    + + {% endif %} +
    +{% endmacro %} + + +{% macro summary(cart, total, calendar_total, calendar_cart_entries, ticket_total, ticket_cart_entries, oob=False) %} + +{% endmacro %} + +{% macro cart_total(cart, total) %} + {% set cart_total = total(cart) %} + {% if cart_total %} + {% set symbol = "£" if cart[0].product.regular_price_currency == "GBP" else cart[0].product.regular_price_currency %} + {{ symbol }}{{ "%.2f"|format(cart_total) }} + {% else %} + – + {% endif %} +{% endmacro %} + + +{% macro cart_grand_total(cart, total, calendar_total, calendar_cart_entries, ticket_total, ticket_cart_entries) %} + {% set product_total = total(cart) or 0 %} + {% set cal_total = calendar_total(calendar_cart_entries) or 0 %} + {% set tk_total = ticket_total(ticket_cart_entries) or 0 %} + {% set grand = product_total + cal_total + tk_total %} + + {% if cart and cart[0].product.regular_price_currency %} + {% set symbol = "£" if cart[0].product.regular_price_currency == "GBP" else cart[0].product.regular_price_currency %} + {% else %} + {% set symbol = "£" %} + {% endif %} + + {{ symbol }}{{ "%.2f"|format(grand) }} +{% endmacro %} \ No newline at end of file diff --git a/cart/templates/_types/cart/_main_panel.html b/cart/templates/_types/cart/_main_panel.html new file mode 100644 index 0000000..3872387 --- /dev/null +++ b/cart/templates/_types/cart/_main_panel.html @@ -0,0 +1,4 @@ +
    + {% from '_types/cart/_cart.html' import show_cart with context %} + {{ show_cart() }} +
    \ No newline at end of file diff --git a/cart/templates/_types/cart/_mini.html b/cart/templates/_types/cart/_mini.html new file mode 100644 index 0000000..a8255e4 --- /dev/null +++ b/cart/templates/_types/cart/_mini.html @@ -0,0 +1,45 @@ +{% macro mini(oob=False, count=None) %} +
    + {# cart_count is set by the context processor in all apps. + Cart app computes it from g.cart + calendar_cart_entries; + other apps get it from the cart internal API. + count param allows explicit override when macro is imported without context. #} + {% if count is not none %} + {% set _count = count %} + {% elif cart_count is defined and cart_count is not none %} + {% set _count = cart_count %} + {% elif cart is defined and cart is not none %} + {% set _count = (cart | sum(attribute="quantity")) + ((calendar_cart_entries | length) if calendar_cart_entries else 0) %} + {% else %} + {% set _count = 0 %} + {% endif %} + + {% if _count == 0 %} +
    + + + +
    + {% else %} + + + + + + {{ _count }} + + + {% endif %} +
    +{% endmacro %} diff --git a/cart/templates/_types/cart/_nav.html b/cart/templates/_types/cart/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/cart/templates/_types/cart/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/cart/templates/_types/cart/_oob_elements.html b/cart/templates/_types/cart/_oob_elements.html new file mode 100644 index 0000000..6e54a8b --- /dev/null +++ b/cart/templates/_types/cart/_oob_elements.html @@ -0,0 +1,28 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'cart-header-child', '_types/cart/header/_header.html')}} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/cart/_main_panel.html" %} +{% endblock %} + + diff --git a/cart/templates/_types/cart/checkout_error.html b/cart/templates/_types/cart/checkout_error.html new file mode 100644 index 0000000..a15b1e9 --- /dev/null +++ b/cart/templates/_types/cart/checkout_error.html @@ -0,0 +1,38 @@ +{% extends '_types/root/index.html' %} + +{% block filter %} +
    +

    + Checkout error +

    +

    + We tried to start your payment with SumUp but hit a problem. +

    +
    +{% endblock %} + +{% block content %} +
    +
    +

    Something went wrong.

    +

    + {{ error or "Unexpected error while creating the hosted checkout session." }} +

    + {% if order %} +

    + Order ID: #{{ order.id }} +

    + {% endif %} +
    + + +
    +{% endblock %} diff --git a/cart/templates/_types/cart/checkout_return.html b/cart/templates/_types/cart/checkout_return.html new file mode 100644 index 0000000..b08a09d --- /dev/null +++ b/cart/templates/_types/cart/checkout_return.html @@ -0,0 +1,68 @@ +{% extends '_types/root/index.html' %} + +{% block filter %} +
    +
    +

    + {% if order.status == 'paid' %} + Payment received + {% elif order.status == 'failed' %} + Payment failed + {% elif order.status == 'missing' %} + Order not found + {% else %} + Payment status: {{ order.status|default('pending')|capitalize }} + {% endif %} +

    +

    + {% if order.status == 'paid' %} + Thanks for your order. + {% elif order.status == 'failed' %} + Something went wrong while processing your payment. You can try again below. + {% elif order.status == 'missing' %} + We couldn't find that order – it may have expired or never been created. + {% else %} + We’re still waiting for a final confirmation from SumUp. + {% endif %} +

    +
    + +
    +{% endblock %} + +{% block aside %} + {# no aside content for now #} +{% endblock %} + +{% block content %} +
    + {% if order %} +
    + {% include '_types/order/_summary.html' %} +
    + {% else %} +
    + We couldn’t find that order. If you reached this page from an old link, please start a new order. +
    + {% endif %} + {% include '_types/order/_items.html' %} + {% include '_types/order/_calendar_items.html' %} + {% include '_types/order/_ticket_items.html' %} + + {% if order.status == 'failed' and order %} +
    +

    Your payment was not completed.

    +

    + You can go back to your cart and try checkout again. If the problem persists, + please contact us and mention order #{{ order.id }}. +

    +
    + {% elif order.status == 'paid' %} +
    +

    All done!

    +

    We’ll start processing your order shortly.

    +
    + {% endif %} + +
    +{% endblock %} diff --git a/cart/templates/_types/cart/header/_header.html b/cart/templates/_types/cart/header/_header.html new file mode 100644 index 0000000..b5d913d --- /dev/null +++ b/cart/templates/_types/cart/header/_header.html @@ -0,0 +1,12 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='cart-row', oob=oob) %} + {% call links.link(cart_url('/'), hx_select_search ) %} + +

    cart

    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/cart/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/cart/templates/_types/cart/index.html b/cart/templates/_types/cart/index.html new file mode 100644 index 0000000..78570d9 --- /dev/null +++ b/cart/templates/_types/cart/index.html @@ -0,0 +1,22 @@ +{% extends '_types/root/_index.html' %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('cart-header-child', '_types/cart/header/_header.html') %} + {% block cart_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} +{% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block aside %} +{% endblock %} + +{% block content %} + {% include '_types/cart/_main_panel.html' %} +{% endblock %} diff --git a/cart/templates/_types/cart/overview/_main_panel.html b/cart/templates/_types/cart/overview/_main_panel.html new file mode 100644 index 0000000..0ac484e --- /dev/null +++ b/cart/templates/_types/cart/overview/_main_panel.html @@ -0,0 +1,147 @@ +
    + {% if not page_groups or (page_groups | length == 0) %} +
    +
    + +
    +

    + Your cart is empty +

    +
    + + {% else %} + {# Check if there are any items at all across all groups #} + {% set ns = namespace(has_items=false) %} + {% for grp in page_groups %} + {% if grp.cart_items or grp.calendar_entries or grp.get('tickets') %} + {% set ns.has_items = true %} + {% endif %} + {% endfor %} + + {% if not ns.has_items %} +
    +
    + +
    +

    + Your cart is empty +

    +
    + {% else %} +
    + {% for grp in page_groups %} + {% if grp.cart_items or grp.calendar_entries or grp.get('tickets') %} + + {% if grp.post %} + {# Market / page cart card #} + +
    + {% if grp.post.feature_image %} + {{ grp.post.title }} + {% else %} +
    + +
    + {% endif %} + +
    +

    + {% if grp.market_place %} + {{ grp.market_place.name }} + {% else %} + {{ grp.post.title }} + {% endif %} +

    + {% if grp.market_place %} +

    {{ grp.post.title }}

    + {% endif %} + +
    + {% if grp.product_count > 0 %} + + + {{ grp.product_count }} item{{ 's' if grp.product_count != 1 }} + + {% endif %} + {% if grp.calendar_count > 0 %} + + + {{ grp.calendar_count }} booking{{ 's' if grp.calendar_count != 1 }} + + {% endif %} + {% if grp.ticket_count is defined and grp.ticket_count > 0 %} + + + {{ grp.ticket_count }} ticket{{ 's' if grp.ticket_count != 1 }} + + {% endif %} +
    +
    + +
    +
    + £{{ "%.2f"|format(grp.total) }} +
    +
    + View cart → +
    +
    +
    +
    + + {% else %} + {# Orphan bucket (items without a page) #} +
    +
    +
    + +
    + +
    +

    + Other items +

    +
    + {% if grp.product_count > 0 %} + + + {{ grp.product_count }} item{{ 's' if grp.product_count != 1 }} + + {% endif %} + {% if grp.calendar_count > 0 %} + + + {{ grp.calendar_count }} booking{{ 's' if grp.calendar_count != 1 }} + + {% endif %} + {% if grp.ticket_count is defined and grp.ticket_count > 0 %} + + + {{ grp.ticket_count }} ticket{{ 's' if grp.ticket_count != 1 }} + + {% endif %} +
    +
    + +
    +
    + £{{ "%.2f"|format(grp.total) }} +
    +
    +
    +
    + {% endif %} + + {% endif %} + {% endfor %} +
    + {% endif %} + {% endif %} +
    diff --git a/cart/templates/_types/cart/overview/_oob_elements.html b/cart/templates/_types/cart/overview/_oob_elements.html new file mode 100644 index 0000000..af27fdc --- /dev/null +++ b/cart/templates/_types/cart/overview/_oob_elements.html @@ -0,0 +1,24 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for cart overview HTMX navigation #} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'cart-header-child', '_types/cart/header/_header.html')}} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/cart/overview/_main_panel.html" %} +{% endblock %} diff --git a/cart/templates/_types/cart/overview/index.html b/cart/templates/_types/cart/overview/index.html new file mode 100644 index 0000000..bf1faf0 --- /dev/null +++ b/cart/templates/_types/cart/overview/index.html @@ -0,0 +1,22 @@ +{% extends '_types/root/_index.html' %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('cart-header-child', '_types/cart/header/_header.html') %} + {% block cart_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} +{% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block aside %} +{% endblock %} + +{% block content %} + {% include '_types/cart/overview/_main_panel.html' %} +{% endblock %} diff --git a/cart/templates/_types/cart/page/_main_panel.html b/cart/templates/_types/cart/page/_main_panel.html new file mode 100644 index 0000000..7b62eb9 --- /dev/null +++ b/cart/templates/_types/cart/page/_main_panel.html @@ -0,0 +1,4 @@ +
    + {% from '_types/cart/_cart.html' import show_cart with context %} + {{ show_cart() }} +
    diff --git a/cart/templates/_types/cart/page/_oob_elements.html b/cart/templates/_types/cart/page/_oob_elements.html new file mode 100644 index 0000000..b5416fc --- /dev/null +++ b/cart/templates/_types/cart/page/_oob_elements.html @@ -0,0 +1,27 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for page cart HTMX navigation #} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('root-header-child', 'cart-header-child', '_types/cart/header/_header.html')}} + + {% from '_types/cart/page/header/_header.html' import page_header_row with context %} + {{ page_header_row(oob=True) }} + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/cart/page/_main_panel.html" %} +{% endblock %} diff --git a/cart/templates/_types/cart/page/header/_header.html b/cart/templates/_types/cart/page/header/_header.html new file mode 100644 index 0000000..6afb1fb --- /dev/null +++ b/cart/templates/_types/cart/page/header/_header.html @@ -0,0 +1,25 @@ +{% import 'macros/links.html' as links %} +{% macro page_header_row(oob=False) %} + {% call links.menu_row(id='page-cart-row', oob=oob) %} + {% call links.link(cart_url('/' + page_post.slug + '/'), hx_select_search) %} + {% if page_post.feature_image %} + + {% endif %} + + {{ page_post.title | truncate(160, True, '...') }} + + {% endcall %} + {% call links.desktop_nav() %} + + + All carts + + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/cart/templates/_types/cart/page/index.html b/cart/templates/_types/cart/page/index.html new file mode 100644 index 0000000..4fa9814 --- /dev/null +++ b/cart/templates/_types/cart/page/index.html @@ -0,0 +1,24 @@ +{% extends '_types/root/_index.html' %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('cart-header-child', '_types/cart/header/_header.html') %} + {% block cart_header_child %} + {% from '_types/cart/page/header/_header.html' import page_header_row with context %} + {{ page_header_row() }} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} +{% include '_types/cart/_nav.html' %} +{% endblock %} + + +{% block aside %} +{% endblock %} + +{% block content %} + {% include '_types/cart/page/_main_panel.html' %} +{% endblock %} diff --git a/cart/templates/_types/order/_calendar_items.html b/cart/templates/_types/order/_calendar_items.html new file mode 100644 index 0000000..019f048 --- /dev/null +++ b/cart/templates/_types/order/_calendar_items.html @@ -0,0 +1,43 @@ +{# --- NEW: calendar bookings in this order --- #} + {% if order and calendar_entries %} +
    +

    + Calendar bookings in this order +

    + +
      + {% for entry in calendar_entries %} +
    • +
      +
      + {{ entry.name }} + {# Small status pill #} + + {{ entry.state|capitalize }} + +
      +
      + {{ entry.start_at.strftime('%-d %b %Y, %H:%M') }} + {% if entry.end_at %} + – {{ entry.end_at.strftime('%-d %b %Y, %H:%M') }} + {% endif %} +
      +
      +
      + £{{ "%.2f"|format(entry.cost or 0) }} +
      +
    • + {% endfor %} +
    +
    + {% endif %} \ No newline at end of file diff --git a/cart/templates/_types/order/_items.html b/cart/templates/_types/order/_items.html new file mode 100644 index 0000000..27b2a9f --- /dev/null +++ b/cart/templates/_types/order/_items.html @@ -0,0 +1,51 @@ +{# Items list #} +{% if order and order.items %} + +{% endif %} \ No newline at end of file diff --git a/cart/templates/_types/order/_main_panel.html b/cart/templates/_types/order/_main_panel.html new file mode 100644 index 0000000..679b846 --- /dev/null +++ b/cart/templates/_types/order/_main_panel.html @@ -0,0 +1,7 @@ +
    + {# Order summary card #} + {% include '_types/order/_summary.html' %} + {% include '_types/order/_items.html' %} + {% include '_types/order/_calendar_items.html' %} + +
    \ No newline at end of file diff --git a/cart/templates/_types/order/_nav.html b/cart/templates/_types/order/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/cart/templates/_types/order/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/cart/templates/_types/order/_oob_elements.html b/cart/templates/_types/order/_oob_elements.html new file mode 100644 index 0000000..31d1e17 --- /dev/null +++ b/cart/templates/_types/order/_oob_elements.html @@ -0,0 +1,30 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('orders-header-child', 'order-header-child', '_types/order/header/_header.html')}} + + {% from '_types/order/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/order/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/order/_main_panel.html" %} +{% endblock %} + + diff --git a/cart/templates/_types/order/_summary.html b/cart/templates/_types/order/_summary.html new file mode 100644 index 0000000..ffe560b --- /dev/null +++ b/cart/templates/_types/order/_summary.html @@ -0,0 +1,52 @@ +
    +

    + Order ID: + #{{ order.id }} +

    + +

    + Created: + {% if order.created_at %} + {{ order.created_at.strftime('%-d %b %Y, %H:%M') }} + {% else %} + — + {% endif %} +

    + +

    + Description: + {{ order.description or '–' }} +

    + +

    + Status: + + {{ order.status or 'pending' }} + +

    + +

    + Currency: + {{ order.currency or 'GBP' }} +

    + +

    + Total: + {% if order.total_amount %} + {{ order.currency or 'GBP' }} {{ '%.2f'|format(order.total_amount) }} + {% else %} + – + {% endif %} +

    + +
    + + \ No newline at end of file diff --git a/cart/templates/_types/order/_ticket_items.html b/cart/templates/_types/order/_ticket_items.html new file mode 100644 index 0000000..ef06c0b --- /dev/null +++ b/cart/templates/_types/order/_ticket_items.html @@ -0,0 +1,49 @@ +{# --- Tickets in this order --- #} + {% if order and order_tickets %} +
    +

    + Event tickets in this order +

    + +
      + {% for tk in order_tickets %} +
    • +
      +
      + {{ tk.entry_name }} + {# Small status pill #} + + {{ tk.state|replace('_', ' ')|capitalize }} + +
      + {% if tk.ticket_type_name %} +
      {{ tk.ticket_type_name }}
      + {% endif %} +
      + {{ tk.entry_start_at.strftime('%-d %b %Y, %H:%M') }} + {% if tk.entry_end_at %} + – {{ tk.entry_end_at.strftime('%-d %b %Y, %H:%M') }} + {% endif %} +
      +
      + {{ tk.code }} +
      +
      +
      + £{{ "%.2f"|format(tk.price or 0) }} +
      +
    • + {% endfor %} +
    +
    + {% endif %} \ No newline at end of file diff --git a/cart/templates/_types/order/header/_header.html b/cart/templates/_types/order/header/_header.html new file mode 100644 index 0000000..4d7f74b --- /dev/null +++ b/cart/templates/_types/order/header/_header.html @@ -0,0 +1,17 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='order-row', oob=oob) %} + {% call links.link(url_for('orders.order.order_detail', order_id=order.id), hx_select_search ) %} + +
    + Order +
    +
    + {{ order.id }} +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/order/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/cart/templates/_types/order/index.html b/cart/templates/_types/order/index.html new file mode 100644 index 0000000..c3d301e --- /dev/null +++ b/cart/templates/_types/order/index.html @@ -0,0 +1,68 @@ +{% extends '_types/orders/index.html' %} + + +{% block orders_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('order-header-child', '_types/order/header/_header.html') %} + {% block order_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/order/_nav.html' %} +{% endblock %} + + + +{% block filter %} +
    +
    +

    + Placed {% if order.created_at %}{{ order.created_at.strftime('%-d %b %Y, %H:%M') }}{% else %}—{% endif %} · Status: {{ order.status or 'pending' }} +

    +
    +
    + + + All orders + + + {# Re-check status button #} +
    + + +
    + + {% if order.status != 'paid' %} + + + Open payment page + + {% endif %} +
    +
    +{% endblock %} + +{% block content %} + {% include '_types/order/_main_panel.html' %} +{% endblock %} + +{% block aside %} +{% endblock %} diff --git a/cart/templates/_types/orders/_main_panel.html b/cart/templates/_types/orders/_main_panel.html new file mode 100644 index 0000000..01ad410 --- /dev/null +++ b/cart/templates/_types/orders/_main_panel.html @@ -0,0 +1,26 @@ +
    + {% if not orders %} +
    + No orders yet. +
    + {% else %} +
    + + + + + + + + + + + + + {# rows + infinite-scroll sentinel #} + {% include "_types/orders/_rows.html" %} + +
    OrderCreatedDescriptionTotalStatus
    +
    + {% endif %} +
    diff --git a/cart/templates/_types/orders/_nav.html b/cart/templates/_types/orders/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/cart/templates/_types/orders/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/cart/templates/_types/orders/_oob_elements.html b/cart/templates/_types/orders/_oob_elements.html new file mode 100644 index 0000000..741e8fa --- /dev/null +++ b/cart/templates/_types/orders/_oob_elements.html @@ -0,0 +1,38 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('auth-header-child', 'orders-header-child', '_types/orders/header/_header.html')}} + + {% from '_types/auth/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block aside %} + {% from 'macros/search.html' import search_desktop %} + {{ search_desktop(current_local_href, search, search_count, hx_select) }} +{% endblock %} + +{% block filter %} +{% include '_types/orders/_summary.html' %} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/orders/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/orders/_main_panel.html" %} +{% endblock %} + + diff --git a/cart/templates/_types/orders/_rows.html b/cart/templates/_types/orders/_rows.html new file mode 100644 index 0000000..33a459c --- /dev/null +++ b/cart/templates/_types/orders/_rows.html @@ -0,0 +1,164 @@ +{# suma_browser/templates/_types/order/_orders_rows.html #} + +{# --- existing rows, but split into desktop/tablet vs mobile --- #} +{% for order in orders %} + {# Desktop / tablet table row #} + + + #{{ order.id }} + + + {% if order.created_at %} + {{ order.created_at.strftime('%-d %b %Y, %H:%M') }} + {% else %} + — + {% endif %} + + + {{ order.description or '' }} + + + + {{ order.currency or 'GBP' }} + {{ '%.2f'|format(order.total_amount or 0) }} + + + {# status pill, roughly matching existing styling #} + + {{ order.status or 'pending' }} + + + + + View + + + + + {# Mobile card row #} + + +
    +
    + + #{{ order.id }} + + + + {{ order.status or 'pending' }} + +
    + +
    + {{ order.created_at or '' }} +
    + +
    +
    + {{ order.currency or 'GBP' }} + {{ '%.2f'|format(order.total_amount or 0) }} +
    + + + View + +
    +
    + + +{% endfor %} + +{# --- sentinel / end-of-results --- #} +{% if page < total_pages|int %} + + + {# Mobile sentinel content #} +
    + {% include "sentinel/mobile_content.html" %} +
    + + {# Desktop sentinel content #} + + + +{% else %} + + + End of results + + +{% endif %} diff --git a/cart/templates/_types/orders/_summary.html b/cart/templates/_types/orders/_summary.html new file mode 100644 index 0000000..f812413 --- /dev/null +++ b/cart/templates/_types/orders/_summary.html @@ -0,0 +1,11 @@ +
    +
    +

    + Recent orders placed via the checkout. +

    +
    +
    + {% from 'macros/search.html' import search_mobile %} + {{ search_mobile(current_local_href, search, search_count, hx_select) }} +
    +
    \ No newline at end of file diff --git a/cart/templates/_types/orders/header/_header.html b/cart/templates/_types/orders/header/_header.html new file mode 100644 index 0000000..32c1659 --- /dev/null +++ b/cart/templates/_types/orders/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='orders-row', oob=oob) %} + {% call links.link(url_for('orders.list_orders'), hx_select_search, ) %} + +
    + Orders +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/orders/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/cart/templates/_types/orders/index.html b/cart/templates/_types/orders/index.html new file mode 100644 index 0000000..7ee80a0 --- /dev/null +++ b/cart/templates/_types/orders/index.html @@ -0,0 +1,29 @@ +{% extends '_types/auth/index.html' %} + + +{% block auth_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('orders-header-child', '_types/orders/header/_header.html') %} + {% block orders_header_child %} + {% endblock %} + {% endcall %} + +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/orders/_nav.html' %} +{% endblock %} + +{% block aside %} + {% from 'macros/search.html' import search_desktop %} + {{ search_desktop(current_local_href, search, search_count, hx_select) }} +{% endblock %} + + +{% block filter %} + {% include '_types/orders/_summary.html' %} +{% endblock %} + +{% block content %} +{% include '_types/orders/_main_panel.html' %} +{% endblock %} diff --git a/cart/templates/_types/product/_cart.html b/cart/templates/_types/product/_cart.html new file mode 100644 index 0000000..2c68284 --- /dev/null +++ b/cart/templates/_types/product/_cart.html @@ -0,0 +1,250 @@ +{% macro add(slug, cart, oob='false') %} +{% set quantity = cart + | selectattr('product.slug', 'equalto', slug) + | sum(attribute='quantity') %} + +
    + + {% if not quantity %} +
    + + + + +
    + + {% else %} +
    + +
    + + + +
    + + + + + + + + + {{ quantity }} + + + + + + +
    + + + +
    +
    + {% endif %} +
    +{% endmacro %} + + + +{% macro cart_item(oob=False) %} + +{% set p = item.product %} +{% set unit_price = p.special_price or p.regular_price %} +
    +
    + {% if p.image %} + {{ p.title }} + {% else %} +
    + No image +
    'market', 'product', p.slug + {% endif %} +
    + + {# Details #} +
    +
    +
    +

    + {% set href=url_for('market.browse.product.product_detail', product_slug=p.slug) %} + + {{ p.title }} + +

    + + {% if p.brand %} +

    + {{ p.brand }} +

    + {% endif %} + + {% if item.is_deleted %} +

    + + This item is no longer available or price has changed +

    + {% endif %} +
    + + {# Unit price #} +
    + {% if unit_price %} + {% set symbol = "£" if p.regular_price_currency == "GBP" else p.regular_price_currency %} +

    + {{ symbol }}{{ "%.2f"|format(unit_price) }} +

    + {% if p.special_price and p.special_price != p.regular_price %} +

    + {{ symbol }}{{ "%.2f"|format(p.regular_price) }} +

    + {% endif %} + {% else %} +

    No price

    + {% endif %} +
    +
    + +
    +
    + Quantity +
    + + + +
    + + {{ item.quantity }} + +
    + + + +
    +
    + +
    + {% if unit_price %} + {% set line_total = unit_price * item.quantity %} + {% set symbol = "£" if p.regular_price_currency == "GBP" else p.regular_price_currency %} +

    + Line total: + {{ symbol }}{{ "%.2f"|format(line_total) }} +

    + {% endif %} +
    +
    +
    +
    + +{% endmacro %} diff --git a/cart/templates/fragments/cart_mini.html b/cart/templates/fragments/cart_mini.html new file mode 100644 index 0000000..4725a02 --- /dev/null +++ b/cart/templates/fragments/cart_mini.html @@ -0,0 +1,27 @@ +
    + {% if cart_count == 0 %} +
    + + + +
    + {% else %} + + + + {{ cart_count }} + + + {% endif %} +
    diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..cb4b741 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,175 @@ +x-app-common: &app-common + networks: + appnet: + externalnet: + deploy: + placement: + constraints: + - node.labels.gpu != true + volumes: + - /root/rose-ash/_config/app-config.yaml:/app/config/app-config.yaml:ro + +x-app-env: &app-env + DATABASE_URL: postgresql+asyncpg://postgres:change-me@db:5432/appdb + ALEMBIC_DATABASE_URL: postgresql+psycopg://postgres:change-me@db:5432/appdb + SMTP_HOST: ${SMTP_HOST} + SMTP_PORT: ${SMTP_PORT} + MAIL_FROM: ${MAIL_FROM} + SMTP_USER: ${SMTP_USER} + SMTP_PASS: ${SMTP_PASS} + GHOST_API_URL: ${GHOST_API_URL} + GHOST_ADMIN_API_URL: ${GHOST_ADMIN_API_URL} + GHOST_PUBLIC_URL: ${GHOST_PUBLIC_URL} + GHOST_CONTENT_API_KEY: ${GHOST_CONTENT_API_KEY} + GHOST_WEBHOOK_SECRET: ${GHOST_WEBHOOK_SECRET} + GHOST_ADMIN_API_KEY: ${GHOST_ADMIN_API_KEY} + REDIS_URL: redis://redis:6379 + SECRET_KEY: ${SECRET_KEY} + SUMUP_API_KEY: ${SUMUP_API_KEY} + APP_URL_BLOG: https://blog.rose-ash.com + APP_URL_MARKET: https://market.rose-ash.com + APP_URL_CART: https://cart.rose-ash.com + APP_URL_EVENTS: https://events.rose-ash.com + APP_URL_FEDERATION: https://federation.rose-ash.com + APP_URL_ACCOUNT: https://account.rose-ash.com + APP_URL_ARTDAG: https://celery-artdag.rose-ash.com + INTERNAL_URL_BLOG: http://blog:8000 + INTERNAL_URL_MARKET: http://market:8000 + INTERNAL_URL_CART: http://cart:8000 + INTERNAL_URL_EVENTS: http://events:8000 + INTERNAL_URL_FEDERATION: http://federation:8000 + INTERNAL_URL_ACCOUNT: http://account:8000 + AP_DOMAIN: federation.rose-ash.com + AP_DOMAIN_BLOG: blog.rose-ash.com + AP_DOMAIN_MARKET: market.rose-ash.com + AP_DOMAIN_EVENTS: events.rose-ash.com + EXTERNAL_INBOXES: "artdag|https://celery-artdag.rose-ash.com/inbox" + +services: + blog: + <<: *app-common + image: registry.rose-ash.com:5000/blog:latest + build: + context: . + dockerfile: blog/Dockerfile + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + RUN_MIGRATIONS: "true" + + market: + <<: *app-common + image: registry.rose-ash.com:5000/market:latest + build: + context: . + dockerfile: market/Dockerfile + volumes: + - /root/rose-ash/_config/app-config.yaml:/app/config/app-config.yaml:ro + - /root/rose-ash/_snapshot:/app/_snapshot + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + + cart: + <<: *app-common + image: registry.rose-ash.com:5000/cart:latest + build: + context: . + dockerfile: cart/Dockerfile + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + + events: + <<: *app-common + image: registry.rose-ash.com:5000/events:latest + build: + context: . + dockerfile: events/Dockerfile + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + + federation: + <<: *app-common + image: registry.rose-ash.com:5000/federation:latest + build: + context: . + dockerfile: federation/Dockerfile + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + + account: + <<: *app-common + image: registry.rose-ash.com:5000/account:latest + build: + context: . + dockerfile: account/Dockerfile + environment: + <<: *app-env + DATABASE_HOST: db + DATABASE_PORT: "5432" + + db: + image: postgres:16 + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change-me} + POSTGRES_DB: ${POSTGRES_DB:-appdb} + volumes: + - db_data_1:/var/lib/postgresql/data + networks: + appnet: + configs: + - source: schema_sql + target: /run/configs/schema_sql + mode: 0444 + deploy: + placement: + constraints: + - node.labels.gpu != true + + adminer: + image: adminer + networks: + appnet: + externalnet: + deploy: + placement: + constraints: + - node.labels.gpu != true + + redis: + image: redis:7-alpine + container_name: redis + volumes: + - redis_data:/data + networks: + appnet: + command: + redis-server + --maxmemory 256mb + --maxmemory-policy allkeys-lru + deploy: + placement: + constraints: + - node.labels.gpu != true + +volumes: + db_data_1: + redis_data: +networks: + appnet: + driver: overlay + externalnet: + driver: overlay + external: true +configs: + schema_sql: + file: ./schema.sql diff --git a/events/.gitignore b/events/.gitignore new file mode 100644 index 0000000..27275ba --- /dev/null +++ b/events/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +*.pyc +.env +node_modules/ diff --git a/events/Dockerfile b/events/Dockerfile new file mode 100644 index 0000000..90c5ad9 --- /dev/null +++ b/events/Dockerfile @@ -0,0 +1,49 @@ +# syntax=docker/dockerfile:1 + +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY events/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY blog/__init__.py ./blog/__init__.py +COPY blog/models/ ./blog/models/ +COPY market/__init__.py ./market/__init__.py +COPY market/models/ ./market/models/ +COPY cart/__init__.py ./cart/__init__.py +COPY cart/models/ ./cart/models/ +COPY federation/__init__.py ./federation/__init__.py +COPY federation/models/ ./federation/models/ +COPY account/__init__.py ./account/__init__.py +COPY account/models/ ./account/models/ + +# ---------- Runtime setup ---------- +COPY events/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/events/README.md b/events/README.md new file mode 100644 index 0000000..5327685 --- /dev/null +++ b/events/README.md @@ -0,0 +1,78 @@ +# Events App + +Calendar and event booking service for the Rose Ash cooperative platform. Manages calendars, time slots, calendar entries (bookings), tickets, and ticket types. + +## Architecture + +One of five Quart microservices sharing a single PostgreSQL database: + +| App | Port | Domain | +|-----|------|--------| +| blog (coop) | 8000 | Auth, blog, admin, menus, snippets | +| market | 8001 | Product browsing, Suma scraping | +| cart | 8002 | Shopping cart, checkout, orders | +| **events** | 8003 | Calendars, bookings, tickets | +| federation | 8004 | ActivityPub, fediverse social | + +## Structure + +``` +app.py # Application factory (create_base_app + blueprints) +path_setup.py # Adds project root + app dir to sys.path +config/app-config.yaml # App URLs, feature flags +models/ # Events-domain models + calendars.py # Calendar, CalendarEntry, CalendarSlot, + # TicketType, Ticket, CalendarEntryPost +bp/ # Blueprints + calendars/ # Calendar listing + calendar/ # Single calendar view and admin + calendar_entries/ # Calendar entries listing + calendar_entry/ # Single entry view and admin + day/ # Day view and admin + slots/ # Slot listing + slot/ # Single slot management + ticket_types/ # Ticket type listing + ticket_type/ # Single ticket type management + tickets/ # Ticket listing + ticket_admin/ # Ticket administration + markets/ # Page-scoped marketplace views + payments/ # Payment-related views +services/ # register_domain_services() — wires calendar + market + cart +shared/ # Submodule -> git.rose-ash.com/coop/shared.git +``` + +## Models + +All events-domain models live in `models/calendars.py`: + +| Model | Description | +|-------|-------------| +| **Calendar** | Container for entries, scoped to a page via `container_type + container_id` | +| **CalendarEntry** | A bookable event/time slot. Has `state` (pending/ordered/provisional), `cost`, ownership (`user_id`/`session_id`), and `order_id` (plain integer, no FK) | +| **CalendarSlot** | Recurring time bands (day-of-week + time range) within a calendar | +| **TicketType** | Named ticket categories with price and count | +| **Ticket** | Individual ticket with unique code, state, and `order_id` (plain integer, no FK) | +| **CalendarEntryPost** | Junction linking entries to content via `content_type + content_id` | + +`order_id` on CalendarEntry and Ticket is a plain integer column — no FK constraint to the orders table. The cart app writes these values via service calls, not directly. + +## Cross-Domain Communication + +- `services.market.*` — marketplace queries for page views +- `services.cart.*` — cart summary for context processor +- `services.federation.*` — AP publishing for new entries +- `shared.services.navigation` — site navigation tree + +## Migrations + +This app does **not** run Alembic migrations on startup. Migrations are managed in the `shared/` submodule and run from the blog app's entrypoint. + +## Running + +```bash +export DATABASE_URL_ASYNC=postgresql+asyncpg://user:pass@localhost/coop +export REDIS_URL=redis://localhost:6379/0 +export SECRET_KEY=your-secret-key + +hypercorn app:app --bind 0.0.0.0:8003 +``` diff --git a/events/__init__.py b/events/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/app.py b/events/app.py new file mode 100644 index 0000000..48bb697 --- /dev/null +++ b/events/app.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +import path_setup # noqa: F401 # adds shared/ to sys.path +from pathlib import Path + +from quart import g, abort, request +from jinja2 import FileSystemLoader, ChoiceLoader + +from shared.infrastructure.factory import create_base_app + +from bp import register_all_events, register_calendars, register_markets, register_payments, register_page, register_fragments + + +async def events_context() -> dict: + """ + Events app context processor. + + - nav_tree_html: fetched from blog as fragment + - cart_count/cart_total: via cart service (shared DB) + """ + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.services.registry import services + from shared.infrastructure.cart_identity import current_cart_identity + from shared.infrastructure.fragments import fetch_fragment + + ctx = await base_context() + + ctx["nav_tree_html"] = await fetch_fragment( + "blog", "nav-tree", + params={"app_name": "events", "path": request.path}, + ) + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + # Cart data via service (replaces cross-app HTTP API) + ident = current_cart_identity() + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + ctx["cart_count"] = summary.count + summary.calendar_count + summary.ticket_count + ctx["cart_total"] = float(summary.total + summary.calendar_total + summary.ticket_total) + + return ctx + + +def create_app() -> "Quart": + from shared.services.registry import services + from services import register_domain_services + + app = create_base_app( + "events", + context_fn=events_context, + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + # All events: / — global view across all pages + app.register_blueprint( + register_all_events(), + url_prefix="/", + ) + + # Page summary: // — upcoming events across all calendars + app.register_blueprint( + register_page(), + url_prefix="/", + ) + + # Calendars nested under post slug: //calendars/... + app.register_blueprint( + register_calendars(), + url_prefix="//calendars", + ) + + # Markets nested under post slug: //markets/... + app.register_blueprint( + register_markets(), + url_prefix="//markets", + ) + + # Payments nested under post slug: //payments/... + app.register_blueprint( + register_payments(), + url_prefix="//payments", + ) + + app.register_blueprint(register_fragments()) + + # --- Auto-inject slug into url_for() calls --- + @app.url_value_preprocessor + def pull_slug(endpoint, values): + if values and "slug" in values: + g.post_slug = values.pop("slug") + + @app.url_defaults + def inject_slug(endpoint, values): + slug = g.get("post_slug") + if slug and "slug" not in values: + if app.url_map.is_endpoint_expecting(endpoint, "slug"): + values["slug"] = slug + + # --- Load post data for slug --- + @app.before_request + async def hydrate_post(): + slug = getattr(g, "post_slug", None) + if not slug: + return + post = await services.blog.get_post_by_slug(g.s, slug) + if not post: + abort(404) + g.post_data = { + "post": { + "id": post.id, + "title": post.title, + "slug": post.slug, + "feature_image": post.feature_image, + "status": post.status, + "visibility": post.visibility, + }, + } + + @app.context_processor + async def inject_post(): + post_data = getattr(g, "post_data", None) + if not post_data: + return {} + post_id = post_data["post"]["id"] + calendars = await services.calendar.calendars_for_container(g.s, "page", post_id) + markets = await services.market.marketplaces_for_container(g.s, "page", post_id) + return { + **post_data, + "calendars": calendars, + "markets": markets, + } + + # Tickets blueprint — user-facing ticket views and QR codes + from bp.tickets.routes import register as register_tickets + app.register_blueprint(register_tickets()) + + # Ticket admin — check-in interface (admin only) + from bp.ticket_admin.routes import register as register_ticket_admin + app.register_blueprint(register_ticket_admin()) + + return app + + +app = create_app() diff --git a/events/bp/__init__.py b/events/bp/__init__.py new file mode 100644 index 0000000..68e3b31 --- /dev/null +++ b/events/bp/__init__.py @@ -0,0 +1,6 @@ +from .all_events.routes import register as register_all_events +from .calendars.routes import register as register_calendars +from .markets.routes import register as register_markets +from .payments.routes import register as register_payments +from .page.routes import register as register_page +from .fragments import register_fragments diff --git a/events/bp/all_events/__init__.py b/events/bp/all_events/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/all_events/routes.py b/events/bp/all_events/routes.py new file mode 100644 index 0000000..58732b8 --- /dev/null +++ b/events/bp/all_events/routes.py @@ -0,0 +1,143 @@ +""" +All-events blueprint — shows upcoming events across ALL pages' calendars. + +Mounted at / (root of events app). No slug context — works independently +of the post/slug machinery. + +Routes: + GET / — full page with first page of entries + GET /all-entries — HTMX fragment for infinite scroll + POST /all-tickets/adjust — adjust ticket quantity inline +""" +from __future__ import annotations + +from quart import Blueprint, g, request, render_template, render_template_string, make_response + +from shared.browser.app.utils.htmx import is_htmx_request +from shared.infrastructure.cart_identity import current_cart_identity +from shared.services.registry import services + + +def register() -> Blueprint: + bp = Blueprint("all_events", __name__) + + async def _load_entries(page, per_page=20): + """Load all upcoming entries + pending ticket counts + page info.""" + entries, has_more = await services.calendar.upcoming_entries_for_container( + g.s, page=page, per_page=per_page, + ) + + # Pending ticket counts keyed by entry_id + ident = current_cart_identity() + pending_tickets = {} + if entries: + tickets = await services.calendar.pending_tickets( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + for t in tickets: + if t.entry_id is not None: + pending_tickets[t.entry_id] = pending_tickets.get(t.entry_id, 0) + 1 + + # Batch-load page info for container_ids + page_info = {} # {post_id: {title, slug}} + if entries: + post_ids = list({ + e.calendar_container_id + for e in entries + if e.calendar_container_type == "page" and e.calendar_container_id + }) + if post_ids: + posts = await services.blog.get_posts_by_ids(g.s, post_ids) + for p in posts: + page_info[p.id] = {"title": p.title, "slug": p.slug} + + return entries, has_more, pending_tickets, page_info + + @bp.get("/") + async def index(): + view = request.args.get("view", "list") + page = int(request.args.get("page", 1)) + + entries, has_more, pending_tickets, page_info = await _load_entries(page) + + ctx = dict( + entries=entries, + has_more=has_more, + pending_tickets=pending_tickets, + page_info=page_info, + page=page, + view=view, + ) + + if is_htmx_request(): + html = await render_template("_types/all_events/_main_panel.html", **ctx) + else: + html = await render_template("_types/all_events/index.html", **ctx) + + return await make_response(html, 200) + + @bp.get("/all-entries") + async def entries_fragment(): + view = request.args.get("view", "list") + page = int(request.args.get("page", 1)) + + entries, has_more, pending_tickets, page_info = await _load_entries(page) + + html = await render_template( + "_types/all_events/_cards.html", + entries=entries, + has_more=has_more, + pending_tickets=pending_tickets, + page_info=page_info, + page=page, + view=view, + ) + return await make_response(html, 200) + + @bp.post("/all-tickets/adjust") + async def adjust_ticket(): + """Adjust ticket quantity, return updated widget + OOB cart-mini.""" + ident = current_cart_identity() + form = await request.form + entry_id = int(form.get("entry_id", 0)) + count = max(int(form.get("count", 0)), 0) + tt_raw = (form.get("ticket_type_id") or "").strip() + ticket_type_id = int(tt_raw) if tt_raw else None + + await services.calendar.adjust_ticket_quantity( + g.s, entry_id, count, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=ticket_type_id, + ) + + # Get updated ticket count for this entry + tickets = await services.calendar.pending_tickets( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + qty = sum(1 for t in tickets if t.entry_id == entry_id) + + # Load entry DTO for the widget template + entry = await services.calendar.entry_by_id(g.s, entry_id) + + # Updated cart count for OOB mini-cart + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + cart_count = summary.count + summary.calendar_count + summary.ticket_count + + # Render widget + OOB cart-mini + widget_html = await render_template( + "_types/page_summary/_ticket_widget.html", + entry=entry, + qty=qty, + ticket_url="/all-tickets/adjust", + ) + mini_html = await render_template_string( + '{% from "_types/cart/_mini.html" import mini with context %}' + '{{ mini(oob="true") }}', + cart_count=cart_count, + ) + return await make_response(widget_html + mini_html, 200) + + return bp diff --git a/events/bp/calendar/admin/routes.py b/events/bp/calendar/admin/routes.py new file mode 100644 index 0000000..3d042ff --- /dev/null +++ b/events/bp/calendar/admin/routes.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g +) + + +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + + + + +def register(): + bp = Blueprint("admin", __name__, url_prefix='/admin') + # ---------- Pages ---------- + @bp.get("/") + @require_admin + async def admin(calendar_slug: str, **kwargs): + from shared.browser.app.utils.htmx import is_htmx_request + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/calendar/admin/index.html") + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/calendar/admin/_oob_elements.html") + + return await make_response(html) + + + @bp.get("/description/") + @require_admin + async def calendar_description_edit(calendar_slug: str, **kwargs): + # g.post and g.calendar should already be set by the parent calendar bp + html = await render_template( + "_types/calendar/admin/_description_edit.html", + post=g.post_data['post'], + calendar=g.calendar, + ) + return await make_response(html) + + + @bp.post("/description/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def calendar_description_save(calendar_slug: str, **kwargs): + form = await request.form + description = (form.get("description") or "").strip() or None + + # simple inline update, or call a service if you prefer + g.calendar.description = description + await g.s.flush() + + html = await render_template( + "_types/calendar/admin/_description.html", + post=g.post_data['post'], + calendar=g.calendar, + oob=True + ) + return await make_response(html) + + + @bp.get("/description/view/") + @require_admin + async def calendar_description_view(calendar_slug: str, **kwargs): + # just render the display version without touching the DB (used by Cancel) + html = await render_template( + "_types/calendar/admin/_description.html", + post=g.post_data['post'], + calendar=g.calendar, + ) + return await make_response(html) + + return bp diff --git a/events/bp/calendar/routes.py b/events/bp/calendar/routes.py new file mode 100644 index 0000000..4bd544f --- /dev/null +++ b/events/bp/calendar/routes.py @@ -0,0 +1,251 @@ +from __future__ import annotations +from datetime import datetime, timezone + +from quart import ( + request, render_template, make_response, Blueprint, g, abort, session as qsession +) + + +from sqlalchemy import select + +from models.calendars import Calendar + +from sqlalchemy.orm import selectinload, with_loader_criteria +from shared.browser.app.authz import require_admin + +from .admin.routes import register as register_admin +from .services import get_visible_entries_for_period +from .services.calendar_view import ( + parse_int_arg, + add_months, + build_calendar_weeks, + get_calendar_by_post_and_slug, + get_calendar_by_slug, + update_calendar_description, +) +from shared.browser.app.utils.htmx import is_htmx_request + +from ..slots.routes import register as register_slots + +from models.calendars import CalendarSlot + +from bp.calendars.services.calendars import soft_delete + +from bp.day.routes import register as register_day + +from shared.browser.app.redis_cacher import cache_page, clear_cache + +from sqlalchemy import select + +import calendar as pycalendar + + +def register(): + bp = Blueprint("calendar", __name__, url_prefix='/') + + bp.register_blueprint( + register_admin(), + ) + bp.register_blueprint( + register_slots(), + ) + bp.register_blueprint( + register_day() + ) + + @bp.url_value_preprocessor + def pull(endpoint, values): + g.calendar_slug = values.get("calendar_slug") + + @bp.before_request + async def hydrate_calendar_data(): + calendar_slug = getattr(g, "calendar_slug", None) + + # Standalone mode (events app): no post context + post_data = getattr(g, "post_data", None) + if post_data: + post_id = (post_data.get("post") or {}).get("id") + cal = await get_calendar_by_post_and_slug(g.s, post_id, calendar_slug) + else: + cal = await get_calendar_by_slug(g.s, calendar_slug) + + if not cal: + abort(404) + return + + g.calendar = cal + + @bp.context_processor + async def inject_root(): + + return { + "calendar": getattr(g, "calendar", None), + } + + # ---------- Pages ---------- + + + # ---------- Pages ---------- + + @bp.get("/") + @cache_page(tag="calendars") + async def get(calendar_slug: str, **kwargs): + """ + Show a month-view calendar for this calendar. + + - One month at a time + - Outer arrows: +/- 1 year + - Inner arrows: +/- 1 month + """ + + # --- Determine year & month from query params --- + today = datetime.now(timezone.utc).date() + + month = parse_int_arg("month") + year = parse_int_arg("year") + + if year is None: + year = today.year + if month is None or not (1 <= month <= 12): + month = today.month + + # --- Helpers to move between months --- + prev_month_year, prev_month = add_months(year, month, -1) + next_month_year, next_month = add_months(year, month, +1) + prev_year = year - 1 + next_year = year + 1 + + # --- Build weeks grid (list of weeks, each week = 7 days) --- + weeks = build_calendar_weeks(year, month) + month_name = pycalendar.month_name[month] + weekday_names = [pycalendar.day_abbr[i] for i in range(7)] + + # --- Period boundaries for this calendar view --- + period_start = datetime(year, month, 1, tzinfo=timezone.utc) + next_y, next_m = add_months(year, month, +1) + period_end = datetime(next_y, next_m, 1, tzinfo=timezone.utc) + + # --- Identity & admin flag --- + user = getattr(g, "user", None) + session_id = qsession.get("calendar_sid") + + visible = await get_visible_entries_for_period( + sess=g.s, + calendar_id=g.calendar.id, + period_start=period_start, + period_end=period_end, + user=user, + session_id=session_id, + ) + + month_entries = visible.merged_entries + user_entries = visible.user_entries + confirmed_entries = visible.confirmed_entries + + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/calendar/index.html", + qsession=qsession, + year=year, + month=month, + month_name=month_name, + weekday_names=weekday_names, + weeks=weeks, + prev_month=prev_month, + prev_month_year=prev_month_year, + next_month=next_month, + next_month_year=next_month_year, + prev_year=prev_year, + next_year=next_year, + user_entries=user_entries, + confirmed_entries=confirmed_entries, + month_entries=month_entries, + ) + else: + + html = await render_template( + "_types/calendar/_oob_elements.html", + qsession=qsession, + year=year, + month=month, + month_name=month_name, + weekday_names=weekday_names, + weeks=weeks, + prev_month=prev_month, + prev_month_year=prev_month_year, + next_month=next_month, + next_month_year=next_month_year, + prev_year=prev_year, + next_year=next_year, + user_entries=user_entries, + confirmed_entries=confirmed_entries, + month_entries=month_entries, + ) + + return await make_response(html) + + + @bp.put("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def put(calendar_slug: str, **kwargs): + """ + Idempotent update for calendar configuration. + Accepts HTMX form (POST/PUT) and optional JSON. + """ + # Try JSON first + data = await request.get_json(silent=True) + description = None + + if data and isinstance(data, dict): + description = (data.get("description") or "").strip() + else: + form = await request.form + description = (form.get("description") or "").strip() + + await update_calendar_description(g.calendar, description) + html = await render_template("_types/calendar/admin/index.html") + return await make_response(html, 200) + + + @bp.delete("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def delete(calendar_slug: str, **kwargs): + from shared.browser.app.utils.htmx import is_htmx_request + + cal = g.calendar + cal.deleted_at = datetime.now(timezone.utc) + await g.s.flush() + + # If we have post context (blog-embedded mode), update nav + post_data = getattr(g, "post_data", None) + html = await render_template("_types/calendars/index.html") + + if post_data: + from ..post.services.entry_associations import get_associated_entries + + post_id = (post_data.get("post") or {}).get("id") + cals = ( + await g.s.execute( + select(Calendar) + .where(Calendar.container_type == "page", Calendar.container_id == post_id, Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + ).scalars().all() + + associated_entries = await get_associated_entries(g.s, post_id) + + nav_oob = await render_template( + "_types/post/admin/_nav_entries_oob.html", + associated_entries=associated_entries, + calendars=cals, + post=post_data["post"], + ) + html = html + nav_oob + + return await make_response(html, 200) + + + return bp diff --git a/events/bp/calendar/services/__init__.py b/events/bp/calendar/services/__init__.py new file mode 100644 index 0000000..a8110ed --- /dev/null +++ b/events/bp/calendar/services/__init__.py @@ -0,0 +1 @@ +from .visiblity import get_visible_entries_for_period diff --git a/events/bp/calendar/services/adopt_session_entries_for_user.py b/events/bp/calendar/services/adopt_session_entries_for_user.py new file mode 100644 index 0000000..8e0fa2f --- /dev/null +++ b/events/bp/calendar/services/adopt_session_entries_for_user.py @@ -0,0 +1,25 @@ +from sqlalchemy import select, update +from models.calendars import CalendarEntry + +from sqlalchemy import func + +async def adopt_session_entries_for_user(session, user_id: int, session_id: str | None) -> None: + if not session_id: + return + # (Optional) Mark any existing entries for this user as deleted to avoid duplicates + await session.execute( + update(CalendarEntry) + .where(CalendarEntry.deleted_at.is_(None), CalendarEntry.user_id == user_id) + .values(deleted_at=func.now()) + ) + # Reassign anonymous entries to the user + result = await session.execute( + select(CalendarEntry).where( + CalendarEntry.deleted_at.is_(None), + CalendarEntry.session_id == session_id + ) + ) + anon_entries = result.scalars().all() + for entry in anon_entries: + entry.user_id = user_id + # No commit here; caller will commit diff --git a/events/bp/calendar/services/calendar.py b/events/bp/calendar/services/calendar.py new file mode 100644 index 0000000..e1bda42 --- /dev/null +++ b/events/bp/calendar/services/calendar.py @@ -0,0 +1,28 @@ +from __future__ import annotations + + +from models.calendars import Calendar +from ...calendars.services.calendars import CalendarError + +async def update_calendar_config(sess, calendar_id: int, *, description: str | None, slots: list | None): + """Update description and slots for a calendar.""" + cal = await sess.get(Calendar, calendar_id) + if not cal: + raise CalendarError(f"Calendar {calendar_id} not found.") + cal.description = (description or '').strip() or None + # Validate slots shape a bit + norm_slots: list[dict] = [] + if slots: + for s in slots: + if not isinstance(s, dict): + continue + norm_slots.append({ + "days": str(s.get("days", ""))[:7].lower(), + "time_from": str(s.get("time_from", ""))[:5], + "time_to": str(s.get("time_to", ""))[:5], + "cost_name": (s.get("cost_name") or "")[:64], + "description": (s.get("description") or "")[:255], + }) + cal.slots = norm_slots or None + await sess.flush() + return cal diff --git a/events/bp/calendar/services/calendar_view.py b/events/bp/calendar/services/calendar_view.py new file mode 100644 index 0000000..71fe331 --- /dev/null +++ b/events/bp/calendar/services/calendar_view.py @@ -0,0 +1,109 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import Optional +import calendar as pycalendar + +from quart import request +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload, with_loader_criteria + +from models.calendars import Calendar, CalendarSlot + +def parse_int_arg(name: str, default: Optional[int] = None) -> Optional[int]: + """Parse an integer query parameter from the request.""" + val = request.args.get(name, "").strip() + if not val: + return default + try: + return int(val) + except ValueError: + return default + + +def add_months(year: int, month: int, delta: int) -> tuple[int, int]: + """Add (or subtract) months to a given year/month, handling year overflow.""" + new_month = month + delta + new_year = year + (new_month - 1) // 12 + new_month = ((new_month - 1) % 12) + 1 + return new_year, new_month + + +def build_calendar_weeks(year: int, month: int) -> list[list[dict]]: + """ + Build a calendar grid for the given year and month. + Returns a list of weeks, where each week is a list of 7 day dictionaries. + """ + today = datetime.now(timezone.utc).date() + cal = pycalendar.Calendar(firstweekday=0) # 0 = Monday + weeks: list[list[dict]] = [] + + for week in cal.monthdatescalendar(year, month): + week_days = [] + for d in week: + week_days.append( + { + "date": d, + "in_month": (d.month == month), + "is_today": (d == today), + } + ) + weeks.append(week_days) + + return weeks + + +async def get_calendar_by_post_and_slug( + session: AsyncSession, + post_id: int, + calendar_slug: str, +) -> Optional[Calendar]: + """ + Fetch a calendar by post_id and slug, with slots eagerly loaded. + Returns None if not found. + """ + result = await session.execute( + select(Calendar) + .options( + selectinload(Calendar.slots), + with_loader_criteria(CalendarSlot, CalendarSlot.deleted_at.is_(None)), + ) + .where( + Calendar.container_type == "page", + Calendar.container_id == post_id, + Calendar.slug == calendar_slug, + Calendar.deleted_at.is_(None), + ) + ) + return result.scalar_one_or_none() + + +async def get_calendar_by_slug( + session: AsyncSession, + calendar_slug: str, +) -> Optional[Calendar]: + """ + Fetch a calendar by slug only (for standalone events service). + With slots eagerly loaded. Returns None if not found. + """ + result = await session.execute( + select(Calendar) + .options( + selectinload(Calendar.slots), + with_loader_criteria(CalendarSlot, CalendarSlot.deleted_at.is_(None)), + ) + .where( + Calendar.slug == calendar_slug, + Calendar.deleted_at.is_(None), + ) + ) + return result.scalar_one_or_none() + + +async def update_calendar_description( + calendar: Calendar, + description: Optional[str], +) -> None: + """Update calendar description (in-place on the calendar object).""" + calendar.description = description or None diff --git a/events/bp/calendar/services/slots.py b/events/bp/calendar/services/slots.py new file mode 100644 index 0000000..4c40445 --- /dev/null +++ b/events/bp/calendar/services/slots.py @@ -0,0 +1,118 @@ + +from __future__ import annotations +from datetime import time +from typing import Sequence + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import CalendarSlot + + +class SlotError(ValueError): + pass + +def _b(v): + if isinstance(v, bool): + return v + s = str(v).lower() + return s in {"1","true","t","yes","y","on"} + +async def list_slots(sess: AsyncSession, calendar_id: int) -> Sequence[CalendarSlot]: + res = await sess.execute( + select(CalendarSlot) + .where(CalendarSlot.calendar_id == calendar_id, CalendarSlot.deleted_at.is_(None)) + .order_by(CalendarSlot.time_start.asc(), CalendarSlot.id.asc()) + ) + return res.scalars().all() + +async def create_slot(sess: AsyncSession, calendar_id: int, *, name: str, description: str | None, + days: dict, time_start: time, time_end: time, cost: float | None): + if not name: + raise SlotError("name is required") + if not time_start or not time_end or time_end <= time_start: + raise SlotError("time range invalid") + slot = CalendarSlot( + calendar_id=calendar_id, + name=name, + description=(description or None), + mon=_b(days.get("mon")), tue=_b(days.get("tue")), wed=_b(days.get("wed")), + thu=_b(days.get("thu")), fri=_b(days.get("fri")), sat=_b(days.get("sat")), sun=_b(days.get("sun")), + time_start=time_start, time_end=time_end, cost=cost, + ) + sess.add(slot) + await sess.flush() + return slot + +async def update_slot( + sess: AsyncSession, + slot_id: int, + *, + name: str | None = None, + description: str | None = None, + days: dict | None = None, + time_start: time | None = None, + time_end: time | None = None, + cost: float | None = None, + flexible: bool | None = None, # NEW +): + slot = await sess.get(CalendarSlot, slot_id) + if not slot or slot.deleted_at is not None: + raise SlotError("slot not found") + + if name is not None: + slot.name = name + + if description is not None: + slot.description = description or None + + if days is not None: + slot.mon = _b(days.get("mon", slot.mon)) + slot.tue = _b(days.get("tue", slot.tue)) + slot.wed = _b(days.get("wed", slot.wed)) + slot.thu = _b(days.get("thu", slot.thu)) + slot.fri = _b(days.get("fri", slot.fri)) + slot.sat = _b(days.get("sat", slot.sat)) + slot.sun = _b(days.get("sun", slot.sun)) + + if time_start is not None: + slot.time_start = time_start + if time_end is not None: + slot.time_end = time_end + + if (time_start or time_end) and slot.time_end <= slot.time_start: + raise SlotError("time range invalid") + + if cost is not None: + slot.cost = cost + + # NEW: update flexible flag only if explicitly provided + if flexible is not None: + slot.flexible = flexible + + await sess.flush() + return slot + +async def soft_delete_slot(sess: AsyncSession, slot_id: int): + slot = await sess.get(CalendarSlot, slot_id) + if not slot or slot.deleted_at is not None: + return + from datetime import datetime, timezone + slot.deleted_at = datetime.now(timezone.utc) + await sess.flush() + + +async def get_slot(sess: AsyncSession, slot_id: int) -> CalendarSlot | None: + return await sess.get(CalendarSlot, slot_id) + +async def update_slot_description( + sess: AsyncSession, + slot_id: int, + description: str | None, +) -> CalendarSlot: + slot = await sess.get(CalendarSlot, slot_id) + if not slot: + raise SlotError("slot not found") + slot.description = description or None + await sess.flush() + return slot diff --git a/events/bp/calendar/services/visiblity.py b/events/bp/calendar/services/visiblity.py new file mode 100644 index 0000000..5c5776a --- /dev/null +++ b/events/bp/calendar/services/visiblity.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from typing import Optional + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import CalendarEntry + + + +@dataclass +class VisibleEntries: + """ + Result of applying calendar visibility rules for a given period. + """ + user_entries: list[CalendarEntry] + confirmed_entries: list[CalendarEntry] + admin_other_entries: list[CalendarEntry] + merged_entries: list[CalendarEntry] # sorted, deduped + + +async def get_visible_entries_for_period( + sess: AsyncSession, + calendar_id: int, + period_start: datetime, + period_end: datetime, + user: Optional[object], + session_id: Optional[str], +) -> VisibleEntries: + """ + Visibility rules (same as your fixed month view): + + - Non-admin: + - sees all *confirmed* entries in the period (any user) + - sees all entries for current user/session in the period (any state) + - Admin: + - sees all confirmed + provisional + ordered entries in the period (all users) + - sees pending only for current user/session + """ + + user_id = user.id if user else None + is_admin = bool(user and getattr(user, "is_admin", False)) + + # --- Entries for current user/session (any state, in period) --- + user_entries: list[CalendarEntry] = [] + if user_id or session_id: + conditions_user = [ + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ] + if user_id: + conditions_user.append(CalendarEntry.user_id == user_id) + elif session_id: + conditions_user.append(CalendarEntry.session_id == session_id) + + result_user = await sess.execute(select(CalendarEntry).where(*conditions_user)) + user_entries = result_user.scalars().all() + + # --- Confirmed entries for everyone in period --- + result_conf = await sess.execute( + select(CalendarEntry).where( + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.state == "confirmed", + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ) + ) + confirmed_entries = result_conf.scalars().all() + + # --- For admins: ordered + provisional for everyone in period --- + admin_other_entries: list[CalendarEntry] = [] + if is_admin: + result_admin = await sess.execute( + select(CalendarEntry).where( + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.state.in_(("ordered", "provisional")), + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ) + ) + admin_other_entries = result_admin.scalars().all() + + # --- Merge with de-duplication and keep chronological order --- + entries_by_id: dict[int, CalendarEntry] = {} + + # Everyone's confirmed + for e in confirmed_entries: + entries_by_id[e.id] = e + + # Admin-only: everyone's ordered/provisional + if is_admin: + for e in admin_other_entries: + entries_by_id[e.id] = e + + # Always include current user/session entries (includes their pending) + for e in user_entries: + entries_by_id[e.id] = e + + merged_entries = sorted( + entries_by_id.values(), + key=lambda e: e.start_at or period_start, + ) + + return VisibleEntries( + user_entries=user_entries, + confirmed_entries=confirmed_entries, + admin_other_entries=admin_other_entries, + merged_entries=merged_entries, + ) diff --git a/events/bp/calendar_entries/routes.py b/events/bp/calendar_entries/routes.py new file mode 100644 index 0000000..b4fdb31 --- /dev/null +++ b/events/bp/calendar_entries/routes.py @@ -0,0 +1,257 @@ +from __future__ import annotations +from datetime import datetime, timezone +from decimal import Decimal + +from quart import ( + request, render_template, render_template_string, make_response, + Blueprint, g, redirect, url_for, jsonify, +) + + +from sqlalchemy import update, func as sa_func + +from models.calendars import CalendarEntry + + +from .services.entries import ( + + add_entry as svc_add_entry, +) +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + + +from bp.calendar_entry.routes import register as register_calendar_entry + + +from models.calendars import CalendarSlot + +from sqlalchemy import select + + +def calculate_entry_cost(slot: CalendarSlot, start_at: datetime, end_at: datetime) -> Decimal: + """ + Calculate cost for an entry based on slot and time range. + - Fixed slot: use slot cost + - Flexible slot: prorate based on actual time vs slot time range + """ + if not slot.cost: + return Decimal('0') + + if not slot.flexible: + # Fixed slot: full cost + return Decimal(str(slot.cost)) + + # Flexible slot: calculate ratio + if not slot.time_end or not start_at or not end_at: + return Decimal('0') + + # Calculate durations in minutes + slot_start_minutes = slot.time_start.hour * 60 + slot.time_start.minute + slot_end_minutes = slot.time_end.hour * 60 + slot.time_end.minute + slot_duration = slot_end_minutes - slot_start_minutes + + actual_start_minutes = start_at.hour * 60 + start_at.minute + actual_end_minutes = end_at.hour * 60 + end_at.minute + actual_duration = actual_end_minutes - actual_start_minutes + + if slot_duration <= 0 or actual_duration <= 0: + return Decimal('0') + + ratio = Decimal(actual_duration) / Decimal(slot_duration) + return Decimal(str(slot.cost)) * ratio + + +def register(): + bp = Blueprint("calendar_entries", __name__, url_prefix='/entries') + + bp.register_blueprint( + register_calendar_entry() + ) + + @bp.post("/") + @clear_cache(tag="calendars", tag_scope="all") + async def add_entry(year: int, month: int, day: int, **kwargs): + form = await request.form + + def parse_time_to_dt(value: str | None, year: int, month: int, day: int): + if not value: + return None + try: + hour_str, minute_str = value.split(":", 1) + hour = int(hour_str) + minute = int(minute_str) + return datetime(year, month, day, hour, minute, tzinfo=timezone.utc) + except Exception: + return None + + name = (form.get("name") or "").strip() + start_at = parse_time_to_dt(form.get("start_time"), year, month, day) + end_at = parse_time_to_dt(form.get("end_time"), year, month, day) + + # NEW: slot_id + slot_id_raw = (form.get("slot_id") or "").strip() + slot_id = int(slot_id_raw) if slot_id_raw else None + + # Ticket configuration + ticket_price_str = (form.get("ticket_price") or "").strip() + ticket_price = None + if ticket_price_str: + try: + ticket_price = Decimal(ticket_price_str) + except Exception: + pass + + ticket_count_str = (form.get("ticket_count") or "").strip() + ticket_count = None + if ticket_count_str: + try: + ticket_count = int(ticket_count_str) + except Exception: + pass + + field_errors: dict[str, list[str]] = {} + + # Basic checks + if not name: + field_errors.setdefault("name", []).append("Please enter a name for the entry.") + + # Check slot first before validating times + slot = None + cost = Decimal('10') # default cost + + if slot_id is not None: + result = await g.s.execute( + select(CalendarSlot).where( + CalendarSlot.id == slot_id, + CalendarSlot.calendar_id == g.calendar.id, + CalendarSlot.deleted_at.is_(None), + ) + ) + slot = result.scalar_one_or_none() + if slot is None: + field_errors.setdefault("slot_id", []).append( + "Selected slot is no longer available." + ) + else: + # For inflexible slots, override the times with slot times + if not slot.flexible: + # Replace start/end with slot times + start_at = datetime(year, month, day, + slot.time_start.hour, + slot.time_start.minute, + tzinfo=timezone.utc) + if slot.time_end: + end_at = datetime(year, month, day, + slot.time_end.hour, + slot.time_end.minute, + tzinfo=timezone.utc) + else: + # Flexible: validate times are within slot band + # Only validate if times were provided + if not start_at: + field_errors.setdefault("start_time", []).append("Please select a start time.") + if end_at is None: + field_errors.setdefault("end_time", []).append("Please select an end time.") + + if start_at and end_at: + s_time = start_at.timetz() + e_time = end_at.timetz() + slot_start = slot.time_start + slot_end = slot.time_end + + if s_time.replace(tzinfo=None) < slot_start: + field_errors.setdefault("start_time", []).append( + f"Start time must be at or after {slot_start.strftime('%H:%M')}." + ) + if slot_end is not None and e_time.replace(tzinfo=None) > slot_end: + field_errors.setdefault("end_time", []).append( + f"End time must be at or before {slot_end.strftime('%H:%M')}." + ) + + # Calculate cost based on slot and times + if start_at and end_at: + cost = calculate_entry_cost(slot, start_at, end_at) + else: + field_errors.setdefault("slot_id", []).append( + "Please select a slot." + ) + + # Time ordering check (only if we have times) + if start_at and end_at and end_at < start_at: + field_errors.setdefault("end_time", []).append("End time must be after the start time.") + + if field_errors: + return jsonify( + { + "message": "Please fix the highlighted fields.", + "errors": field_errors, + } + ), 422 + + # Pass slot_id and calculated cost to the service + entry = await svc_add_entry( + g.s, + calendar_id=g.calendar.id, + name=name, + start_at=start_at, + end_at=end_at, + user_id=getattr(g, "user", None).id if getattr(g, "user", None) else None, + session_id=None, + slot_id=slot_id, + cost=cost, # Pass calculated cost + ) + + # Set ticket configuration + entry.ticket_price = ticket_price + entry.ticket_count = ticket_count + + # Count pending calendar entries from local session (sees the just-added entry) + user_id = getattr(g, "user", None) and g.user.id + cal_filters = [ + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "pending", + ] + if user_id: + cal_filters.append(CalendarEntry.user_id == user_id) + + cal_count = await g.s.scalar( + select(sa_func.count()).select_from(CalendarEntry).where(*cal_filters) + ) or 0 + + # Get product cart count via service (same DB, no HTTP needed) + from shared.infrastructure.cart_identity import current_cart_identity + from shared.services.registry import services + ident = current_cart_identity() + cart_summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + product_count = cart_summary.count + total_count = product_count + cal_count + + html = await render_template("_types/day/_main_panel.html") + mini_html = await render_template_string( + '{% from "_types/cart/_mini.html" import mini with context %}' + '{{ mini(oob="true") }}', + cart_count=total_count, + ) + return await make_response(html + mini_html, 200) + + @bp.get("/add/") + async def add_form(day: int, month: int, year: int, **kwargs): + html = await render_template( + "_types/day/_add.html", + ) + return await make_response(html) + + @bp.get("/add-button/") + async def add_button(day: int, month: int, year: int, **kwargs): + + html = await render_template( + "_types/day/_add_button.html", + ) + return await make_response(html) + + + + return bp diff --git a/events/bp/calendar_entries/services/entries.py b/events/bp/calendar_entries/services/entries.py new file mode 100644 index 0000000..c51345e --- /dev/null +++ b/events/bp/calendar_entries/services/entries.py @@ -0,0 +1,278 @@ +from __future__ import annotations +from datetime import datetime +from typing import Optional, Sequence +from decimal import Decimal + +from sqlalchemy import select, and_, or_ +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import Calendar, CalendarEntry + +from datetime import datetime + +from shared.browser.app.errors import AppError + +class CalendarError(AppError): + """Base error for calendar service operations.""" + status_code = 422 + + + +async def add_entry( + sess: AsyncSession, + calendar_id: int, + name: str, + start_at: Optional[datetime], + end_at: Optional[datetime], + user_id: int | None = None, + session_id: str | None = None, + slot_id: int | None = None, # NEW: accept slot_id + cost: Optional[Decimal] = None, # NEW: accept cost +) -> CalendarEntry: + """ + Add an entry to a calendar. + + Collects *all* validation errors and raises CalendarError([...]) + so the HTMX handler can show them as a list. + """ + errors: list[str] = [] + + # Normalise + name = (name or "").strip() + + # Name validation + if not name: + errors.append("Entry name must not be empty.") + + # start_at validation + if start_at is None: + errors.append("Start time is required.") + elif not isinstance(start_at, datetime): + errors.append("Start time is invalid.") + + # end_at validation + if end_at is not None and not isinstance(end_at, datetime): + errors.append("End time is invalid.") + + # Time ordering (only if we have sensible datetimes) + if isinstance(start_at, datetime) and isinstance(end_at, datetime): + if end_at < start_at: + errors.append("End time must be greater than or equal to the start time.") + + # If we have any validation errors, bail out now + if errors: + raise CalendarError(errors, status_code=422) + + # Calendar existence (this is more of a 404 than a validation issue) + cal = ( + await sess.execute( + select(Calendar).where( + Calendar.id == calendar_id, + Calendar.deleted_at.is_(None), + ) + ) + ).scalar_one_or_none() + + if not cal: + # Single-message CalendarError – still handled by the same error handler + raise CalendarError( + f"Calendar {calendar_id} does not exist or has been deleted.", + status_code=404, + ) + + # All good, create the entry + entry = CalendarEntry( + calendar_id=calendar_id, + name=name, + start_at=start_at, + end_at=end_at, + user_id=user_id, + session_id=session_id, + slot_id=slot_id, # NEW: save slot_id + state="pending", + cost=cost if cost is not None else Decimal('10'), # Use provided cost or default + ) + sess.add(entry) + await sess.flush() + + # Publish to federation inline + if entry.user_id: + from shared.services.federation_publish import try_publish + await try_publish( + sess, + user_id=entry.user_id, + activity_type="Create", + object_type="Event", + object_data={ + "name": entry.name or "", + "startTime": entry.start_at.isoformat() if entry.start_at else "", + "endTime": entry.end_at.isoformat() if entry.end_at else "", + }, + source_type="CalendarEntry", + source_id=entry.id, + ) + + return entry + + +async def list_entries( + sess: AsyncSession, + post_id: int, + calendar_slug: str, + from_: Optional[datetime] = None, + to: Optional[datetime] = None, +) -> Sequence[CalendarEntry]: + """ + List entries for a given post's calendar by name. + - Respects soft-deletes (only non-deleted calendar / entries). + - If a time window is provided, returns entries that overlap the window: + - If only from_ is given: entries where end_at is NULL or end_at >= from_ + - If only to is given: entries where start_at <= to + - If both given: entries where [start_at, end_at or +inf] overlaps [from_, to] + - Sorted by start_at ascending. + """ + calendar_slug = (calendar_slug or "").strip() + if not calendar_slug: + raise CalendarError("calendar_slug must not be empty.") + + cal = ( + await sess.execute( + select(Calendar.id) + .where( + Calendar.container_type == "page", + Calendar.container_id == post_id, + Calendar.slug == calendar_slug, + Calendar.deleted_at.is_(None), + ) + ) + ).scalar_one_or_none() + + if not cal: + # Return empty list instead of raising, so callers can treat absence as "no entries" + return [] + + # Base filter: not soft-deleted entries of this calendar + filters = [CalendarEntry.calendar_id == cal, CalendarEntry.deleted_at.is_(None)] + + # Time window logic + if from_ and to: + # Overlap condition: start <= to AND (end is NULL OR end >= from_) + filters.append(CalendarEntry.start_at <= to) + filters.append(or_(CalendarEntry.end_at.is_(None), CalendarEntry.end_at >= from_)) + elif from_: + # Anything that hasn't ended before from_ + filters.append(or_(CalendarEntry.end_at.is_(None), CalendarEntry.end_at >= from_)) + elif to: + # Anything that has started by 'to' + filters.append(CalendarEntry.start_at <= to) + + stmt = ( + select(CalendarEntry) + .where(and_(*filters)) + .order_by(CalendarEntry.start_at.asc(), CalendarEntry.id.asc()) + ) + + result = await sess.execute(stmt) + entries = list(result.scalars()) + + # Eagerly load slot relationships + for entry in entries: + await sess.refresh(entry, ['slot']) + + return entries + + +async def svc_update_entry( + sess: AsyncSession, + entry_id: int, + *, + name: str | None = None, + start_at: datetime | None = None, + end_at: datetime | None = None, + user_id: int | None = None, + session_id: str | None = None, + slot_id: int | None = None, # NEW: accept slot_id + cost: Decimal | None = None, # NEW: accept cost +) -> CalendarEntry: + """ + Update an existing CalendarEntry. + + - Performs the same validations as add_entry() + - Returns the updated CalendarEntry + - Raises CalendarError([...]) on validation issues + - Raises CalendarError(...) if entry does not exist + """ + + # Fetch entry + entry = ( + await sess.execute( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + ) + ).scalar_one_or_none() + + if not entry: + raise CalendarError( + f"Entry {entry_id} does not exist or has been deleted.", + status_code=404, + ) + + errors: list[str] = [] + + # ----- Validation ----- # + + # Name validation only if updating it + if name is not None: + name = name.strip() + if not name: + errors.append("Entry name must not be empty.") + + # start_at type validation only if provided + if start_at is not None and not isinstance(start_at, datetime): + errors.append("Start time is invalid.") + + # end_at type validation + if end_at is not None and not isinstance(end_at, datetime): + errors.append("End time is invalid.") + + # Time ordering + effective_start = start_at if start_at is not None else entry.start_at + effective_end = end_at if end_at is not None else entry.end_at + + if isinstance(effective_start, datetime) and isinstance(effective_end, datetime): + if effective_end < effective_start: + errors.append("End time must be greater than or equal to the start time.") + + # Validation failures? + if errors: + raise CalendarError(errors, status_code=422) + + # ----- Apply Updates ----- # + + if name is not None: + entry.name = name + + if start_at is not None: + entry.start_at = start_at + + if end_at is not None: + entry.end_at = end_at + + if user_id is not None: + entry.user_id = user_id + + if session_id is not None: + entry.session_id = session_id + + if slot_id is not None: # NEW: update slot_id + entry.slot_id = slot_id + + if cost is not None: # NEW: update cost + entry.cost = cost + + entry.updated_at = datetime.utcnow() + + await sess.flush() + return entry \ No newline at end of file diff --git a/events/bp/calendar_entry/admin/routes.py b/events/bp/calendar_entry/admin/routes.py new file mode 100644 index 0000000..fb422a2 --- /dev/null +++ b/events/bp/calendar_entry/admin/routes.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from quart import ( + render_template, make_response, Blueprint +) + + +from shared.browser.app.authz import require_admin + + +def register(): + bp = Blueprint("admin", __name__, url_prefix='/admin') + + # ---------- Pages ---------- + @bp.get("/") + @require_admin + async def admin(entry_id: int, **kwargs): + from shared.browser.app.utils.htmx import is_htmx_request + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/entry/admin/index.html") + else: + html = await render_template("_types/entry/admin/_oob_elements.html") + + return await make_response(html) + return bp diff --git a/events/bp/calendar_entry/routes.py b/events/bp/calendar_entry/routes.py new file mode 100644 index 0000000..ab46095 --- /dev/null +++ b/events/bp/calendar_entry/routes.py @@ -0,0 +1,626 @@ +from __future__ import annotations + + +from sqlalchemy import select, update + +from models.calendars import CalendarEntry, CalendarSlot + +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + + +from sqlalchemy import select +from quart import ( + request, render_template, make_response, Blueprint, g, jsonify +) +from ..calendar_entries.services.entries import ( + svc_update_entry, + CalendarError, # <-- add this if you want to catch it explicitly +) +from .services.post_associations import ( + add_post_to_entry, + remove_post_from_entry, + get_entry_posts, + search_posts as svc_search_posts, +) +from datetime import datetime, timezone +import math +import logging + +from shared.infrastructure.fragments import fetch_fragment + +from ..ticket_types.routes import register as register_ticket_types + +from .admin.routes import register as register_admin + + +logger = logging.getLogger(__name__) + +def register(): + bp = Blueprint("calendar_entry", __name__, url_prefix='/') + + # Register tickets blueprint + bp.register_blueprint( + register_ticket_types() + ) + bp.register_blueprint( + register_admin() + ) + + @bp.before_request + async def load_entry(): + """Load the calendar entry from the URL parameter.""" + entry_id = request.view_args.get("entry_id") + if entry_id: + result = await g.s.execute( + select(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None) + ) + ) + g.entry = result.scalar_one_or_none() + + @bp.context_processor + async def inject_entry(): + """Make entry and date parameters available to all templates in this blueprint.""" + return { + "entry": getattr(g, "entry", None), + "year": request.view_args.get("year"), + "month": request.view_args.get("month"), + "day": request.view_args.get("day"), + } + + async def get_day_nav_oob(year: int, month: int, day: int): + """Helper to generate OOB update for day entries nav""" + from datetime import datetime, timezone, date, timedelta + from ..calendar.services import get_visible_entries_for_period + from quart import session as qsession + + # Get the calendar from g + calendar = getattr(g, "calendar", None) + if not calendar: + return "" + + # Build day date + try: + day_date = date(year, month, day) + except (ValueError, TypeError): + return "" + + # Period: this day only + period_start = datetime(year, month, day, tzinfo=timezone.utc) + period_end = period_start + timedelta(days=1) + + # Identity + user = getattr(g, "user", None) + session_id = qsession.get("calendar_sid") + + # Get confirmed entries for this day + visible = await get_visible_entries_for_period( + sess=g.s, + calendar_id=calendar.id, + period_start=period_start, + period_end=period_end, + user=user, + session_id=session_id, + ) + + # Render OOB template + nav_oob = await render_template( + "_types/day/admin/_nav_entries_oob.html", + confirmed_entries=visible.confirmed_entries, + post=g.post_data["post"], + calendar=calendar, + day_date=day_date, + ) + return nav_oob + + async def get_post_nav_oob(entry_id: int): + """Helper to generate OOB update for post entries nav when entry state changes""" + # Get the entry to find associated posts + entry = await g.s.scalar( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None) + ) + ) + if not entry: + return "" + + # Get all posts associated with this entry + from .services.post_associations import get_entry_posts + entry_posts = await get_entry_posts(g.s, entry_id) + + # Generate OOB updates for each post's nav + nav_oobs = [] + for post in entry_posts: + # Get associated entries for this post + from ..post.services.entry_associations import get_associated_entries + associated_entries = await get_associated_entries(g.s, post.id) + + # Load calendars for this post + from models.calendars import Calendar + calendars = ( + await g.s.execute( + select(Calendar) + .where(Calendar.container_type == "page", Calendar.container_id == post.id, Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + ).scalars().all() + + # Render OOB template for this post's nav + nav_oob = await render_template( + "_types/post/admin/_nav_entries_oob.html", + associated_entries=associated_entries, + calendars=calendars, + post=post, + ) + nav_oobs.append(nav_oob) + + return "".join(nav_oobs) + + @bp.context_processor + async def inject_root(): + from ..tickets.services.tickets import ( + get_available_ticket_count, + get_sold_ticket_count, + get_user_reserved_count, + ) + from shared.infrastructure.cart_identity import current_cart_identity + from sqlalchemy.orm import selectinload + + view_args = getattr(request, "view_args", {}) or {} + entry_id = view_args.get("entry_id") + calendar_entry = None + entry_posts = [] + ticket_remaining = None + ticket_sold_count = 0 + user_ticket_count = 0 + user_ticket_counts_by_type = {} + + stmt = ( + select(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + .options(selectinload(CalendarEntry.ticket_types)) + ) + result = await g.s.execute(stmt) + calendar_entry = result.scalar_one_or_none() + + # Optional: also ensure it belongs to the current calendar, if g.calendar is set + if calendar_entry is not None and getattr(g, "calendar", None): + if calendar_entry.calendar_id != g.calendar.id: + calendar_entry = None + + # Refresh slot relationship if we have a valid entry + if calendar_entry is not None: + await g.s.refresh(calendar_entry, ['slot']) + # Fetch associated posts + entry_posts = await get_entry_posts(g.s, calendar_entry.id) + # Get ticket availability + ticket_remaining = await get_available_ticket_count(g.s, calendar_entry.id) + # Get sold count + ticket_sold_count = await get_sold_ticket_count(g.s, calendar_entry.id) + # Get current user's reserved count + ident = current_cart_identity() + user_ticket_count = await get_user_reserved_count( + g.s, calendar_entry.id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + # Per-type counts for multi-type entries + if calendar_entry.ticket_types: + for tt in calendar_entry.ticket_types: + if tt.deleted_at is None: + user_ticket_counts_by_type[tt.id] = await get_user_reserved_count( + g.s, calendar_entry.id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=tt.id, + ) + + # Fetch container nav from market (skip calendar — we're on a calendar page) + container_nav_html = "" + post_data = getattr(g, "post_data", None) + if post_data: + post_id = post_data["post"]["id"] + post_slug = post_data["post"]["slug"] + container_nav_html = await fetch_fragment("market", "container-nav", params={ + "container_type": "page", + "container_id": str(post_id), + "post_slug": post_slug, + }) + + return { + "entry": calendar_entry, + "entry_posts": entry_posts, + "ticket_remaining": ticket_remaining, + "ticket_sold_count": ticket_sold_count, + "user_ticket_count": user_ticket_count, + "user_ticket_counts_by_type": user_ticket_counts_by_type, + "container_nav_html": container_nav_html, + } + @bp.get("/") + @require_admin + async def get(entry_id: int, **rest): + from shared.browser.app.utils.htmx import is_htmx_request + + # Full template for both HTMX and normal requests + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/entry/index.html", + ) + else: + + html = await render_template( + "_types/entry/_oob_elements.html", + ) + + return await make_response(html, 200) + + @bp.get("/edit/") + @require_admin + async def get_edit(entry_id: int, **rest): + html = await render_template("_types/entry/_edit.html") + return await make_response(html, 200) + + @bp.put("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def put(year: int, month: int, day: int, entry_id: int, **rest): + form = await request.form + + def parse_time_to_dt(value: str | None, year: int, month: int, day: int): + """ + 'HH:MM' + (year, month, day) -> aware datetime in UTC. + Returns None if empty/invalid. + """ + if not value: + return None + try: + hour_str, minute_str = value.split(":", 1) + hour = int(hour_str) + minute = int(minute_str) + return datetime(year, month, day, hour, minute, tzinfo=timezone.utc) + except Exception: + return None + + name = (form.get("name") or "").strip() + start_at = parse_time_to_dt(form.get("start_at"), year, month, day) + end_at = parse_time_to_dt(form.get("end_at"), year, month, day) + + # NEW: slot_id + slot_id_raw = (form.get("slot_id") or "").strip() + slot_id = int(slot_id_raw) if slot_id_raw else None + + # Ticket configuration + ticket_price_str = (form.get("ticket_price") or "").strip() + ticket_price = None + if ticket_price_str: + try: + from decimal import Decimal + ticket_price = Decimal(ticket_price_str) + except Exception: + pass # Will be validated below if needed + + ticket_count_str = (form.get("ticket_count") or "").strip() + ticket_count = None + if ticket_count_str: + try: + ticket_count = int(ticket_count_str) + except Exception: + pass # Will be validated below if needed + + field_errors: dict[str, list[str]] = {} + + # --- Basic validation (slot-style) ------------------------- + + if not name: + field_errors.setdefault("name", []).append( + "Please enter a name for the entry." + ) + + # Check slot first before validating times + slot = None + if slot_id is not None: + result = await g.s.execute( + select(CalendarSlot).where( + CalendarSlot.id == slot_id, + CalendarSlot.calendar_id == g.calendar.id, + CalendarSlot.deleted_at.is_(None), + ) + ) + slot = result.scalar_one_or_none() + if slot is None: + field_errors.setdefault("slot_id", []).append( + "Selected slot is no longer available." + ) + else: + # For inflexible slots, override the times with slot times + if not slot.flexible: + # Replace start/end with slot times + start_at = datetime(year, month, day, + slot.time_start.hour, + slot.time_start.minute, + tzinfo=timezone.utc) + if slot.time_end: + end_at = datetime(year, month, day, + slot.time_end.hour, + slot.time_end.minute, + tzinfo=timezone.utc) + else: + # Flexible: validate times are within slot band + # Only validate if times were provided + if not start_at: + field_errors.setdefault("start_at", []).append( + "Please select a start time." + ) + if not end_at: + field_errors.setdefault("end_at", []).append( + "Please select an end time." + ) + + if start_at and end_at: + s_time = start_at.timetz() + e_time = end_at.timetz() + slot_start = slot.time_start + slot_end = slot.time_end + + if s_time.replace(tzinfo=None) < slot_start: + field_errors.setdefault("start_at", []).append( + f"Start time must be at or after {slot_start.strftime('%H:%M')}." + ) + if slot_end is not None and e_time.replace(tzinfo=None) > slot_end: + field_errors.setdefault("end_at", []).append( + f"End time must be at or before {slot_end.strftime('%H:%M')}." + ) + else: + field_errors.setdefault("slot_id", []).append( + "Please select a slot." + ) + + # Time ordering check (only if we have times and no slot override) + if start_at and end_at and end_at < start_at: + field_errors.setdefault("end_at", []).append( + "End time must be after the start time." + ) + + if field_errors: + return jsonify( + { + "message": "Please fix the highlighted fields.", + "errors": field_errors, + } + ), 422 + + # --- Service call & safety net for extra validation ------- + + try: + entry = await svc_update_entry( + g.s, + entry_id, + name=name, + start_at=start_at, + end_at=end_at, + slot_id=slot_id, # Pass slot_id to service + ) + + # Update ticket configuration + entry.ticket_price = ticket_price + entry.ticket_count = ticket_count + + except CalendarError as e: + # If the service still finds something wrong, surface it nicely. + msg = str(e) + return jsonify( + { + "message": "There was a problem updating the entry.", + "errors": {"__all__": [msg]}, + } + ), 422 + + # --- Success: re-render the entry block ------------------- + + # Get nav OOB update + nav_oob = await get_day_nav_oob(year, month, day) + + html = await render_template( + "_types/entry/index.html", + #entry=entry, + ) + return await make_response(html + nav_oob, 200) + + + @bp.post("/confirm/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def confirm_entry(entry_id: int, year: int, month: int, day: int, **rest): + await g.s.execute( + update(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "provisional", + ) + .values(state="confirmed") + ) + await g.s.flush() + + # Get nav OOB updates (both day and post navs) + day_nav_oob = await get_day_nav_oob(year, month, day) + post_nav_oob = await get_post_nav_oob(entry_id) + + # redirect back to calendar admin or order page as you prefer + html = await render_template("_types/entry/_optioned.html") + return await make_response(html + day_nav_oob + post_nav_oob, 200) + + @bp.post("/decline/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def decline_entry(entry_id: int, year: int, month: int, day: int, **rest): + await g.s.execute( + update(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "provisional", + ) + .values(state="declined") + ) + await g.s.flush() + + # Get nav OOB updates (both day and post navs) + day_nav_oob = await get_day_nav_oob(year, month, day) + post_nav_oob = await get_post_nav_oob(entry_id) + + # redirect back to calendar admin or order page as you prefer + html = await render_template("_types/entry/_optioned.html") + return await make_response(html + day_nav_oob + post_nav_oob, 200) + + @bp.post("/provisional/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def provisional_entry(entry_id: int, year: int, month: int, day: int, **rest): + await g.s.execute( + update(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "confirmed", + ) + .values(state="provisional") + ) + await g.s.flush() + + # Get nav OOB updates (both day and post navs) + day_nav_oob = await get_day_nav_oob(year, month, day) + post_nav_oob = await get_post_nav_oob(entry_id) + + # redirect back to calendar admin or order page as you prefer + html = await render_template("_types/entry/_optioned.html") + return await make_response(html + day_nav_oob + post_nav_oob, 200) + + @bp.post("/tickets/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def update_tickets(entry_id: int, **rest): + """Update ticket configuration for a calendar entry""" + from .services.ticket_operations import update_ticket_config + from decimal import Decimal + + form = await request.form + + # Parse ticket price + ticket_price_str = (form.get("ticket_price") or "").strip() + ticket_price = None + if ticket_price_str: + try: + ticket_price = Decimal(ticket_price_str) + except Exception: + return await make_response("Invalid ticket price", 400) + + # Parse ticket count + ticket_count_str = (form.get("ticket_count") or "").strip() + ticket_count = None + if ticket_count_str: + try: + ticket_count = int(ticket_count_str) + except Exception: + return await make_response("Invalid ticket count", 400) + + # Update ticket configuration + success, error = await update_ticket_config( + g.s, entry_id, ticket_price, ticket_count + ) + + if not success: + return await make_response(error, 400) + + await g.s.flush() + + # Return just the tickets fragment (targeted by hx-target="#entry-tickets-...") + html = await render_template("_types/entry/_tickets.html") + return await make_response(html, 200) + + @bp.get("/posts/search/") + @require_admin + async def search_posts(entry_id: int, **rest): + """Search for posts to associate with this entry""" + query = request.args.get("q", "").strip() + page = int(request.args.get("page", 1)) + per_page = 10 + + search_posts, total = await svc_search_posts(g.s, query, page, per_page) + total_pages = math.ceil(total / per_page) if total > 0 else 0 + + html = await render_template( + "_types/entry/_post_search_results.html", + search_posts=search_posts, + search_query=query, + page=page, + total_pages=total_pages, + ) + return await make_response(html, 200) + + @bp.post("/posts/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def add_post(entry_id: int, **rest): + """Add a post association to this entry""" + form = await request.form + post_id = form.get("post_id") + + if not post_id: + return await make_response("Post ID is required", 400) + + try: + post_id = int(post_id) + except ValueError: + return await make_response("Invalid post ID", 400) + + success, error = await add_post_to_entry(g.s, entry_id, post_id) + + if not success: + return await make_response(error, 400) + + await g.s.flush() + + # Reload entry_posts for nav update + entry_posts = await get_entry_posts(g.s, entry_id) + + # Return updated posts list + OOB nav update + html = await render_template("_types/entry/_posts.html") + nav_oob = await render_template( + "_types/entry/admin/_nav_posts_oob.html", + entry_posts=entry_posts, + ) + return await make_response(html + nav_oob, 200) + + @bp.delete("/posts//") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def remove_post(entry_id: int, post_id: int, **rest): + """Remove a post association from this entry""" + success, error = await remove_post_from_entry(g.s, entry_id, post_id) + + if not success: + return await make_response(error or "Association not found", 404) + + await g.s.flush() + + # Reload entry_posts for nav update + entry_posts = await get_entry_posts(g.s, entry_id) + + # Return updated posts list + OOB nav update + html = await render_template("_types/entry/_posts.html") + nav_oob = await render_template( + "_types/entry/admin/_nav_posts_oob.html", + entry_posts=entry_posts, + ) + return await make_response(html + nav_oob, 200) + + return bp diff --git a/events/bp/calendar_entry/services/post_associations.py b/events/bp/calendar_entry/services/post_associations.py new file mode 100644 index 0000000..d96cf7d --- /dev/null +++ b/events/bp/calendar_entry/services/post_associations.py @@ -0,0 +1,121 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from sqlalchemy.sql import func + +from models.calendars import CalendarEntry, CalendarEntryPost +from shared.services.registry import services + + +async def add_post_to_entry( + session: AsyncSession, + entry_id: int, + post_id: int +) -> tuple[bool, str | None]: + """ + Associate a post with a calendar entry. + Returns (success, error_message). + """ + # Check if entry exists + entry = await session.scalar( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None) + ) + ) + if not entry: + return False, "Calendar entry not found" + + # Check if post exists + post = await services.blog.get_post_by_id(session, post_id) + if not post: + return False, "Post not found" + + # Check if association already exists + existing = await session.scalar( + select(CalendarEntryPost).where( + CalendarEntryPost.entry_id == entry_id, + CalendarEntryPost.content_type == "post", + CalendarEntryPost.content_id == post_id, + CalendarEntryPost.deleted_at.is_(None) + ) + ) + + if existing: + return False, "Post is already associated with this entry" + + # Create association + association = CalendarEntryPost( + entry_id=entry_id, + content_type="post", + content_id=post_id + ) + session.add(association) + await session.flush() + + return True, None + + +async def remove_post_from_entry( + session: AsyncSession, + entry_id: int, + post_id: int +) -> tuple[bool, str | None]: + """ + Remove a post association from a calendar entry (soft delete). + Returns (success, error_message). + """ + # Find the association + association = await session.scalar( + select(CalendarEntryPost).where( + CalendarEntryPost.entry_id == entry_id, + CalendarEntryPost.content_type == "post", + CalendarEntryPost.content_id == post_id, + CalendarEntryPost.deleted_at.is_(None) + ) + ) + + if not association: + return False, "Association not found" + + # Soft delete + association.deleted_at = func.now() + await session.flush() + + return True, None + + +async def get_entry_posts( + session: AsyncSession, + entry_id: int +) -> list: + """ + Get all posts (as PostDTOs) associated with a calendar entry. + """ + result = await session.execute( + select(CalendarEntryPost.content_id).where( + CalendarEntryPost.entry_id == entry_id, + CalendarEntryPost.content_type == "post", + CalendarEntryPost.deleted_at.is_(None), + ) + ) + post_ids = list(result.scalars().all()) + if not post_ids: + return [] + posts = await services.blog.get_posts_by_ids(session, post_ids) + return sorted(posts, key=lambda p: (p.title or "")) + + +async def search_posts( + session: AsyncSession, + query: str, + page: int = 1, + per_page: int = 10 +) -> tuple[list, int]: + """ + Search for posts by title with pagination. + If query is empty, returns all posts in published order. + Returns (post_dtos, total_count). + """ + return await services.blog.search_posts(session, query, page, per_page) diff --git a/events/bp/calendar_entry/services/ticket_operations.py b/events/bp/calendar_entry/services/ticket_operations.py new file mode 100644 index 0000000..46fbdfb --- /dev/null +++ b/events/bp/calendar_entry/services/ticket_operations.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from typing import Optional +from decimal import Decimal + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import CalendarEntry + + + +async def update_ticket_config( + session: AsyncSession, + entry_id: int, + ticket_price: Optional[Decimal], + ticket_count: Optional[int], +) -> tuple[bool, Optional[str]]: + """ + Update ticket configuration for a calendar entry. + + Args: + session: Database session + entry_id: Calendar entry ID + ticket_price: Price per ticket (None = no tickets) + ticket_count: Total available tickets (None = unlimited) + + Returns: + (success, error_message) + """ + # Get the entry + entry = await session.scalar( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None) + ) + ) + + if not entry: + return False, "Calendar entry not found" + + # Validate inputs + if ticket_price is not None and ticket_price < 0: + return False, "Ticket price cannot be negative" + + if ticket_count is not None and ticket_count < 0: + return False, "Ticket count cannot be negative" + + # Update ticket configuration + entry.ticket_price = ticket_price + entry.ticket_count = ticket_count + + return True, None + + +async def get_available_tickets( + session: AsyncSession, + entry_id: int, +) -> tuple[Optional[int], Optional[str]]: + """ + Get the number of available tickets for a calendar entry. + + Returns: + (available_count, error_message) + - available_count is None if unlimited tickets + - available_count is the remaining count if limited + """ + entry = await session.scalar( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None) + ) + ) + + if not entry: + return None, "Calendar entry not found" + + # If no ticket configuration, return None (unlimited) + if entry.ticket_price is None: + return None, None + + # If ticket_count is None, unlimited tickets + if entry.ticket_count is None: + return None, None + + # Returns total count (booked tickets not yet subtracted) + return entry.ticket_count, None diff --git a/events/bp/calendars/routes.py b/events/bp/calendars/routes.py new file mode 100644 index 0000000..ebae1f7 --- /dev/null +++ b/events/bp/calendars/routes.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g +) +from sqlalchemy import select + +from models.calendars import Calendar + + +from .services.calendars import ( + create_calendar as svc_create_calendar, +) + +from ..calendar.routes import register as register_calendar + +from shared.browser.app.redis_cacher import cache_page, clear_cache + +from shared.browser.app.authz import require_admin +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("calendars", __name__, url_prefix='/calendars') + bp.register_blueprint( + register_calendar(), + ) + @bp.context_processor + async def inject_root(): + # Must always return a dict + return {} + + # ---------- Pages ---------- + + @bp.get("/") + @cache_page(tag="calendars") + async def home(**kwargs): + if not is_htmx_request(): + html = await render_template( + "_types/calendars/index.html", + ) + else: + html = await render_template( + "_types/calendars/_oob_elements.html", + ) + return await make_response(html) + + + @bp.post("/new/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def create_calendar(**kwargs): + form = await request.form + name = (form.get("name") or "").strip() + + # Get post_id from context if available (blog-embedded mode) + post_data = getattr(g, "post_data", None) + post_id = (post_data.get("post") or {}).get("id") if post_data else None + + if not post_id: + # Standalone mode: post_id from form (or None — calendar without post) + post_id = form.get("post_id") + if post_id: + post_id = int(post_id) + + try: + await svc_create_calendar(g.s, post_id, name) + except Exception as e: + return await make_response(f'
    {e}
    ', 422) + + html = await render_template( + "_types/calendars/index.html", + ) + + # Blog-embedded mode: also update post nav + if post_data: + from ..post.services.entry_associations import get_associated_entries + + cals = ( + await g.s.execute( + select(Calendar) + .where(Calendar.container_type == "page", Calendar.container_id == post_id, Calendar.deleted_at.is_(None)) + .order_by(Calendar.name.asc()) + ) + ).scalars().all() + + associated_entries = await get_associated_entries(g.s, post_id) + + nav_oob = await render_template( + "_types/post/admin/_nav_entries_oob.html", + associated_entries=associated_entries, + calendars=cals, + post=post_data["post"], + ) + + html = html + nav_oob + + return await make_response(html) + return bp diff --git a/events/bp/calendars/services/calendars.py b/events/bp/calendars/services/calendars.py new file mode 100644 index 0000000..2e8a94b --- /dev/null +++ b/events/bp/calendars/services/calendars.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import Calendar +from shared.services.registry import services +from shared.services.relationships import attach_child, detach_child +import unicodedata +import re + + +class CalendarError(ValueError): + """Base error for calendar service operations.""" + +from shared.browser.app.utils import ( + utcnow +) + +def slugify(value: str, max_len: int = 255) -> str: + """ + Make a URL-friendly slug: + - lowercase + - remove accents + - replace any non [a-z0-9]+ with '-' + - no forward slashes + - collapse multiple dashes + - trim leading/trailing dashes + """ + if value is None: + value = "" + # normalize accents -> ASCII + value = unicodedata.normalize("NFKD", value) + value = value.encode("ascii", "ignore").decode("ascii") + value = value.lower() + + # explicitly block forward slashes + value = value.replace("/", "-") + + # replace non-alnum with hyphen + value = re.sub(r"[^a-z0-9]+", "-", value) + # collapse multiple hyphens + value = re.sub(r"-{2,}", "-", value) + # trim hyphens and enforce length + value = value.strip("-")[:max_len].strip("-") + + # fallback if empty + return value or "calendar" + + +async def soft_delete(sess: AsyncSession, post_slug: str, calendar_slug: str) -> bool: + post = await services.blog.get_post_by_slug(sess, post_slug) + if not post: + return False + + cal = ( + await sess.execute( + select(Calendar).where( + Calendar.container_type == "page", + Calendar.container_id == post.id, + Calendar.slug == calendar_slug, + Calendar.deleted_at.is_(None), + ) + ) + ).scalar_one_or_none() + + if not cal: + return False + + cal.deleted_at = utcnow() + await sess.flush() + await detach_child(sess, "page", cal.container_id, "calendar", cal.id) + return True + +async def create_calendar(sess: AsyncSession, post_id: int, name: str) -> Calendar: + """ + Create a calendar for a post. Name must be unique per post. + If a calendar with the same (post_id, name) exists but is soft-deleted, + it will be revived (deleted_at=None). + """ + name = (name or "").strip() + if not name: + raise CalendarError("Calendar name must not be empty.") + slug=slugify(name) + + # Ensure post exists (avoid silent FK errors in some DBs) + post = await services.blog.get_post_by_id(sess, post_id) + if not post: + raise CalendarError(f"Post {post_id} does not exist.") + + # Enforce: calendars can only be created on pages with the calendar feature + if not post.is_page: + raise CalendarError("Calendars can only be created on pages, not posts.") + + # Look for existing (including soft-deleted) + q = await sess.execute( + select(Calendar).where(Calendar.container_type == "page", Calendar.container_id == post_id, Calendar.name == name) + ) + existing = q.scalar_one_or_none() + + if existing: + if existing.deleted_at is not None: + existing.deleted_at = None # revive + await sess.flush() + await attach_child(sess, "page", post_id, "calendar", existing.id) + return existing + raise CalendarError(f'Calendar with slug "{slug}" already exists for post {post_id}.') + + cal = Calendar(container_type="page", container_id=post_id, name=name, slug=slug) + sess.add(cal) + await sess.flush() + await attach_child(sess, "page", post_id, "calendar", cal.id) + return cal + + diff --git a/events/bp/day/admin/routes.py b/events/bp/day/admin/routes.py new file mode 100644 index 0000000..b14cfe7 --- /dev/null +++ b/events/bp/day/admin/routes.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from quart import ( + render_template, make_response, Blueprint +) + + +from shared.browser.app.authz import require_admin + + +def register(): + bp = Blueprint("admin", __name__, url_prefix='/admin') + + # ---------- Pages ---------- + @bp.get("/") + @require_admin + async def admin(year: int, month: int, day: int, **kwargs): + from shared.browser.app.utils.htmx import is_htmx_request + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/day/admin/index.html") + else: + html = await render_template("_types/day/admin/_oob_elements.html") + + return await make_response(html) + return bp diff --git a/events/bp/day/routes.py b/events/bp/day/routes.py new file mode 100644 index 0000000..7fbe550 --- /dev/null +++ b/events/bp/day/routes.py @@ -0,0 +1,154 @@ +from __future__ import annotations +from datetime import datetime, timezone, date, timedelta + +from quart import ( + request, render_template, make_response, Blueprint, g, abort, session as qsession +) + +from bp.calendar.services import get_visible_entries_for_period + +from bp.calendar_entries.routes import register as register_calendar_entries +from .admin.routes import register as register_admin + +from shared.browser.app.redis_cacher import cache_page +from shared.infrastructure.fragments import fetch_fragment + +from models.calendars import CalendarSlot # add this import + +from sqlalchemy import select + +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("day", __name__, url_prefix='/day///') + + bp.register_blueprint( + register_calendar_entries() + ) + bp.register_blueprint( + register_admin() + ) + + @bp.context_processor + async def inject_root(): + view_args = getattr(request, "view_args", {}) or {} + day = view_args.get("day") + month = view_args.get("month") + year = view_args.get("year") + + calendar = getattr(g, "calendar", None) + if not calendar: + return {} + + try: + day_date = date(year, month, day) + except (ValueError, TypeError): + return {} + + # Period: this day only + period_start = datetime(year, month, day, tzinfo=timezone.utc) + period_end = period_start + timedelta(days=1) + + # Identity & admin flag + user = getattr(g, "user", None) + session_id = qsession.get("calendar_sid") + + visible = await get_visible_entries_for_period( + sess=g.s, + calendar_id=calendar.id, + period_start=period_start, + period_end=period_end, + user=user, + session_id=session_id, + ) + + # --- NEW: slots for this weekday --- + weekday_attr = ["mon","tue","wed","thu","fri","sat","sun"][day_date.weekday()] + + stmt = ( + select(CalendarSlot) + .where( + CalendarSlot.calendar_id == calendar.id, + getattr(CalendarSlot, weekday_attr) == True, # noqa: E712 + CalendarSlot.deleted_at.is_(None), + ) + .order_by(CalendarSlot.time_start.asc(), CalendarSlot.id.asc()) + ) + result = await g.s.execute(stmt) + day_slots = list(result.scalars()) + + # Fetch container nav from market (skip calendar — we're on a calendar page) + container_nav_html = "" + post_data = getattr(g, "post_data", None) + if post_data: + post_id = post_data["post"]["id"] + post_slug = post_data["post"]["slug"] + container_nav_html = await fetch_fragment("market", "container-nav", params={ + "container_type": "page", + "container_id": str(post_id), + "post_slug": post_slug, + }) + + return { + "qsession": qsession, + "day_date": day_date, + "day": day, + "year": year, + "month": month, + "day_entries": visible.merged_entries, + "user_entries": visible.user_entries, + "confirmed_entries": visible.confirmed_entries, + "day_slots": day_slots, + "container_nav_html": container_nav_html, + } + + + + @bp.get("/") + @cache_page(tag="calendars") + async def show_day(year: int, month: int, day: int, **kwargs): + """ + Show a detail view for a single calendar day. + + Visibility rules: + - Non-admin: + - all *confirmed* entries for that day (any user) + - all entries for current user/session (any state) for that day + (pending/ordered/provisional/confirmed) + - Admin: + - all confirmed + provisional + ordered entries for that day (all users) + - pending only for current user/session + """ + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/day/index.html", + ) + else: + + html = await render_template( + "_types/day/_oob_elements.html", + ) + return await make_response(html) + + @bp.get("/w//") + async def widget_paginate(widget_domain: str, **kwargs): + """Proxies paginated widget requests to the appropriate fragment provider.""" + page = int(request.args.get("page", 1)) + post_data = getattr(g, "post_data", None) + if not post_data: + abort(404) + post_id = post_data["post"]["id"] + post_slug = post_data["post"]["slug"] + + if widget_domain == "market": + html = await fetch_fragment("market", "container-nav", params={ + "container_type": "page", + "container_id": str(post_id), + "post_slug": post_slug, + }) + return await make_response(html or "") + abort(404) + + return bp diff --git a/events/bp/fragments/__init__.py b/events/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/events/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/events/bp/fragments/routes.py b/events/bp/fragments/routes.py new file mode 100644 index 0000000..293398a --- /dev/null +++ b/events/bp/fragments/routes.py @@ -0,0 +1,130 @@ +"""Events app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. +""" + +from __future__ import annotations + +from quart import Blueprint, Response, g, render_template, request + +from shared.infrastructure.fragments import FRAGMENT_HEADER +from shared.services.registry import services + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + _handlers: dict[str, object] = {} + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + # --- container-nav fragment: calendar entries + calendar links ----------- + + async def _container_nav_handler(): + container_type = request.args.get("container_type", "page") + container_id = int(request.args.get("container_id", 0)) + post_slug = request.args.get("post_slug", "") + paginate_url_base = request.args.get("paginate_url", "") + page = int(request.args.get("page", 1)) + exclude = request.args.get("exclude", "") + excludes = [e.strip() for e in exclude.split(",") if e.strip()] + + html_parts = [] + + # Calendar entries nav + if not any(e.startswith("calendar") for e in excludes): + entries, has_more = await services.calendar.associated_entries( + g.s, container_type, container_id, page, + ) + if entries: + html_parts.append(await render_template( + "fragments/container_nav_entries.html", + entries=entries, has_more=has_more, + page=page, post_slug=post_slug, + paginate_url_base=paginate_url_base, + )) + + # Calendar links nav + if not any(e.startswith("calendar") for e in excludes): + calendars = await services.calendar.calendars_for_container( + g.s, container_type, container_id, + ) + if calendars: + html_parts.append(await render_template( + "fragments/container_nav_calendars.html", + calendars=calendars, post_slug=post_slug, + )) + + return "\n".join(html_parts) + + _handlers["container-nav"] = _container_nav_handler + + # --- container-cards fragment: entries for blog listing cards ------------ + + async def _container_cards_handler(): + post_ids_raw = request.args.get("post_ids", "") + post_slugs_raw = request.args.get("post_slugs", "") + post_ids = [int(x) for x in post_ids_raw.split(",") if x.strip()] + post_slugs = [x.strip() for x in post_slugs_raw.split(",") if x.strip()] + if not post_ids: + return "" + + # Build post_id -> slug mapping + slug_map = {} + for i, pid in enumerate(post_ids): + slug_map[pid] = post_slugs[i] if i < len(post_slugs) else "" + + batch = await services.calendar.confirmed_entries_for_posts(g.s, post_ids) + return await render_template( + "fragments/container_cards_entries.html", + batch=batch, post_ids=post_ids, slug_map=slug_map, + ) + + _handlers["container-cards"] = _container_cards_handler + + # --- account-nav-item fragment: tickets + bookings links for account nav - + + async def _account_nav_item_handler(): + return await render_template("fragments/account_nav_items.html") + + _handlers["account-nav-item"] = _account_nav_item_handler + + # --- account-page fragment: tickets or bookings panel -------------------- + + async def _account_page_handler(): + slug = request.args.get("slug", "") + user_id = request.args.get("user_id", type=int) + if not user_id: + return "" + + if slug == "tickets": + tickets = await services.calendar.user_tickets(g.s, user_id=user_id) + return await render_template( + "fragments/account_page_tickets.html", + tickets=tickets, + ) + elif slug == "bookings": + bookings = await services.calendar.user_bookings(g.s, user_id=user_id) + return await render_template( + "fragments/account_page_bookings.html", + bookings=bookings, + ) + return "" + + _handlers["account-page"] = _account_page_handler + + bp._fragment_handlers = _handlers + + return bp diff --git a/events/bp/markets/__init__.py b/events/bp/markets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/markets/routes.py b/events/bp/markets/routes.py new file mode 100644 index 0000000..bac523f --- /dev/null +++ b/events/bp/markets/routes.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g +) + +from .services.markets import ( + create_market as svc_create_market, + soft_delete as svc_soft_delete, +) + +from shared.browser.app.redis_cacher import cache_page, clear_cache +from shared.browser.app.authz import require_admin +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("markets", __name__, url_prefix='/markets') + + @bp.context_processor + async def inject_root(): + return {} + + @bp.get("/") + async def home(**kwargs): + if not is_htmx_request(): + html = await render_template("_types/markets/index.html") + else: + html = await render_template("_types/markets/_oob_elements.html") + return await make_response(html) + + @bp.post("/new/") + @require_admin + async def create_market(**kwargs): + form = await request.form + name = (form.get("name") or "").strip() + + post_data = getattr(g, "post_data", None) + post_id = (post_data.get("post") or {}).get("id") if post_data else None + + if not post_id: + post_id = form.get("post_id") + if post_id: + post_id = int(post_id) + + try: + await svc_create_market(g.s, post_id, name) + except Exception as e: + return await make_response(f'
    {e}
    ', 422) + + html = await render_template("_types/markets/index.html") + return await make_response(html) + + @bp.delete("//") + @require_admin + async def delete_market(market_slug: str, **kwargs): + post_slug = getattr(g, "post_slug", None) + deleted = await svc_soft_delete(g.s, post_slug, market_slug) + if not deleted: + return await make_response("Market not found", 404) + + html = await render_template("_types/markets/index.html") + return await make_response(html) + + return bp diff --git a/events/bp/markets/services/__init__.py b/events/bp/markets/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/markets/services/markets.py b/events/bp/markets/services/markets.py new file mode 100644 index 0000000..7b0890a --- /dev/null +++ b/events/bp/markets/services/markets.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import re +import unicodedata + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.contracts.dtos import MarketPlaceDTO +from shared.services.registry import services + + +class MarketError(ValueError): + """Base error for market service operations.""" + + +def slugify(value: str, max_len: int = 255) -> str: + if value is None: + value = "" + value = unicodedata.normalize("NFKD", value) + value = value.encode("ascii", "ignore").decode("ascii") + value = value.lower() + value = value.replace("/", "-") + value = re.sub(r"[^a-z0-9]+", "-", value) + value = re.sub(r"-{2,}", "-", value) + value = value.strip("-")[:max_len].strip("-") + return value or "market" + + +async def create_market(sess: AsyncSession, post_id: int, name: str) -> MarketPlaceDTO: + """ + Create a market for a page. Name must be unique per page. + If a market with the same (post_id, slug) exists but is soft-deleted, + it will be revived. + """ + name = (name or "").strip() + if not name: + raise MarketError("Market name must not be empty.") + slug = slugify(name) + + post = await services.blog.get_post_by_id(sess, post_id) + if not post: + raise MarketError(f"Post {post_id} does not exist.") + if not post.is_page: + raise MarketError("Markets can only be created on pages, not posts.") + + try: + return await services.market.create_marketplace(sess, "page", post_id, name, slug) + except ValueError as e: + raise MarketError(str(e)) from e + + +async def soft_delete(sess: AsyncSession, post_slug: str, market_slug: str) -> bool: + post = await services.blog.get_post_by_slug(sess, post_slug) + if not post: + return False + + return await services.market.soft_delete_marketplace(sess, "page", post.id, market_slug) diff --git a/events/bp/page/__init__.py b/events/bp/page/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/page/routes.py b/events/bp/page/routes.py new file mode 100644 index 0000000..da4fb74 --- /dev/null +++ b/events/bp/page/routes.py @@ -0,0 +1,129 @@ +""" +Page summary blueprint — shows upcoming events for a single page's calendars. + +Routes: + GET // — full page scoped to this page + GET //entries — HTMX fragment for infinite scroll + POST //tickets/adjust — adjust ticket quantity inline +""" +from __future__ import annotations + +from quart import Blueprint, g, request, render_template, render_template_string, make_response + +from shared.browser.app.utils.htmx import is_htmx_request +from shared.infrastructure.cart_identity import current_cart_identity +from shared.services.registry import services + + +def register() -> Blueprint: + bp = Blueprint("page_summary", __name__) + + async def _load_entries(post_id, page, per_page=20): + """Load upcoming entries for this page + pending ticket counts.""" + entries, has_more = await services.calendar.upcoming_entries_for_container( + g.s, "page", post_id, page=page, per_page=per_page, + ) + + # Pending ticket counts keyed by entry_id + ident = current_cart_identity() + pending_tickets = {} + if entries: + tickets = await services.calendar.pending_tickets( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + for t in tickets: + if t.entry_id is not None: + pending_tickets[t.entry_id] = pending_tickets.get(t.entry_id, 0) + 1 + + return entries, has_more, pending_tickets + + @bp.get("/") + async def index(): + post = g.post_data["post"] + view = request.args.get("view", "list") + page = int(request.args.get("page", 1)) + + entries, has_more, pending_tickets = await _load_entries(post["id"], page) + + ctx = dict( + entries=entries, + has_more=has_more, + pending_tickets=pending_tickets, + page_info={}, + page=page, + view=view, + ) + + if is_htmx_request(): + html = await render_template("_types/page_summary/_main_panel.html", **ctx) + else: + html = await render_template("_types/page_summary/index.html", **ctx) + + return await make_response(html, 200) + + @bp.get("/entries") + async def entries_fragment(): + post = g.post_data["post"] + view = request.args.get("view", "list") + page = int(request.args.get("page", 1)) + + entries, has_more, pending_tickets = await _load_entries(post["id"], page) + + html = await render_template( + "_types/page_summary/_cards.html", + entries=entries, + has_more=has_more, + pending_tickets=pending_tickets, + page_info={}, + page=page, + view=view, + ) + return await make_response(html, 200) + + @bp.post("/tickets/adjust") + async def adjust_ticket(): + """Adjust ticket quantity, return updated widget + OOB cart-mini.""" + ident = current_cart_identity() + form = await request.form + entry_id = int(form.get("entry_id", 0)) + count = max(int(form.get("count", 0)), 0) + tt_raw = (form.get("ticket_type_id") or "").strip() + ticket_type_id = int(tt_raw) if tt_raw else None + + await services.calendar.adjust_ticket_quantity( + g.s, entry_id, count, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=ticket_type_id, + ) + + # Get updated ticket count for this entry + tickets = await services.calendar.pending_tickets( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + qty = sum(1 for t in tickets if t.entry_id == entry_id) + + # Load entry DTO for the widget template + entry = await services.calendar.entry_by_id(g.s, entry_id) + + # Updated cart count for OOB mini-cart + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + cart_count = summary.count + summary.calendar_count + summary.ticket_count + + # Render widget + OOB cart-mini + widget_html = await render_template( + "_types/page_summary/_ticket_widget.html", + entry=entry, + qty=qty, + ticket_url=f"/{g.post_slug}/tickets/adjust", + ) + mini_html = await render_template_string( + '{% from "_types/cart/_mini.html" import mini with context %}' + '{{ mini(oob="true") }}', + cart_count=cart_count, + ) + return await make_response(widget_html + mini_html, 200) + + return bp diff --git a/events/bp/payments/__init__.py b/events/bp/payments/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/payments/routes.py b/events/bp/payments/routes.py new file mode 100644 index 0000000..677bbc8 --- /dev/null +++ b/events/bp/payments/routes.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from quart import ( + render_template, make_response, Blueprint, g, request +) +from sqlalchemy import select + +from shared.models.page_config import PageConfig + +from shared.browser.app.authz import require_admin +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("payments", __name__, url_prefix='/payments') + + @bp.context_processor + async def inject_root(): + return {} + + async def _load_payment_ctx(): + """Load PageConfig SumUp data for the current page.""" + post = (getattr(g, "post_data", None) or {}).get("post", {}) + post_id = post.get("id") + if not post_id: + return {} + + pc = (await g.s.execute( + select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post_id) + )).scalar_one_or_none() + + return { + "sumup_configured": bool(pc and pc.sumup_api_key), + "sumup_merchant_code": (pc.sumup_merchant_code or "") if pc else "", + "sumup_checkout_prefix": (pc.sumup_checkout_prefix or "") if pc else "", + } + + @bp.get("/") + @require_admin + async def home(**kwargs): + ctx = await _load_payment_ctx() + if not is_htmx_request(): + html = await render_template("_types/payments/index.html", **ctx) + else: + html = await render_template("_types/payments/_oob_elements.html", **ctx) + return await make_response(html) + + @bp.put("/") + @require_admin + async def update_sumup(**kwargs): + """Update SumUp credentials for this page.""" + post = (getattr(g, "post_data", None) or {}).get("post", {}) + post_id = post.get("id") + if not post_id: + return await make_response("Post not found", 404) + + pc = (await g.s.execute( + select(PageConfig).where(PageConfig.container_type == "page", PageConfig.container_id == post_id) + )).scalar_one_or_none() + if pc is None: + pc = PageConfig(container_type="page", container_id=post_id, features={}) + g.s.add(pc) + await g.s.flush() + + form = await request.form + merchant_code = (form.get("merchant_code") or "").strip() + api_key = (form.get("api_key") or "").strip() + checkout_prefix = (form.get("checkout_prefix") or "").strip() + + pc.sumup_merchant_code = merchant_code or None + pc.sumup_checkout_prefix = checkout_prefix or None + if api_key: + pc.sumup_api_key = api_key + + await g.s.flush() + + ctx = await _load_payment_ctx() + html = await render_template("_types/payments/_main_panel.html", **ctx) + return await make_response(html) + + return bp diff --git a/events/bp/slot/routes.py b/events/bp/slot/routes.py new file mode 100644 index 0000000..d3011fd --- /dev/null +++ b/events/bp/slot/routes.py @@ -0,0 +1,182 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g, jsonify +) +from sqlalchemy.exc import IntegrityError + + +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + +from .services.slot import ( + update_slot as svc_update_slot, + soft_delete_slot as svc_delete_slot, + get_slot as svc_get_slot, +) + +from ..slots.services.slots import ( + list_slots as svc_list_slots, +) + +from shared.browser.app.utils import ( + parse_time, + parse_cost +) +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("slot", __name__, url_prefix='/') + + # ---------- Pages ---------- + + @bp.get("/") + @require_admin + async def get(slot_id: int, **kwargs): + slot = await svc_get_slot(g.s, slot_id) + if not slot: + return await make_response("Not found", 404) + + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/slot/index.html", + slot=slot, + ) + else: + + html = await render_template( + "_types/slot/_oob_elements.html", + slot=slot, + ) + + return await make_response(html) + + + @bp.get("/edit/") + @require_admin + async def get_edit(slot_id: int, **kwargs): + slot = await svc_get_slot(g.s, slot_id) + if not slot: + return await make_response("Not found", 404) + html = await render_template( + "_types/slot/_edit.html", + slot=slot, + #post=g.post_data['post'], + #calendar=g.calendar, + ) + return await make_response(html) + + @bp.get("/view/") + @require_admin + async def get_view(slot_id: int, **kwargs): + slot = await svc_get_slot(g.s, slot_id) + if not slot: + return await make_response("Not found", 404) + html = await render_template( + "_types/slot/_main_panel.html", + slot=slot, + #post=g.post_data['post'], + #calendar=g.calendar, + ) + return await make_response(html) + + @bp.delete("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def slot_delete(slot_id: int, **kwargs): + await svc_delete_slot(g.s, slot_id) + slots = await svc_list_slots(g.s, g.calendar.id) + html = await render_template("_types/slots/_man_panel.html", calendar=g.calendar, slots=slots) + return await make_response(html) + + @bp.put("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def put(slot_id: int, **kwargs): + form = await request.form + + name = (form.get("name") or "").strip() + description = (form.get("description") or "").strip() or None + days = {k: form.get(k) for k in ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]} + time_start = parse_time(form.get("time_start")) + time_end = parse_time(form.get("time_end")) + cost = parse_cost(form.get("cost")) + + # NEW + flexible = bool(form.get("flexible")) + + field_errors: dict[str, list[str]] = {} + + # Basic validation... + if not name: + field_errors.setdefault("name", []).append("Please enter a name for the slot.") + + if not time_start: + field_errors.setdefault("time_start", []).append("Please select a start time.") + + if not time_end: + field_errors.setdefault("time_end", []).append("Please select an end time.") + + if time_start and time_end and time_end <= time_start: + field_errors.setdefault("time_end", []).append( + "End time must be after the start time." + ) + + if not any(form.get(d) for d in ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]): + field_errors.setdefault("days", []).append( + "Please select at least one day." + ) + + if field_errors: + return jsonify( + { + "message": "Please fix the highlighted fields.", + "errors": field_errors, + } + ), 422 + + # DB update + friendly duplicate handling + try: + slot = await svc_update_slot( + g.s, + slot_id, + name=name, + description=description, + days=days, + time_start=time_start, + time_end=time_end, + cost=cost, + flexible=flexible, # <--- NEW + ) + except IntegrityError as e: + msg = str(e.orig) if getattr(e, "orig", None) else str(e) + if "uq_calendar_slots_unique_band" in msg or "duplicate key value" in msg: + field_errors = { + "name": [f'A slot called “{name}” already exists on this calendar.'] + } + return jsonify( + { + "message": "That slot name is already in use.", + "errors": field_errors, + } + ), 422 + + return jsonify( + { + "message": "An unexpected error occurred while updating the slot.", + "errors": {"__all__": [msg]}, + } + ), 422 + + html = await render_template( + "_types/slot/_main_panel.html", + slot=slot, + oob=True, + ) + return await make_response(html) + + + + return bp diff --git a/events/bp/slot/services/slot.py b/events/bp/slot/services/slot.py new file mode 100644 index 0000000..169facd --- /dev/null +++ b/events/bp/slot/services/slot.py @@ -0,0 +1,91 @@ + +from __future__ import annotations +from datetime import time + +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import CalendarSlot + + +class SlotError(ValueError): + pass + +def _b(v): + if isinstance(v, bool): + return v + s = str(v).lower() + return s in {"1","true","t","yes","y","on"} + + +async def update_slot( + sess: AsyncSession, + slot_id: int, + *, + name: str | None = None, + description: str | None = None, + days: dict | None = None, + time_start: time | None = None, + time_end: time | None = None, + cost: float | None = None, + flexible: bool | None = None, # NEW +): + slot = await sess.get(CalendarSlot, slot_id) + if not slot or slot.deleted_at is not None: + raise SlotError("slot not found") + + if name is not None: + slot.name = name + + if description is not None: + slot.description = description or None + + if days is not None: + slot.mon = _b(days.get("mon", slot.mon)) + slot.tue = _b(days.get("tue", slot.tue)) + slot.wed = _b(days.get("wed", slot.wed)) + slot.thu = _b(days.get("thu", slot.thu)) + slot.fri = _b(days.get("fri", slot.fri)) + slot.sat = _b(days.get("sat", slot.sat)) + slot.sun = _b(days.get("sun", slot.sun)) + + if time_start is not None: + slot.time_start = time_start + if time_end is not None: + slot.time_end = time_end + + if (time_start or time_end) and slot.time_end <= slot.time_start: + raise SlotError("time range invalid") + + if cost is not None: + slot.cost = cost + + # NEW: update flexible flag only if explicitly provided + if flexible is not None: + slot.flexible = flexible + + await sess.flush() + return slot + +async def soft_delete_slot(sess: AsyncSession, slot_id: int): + slot = await sess.get(CalendarSlot, slot_id) + if not slot or slot.deleted_at is not None: + return + from datetime import datetime, timezone + slot.deleted_at = datetime.now(timezone.utc) + await sess.flush() + + +async def get_slot(sess: AsyncSession, slot_id: int) -> CalendarSlot | None: + return await sess.get(CalendarSlot, slot_id) + +async def update_slot_description( + sess: AsyncSession, + slot_id: int, + description: str | None, +) -> CalendarSlot: + slot = await sess.get(CalendarSlot, slot_id) + if not slot: + raise SlotError("slot not found") + slot.description = description or None + await sess.flush() + return slot diff --git a/events/bp/slots/routes.py b/events/bp/slots/routes.py new file mode 100644 index 0000000..cd655cb --- /dev/null +++ b/events/bp/slots/routes.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g, jsonify +) + +from sqlalchemy.exc import IntegrityError +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + +from .services.slots import ( + list_slots as svc_list_slots, + create_slot as svc_create_slot, +) + +from ..slot.routes import register as register_slot + +from shared.browser.app.utils import ( + parse_time, + parse_cost +) +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("slots", __name__, url_prefix='/slots') + + # ---------- Pages ---------- + + bp.register_blueprint( + register_slot() + ) + + + + @bp.context_processor + async def get_slots(): + calendar = getattr(g, "calendar", None) + if calendar: + return { + "slots": await svc_list_slots(g.s, calendar.id) + } + return {"slots": []} + + @bp.get("/") + async def get(**kwargs): + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/slots/index.html", + ) + else: + + html = await render_template( + "_types/slots/_oob_elements.html", + ) + return await make_response(html) + + + @bp.post("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def post(**kwargs): + form = await request.form + + name = (form.get("name") or "").strip() + description = (form.get("description") or "").strip() or None + days = {k: form.get(k) for k in ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]} + time_start = parse_time(form.get("time_start")) + time_end = parse_time(form.get("time_end")) + cost = parse_cost(form.get("cost")) + + # NEW: flexible flag from checkbox + flexible = bool(form.get("flexible")) + + field_errors: dict[str, list[str]] = {} + + if not name: + field_errors.setdefault("name", []).append("Please enter a name for the slot.") + + if not time_start: + field_errors.setdefault("time_start", []).append("Please select a start time.") + + if not time_end: + field_errors.setdefault("time_end", []).append("Please select an end time.") + + if time_start and time_end and time_end <= time_start: + field_errors.setdefault("time_end", []).append("End time must be after the start time.") + + if not any(form.get(d) for d in ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]): + field_errors.setdefault("days", []).append("Please select at least one day.") + + if field_errors: + return jsonify({ + "message": "Please fix the highlighted fields.", + "errors": field_errors, + }), 422 + + # DB insert with friendly duplicate detection + try: + await svc_create_slot( + g.s, + g.calendar.id, + name=name, + description=description, + days=days, + time_start=time_start, + time_end=time_end, + cost=cost, + flexible=flexible, # <<< NEW + ) + except IntegrityError as e: + # Improve duplicate detection: check constraint name or message + msg = str(e.orig) if getattr(e, "orig", None) else str(e) + if "uq_calendar_slots_unique_band" in msg or "duplicate key value" in msg: + field_errors = { + "name": [f"A slot called “{name}” already exists on this calendar."] + } + return jsonify({ + "message": "That slot name is already in use.", + "errors": field_errors, + }), 422 + + # Unknown DB error + return jsonify({ + "message": "An unexpected error occurred while saving the slot.", + "errors": {"__all__": [msg]}, + }), 422 + + # Success → re-render the slots table + html = await render_template("_types/slots/_main_panel.html") + return await make_response(html) + + + @bp.get("/add") + @require_admin + async def add_form(**kwargs): + html = await render_template( + "_types/slots/_add.html", + ) + return await make_response(html) + + @bp.get("/add-button") + @require_admin + async def add_button(**kwargs): + + html = await render_template( + "_types/slots/_add_button.html", + ) + return await make_response(html) + + return bp diff --git a/events/bp/slots/services/slots.py b/events/bp/slots/services/slots.py new file mode 100644 index 0000000..bd9827f --- /dev/null +++ b/events/bp/slots/services/slots.py @@ -0,0 +1,65 @@ + +from __future__ import annotations +from datetime import time +from typing import Sequence + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import CalendarSlot + + +class SlotError(ValueError): + pass + +def _b(v): + if isinstance(v, bool): + return v + s = str(v).lower() + return s in {"1","true","t","yes","y","on"} + +async def list_slots(sess: AsyncSession, calendar_id: int) -> Sequence[CalendarSlot]: + res = await sess.execute( + select(CalendarSlot) + .where(CalendarSlot.calendar_id == calendar_id, CalendarSlot.deleted_at.is_(None)) + .order_by(CalendarSlot.time_start.asc(), CalendarSlot.id.asc()) + ) + return res.scalars().all() + +async def create_slot( + sess: AsyncSession, + calendar_id: int, + *, + name: str, + description: str | None, + days: dict, + time_start: time, + time_end: time, + cost: float | None, + flexible: bool = False, # NEW +): + if not name: + raise SlotError("name is required") + + if not time_start or not time_end or time_end <= time_start: + raise SlotError("time range invalid") + + slot = CalendarSlot( + calendar_id=calendar_id, + name=name, + description=(description or None), + mon=_b(days.get("mon")), + tue=_b(days.get("tue")), + wed=_b(days.get("wed")), + thu=_b(days.get("thu")), + fri=_b(days.get("fri")), + sat=_b(days.get("sat")), + sun=_b(days.get("sun")), + time_start=time_start, + time_end=time_end, + cost=cost, + flexible=flexible, # NEW + ) + sess.add(slot) + await sess.flush() + return slot diff --git a/events/bp/ticket_admin/__init__.py b/events/bp/ticket_admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/ticket_admin/routes.py b/events/bp/ticket_admin/routes.py new file mode 100644 index 0000000..3168a29 --- /dev/null +++ b/events/bp/ticket_admin/routes.py @@ -0,0 +1,166 @@ +""" +Ticket admin blueprint — check-in interface and ticket management. + +Routes: + GET /admin/tickets/ — Ticket dashboard (scan + list) + GET /admin/tickets/entry// — Tickets for a specific entry + POST /admin/tickets//checkin — Check in a ticket + GET /admin/tickets// — Ticket admin detail +""" +from __future__ import annotations + +import logging + +from quart import ( + Blueprint, g, request, render_template, make_response, jsonify, +) +from sqlalchemy import select, func +from sqlalchemy.orm import selectinload + +from models.calendars import CalendarEntry, Ticket, TicketType +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + +from ..tickets.services.tickets import ( + get_ticket_by_code, + get_tickets_for_entry, + checkin_ticket, +) + +logger = logging.getLogger(__name__) + + +def register() -> Blueprint: + bp = Blueprint("ticket_admin", __name__, url_prefix="/admin/tickets") + + @bp.get("/") + @require_admin + async def dashboard(): + """Ticket admin dashboard with QR scanner and recent tickets.""" + from shared.browser.app.utils.htmx import is_htmx_request + + # Get recent tickets + result = await g.s.execute( + select(Ticket) + .options( + selectinload(Ticket.entry).selectinload(CalendarEntry.calendar), + selectinload(Ticket.ticket_type), + ) + .order_by(Ticket.created_at.desc()) + .limit(50) + ) + tickets = result.scalars().all() + + # Stats + total = await g.s.scalar(select(func.count(Ticket.id))) + confirmed = await g.s.scalar( + select(func.count(Ticket.id)).where(Ticket.state == "confirmed") + ) + checked_in = await g.s.scalar( + select(func.count(Ticket.id)).where(Ticket.state == "checked_in") + ) + reserved = await g.s.scalar( + select(func.count(Ticket.id)).where(Ticket.state == "reserved") + ) + + stats = { + "total": total or 0, + "confirmed": confirmed or 0, + "checked_in": checked_in or 0, + "reserved": reserved or 0, + } + + if not is_htmx_request(): + html = await render_template( + "_types/ticket_admin/index.html", + tickets=tickets, + stats=stats, + ) + else: + html = await render_template( + "_types/ticket_admin/_main_panel.html", + tickets=tickets, + stats=stats, + ) + + return await make_response(html, 200) + + @bp.get("/entry//") + @require_admin + async def entry_tickets(entry_id: int): + """List all tickets for a specific calendar entry.""" + from shared.browser.app.utils.htmx import is_htmx_request + + entry = await g.s.scalar( + select(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + .options(selectinload(CalendarEntry.calendar)) + ) + if not entry: + return await make_response("Entry not found", 404) + + tickets = await get_tickets_for_entry(g.s, entry_id) + + html = await render_template( + "_types/ticket_admin/_entry_tickets.html", + entry=entry, + tickets=tickets, + ) + return await make_response(html, 200) + + @bp.get("/lookup/") + @require_admin + async def lookup(): + """Look up a ticket by code (used by scanner).""" + code = request.args.get("code", "").strip() + if not code: + return await make_response( + '
    Enter a ticket code
    ', + 200, + ) + + ticket = await get_ticket_by_code(g.s, code) + if not ticket: + html = await render_template( + "_types/ticket_admin/_lookup_result.html", + ticket=None, + error="Ticket not found", + ) + return await make_response(html, 200) + + html = await render_template( + "_types/ticket_admin/_lookup_result.html", + ticket=ticket, + error=None, + ) + return await make_response(html, 200) + + @bp.post("//checkin/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def do_checkin(code: str): + """Check in a ticket by its code.""" + success, error = await checkin_ticket(g.s, code) + + if not success: + html = await render_template( + "_types/ticket_admin/_checkin_result.html", + success=False, + error=error, + ticket=None, + ) + return await make_response(html, 200) + + ticket = await get_ticket_by_code(g.s, code) + html = await render_template( + "_types/ticket_admin/_checkin_result.html", + success=True, + error=None, + ticket=ticket, + ) + return await make_response(html, 200) + + return bp diff --git a/events/bp/ticket_admin/services/__init__.py b/events/bp/ticket_admin/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/ticket_type/routes.py b/events/bp/ticket_type/routes.py new file mode 100644 index 0000000..8f807b3 --- /dev/null +++ b/events/bp/ticket_type/routes.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g, jsonify +) + +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + +from .services.ticket import ( + get_ticket_type as svc_get_ticket_type, + update_ticket_type as svc_update_ticket_type, + soft_delete_ticket_type as svc_delete_ticket_type, +) + +from ..ticket_types.services.tickets import ( + list_ticket_types as svc_list_ticket_types, +) +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("ticket_type", __name__, url_prefix='/') + + @bp.get("/") + @require_admin + async def get(ticket_type_id: int, **kwargs): + """View a single ticket type.""" + ticket_type = await svc_get_ticket_type(g.s, ticket_type_id) + if not ticket_type: + return await make_response("Not found", 404) + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/ticket_type/index.html", + ticket_type=ticket_type, + ) + else: + + html = await render_template( + "_types/ticket_type/_oob_elements.html", + ticket_type=ticket_type, + ) + + return await make_response(html) + + @bp.get("/edit/") + @require_admin + async def get_edit(ticket_type_id: int, **kwargs): + """Show the edit form for a ticket type.""" + ticket_type = await svc_get_ticket_type(g.s, ticket_type_id) + if not ticket_type: + return await make_response("Not found", 404) + + html = await render_template( + "_types/ticket_type/_edit.html", + ticket_type=ticket_type, + ) + return await make_response(html) + + @bp.get("/view/") + @require_admin + async def get_view(ticket_type_id: int, **kwargs): + """Show the view for a ticket type.""" + ticket_type = await svc_get_ticket_type(g.s, ticket_type_id) + if not ticket_type: + return await make_response("Not found", 404) + + html = await render_template( + "_types/ticket_type/_main_panel.html", + ticket_type=ticket_type, + ) + return await make_response(html) + + @bp.put("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def put(ticket_type_id: int, **kwargs): + """Update a ticket type.""" + form = await request.form + + name = (form.get("name") or "").strip() + cost_str = (form.get("cost") or "").strip() + count_str = (form.get("count") or "").strip() + + field_errors: dict[str, list[str]] = {} + + # Validate name + if not name: + field_errors.setdefault("name", []).append("Please enter a ticket type name.") + + # Validate cost + cost = None + if not cost_str: + field_errors.setdefault("cost", []).append("Please enter a cost.") + else: + try: + cost = float(cost_str) + if cost < 0: + field_errors.setdefault("cost", []).append("Cost must be positive.") + except ValueError: + field_errors.setdefault("cost", []).append("Please enter a valid number.") + + # Validate count + count = None + if not count_str: + field_errors.setdefault("count", []).append("Please enter a ticket count.") + else: + try: + count = int(count_str) + if count < 0: + field_errors.setdefault("count", []).append("Count must be positive.") + except ValueError: + field_errors.setdefault("count", []).append("Please enter a valid whole number.") + + if field_errors: + return jsonify({ + "message": "Please fix the highlighted fields.", + "errors": field_errors, + }), 422 + + # Update ticket type + ticket_type = await svc_update_ticket_type( + g.s, + ticket_type_id, + name=name, + cost=cost, + count=count, + ) + + if not ticket_type: + return await make_response("Not found", 404) + + # Return updated view with OOB flag + html = await render_template( + "_types/ticket_type/_main_panel.html", + ticket_type=ticket_type, + oob=True, + ) + return await make_response(html) + + @bp.delete("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def delete(ticket_type_id: int, **kwargs): + """Soft-delete a ticket type.""" + success = await svc_delete_ticket_type(g.s, ticket_type_id) + if not success: + return await make_response("Not found", 404) + + # Re-render the ticket types list + ticket_types = await svc_list_ticket_types(g.s, g.entry.id) + html = await render_template( + "_types/ticket_types/_main_panel.html", + ticket_types=ticket_types + ) + return await make_response(html) + + return bp diff --git a/events/bp/ticket_type/services/ticket.py b/events/bp/ticket_type/services/ticket.py new file mode 100644 index 0000000..b53a657 --- /dev/null +++ b/events/bp/ticket_type/services/ticket.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.calendars import TicketType + +from datetime import datetime, timezone + + +def utcnow() -> datetime: + return datetime.now(timezone.utc) + + +async def get_ticket_type(session: AsyncSession, ticket_type_id: int) -> TicketType | None: + """Get a single ticket type by ID (only if not soft-deleted).""" + result = await session.execute( + select(TicketType) + .where( + TicketType.id == ticket_type_id, + TicketType.deleted_at.is_(None) + ) + ) + return result.scalar_one_or_none() + + +async def update_ticket_type( + session: AsyncSession, + ticket_type_id: int, + *, + name: str, + cost: float, + count: int, +) -> TicketType | None: + """Update an existing ticket type.""" + ticket_type = await get_ticket_type(session, ticket_type_id) + if not ticket_type: + return None + + ticket_type.name = name + ticket_type.cost = cost + ticket_type.count = count + ticket_type.updated_at = utcnow() + + await session.flush() + return ticket_type + + +async def soft_delete_ticket_type(session: AsyncSession, ticket_type_id: int) -> bool: + """Soft-delete a ticket type.""" + ticket_type = await get_ticket_type(session, ticket_type_id) + if not ticket_type: + return False + + ticket_type.deleted_at = utcnow() + await session.flush() + return True diff --git a/events/bp/ticket_types/routes.py b/events/bp/ticket_types/routes.py new file mode 100644 index 0000000..0041eb1 --- /dev/null +++ b/events/bp/ticket_types/routes.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +from quart import ( + request, render_template, make_response, Blueprint, g, jsonify +) + +from shared.browser.app.authz import require_admin +from shared.browser.app.redis_cacher import clear_cache + +from .services.tickets import ( + list_ticket_types as svc_list_ticket_types, + create_ticket_type as svc_create_ticket_type, +) + +from ..ticket_type.routes import register as register_ticket_type + +from shared.browser.app.utils.htmx import is_htmx_request + + +def register(): + bp = Blueprint("ticket_types", __name__, url_prefix='/ticket-types') + + # Register individual ticket routes + bp.register_blueprint( + register_ticket_type() + ) + + @bp.context_processor + async def get_ticket_types(): + """Make ticket types available to all templates in this blueprint.""" + entry = getattr(g, "entry", None) + if entry: + return { + "ticket_types": await svc_list_ticket_types(g.s, entry.id) + } + return {"ticket_types": []} + + @bp.get("/") + async def get(**kwargs): + """List all ticket types for the current entry.""" + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template( + "_types/ticket_types/index.html", + ) + else: + + html = await render_template( + "_types/ticket_types/_oob_elements.html", + ) + + return await make_response(html) + + @bp.post("/") + @require_admin + @clear_cache(tag="calendars", tag_scope="all") + async def post(**kwargs): + """Create a new ticket type.""" + form = await request.form + + name = (form.get("name") or "").strip() + cost_str = (form.get("cost") or "").strip() + count_str = (form.get("count") or "").strip() + + field_errors: dict[str, list[str]] = {} + + # Validate name + if not name: + field_errors.setdefault("name", []).append("Please enter a ticket type name.") + + # Validate cost + cost = None + if not cost_str: + field_errors.setdefault("cost", []).append("Please enter a cost.") + else: + try: + cost = float(cost_str) + if cost < 0: + field_errors.setdefault("cost", []).append("Cost must be positive.") + except ValueError: + field_errors.setdefault("cost", []).append("Please enter a valid number.") + + # Validate count + count = None + if not count_str: + field_errors.setdefault("count", []).append("Please enter a ticket count.") + else: + try: + count = int(count_str) + if count < 0: + field_errors.setdefault("count", []).append("Count must be positive.") + except ValueError: + field_errors.setdefault("count", []).append("Please enter a valid whole number.") + + if field_errors: + return jsonify({ + "message": "Please fix the highlighted fields.", + "errors": field_errors, + }), 422 + + # Create ticket type + await svc_create_ticket_type( + g.s, + g.entry.id, + name=name, + cost=cost, + count=count, + ) + + # Success → re-render the ticket types table + html = await render_template("_types/ticket_types/_main_panel.html") + return await make_response(html) + + @bp.get("/add") + @require_admin + async def add_form(**kwargs): + """Show the add ticket type form.""" + html = await render_template( + "_types/ticket_types/_add.html", + ) + return await make_response(html) + + @bp.get("/add-button") + @require_admin + async def add_button(**kwargs): + """Show the add ticket type button.""" + html = await render_template( + "_types/ticket_types/_add_button.html", + ) + return await make_response(html) + + return bp diff --git a/events/bp/ticket_types/services/tickets.py b/events/bp/ticket_types/services/tickets.py new file mode 100644 index 0000000..0be361e --- /dev/null +++ b/events/bp/ticket_types/services/tickets.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from models.calendars import TicketType + +from datetime import datetime, timezone + + +def utcnow() -> datetime: + return datetime.now(timezone.utc) + + +async def list_ticket_types(session: AsyncSession, entry_id: int) -> list[TicketType]: + """Get all active ticket types for a calendar entry.""" + result = await session.execute( + select(TicketType) + .where( + TicketType.entry_id == entry_id, + TicketType.deleted_at.is_(None) + ) + .order_by(TicketType.name) + ) + return list(result.scalars().all()) + + +async def create_ticket_type( + session: AsyncSession, + entry_id: int, + *, + name: str, + cost: float, + count: int, +) -> TicketType: + """Create a new ticket type for a calendar entry.""" + ticket_type = TicketType( + entry_id=entry_id, + name=name, + cost=cost, + count=count, + created_at=utcnow(), + updated_at=utcnow(), + ) + session.add(ticket_type) + await session.flush() + return ticket_type diff --git a/events/bp/tickets/__init__.py b/events/bp/tickets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/tickets/routes.py b/events/bp/tickets/routes.py new file mode 100644 index 0000000..408eb06 --- /dev/null +++ b/events/bp/tickets/routes.py @@ -0,0 +1,308 @@ +""" +Tickets blueprint — user-facing ticket views and QR codes. + +Routes: + GET /tickets/ — My tickets list + GET /tickets// — Ticket detail with QR code + POST /tickets/buy/ — Purchase tickets for an entry + POST /tickets/adjust/ — Adjust ticket quantity (+/-) +""" +from __future__ import annotations + +import logging + +from quart import ( + Blueprint, g, request, render_template, make_response, +) +from sqlalchemy import select +from sqlalchemy.orm import selectinload + +from models.calendars import CalendarEntry +from shared.infrastructure.cart_identity import current_cart_identity +from shared.browser.app.redis_cacher import clear_cache + +from .services.tickets import ( + create_ticket, + get_ticket_by_code, + get_user_tickets, + get_available_ticket_count, + get_tickets_for_entry, + get_sold_ticket_count, + get_user_reserved_count, + cancel_latest_reserved_ticket, +) + +logger = logging.getLogger(__name__) + + +def register() -> Blueprint: + bp = Blueprint("tickets", __name__, url_prefix="/tickets") + + @bp.get("/") + async def my_tickets(): + """List all tickets for the current user/session.""" + from shared.browser.app.utils.htmx import is_htmx_request + + ident = current_cart_identity() + tickets = await get_user_tickets( + g.s, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + if not is_htmx_request(): + html = await render_template( + "_types/tickets/index.html", + tickets=tickets, + ) + else: + html = await render_template( + "_types/tickets/_main_panel.html", + tickets=tickets, + ) + + return await make_response(html, 200) + + @bp.get("//") + async def ticket_detail(code: str): + """View a single ticket with QR code.""" + from shared.browser.app.utils.htmx import is_htmx_request + + ticket = await get_ticket_by_code(g.s, code) + if not ticket: + return await make_response("Ticket not found", 404) + + # Verify ownership + ident = current_cart_identity() + if ident["user_id"] is not None: + if ticket.user_id != ident["user_id"]: + return await make_response("Ticket not found", 404) + elif ident["session_id"] is not None: + if ticket.session_id != ident["session_id"]: + return await make_response("Ticket not found", 404) + else: + return await make_response("Ticket not found", 404) + + if not is_htmx_request(): + html = await render_template( + "_types/tickets/detail.html", + ticket=ticket, + ) + else: + html = await render_template( + "_types/tickets/_detail_panel.html", + ticket=ticket, + ) + + return await make_response(html, 200) + + @bp.post("/buy/") + @clear_cache(tag="calendars", tag_scope="all") + async def buy_tickets(): + """ + Purchase tickets for a calendar entry. + Creates ticket records with state='reserved' (awaiting payment). + + Form fields: + entry_id — the calendar entry ID + ticket_type_id (optional) — specific ticket type + quantity — number of tickets (default 1) + """ + form = await request.form + + entry_id_raw = form.get("entry_id", "").strip() + if not entry_id_raw: + return await make_response("Entry ID required", 400) + + try: + entry_id = int(entry_id_raw) + except ValueError: + return await make_response("Invalid entry ID", 400) + + # Load entry + entry = await g.s.scalar( + select(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + .options(selectinload(CalendarEntry.ticket_types)) + ) + if not entry: + return await make_response("Entry not found", 404) + + if entry.ticket_price is None: + return await make_response("Tickets not available for this entry", 400) + + # Check availability + available = await get_available_ticket_count(g.s, entry_id) + quantity = int(form.get("quantity", 1)) + if quantity < 1: + quantity = 1 + + if available is not None and quantity > available: + return await make_response( + f"Only {available} ticket(s) remaining", 400 + ) + + # Ticket type (optional) + ticket_type_id = None + tt_raw = form.get("ticket_type_id", "").strip() + if tt_raw: + try: + ticket_type_id = int(tt_raw) + except ValueError: + pass + + ident = current_cart_identity() + + # Create tickets + created = [] + for _ in range(quantity): + ticket = await create_ticket( + g.s, + entry_id=entry_id, + ticket_type_id=ticket_type_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + state="reserved", + ) + created.append(ticket) + + # Re-check availability for display + remaining = await get_available_ticket_count(g.s, entry_id) + all_tickets = await get_tickets_for_entry(g.s, entry_id) + + html = await render_template( + "_types/tickets/_buy_result.html", + entry=entry, + created_tickets=created, + remaining=remaining, + all_tickets=all_tickets, + ) + return await make_response(html, 200) + + @bp.post("/adjust/") + @clear_cache(tag="calendars", tag_scope="all") + async def adjust_quantity(): + """ + Adjust ticket quantity for a calendar entry (+/- pattern). + Creates or cancels tickets to reach the target count. + + Form fields: + entry_id — the calendar entry ID + ticket_type_id — (optional) specific ticket type + count — target quantity of reserved tickets + """ + form = await request.form + + entry_id_raw = form.get("entry_id", "").strip() + if not entry_id_raw: + return await make_response("Entry ID required", 400) + try: + entry_id = int(entry_id_raw) + except ValueError: + return await make_response("Invalid entry ID", 400) + + # Load entry + entry = await g.s.scalar( + select(CalendarEntry) + .where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + .options(selectinload(CalendarEntry.ticket_types)) + ) + if not entry: + return await make_response("Entry not found", 404) + if entry.ticket_price is None: + return await make_response("Tickets not available for this entry", 400) + + # Ticket type (optional) + ticket_type_id = None + tt_raw = form.get("ticket_type_id", "").strip() + if tt_raw: + try: + ticket_type_id = int(tt_raw) + except ValueError: + pass + + target = max(int(form.get("count", 0)), 0) + ident = current_cart_identity() + + current = await get_user_reserved_count( + g.s, entry_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=ticket_type_id, + ) + + if target > current: + # Need to add tickets + to_add = target - current + available = await get_available_ticket_count(g.s, entry_id) + if available is not None and to_add > available: + return await make_response( + f"Only {available} ticket(s) remaining", 400 + ) + for _ in range(to_add): + await create_ticket( + g.s, + entry_id=entry_id, + ticket_type_id=ticket_type_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + state="reserved", + ) + elif target < current: + # Need to remove tickets + to_remove = current - target + for _ in range(to_remove): + await cancel_latest_reserved_ticket( + g.s, entry_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=ticket_type_id, + ) + + # Build context for re-rendering the buy form + ticket_remaining = await get_available_ticket_count(g.s, entry_id) + ticket_sold_count = await get_sold_ticket_count(g.s, entry_id) + user_ticket_count = await get_user_reserved_count( + g.s, entry_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ) + + # Per-type counts for multi-type entries + user_ticket_counts_by_type = {} + if entry.ticket_types: + for tt in entry.ticket_types: + if tt.deleted_at is None: + user_ticket_counts_by_type[tt.id] = await get_user_reserved_count( + g.s, entry_id, + user_id=ident["user_id"], + session_id=ident["session_id"], + ticket_type_id=tt.id, + ) + + # Compute cart count for OOB mini-cart update + from shared.services.registry import services + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + cart_count = summary.count + summary.calendar_count + summary.ticket_count + + html = await render_template( + "_types/tickets/_adjust_response.html", + entry=entry, + ticket_remaining=ticket_remaining, + ticket_sold_count=ticket_sold_count, + user_ticket_count=user_ticket_count, + user_ticket_counts_by_type=user_ticket_counts_by_type, + cart_count=cart_count, + ) + + return await make_response(html, 200) + + return bp diff --git a/events/bp/tickets/services/__init__.py b/events/bp/tickets/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/bp/tickets/services/tickets.py b/events/bp/tickets/services/tickets.py new file mode 100644 index 0000000..dab250c --- /dev/null +++ b/events/bp/tickets/services/tickets.py @@ -0,0 +1,313 @@ +""" +Ticket service layer — create, query, and manage tickets. +""" +from __future__ import annotations + +import uuid +from decimal import Decimal +from typing import Optional + +from sqlalchemy import select, update, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from models.calendars import Ticket, TicketType, CalendarEntry + + + +async def create_ticket( + session: AsyncSession, + *, + entry_id: int, + ticket_type_id: Optional[int] = None, + user_id: Optional[int] = None, + session_id: Optional[str] = None, + order_id: Optional[int] = None, + state: str = "reserved", +) -> Ticket: + """Create a single ticket with a unique code.""" + ticket = Ticket( + entry_id=entry_id, + ticket_type_id=ticket_type_id, + user_id=user_id, + session_id=session_id, + order_id=order_id, + code=uuid.uuid4().hex, + state=state, + ) + session.add(ticket) + await session.flush() + return ticket + + +async def create_tickets_for_order( + session: AsyncSession, + order_id: int, + user_id: Optional[int], + session_id: Optional[str], +) -> list[Ticket]: + """ + Create ticket records for all calendar entries in an order + that have ticket_price configured. + Called during checkout after calendar entries are transitioned to 'ordered'. + """ + # Find all ordered entries for this order that have ticket pricing + result = await session.execute( + select(CalendarEntry) + .where( + CalendarEntry.order_id == order_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.ticket_price.isnot(None), + ) + .options(selectinload(CalendarEntry.ticket_types)) + ) + entries = result.scalars().all() + + tickets = [] + for entry in entries: + if entry.ticket_types: + # Entry has specific ticket types — create one ticket per type + # (quantity handling can be added later) + for tt in entry.ticket_types: + if tt.deleted_at is None: + ticket = await create_ticket( + session, + entry_id=entry.id, + ticket_type_id=tt.id, + user_id=user_id, + session_id=session_id, + order_id=order_id, + state="reserved", + ) + tickets.append(ticket) + else: + # Simple ticket — one per entry + ticket = await create_ticket( + session, + entry_id=entry.id, + user_id=user_id, + session_id=session_id, + order_id=order_id, + state="reserved", + ) + tickets.append(ticket) + + return tickets + + +async def confirm_tickets_for_order( + session: AsyncSession, + order_id: int, +) -> int: + """ + Transition tickets from reserved → confirmed when payment succeeds. + Returns the number of tickets confirmed. + """ + result = await session.execute( + update(Ticket) + .where( + Ticket.order_id == order_id, + Ticket.state == "reserved", + ) + .values(state="confirmed") + ) + return result.rowcount + + +async def get_ticket_by_code( + session: AsyncSession, + code: str, +) -> Optional[Ticket]: + """Look up a ticket by its unique code.""" + result = await session.execute( + select(Ticket) + .where(Ticket.code == code) + .options( + selectinload(Ticket.entry).selectinload(CalendarEntry.calendar), + selectinload(Ticket.ticket_type), + ) + ) + return result.scalar_one_or_none() + + +async def get_user_tickets( + session: AsyncSession, + user_id: Optional[int] = None, + session_id: Optional[str] = None, + state: Optional[str] = None, +) -> list[Ticket]: + """Get all tickets for a user or session.""" + filters = [] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return [] + + if state: + filters.append(Ticket.state == state) + else: + # Exclude cancelled by default + filters.append(Ticket.state != "cancelled") + + result = await session.execute( + select(Ticket) + .where(*filters) + .options( + selectinload(Ticket.entry).selectinload(CalendarEntry.calendar), + selectinload(Ticket.ticket_type), + ) + .order_by(Ticket.created_at.desc()) + ) + return result.scalars().all() + + +async def get_tickets_for_entry( + session: AsyncSession, + entry_id: int, +) -> list[Ticket]: + """Get all non-cancelled tickets for a calendar entry.""" + result = await session.execute( + select(Ticket) + .where( + Ticket.entry_id == entry_id, + Ticket.state != "cancelled", + ) + .options( + selectinload(Ticket.ticket_type), + ) + .order_by(Ticket.created_at.asc()) + ) + return result.scalars().all() + + +async def get_sold_ticket_count( + session: AsyncSession, + entry_id: int, +) -> int: + """Count all non-cancelled tickets for an entry (total sold/reserved).""" + result = await session.scalar( + select(func.count(Ticket.id)).where( + Ticket.entry_id == entry_id, + Ticket.state != "cancelled", + ) + ) + return result or 0 + + +async def get_user_reserved_count( + session: AsyncSession, + entry_id: int, + user_id: Optional[int] = None, + session_id: Optional[str] = None, + ticket_type_id: Optional[int] = None, +) -> int: + """Count reserved tickets for a specific user/session + entry + optional type.""" + filters = [ + Ticket.entry_id == entry_id, + Ticket.state == "reserved", + ] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return 0 + if ticket_type_id is not None: + filters.append(Ticket.ticket_type_id == ticket_type_id) + result = await session.scalar( + select(func.count(Ticket.id)).where(*filters) + ) + return result or 0 + + +async def cancel_latest_reserved_ticket( + session: AsyncSession, + entry_id: int, + user_id: Optional[int] = None, + session_id: Optional[str] = None, + ticket_type_id: Optional[int] = None, +) -> bool: + """Cancel the most recently created reserved ticket. Returns True if one was cancelled.""" + filters = [ + Ticket.entry_id == entry_id, + Ticket.state == "reserved", + ] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return False + if ticket_type_id is not None: + filters.append(Ticket.ticket_type_id == ticket_type_id) + + ticket = await session.scalar( + select(Ticket) + .where(*filters) + .order_by(Ticket.created_at.desc()) + .limit(1) + ) + if ticket: + ticket.state = "cancelled" + await session.flush() + return True + return False + + +async def get_available_ticket_count( + session: AsyncSession, + entry_id: int, +) -> Optional[int]: + """ + Get number of remaining tickets for an entry. + Returns None if unlimited. + """ + entry = await session.scalar( + select(CalendarEntry).where( + CalendarEntry.id == entry_id, + CalendarEntry.deleted_at.is_(None), + ) + ) + if not entry or entry.ticket_price is None: + return None + if entry.ticket_count is None: + return None # Unlimited + + # Count non-cancelled tickets + sold = await session.scalar( + select(func.count(Ticket.id)).where( + Ticket.entry_id == entry_id, + Ticket.state != "cancelled", + ) + ) + return max(0, entry.ticket_count - (sold or 0)) + + +async def checkin_ticket( + session: AsyncSession, + code: str, +) -> tuple[bool, Optional[str]]: + """ + Check in a ticket by its code. + Returns (success, error_message). + """ + from datetime import datetime, timezone + + ticket = await get_ticket_by_code(session, code) + if not ticket: + return False, "Ticket not found" + + if ticket.state == "checked_in": + return False, "Ticket already checked in" + + if ticket.state == "cancelled": + return False, "Ticket is cancelled" + + if ticket.state not in ("confirmed", "reserved"): + return False, f"Ticket in unexpected state: {ticket.state}" + + ticket.state = "checked_in" + ticket.checked_in_at = datetime.now(timezone.utc) + return True, None diff --git a/events/config/app-config.yaml b/events/config/app-config.yaml new file mode 100644 index 0000000..3aa6a76 --- /dev/null +++ b/events/config/app-config.yaml @@ -0,0 +1,84 @@ +# App-wide settings +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: Rose Ash +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "http://localhost:8000" + market: "http://localhost:8001" + cart: "http://localhost:8002" + events: "http://localhost:8003" + federation: "http://localhost:8004" +cache: + fs_root: _snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/wines + - branded-goods/ciders + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + - ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html + product-details: + - General Information + - A Note About Prices + +# SumUp payment settings (fill these in for live usage) +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING" + checkout_reference_prefix: 'dev-' + diff --git a/events/entrypoint.sh b/events/entrypoint.sh new file mode 100644 index 0000000..9d2720e --- /dev/null +++ b/events/entrypoint.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# NOTE: Events app does NOT run Alembic migrations. +# Migrations are managed by the blog app which owns the shared database schema. + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/events/models/__init__.py b/events/models/__init__.py new file mode 100644 index 0000000..4006b10 --- /dev/null +++ b/events/models/__init__.py @@ -0,0 +1,4 @@ +from .calendars import ( + Calendar, CalendarEntry, CalendarSlot, + TicketType, Ticket, CalendarEntryPost, +) diff --git a/events/models/calendars.py b/events/models/calendars.py new file mode 100644 index 0000000..02025ff --- /dev/null +++ b/events/models/calendars.py @@ -0,0 +1,4 @@ +from shared.models.calendars import ( # noqa: F401 + Calendar, CalendarEntry, CalendarSlot, + TicketType, Ticket, CalendarEntryPost, +) diff --git a/events/path_setup.py b/events/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/events/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/events/services/__init__.py b/events/services/__init__.py new file mode 100644 index 0000000..e7ddf54 --- /dev/null +++ b/events/services/__init__.py @@ -0,0 +1,29 @@ +"""Events app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the events app. + + Events owns: Calendar, CalendarEntry, CalendarSlot, TicketType, + Ticket, CalendarEntryPost. + Standard deployment registers all 4 services as real DB impls + (shared DB). For composable deployments, swap non-owned services + with stubs from shared.services.stubs. + """ + from shared.services.registry import services + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + services.calendar = SqlCalendarService() + if not services.has("blog"): + services.blog = SqlBlogService() + if not services.has("market"): + services.market = SqlMarketService() + if not services.has("cart"): + services.cart = SqlCartService() + if not services.has("federation"): + from shared.services.federation_impl import SqlFederationService + services.federation = SqlFederationService() diff --git a/events/templates/_types/all_events/_card.html b/events/templates/_types/all_events/_card.html new file mode 100644 index 0000000..0005563 --- /dev/null +++ b/events/templates/_types/all_events/_card.html @@ -0,0 +1,62 @@ +{# List card for all events — one entry #} +{% set pi = page_info.get(entry.calendar_container_id, {}) %} +{% set page_slug = pi.get('slug', '') %} +{% set page_title = pi.get('title') %} +
    +
    + {# Left: event info #} +
    + {% if page_slug %} + {% set day_href = events_url('/' ~ page_slug ~ '/calendars/' ~ entry.calendar_slug ~ '/day/' ~ entry.start_at.strftime('%Y/%-m/%-d') ~ '/') %} + {% else %} + {% set day_href = '' %} + {% endif %} + {% set entry_href = day_href ~ 'entries/' ~ entry.id ~ '/' if day_href else '' %} + {% if entry_href %} + +

    {{ entry.name }}

    +
    + {% else %} +

    {{ entry.name }}

    + {% endif %} + +
    + {% if page_title %} + + {{ page_title }} + + {% endif %} + {% if entry.calendar_name %} + + {{ entry.calendar_name }} + + {% endif %} +
    + +
    + {% if day_href %} + {{ entry.start_at.strftime('%a %-d %b') }} · + {% else %} + {{ entry.start_at.strftime('%a %-d %b') }} · + {% endif %} + {{ entry.start_at.strftime('%H:%M') }}{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    + + {% if entry.cost %} +
    + £{{ '%.2f'|format(entry.cost) }} +
    + {% endif %} +
    + + {# Right: ticket widget #} + {% if entry.ticket_price is not none %} +
    + {% set qty = pending_tickets.get(entry.id, 0) %} + {% set ticket_url = url_for('all_events.adjust_ticket') %} + {% include '_types/page_summary/_ticket_widget.html' %} +
    + {% endif %} +
    +
    diff --git a/events/templates/_types/all_events/_card_tile.html b/events/templates/_types/all_events/_card_tile.html new file mode 100644 index 0000000..3f8855f --- /dev/null +++ b/events/templates/_types/all_events/_card_tile.html @@ -0,0 +1,60 @@ +{# Tile card for all events — compact event tile #} +{% set pi = page_info.get(entry.calendar_container_id, {}) %} +{% set page_slug = pi.get('slug', '') %} +{% set page_title = pi.get('title') %} +
    + {% if page_slug %} + {% set day_href = events_url('/' ~ page_slug ~ '/calendars/' ~ entry.calendar_slug ~ '/day/' ~ entry.start_at.strftime('%Y/%-m/%-d') ~ '/') %} + {% else %} + {% set day_href = '' %} + {% endif %} + {% set entry_href = day_href ~ 'entries/' ~ entry.id ~ '/' if day_href else '' %} +
    + {% if entry_href %} + +

    {{ entry.name }}

    +
    + {% else %} +

    {{ entry.name }}

    + {% endif %} + +
    + {% if page_title %} + + {{ page_title }} + + {% endif %} + {% if entry.calendar_name %} + + {{ entry.calendar_name }} + + {% endif %} +
    + +
    + {% if day_href %} + {{ entry.start_at.strftime('%a %-d %b') }} + {% else %} + {{ entry.start_at.strftime('%a %-d %b') }} + {% endif %} + · + {{ entry.start_at.strftime('%H:%M') }}{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    + + {% if entry.cost %} +
    + £{{ '%.2f'|format(entry.cost) }} +
    + {% endif %} +
    + + {# Ticket widget below card #} + {% if entry.ticket_price is not none %} +
    + {% set qty = pending_tickets.get(entry.id, 0) %} + {% set ticket_url = url_for('all_events.adjust_ticket') %} + {% include '_types/page_summary/_ticket_widget.html' %} +
    + {% endif %} +
    diff --git a/events/templates/_types/all_events/_cards.html b/events/templates/_types/all_events/_cards.html new file mode 100644 index 0000000..0e3c6b8 --- /dev/null +++ b/events/templates/_types/all_events/_cards.html @@ -0,0 +1,31 @@ +{% for entry in entries %} + {% if view == 'tile' %} + {% include "_types/all_events/_card_tile.html" %} + {% else %} + {# Date header when date changes (list view only) #} + {% set entry_date = entry.start_at.strftime('%A %-d %B %Y') %} + {% if loop.first or entry_date != entries[loop.index0 - 1].start_at.strftime('%A %-d %B %Y') %} +
    +

    + {{ entry_date }} +

    +
    + {% endif %} + {% include "_types/all_events/_card.html" %} + {% endif %} +{% endfor %} +{% if has_more %} + {# Infinite scroll sentinel #} + {% set entries_url = url_for('all_events.entries_fragment', page=page + 1, view=view if view != 'list' else '')|host %} + +{% endif %} diff --git a/events/templates/_types/all_events/_main_panel.html b/events/templates/_types/all_events/_main_panel.html new file mode 100644 index 0000000..0130973 --- /dev/null +++ b/events/templates/_types/all_events/_main_panel.html @@ -0,0 +1,54 @@ +{# View toggle bar - desktop only #} + + +{# Cards container - list or grid based on view #} +{% if entries %} + {% if view == 'tile' %} +
    + {% include "_types/all_events/_cards.html" %} +
    + {% else %} +
    + {% include "_types/all_events/_cards.html" %} +
    + {% endif %} +{% else %} +
    + +

    No upcoming events

    +
    +{% endif %} +
    diff --git a/events/templates/_types/all_events/index.html b/events/templates/_types/all_events/index.html new file mode 100644 index 0000000..00a9696 --- /dev/null +++ b/events/templates/_types/all_events/index.html @@ -0,0 +1,7 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block content %} + {% include '_types/all_events/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/calendar/_description.html b/events/templates/_types/calendar/_description.html new file mode 100644 index 0000000..0f04f3a --- /dev/null +++ b/events/templates/_types/calendar/_description.html @@ -0,0 +1,12 @@ +{% macro description(calendar, oob=False) %} +
    + {{ calendar.description or ''}} +
    + +{% endmacro %} \ No newline at end of file diff --git a/events/templates/_types/calendar/_main_panel.html b/events/templates/_types/calendar/_main_panel.html new file mode 100644 index 0000000..7c0ffde --- /dev/null +++ b/events/templates/_types/calendar/_main_panel.html @@ -0,0 +1,170 @@ +
    +
    + + {# Month / year navigation #} + +
    + + {# Calendar grid #} +
    + {# Weekday header: only show on sm+ (desktop/tablet) #} + + + {# On mobile: 1 column; on sm+: 7 columns #} +
    + {% for week in weeks %} + {% for day in week %} +
    +
    +
    + + {{ day.date.strftime('%a') }} + + + {# Clickable day number: goes to day detail view #} + + {{ day.date.day }} + +
    +
    + {# Entries for this day: merged, chronological #} +
    + {# Build a list of entries for this specific day. + month_entries is already sorted by start_at in Python. #} + {% for e in month_entries %} + {% if e.start_at.date() == day.date %} + {# Decide colour: highlight "mine" differently if you want #} + {% set is_mine = (g.user and e.user_id == g.user.id) + or (not g.user and e.session_id == qsession.get('calendar_sid')) %} +
    + + {{ e.name }} + + + {{ (e.state or 'pending')|replace('_', ' ') }} + +
    + {% endif %} + {% endfor %} + +
    +
    + {% endfor %} + {% endfor %} +
    +
    diff --git a/events/templates/_types/calendar/_nav.html b/events/templates/_types/calendar/_nav.html new file mode 100644 index 0000000..d3ef2cd --- /dev/null +++ b/events/templates/_types/calendar/_nav.html @@ -0,0 +1,18 @@ + +{% import 'macros/links.html' as links %} +{% call links.link( + url_for('calendars.calendar.slots.get', calendar_slug=calendar.slug), + hx_select_search, + select_colours, + True, + aclass=styles.nav_button +) %} + +
    + Slots +
    +{% endcall %} +{% if g.rights.admin %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{ admin_nav_item(url_for('calendars.calendar.admin.admin', calendar_slug=calendar.slug)) }} +{% endif %} \ No newline at end of file diff --git a/events/templates/_types/calendar/_oob_elements.html b/events/templates/_types/calendar/_oob_elements.html new file mode 100644 index 0000000..1447e24 --- /dev/null +++ b/events/templates/_types/calendar/_oob_elements.html @@ -0,0 +1,22 @@ +{% extends "oob_elements.html" %} +{# OOB elements for post admin page #} + + + + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-header-child', 'calendar-header-child', '_types/calendar/header/_header.html')}} + + {% from '_types/post/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} +{% include '_types/calendar/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/calendar/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/calendar/admin/_description.html b/events/templates/_types/calendar/admin/_description.html new file mode 100644 index 0000000..46d99cb --- /dev/null +++ b/events/templates/_types/calendar/admin/_description.html @@ -0,0 +1,32 @@ +
    + {% if calendar.description %} +

    + {{ calendar.description }} +

    + {% else %} +

    + No description yet. +

    + {% endif %} + + +
    + +{% if oob %} + + {% from '_types/calendar/_description.html' import description %} + {{description(calendar, oob=True)}} +{% endif %} + + diff --git a/events/templates/_types/calendar/admin/_description_edit.html b/events/templates/_types/calendar/admin/_description_edit.html new file mode 100644 index 0000000..4ab7a7b --- /dev/null +++ b/events/templates/_types/calendar/admin/_description_edit.html @@ -0,0 +1,41 @@ +
    +
    + + + + +
    + + + +
    +
    +
    diff --git a/events/templates/_types/calendar/admin/_main_panel.html b/events/templates/_types/calendar/admin/_main_panel.html new file mode 100644 index 0000000..9c3e1a6 --- /dev/null +++ b/events/templates/_types/calendar/admin/_main_panel.html @@ -0,0 +1,45 @@ + +
    + +
    +

    Calendar configuration

    +
    +
    + + {% include '_types/calendar/admin/_description.html' %} +
    + + +
    + +
    + +
    diff --git a/events/templates/_types/calendar/admin/_nav.html b/events/templates/_types/calendar/admin/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/calendar/admin/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/calendar/admin/_oob_elements.html b/events/templates/_types/calendar/admin/_oob_elements.html new file mode 100644 index 0000000..ec6244c --- /dev/null +++ b/events/templates/_types/calendar/admin/_oob_elements.html @@ -0,0 +1,25 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for calendar admin page #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('calendar-header-child', 'calendar-admin-header-child', '_types/calendar/admin/header/_header.html')}} + + {% from '_types/calendar/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/calendar/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/calendar/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/calendar/admin/header/_header.html b/events/templates/_types/calendar/admin/header/_header.html new file mode 100644 index 0000000..a138229 --- /dev/null +++ b/events/templates/_types/calendar/admin/header/_header.html @@ -0,0 +1,13 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='calendar-admin-row', oob=oob) %} + {% call links.link( + hx_select_search + ) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/calendar/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/events/templates/_types/calendar/admin/index.html b/events/templates/_types/calendar/admin/index.html new file mode 100644 index 0000000..c27d6d2 --- /dev/null +++ b/events/templates/_types/calendar/admin/index.html @@ -0,0 +1,24 @@ +{% extends '_types/calendar/index.html' %} +{% import 'macros/layout.html' as layout %} + +{% block calendar_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/calendar/admin/header/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block calendar_admin_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + + + +{% block _main_mobile_menu %} + {% include '_types/calendar/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/calendar/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/calendar/header/_header.html b/events/templates/_types/calendar/header/_header.html new file mode 100644 index 0000000..2f4ecf0 --- /dev/null +++ b/events/templates/_types/calendar/header/_header.html @@ -0,0 +1,23 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='calendar-row', oob=oob) %} + {% call links.link(url_for('calendars.calendar.get', calendar_slug=calendar.slug), hx_select_search) %} +
    +
    + +
    + {{ calendar.name }} +
    +
    + {% from '_types/calendar/_description.html' import description %} + {{description(calendar)}} +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/calendar/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/calendar/index.html b/events/templates/_types/calendar/index.html new file mode 100644 index 0000000..bdd0b49 --- /dev/null +++ b/events/templates/_types/calendar/index.html @@ -0,0 +1,26 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% call index_row('post-admin-header-child', '_types/post/admin/header/_header.html') %} + {% call index_row('calendar-header-child', '_types/calendar/header/_header.html') %} + {% block calendar_header_child %} + {% endblock %} + {% endcall %} + {% endcall %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/calendar/_nav.html' %} +{% endblock %} + + + +{% block content %} + {% include '_types/calendar/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/calendars/_calendars_list.html b/events/templates/_types/calendars/_calendars_list.html new file mode 100644 index 0000000..87d482c --- /dev/null +++ b/events/templates/_types/calendars/_calendars_list.html @@ -0,0 +1,44 @@ + {% for row in calendars %} + {% set cal = row %} +
    +
    + + {% set calendar_href = url_for('calendars.calendar.get', calendar_slug=cal.slug)|host %} + +

    {{ cal.name }}

    +

    /{{ cal.slug }}/

    +
    + + + + +
    +
    + {% else %} +

    No calendars yet. Create one above.

    + {% endfor %} diff --git a/events/templates/_types/calendars/_main_panel.html b/events/templates/_types/calendars/_main_panel.html new file mode 100644 index 0000000..7b9aa7c --- /dev/null +++ b/events/templates/_types/calendars/_main_panel.html @@ -0,0 +1,27 @@ +
    + {% if has_access('calendars.create_calendar') %} + +
    + +
    + +
    + + +
    + +
    + {% endif %} + +
    + {% include "_types/calendars/_calendars_list.html" %} +
    +
    \ No newline at end of file diff --git a/events/templates/_types/calendars/_nav.html b/events/templates/_types/calendars/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/calendars/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/calendars/_oob_elements.html b/events/templates/_types/calendars/_oob_elements.html new file mode 100644 index 0000000..6de3bea --- /dev/null +++ b/events/templates/_types/calendars/_oob_elements.html @@ -0,0 +1,28 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'calendars-header-child', '_types/calendars/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/calendars/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/calendars/_main_panel.html" %} +{% endblock %} + + diff --git a/events/templates/_types/calendars/header/_header.html b/events/templates/_types/calendars/header/_header.html new file mode 100644 index 0000000..047e8a3 --- /dev/null +++ b/events/templates/_types/calendars/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='calendars-row', oob=oob) %} + {% call links.link(url_for('calendars.home'), hx_select_search) %} + +
    + Calendars +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/calendars/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/events/templates/_types/calendars/index.html b/events/templates/_types/calendars/index.html new file mode 100644 index 0000000..d958a0c --- /dev/null +++ b/events/templates/_types/calendars/index.html @@ -0,0 +1,26 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% call index_row('post-admin-header-child', '_types/post/admin/header/_header.html') %} + {% call index_row('calendars-header-child', '_types/calendars/header/_header.html') %} + {% block calendars_header_child %} + {% endblock %} + {% endcall %} + {% endcall %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/calendars/_nav.html' %} +{% endblock %} + + + +{% block content %} + {% include '_types/calendars/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/day/_add.html b/events/templates/_types/day/_add.html new file mode 100644 index 0000000..ed08280 --- /dev/null +++ b/events/templates/_types/day/_add.html @@ -0,0 +1,299 @@ +
    + +
    + + + {# 1) Entry name #} + + + {# 2) Slot picker for this weekday (required) #} + {% if day_slots %} + + {% else %} +
    + No slots defined for this day. +
    + {% endif %} + + {# 3) Time entry + cost display #} +
    + {# Time inputs — hidden until a flexible slot is selected #} + + + {# Cost display — shown when a slot is selected #} + + + {# Summary of fixed times — shown for non-flexible slots #} + +
    + + {# Ticket Configuration #} +
    +

    Ticket Configuration (Optional)

    +
    +
    + + +
    +
    + + +
    +
    +
    + +
    + + + +
    +
    + +{# --- Behaviour: lock / unlock times based on slot.flexible --- #} + \ No newline at end of file diff --git a/events/templates/_types/day/_add_button.html b/events/templates/_types/day/_add_button.html new file mode 100644 index 0000000..e92a174 --- /dev/null +++ b/events/templates/_types/day/_add_button.html @@ -0,0 +1,16 @@ + + diff --git a/events/templates/_types/day/_main_panel.html b/events/templates/_types/day/_main_panel.html new file mode 100644 index 0000000..0eea6f0 --- /dev/null +++ b/events/templates/_types/day/_main_panel.html @@ -0,0 +1,28 @@ +
    + + + + + + + + + + + + + {% for entry in day_entries %} + {% include '_types/day/_row.html' %} + {% else %} + + {% endfor %} + + + +
    NameSlot/TimeStateCostTicketsActions
    No entries yet.
    + +
    + {% include '_types/day/_add_button.html' %} +
    + +
    diff --git a/events/templates/_types/day/_nav.html b/events/templates/_types/day/_nav.html new file mode 100644 index 0000000..41d541f --- /dev/null +++ b/events/templates/_types/day/_nav.html @@ -0,0 +1,39 @@ +{% import 'macros/links.html' as links %} + +{# Confirmed Entries - vertical on mobile, horizontal with arrows on desktop #} +
    + {% from 'macros/scrolling_menu.html' import scrolling_menu with context %} + {% call(entry) scrolling_menu('day-entries-container', confirmed_entries) %} + +
    +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    +
    + {% endcall %} +
    + +{# Admin link #} +{% if g.rights.admin %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{admin_nav_item( + url_for( + 'calendars.calendar.day.admin.admin', + calendar_slug=calendar.slug, + year=day_date.year, + month=day_date.month, + day=day_date.day + ) + )}} +{% endif %} \ No newline at end of file diff --git a/events/templates/_types/day/_oob_elements.html b/events/templates/_types/day/_oob_elements.html new file mode 100644 index 0000000..812e6b0 --- /dev/null +++ b/events/templates/_types/day/_oob_elements.html @@ -0,0 +1,18 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('calendar-header-child', 'day-header-child', '_types/day/header/_header.html')}} + + {% from '_types/calendar/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/day/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/day/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/day/_row.html b/events/templates/_types/day/_row.html new file mode 100644 index 0000000..1c3138d --- /dev/null +++ b/events/templates/_types/day/_row.html @@ -0,0 +1,74 @@ +{% import 'macros/links.html' as links %} + + +
    + {% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.get', + calendar_slug=calendar.slug, + day=day, + month=month, + year=year, + entry_id=entry.id + ), + hx_select_search, + aclass=styles.pill + ) %} + {{ entry.name }} + {% endcall %} +
    + + + {% if entry.slot %} +
    + {% call links.link( + url_for( + 'calendars.calendar.slots.slot.get', + calendar_slug=calendar.slug, + slot_id=entry.slot.id + ), + hx_select_search, + aclass=styles.pill + ) %} + {{ entry.slot.name }} + {% endcall %} + + ({{ entry.slot.time_start.strftime('%H:%M') }}{% if entry.slot.time_end %} → {{ entry.slot.time_end.strftime('%H:%M') }}{% endif %}) + +
    + {% else %} +
    + {% include '_types/entry/_times.html' %} +
    + {% endif %} + + +
    + {% include '_types/entry/_state.html' %} +
    + + + + £{{ ('%.2f'|format(entry.cost)) if entry.cost is not none else '0.00' }} + + + + {% if entry.ticket_price is not none %} +
    +
    £{{ ('%.2f'|format(entry.ticket_price)) }}
    +
    + {% if entry.ticket_count is not none %} + {{ entry.ticket_count }} tickets + {% else %} + Unlimited + {% endif %} +
    +
    + {% else %} + No tickets + {% endif %} + + + {% include '_types/entry/_options.html' %} + + \ No newline at end of file diff --git a/events/templates/_types/day/admin/_main_panel.html b/events/templates/_types/day/admin/_main_panel.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/day/admin/_main_panel.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/day/admin/_nav.html b/events/templates/_types/day/admin/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/day/admin/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/day/admin/_nav_entries_oob.html b/events/templates/_types/day/admin/_nav_entries_oob.html new file mode 100644 index 0000000..c8be72c --- /dev/null +++ b/events/templates/_types/day/admin/_nav_entries_oob.html @@ -0,0 +1,33 @@ +{# OOB swap for day confirmed entries nav when entries are edited #} +{% import 'macros/links.html' as links %} + +{# Confirmed Entries - vertical on mobile, horizontal with arrows on desktop #} +{% if confirmed_entries %} +
    + {% from 'macros/scrolling_menu.html' import scrolling_menu with context %} + {% call(entry) scrolling_menu('day-entries-container', confirmed_entries) %} + +
    +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    +
    + {% endcall %} +
    +{% else %} + {# Empty placeholder to remove nav entries when none are confirmed #} +
    +{% endif %} diff --git a/events/templates/_types/day/admin/_oob_elements.html b/events/templates/_types/day/admin/_oob_elements.html new file mode 100644 index 0000000..20986bf --- /dev/null +++ b/events/templates/_types/day/admin/_oob_elements.html @@ -0,0 +1,25 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for calendar admin page #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('day-header-child', 'day-admin-header-child', '_types/day/admin/header/_header.html')}} + + {% from '_types/calendar/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/day/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/day/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/day/admin/header/_header.html b/events/templates/_types/day/admin/header/_header.html new file mode 100644 index 0000000..f3af170 --- /dev/null +++ b/events/templates/_types/day/admin/header/_header.html @@ -0,0 +1,20 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='day-admin-row', oob=oob) %} + {% call links.link( + url_for( + 'calendars.calendar.day.admin.admin', + calendar_slug=calendar.slug, + year=day_date.year, + month=day_date.month, + day=day_date.day + ), + hx_select_search + ) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/day/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/events/templates/_types/day/admin/index.html b/events/templates/_types/day/admin/index.html new file mode 100644 index 0000000..f4f37b5 --- /dev/null +++ b/events/templates/_types/day/admin/index.html @@ -0,0 +1,24 @@ +{% extends '_types/day/index.html' %} +{% import 'macros/layout.html' as layout %} +{% import 'macros/links.html' as links %} + + +{% block day_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/day/admin/header/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block day_admin_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/day/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/day/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/day/header/_header.html b/events/templates/_types/day/header/_header.html new file mode 100644 index 0000000..5774492 --- /dev/null +++ b/events/templates/_types/day/header/_header.html @@ -0,0 +1,26 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='day-row', oob=oob) %} + {% call links.link( + url_for( + 'calendars.calendar.day.show_day', + calendar_slug=calendar.slug, + year=day_date.year, + month=day_date.month, + day=day_date.day + ), + hx_select_search, + ) %} +
    + + {{ day_date.strftime('%A %d %B %Y') }} +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/day/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/day/index.html b/events/templates/_types/day/index.html new file mode 100644 index 0000000..655ee55 --- /dev/null +++ b/events/templates/_types/day/index.html @@ -0,0 +1,18 @@ +{% extends '_types/calendar/index.html' %} + +{% block calendar_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('day-header-child', '_types/day/header/_header.html') %} + {% block day_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/day/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include '_types/day/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/entry/_edit.html b/events/templates/_types/entry/_edit.html new file mode 100644 index 0000000..5467179 --- /dev/null +++ b/events/templates/_types/entry/_edit.html @@ -0,0 +1,332 @@ +
    + + +
    + +
    + + + + +
    + + +
    + + +
    + + {% if day_slots %} + + {% else %} +
    + No slots defined for this day. +
    + {% endif %} +
    + + + + + + + + + + + +
    +

    Ticket Configuration

    + +
    +
    + + +

    Leave empty if no tickets needed

    +
    + +
    + + +

    Leave empty for unlimited

    +
    +
    +
    + +
    + + + + + + + +
    + +
    +
    + +{# --- Behaviour: lock / unlock times based on slot.flexible --- #} + \ No newline at end of file diff --git a/events/templates/_types/entry/_main_panel.html b/events/templates/_types/entry/_main_panel.html new file mode 100644 index 0000000..902ffa4 --- /dev/null +++ b/events/templates/_types/entry/_main_panel.html @@ -0,0 +1,128 @@ +
    + + +
    +
    + Name +
    +
    + {{ entry.name }} +
    +
    + + +
    +
    + Slot +
    +
    + {% if entry.slot %} + + {{ entry.slot.name }} + + {% if entry.slot.flexible %} + (flexible) + {% else %} + (fixed) + {% endif %} + {% else %} + No slot assigned + {% endif %} +
    +
    + + +
    +
    + Time Period +
    +
    + {{ entry.start_at.strftime('%H:%M') }} + {% if entry.end_at %} + – {{ entry.end_at.strftime('%H:%M') }} + {% else %} + – open-ended + {% endif %} +
    +
    + + +
    +
    + State +
    +
    +
    + {% include '_types/entry/_state.html' %} +
    +
    +
    + + +
    +
    + Cost +
    +
    + + £{{ ('%.2f'|format(entry.cost)) if entry.cost is not none else '0.00' }} + +
    +
    + + +
    +
    + Tickets +
    +
    + {% include '_types/entry/_tickets.html' %} +
    +
    + + + {% include '_types/tickets/_buy_form.html' %} + + +
    +
    + Date +
    +
    + {{ entry.start_at.strftime('%A, %B %d, %Y') }} +
    +
    + + +
    +
    + Associated Posts +
    +
    + {% include '_types/entry/_posts.html' %} +
    +
    + + +
    + {% include '_types/entry/_options.html' %} + + +
    + +
    \ No newline at end of file diff --git a/events/templates/_types/entry/_nav.html b/events/templates/_types/entry/_nav.html new file mode 100644 index 0000000..bdfe325 --- /dev/null +++ b/events/templates/_types/entry/_nav.html @@ -0,0 +1,39 @@ +{% import 'macros/links.html' as links %} + +{# Associated Posts - vertical on mobile, horizontal with arrows on desktop #} +
    + {% from 'macros/scrolling_menu.html' import scrolling_menu with context %} + {% call(entry_post) scrolling_menu('entry-posts-container', entry_posts) %} + + {% if entry_post.feature_image %} + {{ entry_post.title }} + {% else %} +
    + {% endif %} +
    +
    {{ entry_post.title }}
    +
    +
    + {% endcall %} +
    + +{# Admin link #} +{% if g.rights.admin %} + + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{admin_nav_item( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.admin.admin', + calendar_slug=calendar.slug, + day=day, + month=month, + year=year, + entry_id=entry.id + ) + )}} +{% endif %} diff --git a/events/templates/_types/entry/_oob_elements.html b/events/templates/_types/entry/_oob_elements.html new file mode 100644 index 0000000..8981fa1 --- /dev/null +++ b/events/templates/_types/entry/_oob_elements.html @@ -0,0 +1,18 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('day-header-child', 'entry-header-child', '_types/entry/header/_header.html')}} + + {% from '_types/day/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/entry/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/entry/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/entry/_optioned.html b/events/templates/_types/entry/_optioned.html new file mode 100644 index 0000000..ba23391 --- /dev/null +++ b/events/templates/_types/entry/_optioned.html @@ -0,0 +1,9 @@ + +{% include '_types/entry/_options.html' %} +
    + {% include '_types/entry/_title.html' %} +
    + +
    + {% include '_types/entry/_state.html' %} +
    \ No newline at end of file diff --git a/events/templates/_types/entry/_options.html b/events/templates/_types/entry/_options.html new file mode 100644 index 0000000..d33ae4c --- /dev/null +++ b/events/templates/_types/entry/_options.html @@ -0,0 +1,95 @@ +
    + {% if entry.state == 'provisional' %} +
    + + +
    +
    + + +
    + {% endif %} + {% if entry.state == 'confirmed' %} +
    + + + +
    + {% endif %} +
    \ No newline at end of file diff --git a/events/templates/_types/entry/_post_search_results.html b/events/templates/_types/entry/_post_search_results.html new file mode 100644 index 0000000..297cd70 --- /dev/null +++ b/events/templates/_types/entry/_post_search_results.html @@ -0,0 +1,105 @@ +{% for search_post in search_posts %} +
    + + + +
    +{% endfor %} + +{# Infinite scroll sentinel #} +{% if page < total_pages|int %} + +{% elif search_posts %} +
    + End of results +
    +{% endif %} diff --git a/events/templates/_types/entry/_posts.html b/events/templates/_types/entry/_posts.html new file mode 100644 index 0000000..122442e --- /dev/null +++ b/events/templates/_types/entry/_posts.html @@ -0,0 +1,72 @@ + +
    + {% if entry_posts %} +
    + {% for entry_post in entry_posts %} +
    + {% if entry_post.feature_image %} + {{ entry_post.title }} + {% else %} +
    + {% endif %} + {{ entry_post.title }} + +
    + {% endfor %} +
    + {% else %} +

    No posts associated

    + {% endif %} + + +
    + + +
    +
    +
    diff --git a/events/templates/_types/entry/_state.html b/events/templates/_types/entry/_state.html new file mode 100644 index 0000000..b67254a --- /dev/null +++ b/events/templates/_types/entry/_state.html @@ -0,0 +1,15 @@ +{% if entry.state %} + + {{ entry.state|capitalize }} + + {% endif %} \ No newline at end of file diff --git a/events/templates/_types/entry/_tickets.html b/events/templates/_types/entry/_tickets.html new file mode 100644 index 0000000..3d9613a --- /dev/null +++ b/events/templates/_types/entry/_tickets.html @@ -0,0 +1,104 @@ +{% if entry.ticket_price is not none %} + {# Tickets are configured #} +
    +
    + Price: + + £{{ ('%.2f'|format(entry.ticket_price)) }} + +
    +
    + Available: + + {% if entry.ticket_count is not none %} + {{ entry.ticket_count }} tickets + {% else %} + Unlimited + {% endif %} + +
    + +
    +{% else %} + {# No tickets configured #} +
    + No tickets configured + +
    +{% endif %} + +{# Ticket configuration form (hidden by default) #} +
    + +
    + + +
    + +
    + + +
    + +
    + + +
    +
    diff --git a/events/templates/_types/entry/_times.html b/events/templates/_types/entry/_times.html new file mode 100644 index 0000000..3543fe4 --- /dev/null +++ b/events/templates/_types/entry/_times.html @@ -0,0 +1,5 @@ +{% from 'macros/date.html' import t %} +
    + {{ t(entry.start_at) }} + {% if entry.end_at %} → {{ t(entry.end_at) }}{% endif %} +
    \ No newline at end of file diff --git a/events/templates/_types/entry/_title.html b/events/templates/_types/entry/_title.html new file mode 100644 index 0000000..3c1dc63 --- /dev/null +++ b/events/templates/_types/entry/_title.html @@ -0,0 +1,3 @@ + + {{ entry.name }} + {% include '_types/entry/_state.html' %} diff --git a/events/templates/_types/entry/admin/_main_panel.html b/events/templates/_types/entry/admin/_main_panel.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/entry/admin/_main_panel.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/entry/admin/_nav.html b/events/templates/_types/entry/admin/_nav.html new file mode 100644 index 0000000..9db8ac0 --- /dev/null +++ b/events/templates/_types/entry/admin/_nav.html @@ -0,0 +1,17 @@ +{% import 'macros/links.html' as links %} +{% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.ticket_types.get', + calendar_slug=calendar.slug, + entry_id=entry.id, + year=year, + month=month, + day=day + ), + hx_select_search, + select_colours, + True, + aclass=styles.nav_button, +)%} + ticket_types +{% endcall %} diff --git a/events/templates/_types/entry/admin/_nav_posts_oob.html b/events/templates/_types/entry/admin/_nav_posts_oob.html new file mode 100644 index 0000000..25ef1f1 --- /dev/null +++ b/events/templates/_types/entry/admin/_nav_posts_oob.html @@ -0,0 +1,31 @@ +{# OOB swap for entry posts nav when posts are associated/disassociated #} +{% import 'macros/links.html' as links %} + +{# Associated Posts - vertical on mobile, horizontal with arrows on desktop #} +{% if entry_posts %} +
    + {% from 'macros/scrolling_menu.html' import scrolling_menu with context %} + {% call(entry_post) scrolling_menu('entry-posts-container', entry_posts) %} + + {% if entry_post.feature_image %} + {{ entry_post.title }} + {% else %} +
    + {% endif %} +
    +
    {{ entry_post.title }}
    +
    +
    + {% endcall %} +
    +{% else %} + {# Empty placeholder to remove nav posts when all are disassociated #} +
    +{% endif %} diff --git a/events/templates/_types/entry/admin/_oob_elements.html b/events/templates/_types/entry/admin/_oob_elements.html new file mode 100644 index 0000000..bcf2255 --- /dev/null +++ b/events/templates/_types/entry/admin/_oob_elements.html @@ -0,0 +1,25 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for calendar admin page #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('entry-header-child', 'entry-admin-header-child', '_types/entry/admin/header/_header.html')}} + + {% from '_types/entry/header/_header.html' import header_row with context %} + {{header_row(oob=True)}} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/entry/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/entry/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/entry/admin/header/_header.html b/events/templates/_types/entry/admin/header/_header.html new file mode 100644 index 0000000..952e215 --- /dev/null +++ b/events/templates/_types/entry/admin/header/_header.html @@ -0,0 +1,21 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='entry-admin-row', oob=oob) %} + {% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.admin.admin', + calendar_slug=calendar.slug, + day=day, + month=month, + year=year, + entry_id=entry.id + ), + hx_select_search + ) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/entry/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/events/templates/_types/entry/admin/index.html b/events/templates/_types/entry/admin/index.html new file mode 100644 index 0000000..caa100c --- /dev/null +++ b/events/templates/_types/entry/admin/index.html @@ -0,0 +1,24 @@ +{% extends '_types/entry/index.html' %} +{% import 'macros/layout.html' as layout %} +{% import 'macros/links.html' as links %} + + +{% block entry_header_child %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% from '_types/entry/admin/header/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block entry_admin_header_child %} + {% endblock %} +
    + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/entry/admin/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/entry/admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/entry/header/_header.html b/events/templates/_types/entry/header/_header.html new file mode 100644 index 0000000..5e1a5cc --- /dev/null +++ b/events/templates/_types/entry/header/_header.html @@ -0,0 +1,27 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='entry-row', oob=oob) %} + {% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.get', + calendar_slug=calendar.slug, + day=day, + month=month, + year=year, + entry_id=entry.id + ), + hx_select_search, + ) %} +
    + {% include '_types/entry/_title.html' %} + {% include '_types/entry/_times.html' %} +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/entry/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/entry/index.html b/events/templates/_types/entry/index.html new file mode 100644 index 0000000..a980f46 --- /dev/null +++ b/events/templates/_types/entry/index.html @@ -0,0 +1,20 @@ +{% extends '_types/day/index.html' %} + +{% block day_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('entry-header-child', '_types/entry/header/_header.html') %} + {% block entry_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/entry/_nav.html' %} +{% endblock %} + + + +{% block content %} +{% include '_types/entry/_main_panel.html' %} +{% endblock %} \ No newline at end of file diff --git a/events/templates/_types/markets/_main_panel.html b/events/templates/_types/markets/_main_panel.html new file mode 100644 index 0000000..7168712 --- /dev/null +++ b/events/templates/_types/markets/_main_panel.html @@ -0,0 +1,25 @@ +
    + {% if has_access('markets.create_market') %} +
    + +
    + +
    + + +
    + +
    + {% endif %} +
    + {% include "_types/markets/_markets_list.html" %} +
    +
    diff --git a/events/templates/_types/markets/_markets_list.html b/events/templates/_types/markets/_markets_list.html new file mode 100644 index 0000000..2ac5143 --- /dev/null +++ b/events/templates/_types/markets/_markets_list.html @@ -0,0 +1,37 @@ + {% for m in markets %} +
    +
    + + {% set market_href = market_url('/' + post.slug + '/' + m.slug + '/') %} + +

    {{ m.name }}

    +

    /{{ m.slug }}/

    +
    + + + +
    +
    + {% else %} +

    No markets yet. Create one above.

    + {% endfor %} diff --git a/events/templates/_types/markets/_nav.html b/events/templates/_types/markets/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/markets/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/markets/_oob_elements.html b/events/templates/_types/markets/_oob_elements.html new file mode 100644 index 0000000..93ec6d7 --- /dev/null +++ b/events/templates/_types/markets/_oob_elements.html @@ -0,0 +1,19 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'markets-header-child', '_types/markets/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/markets/_nav.html' %} +{% endblock %} + +{% block content %} + {% include "_types/markets/_main_panel.html" %} +{% endblock %} diff --git a/events/templates/_types/markets/header/_header.html b/events/templates/_types/markets/header/_header.html new file mode 100644 index 0000000..6ae008d --- /dev/null +++ b/events/templates/_types/markets/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='markets-row', oob=oob) %} + {% call links.link(url_for('markets.home'), hx_select_search) %} + +
    + Markets +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/markets/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/events/templates/_types/markets/index.html b/events/templates/_types/markets/index.html new file mode 100644 index 0000000..cb05b12 --- /dev/null +++ b/events/templates/_types/markets/index.html @@ -0,0 +1,23 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% call index_row('post-admin-header-child', '_types/post/admin/header/_header.html') %} + {% call index_row('markets-header-child', '_types/markets/header/_header.html') %} + {% block markets_header_child %} + {% endblock %} + {% endcall %} + {% endcall %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/markets/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/markets/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/page_summary/_card.html b/events/templates/_types/page_summary/_card.html new file mode 100644 index 0000000..27f12cb --- /dev/null +++ b/events/templates/_types/page_summary/_card.html @@ -0,0 +1,49 @@ +{# List card for page summary — one entry #} +{% set pi = page_info.get(entry.calendar_container_id, {}) %} +{% set page_slug = pi.get('slug', post.slug) %} +{% set page_title = pi.get('title') %} +
    +
    + {# Left: event info #} +
    + {% set day_href = events_url('/' ~ page_slug ~ '/calendars/' ~ entry.calendar_slug ~ '/day/' ~ entry.start_at.strftime('%Y/%-m/%-d') ~ '/') %} + {% set entry_href = day_href ~ 'entries/' ~ entry.id ~ '/' %} + +

    {{ entry.name }}

    +
    + +
    + {% if page_title and page_title != post.title %} + + {{ page_title }} + + {% endif %} + {% if entry.calendar_name %} + + {{ entry.calendar_name }} + + {% endif %} +
    + +
    + {{ entry.start_at.strftime('%H:%M') }}{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    + + {% if entry.cost %} +
    + £{{ '%.2f'|format(entry.cost) }} +
    + {% endif %} +
    + + {# Right: ticket widget #} + {% if entry.ticket_price is not none %} +
    + {% set qty = pending_tickets.get(entry.id, 0) %} + {% set ticket_url = url_for('page_summary.adjust_ticket') %} + {% include '_types/page_summary/_ticket_widget.html' %} +
    + {% endif %} +
    +
    diff --git a/events/templates/_types/page_summary/_card_tile.html b/events/templates/_types/page_summary/_card_tile.html new file mode 100644 index 0000000..7d13cca --- /dev/null +++ b/events/templates/_types/page_summary/_card_tile.html @@ -0,0 +1,48 @@ +{# Tile card for page summary — compact event tile #} +{% set pi = page_info.get(entry.calendar_container_id, {}) %} +{% set page_slug = pi.get('slug', post.slug) %} +{% set page_title = pi.get('title') %} +
    + {% set day_href = events_url('/' ~ page_slug ~ '/calendars/' ~ entry.calendar_slug ~ '/day/' ~ entry.start_at.strftime('%Y/%-m/%-d') ~ '/') %} + {% set entry_href = day_href ~ 'entries/' ~ entry.id ~ '/' %} +
    + +

    {{ entry.name }}

    +
    + +
    + {% if page_title and page_title != post.title %} + + {{ page_title }} + + {% endif %} + {% if entry.calendar_name %} + + {{ entry.calendar_name }} + + {% endif %} +
    + +
    + {{ entry.start_at.strftime('%a %-d %b') }} + · + {{ entry.start_at.strftime('%H:%M') }}{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    + + {% if entry.cost %} +
    + £{{ '%.2f'|format(entry.cost) }} +
    + {% endif %} +
    + + {# Ticket widget below card #} + {% if entry.ticket_price is not none %} +
    + {% set qty = pending_tickets.get(entry.id, 0) %} + {% set ticket_url = url_for('page_summary.adjust_ticket') %} + {% include '_types/page_summary/_ticket_widget.html' %} +
    + {% endif %} +
    diff --git a/events/templates/_types/page_summary/_cards.html b/events/templates/_types/page_summary/_cards.html new file mode 100644 index 0000000..b6958ab --- /dev/null +++ b/events/templates/_types/page_summary/_cards.html @@ -0,0 +1,31 @@ +{% for entry in entries %} + {% if view == 'tile' %} + {% include "_types/page_summary/_card_tile.html" %} + {% else %} + {# Date header when date changes (list view only) #} + {% set entry_date = entry.start_at.strftime('%A %-d %B %Y') %} + {% if loop.first or entry_date != entries[loop.index0 - 1].start_at.strftime('%A %-d %B %Y') %} +
    +

    + {{ entry_date }} +

    +
    + {% endif %} + {% include "_types/page_summary/_card.html" %} + {% endif %} +{% endfor %} +{% if has_more %} + {# Infinite scroll sentinel #} + {% set entries_url = url_for('page_summary.entries_fragment', page=page + 1, view=view if view != 'list' else '')|host %} + +{% endif %} diff --git a/events/templates/_types/page_summary/_main_panel.html b/events/templates/_types/page_summary/_main_panel.html new file mode 100644 index 0000000..ab1a8b4 --- /dev/null +++ b/events/templates/_types/page_summary/_main_panel.html @@ -0,0 +1,54 @@ +{# View toggle bar - desktop only #} + + +{# Cards container - list or grid based on view #} +{% if entries %} + {% if view == 'tile' %} +
    + {% include "_types/page_summary/_cards.html" %} +
    + {% else %} +
    + {% include "_types/page_summary/_cards.html" %} +
    + {% endif %} +{% else %} +
    + +

    No upcoming events

    +
    +{% endif %} +
    diff --git a/events/templates/_types/page_summary/_ticket_widget.html b/events/templates/_types/page_summary/_ticket_widget.html new file mode 100644 index 0000000..6e90871 --- /dev/null +++ b/events/templates/_types/page_summary/_ticket_widget.html @@ -0,0 +1,63 @@ +{# Inline ticket +/- widget for page summary cards. + Variables: entry, qty, ticket_url + Wrapped in a div with stable ID for HTMX targeting. #} +
    + £{{ '%.2f'|format(entry.ticket_price) }} + + {% if qty == 0 %} +
    + + + + +
    + {% else %} +
    + + + + +
    + + + + + + {{ qty }} + + + + +
    + + + + +
    + {% endif %} +
    diff --git a/events/templates/_types/page_summary/index.html b/events/templates/_types/page_summary/index.html new file mode 100644 index 0000000..d084317 --- /dev/null +++ b/events/templates/_types/page_summary/index.html @@ -0,0 +1,15 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% block post_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/page_summary/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/payments/_main_panel.html b/events/templates/_types/payments/_main_panel.html new file mode 100644 index 0000000..42f4141 --- /dev/null +++ b/events/templates/_types/payments/_main_panel.html @@ -0,0 +1,70 @@ +
    +
    +

    + + SumUp Payment +

    +

    + Configure per-page SumUp credentials. Leave blank to use the global merchant account. +

    + +
    + + +
    + + +
    + +
    + + + {% if sumup_configured %} +

    Key is set. Leave blank to keep current key.

    + {% endif %} +
    + +
    + + +
    + + + + {% if sumup_configured %} + + Connected + + {% endif %} +
    +
    +
    diff --git a/events/templates/_types/payments/_nav.html b/events/templates/_types/payments/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/payments/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/payments/_oob_elements.html b/events/templates/_types/payments/_oob_elements.html new file mode 100644 index 0000000..5232f7e --- /dev/null +++ b/events/templates/_types/payments/_oob_elements.html @@ -0,0 +1,19 @@ +{% extends 'oob_elements.html' %} + +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-admin-header-child', 'payments-header-child', '_types/payments/header/_header.html')}} + + {% from '_types/post/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + +{% block mobile_menu %} + {% include '_types/payments/_nav.html' %} +{% endblock %} + +{% block content %} + {% include "_types/payments/_main_panel.html" %} +{% endblock %} diff --git a/events/templates/_types/payments/header/_header.html b/events/templates/_types/payments/header/_header.html new file mode 100644 index 0000000..282aac6 --- /dev/null +++ b/events/templates/_types/payments/header/_header.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='payments-row', oob=oob) %} + {% call links.link(url_for('payments.home'), hx_select_search) %} + +
    + Payments +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/payments/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/events/templates/_types/payments/index.html b/events/templates/_types/payments/index.html new file mode 100644 index 0000000..721145c --- /dev/null +++ b/events/templates/_types/payments/index.html @@ -0,0 +1,23 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% call index_row('post-admin-header-child', '_types/post/admin/header/_header.html') %} + {% call index_row('payments-header-child', '_types/payments/header/_header.html') %} + {% block payments_header_child %} + {% endblock %} + {% endcall %} + {% endcall %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/payments/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/payments/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/post/_nav.html b/events/templates/_types/post/_nav.html new file mode 100644 index 0000000..8a75650 --- /dev/null +++ b/events/templates/_types/post/_nav.html @@ -0,0 +1,14 @@ +{% import 'macros/links.html' as links %} +{% if calendars %} + {% for calendar in calendars %} + {% call links.link(url_for('calendars.calendar.get', calendar_slug=calendar.slug), hx_select_search, select_colours, True, aclass=styles.nav_button_less_pad) %} + +
    {{ calendar.name }}
    + {% endcall %} + {% endfor %} +{% endif %} +{% if g.rights.admin %} + + + +{% endif %} diff --git a/events/templates/_types/post/admin/_associated_entries.html b/events/templates/_types/post/admin/_associated_entries.html new file mode 100644 index 0000000..d9fe853 --- /dev/null +++ b/events/templates/_types/post/admin/_associated_entries.html @@ -0,0 +1,50 @@ +
    +

    Associated Entries

    + {% if associated_entry_ids %} +
    + {% for calendar in all_calendars %} + {% for entry in calendar.entries %} + {% if entry.id in associated_entry_ids and entry.deleted_at is none %} + + {% endif %} + {% endfor %} + {% endfor %} +
    + {% else %} +
    No entries associated yet. Browse calendars below to add entries.
    + {% endif %} +
    diff --git a/events/templates/_types/post/admin/_nav.html b/events/templates/_types/post/admin/_nav.html new file mode 100644 index 0000000..c0237d6 --- /dev/null +++ b/events/templates/_types/post/admin/_nav.html @@ -0,0 +1,36 @@ +{% import 'macros/links.html' as links %} + + + + + + + diff --git a/events/templates/_types/post/admin/header/_header.html b/events/templates/_types/post/admin/header/_header.html new file mode 100644 index 0000000..9056d09 --- /dev/null +++ b/events/templates/_types/post/admin/header/_header.html @@ -0,0 +1,12 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post-admin-row', oob=oob) %} + + {{ links.admin() }} + + {% call links.desktop_nav() %} + {% include '_types/post/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} diff --git a/events/templates/_types/post/header/_header.html b/events/templates/_types/post/header/_header.html new file mode 100644 index 0000000..6655eb5 --- /dev/null +++ b/events/templates/_types/post/header/_header.html @@ -0,0 +1,28 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post-row', oob=oob) %} + {% call links.link(blog_url('/' + post.slug + '/'), hx_select_search ) %} + {% if post.feature_image %} + + {% endif %} + + {{ post.title | truncate(160, True, '…') }} + + {% endcall %} + {% call links.desktop_nav() %} + {% if page_cart_count is defined and page_cart_count > 0 %} + + + {{ page_cart_count }} + + {% endif %} + {% include '_types/post/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/events/templates/_types/post_entries/_main_panel.html b/events/templates/_types/post_entries/_main_panel.html new file mode 100644 index 0000000..3a938ba --- /dev/null +++ b/events/templates/_types/post_entries/_main_panel.html @@ -0,0 +1,47 @@ +
    + + {# Associated Entries List #} + {% include '_types/post/admin/_associated_entries.html' %} + + {# Calendars Browser #} +
    +

    Browse Calendars

    + {% for calendar in all_calendars %} +
    + + {% if calendar.post.feature_image %} + {{ calendar.post.title }} + {% else %} +
    + {% endif %} +
    +
    + + {{ calendar.name }} +
    +
    + {{ calendar.post.title }} +
    +
    +
    +
    +
    Loading calendar...
    +
    +
    + {% else %} +
    No calendars found.
    + {% endfor %} +
    +
    \ No newline at end of file diff --git a/events/templates/_types/post_entries/_nav.html b/events/templates/_types/post_entries/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/post_entries/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/post_entries/header/_header.html b/events/templates/_types/post_entries/header/_header.html new file mode 100644 index 0000000..18859eb --- /dev/null +++ b/events/templates/_types/post_entries/header/_header.html @@ -0,0 +1,17 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post_entries-row', oob=oob) %} + {% call links.link(blog_url('/' + post.slug + '/admin/entries/'), hx_select_search) %} + +
    + entries +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/post_entries/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/slot/__description.html b/events/templates/_types/slot/__description.html new file mode 100644 index 0000000..7897fd2 --- /dev/null +++ b/events/templates/_types/slot/__description.html @@ -0,0 +1,13 @@ +{% macro description(slot, oob=False) %} +
    + {{ slot.description or ''}} +
    + +{% endmacro %} diff --git a/events/templates/_types/slot/_description.html b/events/templates/_types/slot/_description.html new file mode 100644 index 0000000..32e28e6 --- /dev/null +++ b/events/templates/_types/slot/_description.html @@ -0,0 +1,5 @@ +

    + {% if slot.description %} + {{ slot.description }} + {% endif %} +

    diff --git a/events/templates/_types/slot/_edit.html b/events/templates/_types/slot/_edit.html new file mode 100644 index 0000000..e591e74 --- /dev/null +++ b/events/templates/_types/slot/_edit.html @@ -0,0 +1,180 @@ +
    + +
    +
    + + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + + Days + + + {# pre-check "All" if every day is true on this slot #} + {% set all_days_checked = + slot|getattr('mon') + and slot|getattr('tue') + and slot|getattr('wed') + and slot|getattr('thu') + and slot|getattr('fri') + and slot|getattr('sat') + and slot|getattr('sun') %} + +
    + {# "All" toggle – no name so it’s not submitted #} + + + {# Individual days, with data-day like the add form #} + {% for key, label in [ + ('mon','Mon'),('tue','Tue'),('wed','Wed'),('thu','Thu'), + ('fri','Fri'),('sat','Sat'),('sun','Sun') + ] %} + {% set is_checked = slot|getattr(key) %} + + {% endfor %} +
    +
    + + +
    + + +
    + +
    + + + +
    +
    +
    diff --git a/events/templates/_types/slot/_main_panel.html b/events/templates/_types/slot/_main_panel.html new file mode 100644 index 0000000..2b4cb17 --- /dev/null +++ b/events/templates/_types/slot/_main_panel.html @@ -0,0 +1,72 @@ +
    + +
    +
    + Days +
    +
    + {% set days = slot.days_display.split(', ') %} + {% if days and days[0] != "—" %} +
    + {% for day in days %} + + {{ day }} + + {% endfor %} +
    + {% else %} + No days + {% endif %} +
    +
    + + +
    +
    + Flexible +
    +
    + {{ 'yes' if slot.flexible else 'no' }} +
    +
    + + +
    +
    +
    + Time +
    +
    + {{ slot.time_start.strftime('%H:%M') }} — {{ slot.time_end.strftime('%H:%M') }} +
    +
    + +
    +
    + Cost +
    +
    + {{ ('%.2f'|format(slot.cost)) if slot.cost is not none else '' }} +
    +
    +
    + +
    + +{% if oob %} + {% from '_types/slot/__description.html' import description %} + {{description(slot, oob=True)}} + +{% endif %} \ No newline at end of file diff --git a/events/templates/_types/slot/_oob_elements.html b/events/templates/_types/slot/_oob_elements.html new file mode 100644 index 0000000..3b82170 --- /dev/null +++ b/events/templates/_types/slot/_oob_elements.html @@ -0,0 +1,15 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('slots-header-child', 'slot-header-child', '_types/slot/header/_header.html')}} + + {% from '_types/slots/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + + +{% block content %} + {% include '_types/slot/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/slot/header/_header.html b/events/templates/_types/slot/header/_header.html new file mode 100644 index 0000000..fc5381d --- /dev/null +++ b/events/templates/_types/slot/header/_header.html @@ -0,0 +1,25 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='slot-row', oob=oob) %} + {% call links.link( + hx_select_search, + ) %} +
    +
    + +
    + {{ slot.name }} +
    +
    + {% from '_types/slot/__description.html' import description %} + {{description(slot)}} +
    + {% endcall %} + {% call links.desktop_nav() %} + {#% include '_types/slot/_nav.html' %#} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/slot/index.html b/events/templates/_types/slot/index.html new file mode 100644 index 0000000..265be24 --- /dev/null +++ b/events/templates/_types/slot/index.html @@ -0,0 +1,20 @@ +{% extends '_types/slots/index.html' %} +{% import 'macros/layout.html' as layout %} + + +{% block slots_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('slot-header-child', '_types/slot/header/_header.html') %} + {% block slot_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {#% include '_types/slot/_nav.html' %#} +{% endblock %} + +{% block content %} + {% include '_types/slot/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/slots/_add.html b/events/templates/_types/slots/_add.html new file mode 100644 index 0000000..8a0f6df --- /dev/null +++ b/events/templates/_types/slots/_add.html @@ -0,0 +1,123 @@ +
    +
    +
    + + +
    + +
    + + +
    + +
    + +
    + {# "All" toggle – no name so it’s not submitted #} + + + {# Individual days #} + {% for key, label in [ + ('mon','Mon'),('tue','Tue'),('wed','Wed'),('thu','Thu'), + ('fri','Fri'),('sat','Sat'),('sun','Sun') + ] %} + + {% endfor %} +
    +
    + +
    + + +
    + +
    + + +
    + +
    + + +
    + + {# NEW: flexible flag #} +
    + + +
    +
    + +
    + + + +
    +
    diff --git a/events/templates/_types/slots/_add_button.html b/events/templates/_types/slots/_add_button.html new file mode 100644 index 0000000..6bb7e5d --- /dev/null +++ b/events/templates/_types/slots/_add_button.html @@ -0,0 +1,11 @@ + + diff --git a/events/templates/_types/slots/_main_panel.html b/events/templates/_types/slots/_main_panel.html new file mode 100644 index 0000000..a2ac263 --- /dev/null +++ b/events/templates/_types/slots/_main_panel.html @@ -0,0 +1,26 @@ +
    + + + + + + + + + + + + + {% for s in slots %} + {% include '_types/slots/_row.html' %} + {% else %} + + {% endfor %} + +
    NameFlexibleDaysTimeCostActions
    No slots yet.
    + + +
    + {% include '_types/slots/_add_button.html' %} +
    +
    diff --git a/events/templates/_types/slots/_oob_elements.html b/events/templates/_types/slots/_oob_elements.html new file mode 100644 index 0000000..acf0d05 --- /dev/null +++ b/events/templates/_types/slots/_oob_elements.html @@ -0,0 +1,15 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('calendar-header-child', 'slots-header-child', '_types/slots/header/_header.html')}} + + {% from '_types/calendar/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + + +{% block content %} + {% include '_types/slots/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/slots/_row.html b/events/templates/_types/slots/_row.html new file mode 100644 index 0000000..1a9965f --- /dev/null +++ b/events/templates/_types/slots/_row.html @@ -0,0 +1,61 @@ +{% import 'macros/links.html' as links %} + + +
    + {% call links.link( + hx_select_search, + aclass=styles.pill + ) %} + {{ s.name }} + {% endcall %} +
    + {% set slot = s %} + {% include '_types/slot/_description.html' %} + + + {{ 'yes' if s.flexible else 'no' }} + + + {% set days = s.days_display.split(', ') %} + {% if days and days[0] != "—" %} +
    + {% for day in days %} + + {{ day }} + + {% endfor %} +
    + {% else %} + No days + {% endif %} + + + {{ s.time_start.strftime('%H:%M') }} - {{ s.time_end.strftime('%H:%M') }} + + + {{ ('%.2f'|format(s.cost)) if s.cost is not none else '' }} + + + + + diff --git a/events/templates/_types/slots/header/_header.html b/events/templates/_types/slots/header/_header.html new file mode 100644 index 0000000..eb34edb --- /dev/null +++ b/events/templates/_types/slots/header/_header.html @@ -0,0 +1,18 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='slots-row', oob=oob) %} + {% call links.link( + hx_select_search, + ) %} + +
    + slots +
    + {% endcall %} + {% call links.desktop_nav() %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/slots/index.html b/events/templates/_types/slots/index.html new file mode 100644 index 0000000..453ba5f --- /dev/null +++ b/events/templates/_types/slots/index.html @@ -0,0 +1,19 @@ +{% extends '_types/calendar/index.html' %} + +{% block calendar_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('slots-header-child', '_types/slots/header/_header.html') %} + {% block slots_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {#% include '_types/calendar/_nav.html' %#} +{% endblock %} + + + +{% block content %} + {% include '_types/slots/_main_panel.html' %} +{% endblock %} \ No newline at end of file diff --git a/events/templates/_types/ticket_admin/_checkin_result.html b/events/templates/_types/ticket_admin/_checkin_result.html new file mode 100644 index 0000000..4d6447e --- /dev/null +++ b/events/templates/_types/ticket_admin/_checkin_result.html @@ -0,0 +1,39 @@ +{# Check-in result — replaces ticket row or action area #} +{% if success and ticket %} + + + {{ ticket.code[:12] }}... + + +
    {{ ticket.entry.name if ticket.entry else '—' }}
    + {% if ticket.entry and ticket.entry.start_at %} +
    + {{ ticket.entry.start_at.strftime('%d %b %Y, %H:%M') }} +
    + {% endif %} + + + {{ ticket.ticket_type.name if ticket.ticket_type else '—' }} + + + + Checked in + + + + + + {% if ticket.checked_in_at %} + {{ ticket.checked_in_at.strftime('%H:%M') }} + {% else %} + Just now + {% endif %} + + + +{% elif not success %} +
    + + {{ error or 'Check-in failed' }} +
    +{% endif %} diff --git a/events/templates/_types/ticket_admin/_entry_tickets.html b/events/templates/_types/ticket_admin/_entry_tickets.html new file mode 100644 index 0000000..6599b2a --- /dev/null +++ b/events/templates/_types/ticket_admin/_entry_tickets.html @@ -0,0 +1,75 @@ +{# Tickets for a specific calendar entry — admin view #} +
    +
    +

    + Tickets for: {{ entry.name }} +

    + + {{ tickets|length }} ticket{{ 's' if tickets|length != 1 else '' }} + +
    + + {% if tickets %} +
    + + + + + + + + + + + {% for ticket in tickets %} + + + + + + + {% endfor %} + +
    CodeTypeStateActions
    {{ ticket.code[:12] }}...{{ ticket.ticket_type.name if ticket.ticket_type else '—' }} + + {{ ticket.state|replace('_', ' ')|capitalize }} + + + {% if ticket.state in ('confirmed', 'reserved') %} +
    + + +
    + {% elif ticket.state == 'checked_in' %} + + + {% if ticket.checked_in_at %}{{ ticket.checked_in_at.strftime('%H:%M') }}{% endif %} + + {% endif %} +
    +
    + {% else %} +
    + No tickets for this entry +
    + {% endif %} +
    diff --git a/events/templates/_types/ticket_admin/_lookup_result.html b/events/templates/_types/ticket_admin/_lookup_result.html new file mode 100644 index 0000000..5ea17eb --- /dev/null +++ b/events/templates/_types/ticket_admin/_lookup_result.html @@ -0,0 +1,82 @@ +{# Ticket lookup result — rendered into #lookup-result #} +{% if error %} +
    + + {{ error }} +
    +{% elif ticket %} +
    +
    +
    +
    + {{ ticket.entry.name if ticket.entry else 'Unknown event' }} +
    + {% if ticket.ticket_type %} +
    {{ ticket.ticket_type.name }}
    + {% endif %} + {% if ticket.entry and ticket.entry.start_at %} +
    + {{ ticket.entry.start_at.strftime('%A, %B %d, %Y at %H:%M') }} +
    + {% endif %} + {% if ticket.entry and ticket.entry.calendar %} +
    + {{ ticket.entry.calendar.name }} +
    + {% endif %} +
    + + {{ ticket.state|replace('_', ' ')|capitalize }} + + {{ ticket.code }} +
    + {% if ticket.checked_in_at %} +
    + Checked in: {{ ticket.checked_in_at.strftime('%B %d, %Y at %H:%M') }} +
    + {% endif %} +
    + +
    + {% if ticket.state in ('confirmed', 'reserved') %} +
    + + +
    + {% elif ticket.state == 'checked_in' %} +
    + +
    Checked In
    +
    + {% elif ticket.state == 'cancelled' %} +
    + +
    Cancelled
    +
    + {% endif %} +
    +
    +
    +{% endif %} diff --git a/events/templates/_types/ticket_admin/_main_panel.html b/events/templates/_types/ticket_admin/_main_panel.html new file mode 100644 index 0000000..43f367b --- /dev/null +++ b/events/templates/_types/ticket_admin/_main_panel.html @@ -0,0 +1,148 @@ +
    +

    Ticket Admin

    + + {# Stats row #} +
    +
    +
    {{ stats.total }}
    +
    Total
    +
    +
    +
    {{ stats.confirmed }}
    +
    Confirmed
    +
    +
    +
    {{ stats.checked_in }}
    +
    Checked In
    +
    +
    +
    {{ stats.reserved }}
    +
    Reserved
    +
    +
    + + {# Scanner section #} +
    +

    + + Scan / Look Up Ticket +

    + +
    + + +
    + +
    +
    + Enter a ticket code to look it up +
    +
    +
    + + {# Recent tickets table #} +
    +

    + Recent Tickets +

    + + {% if tickets %} +
    + + + + + + + + + + + + {% for ticket in tickets %} + + + + + + + + {% endfor %} + +
    CodeEventTypeStateActions
    + {{ ticket.code[:12] }}... + +
    {{ ticket.entry.name if ticket.entry else '—' }}
    + {% if ticket.entry and ticket.entry.start_at %} +
    + {{ ticket.entry.start_at.strftime('%d %b %Y, %H:%M') }} +
    + {% endif %} +
    + {{ ticket.ticket_type.name if ticket.ticket_type else '—' }} + + + {{ ticket.state|replace('_', ' ')|capitalize }} + + + {% if ticket.state in ('confirmed', 'reserved') %} +
    + + +
    + {% elif ticket.state == 'checked_in' %} + + + {% if ticket.checked_in_at %} + {{ ticket.checked_in_at.strftime('%H:%M') }} + {% endif %} + + {% endif %} +
    +
    + {% else %} +
    + No tickets yet +
    + {% endif %} +
    +
    diff --git a/events/templates/_types/ticket_admin/index.html b/events/templates/_types/ticket_admin/index.html new file mode 100644 index 0000000..47ecb0a --- /dev/null +++ b/events/templates/_types/ticket_admin/index.html @@ -0,0 +1,8 @@ +{% extends '_types/root/index.html' %} + +{% block _main_mobile_menu %} +{% endblock %} + +{% block content %} +{% include '_types/ticket_admin/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/ticket_type/_edit.html b/events/templates/_types/ticket_type/_edit.html new file mode 100644 index 0000000..67cec9e --- /dev/null +++ b/events/templates/_types/ticket_type/_edit.html @@ -0,0 +1,101 @@ +
    + +
    +
    + + + +
    + + +
    + + +
    + + +
    + + +
    + + +
    + +
    + + + +
    +
    +
    diff --git a/events/templates/_types/ticket_type/_main_panel.html b/events/templates/_types/ticket_type/_main_panel.html new file mode 100644 index 0000000..7805dc3 --- /dev/null +++ b/events/templates/_types/ticket_type/_main_panel.html @@ -0,0 +1,49 @@ +
    + +
    +
    +
    + Name +
    +
    + {{ ticket_type.name }} +
    +
    + +
    +
    + Cost +
    +
    + £{{ ('%.2f'|format(ticket_type.cost)) if ticket_type.cost is not none else '0.00' }} +
    +
    + +
    +
    + Count +
    +
    + {{ ticket_type.count }} +
    +
    +
    + + +
    diff --git a/events/templates/_types/ticket_type/_nav.html b/events/templates/_types/ticket_type/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/ticket_type/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/ticket_type/_oob_elements.html b/events/templates/_types/ticket_type/_oob_elements.html new file mode 100644 index 0000000..824e62a --- /dev/null +++ b/events/templates/_types/ticket_type/_oob_elements.html @@ -0,0 +1,18 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('ticket_types-header-child', 'ticket_type-header-child', '_types/ticket_type/header/_header.html')}} + + {% from '_types/ticket_types/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/ticket_type/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/ticket_type/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/ticket_type/header/_header.html b/events/templates/_types/ticket_type/header/_header.html new file mode 100644 index 0000000..9496cbc --- /dev/null +++ b/events/templates/_types/ticket_type/header/_header.html @@ -0,0 +1,32 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='ticket_type-row', oob=oob) %} + {% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.ticket_types.ticket_type.get', + calendar_slug=calendar.slug, + year=year, + month=month, + day=day, + entry_id=entry.id, + ticket_type_id=ticket_type.id + ), + hx_select_search, + ) %} +
    +
    + +
    + {{ ticket_type.name }} +
    +
    +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/ticket_type/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/ticket_type/index.html b/events/templates/_types/ticket_type/index.html new file mode 100644 index 0000000..245992c --- /dev/null +++ b/events/templates/_types/ticket_type/index.html @@ -0,0 +1,19 @@ +{% extends '_types/ticket_types/index.html' %} +{% import 'macros/layout.html' as layout %} + +{% block ticket_types_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('ticket_types-header-child', '_types/ticket_type/header/_header.html') %} + {% block ticket_type_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {#% include '_types/ticket_type/_nav.html' %#} +{% endblock %} + +{% block content %} + {% include '_types/ticket_type/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/ticket_types/_add.html b/events/templates/_types/ticket_types/_add.html new file mode 100644 index 0000000..cbea211 --- /dev/null +++ b/events/templates/_types/ticket_types/_add.html @@ -0,0 +1,85 @@ +
    +
    +
    + + +
    + +
    + + +
    + +
    + + +
    +
    + +
    + + + +
    +
    diff --git a/events/templates/_types/ticket_types/_add_button.html b/events/templates/_types/ticket_types/_add_button.html new file mode 100644 index 0000000..6deeea9 --- /dev/null +++ b/events/templates/_types/ticket_types/_add_button.html @@ -0,0 +1,15 @@ + diff --git a/events/templates/_types/ticket_types/_main_panel.html b/events/templates/_types/ticket_types/_main_panel.html new file mode 100644 index 0000000..2afaa7a --- /dev/null +++ b/events/templates/_types/ticket_types/_main_panel.html @@ -0,0 +1,24 @@ +
    + + + + + + + + + + + {% for tt in ticket_types %} + {% include '_types/ticket_types/_row.html' %} + {% else %} + + {% endfor %} + +
    NameCostCountActions
    No ticket types yet.
    + + +
    + {% include '_types/ticket_types/_add_button.html' %} +
    +
    diff --git a/events/templates/_types/ticket_types/_nav.html b/events/templates/_types/ticket_types/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/events/templates/_types/ticket_types/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/events/templates/_types/ticket_types/_oob_elements.html b/events/templates/_types/ticket_types/_oob_elements.html new file mode 100644 index 0000000..a746f17 --- /dev/null +++ b/events/templates/_types/ticket_types/_oob_elements.html @@ -0,0 +1,18 @@ +{% extends "oob_elements.html" %} + +{% block oobs %} + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('entry-admin-header-child', 'ticket_types-header-child', '_types/ticket_types/header/_header.html')}} + + {% from '_types/entry/admin/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/ticket_types/_nav.html' %} +{% endblock %} + +{% block content %} + {% include '_types/ticket_types/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/ticket_types/_row.html b/events/templates/_types/ticket_types/_row.html new file mode 100644 index 0000000..0864844 --- /dev/null +++ b/events/templates/_types/ticket_types/_row.html @@ -0,0 +1,55 @@ +{% import 'macros/links.html' as links %} + + +
    + {% call links.link( + url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.ticket_types.ticket_type.get', + calendar_slug=calendar.slug, + year=year, + month=month, + day=day, + entry_id=entry.id, + ticket_type_id=tt.id + ), + hx_select_search, + aclass=styles.pill + ) %} + {{ tt.name }} + {% endcall %} +
    + + + £{{ ('%.2f'|format(tt.cost)) if tt.cost is not none else '0.00' }} + + + {{ tt.count }} + + + + + diff --git a/events/templates/_types/ticket_types/header/_header.html b/events/templates/_types/ticket_types/header/_header.html new file mode 100644 index 0000000..2a95316 --- /dev/null +++ b/events/templates/_types/ticket_types/header/_header.html @@ -0,0 +1,24 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='ticket_types-row', oob=oob) %} + {% call links.link(url_for( + 'calendars.calendar.day.calendar_entries.calendar_entry.ticket_types.get', + calendar_slug=calendar.slug, + entry_id=entry.id, + year=year, + month=month, + day=day + ), hx_select_search) %} + +
    + ticket types +
    + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/ticket_types/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/events/templates/_types/ticket_types/index.html b/events/templates/_types/ticket_types/index.html new file mode 100644 index 0000000..9d0362a --- /dev/null +++ b/events/templates/_types/ticket_types/index.html @@ -0,0 +1,20 @@ +{% extends '_types/entry/admin/index.html' %} + +{% block entry_admin_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('ticket_type-header-child', '_types/ticket_types/header/_header.html') %} + {% block ticket_types_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/ticket_types/_nav.html' %} +{% endblock %} + + + +{% block content %} + {% include '_types/ticket_types/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/tickets/_adjust_response.html b/events/templates/_types/tickets/_adjust_response.html new file mode 100644 index 0000000..24ec2fc --- /dev/null +++ b/events/templates/_types/tickets/_adjust_response.html @@ -0,0 +1,4 @@ +{# Response for ticket adjust — buy form + OOB cart-mini update #} +{% from 'macros/cart_icon.html' import cart_icon %} +{{ cart_icon(count=cart_count, oob='true') }} +{% include '_types/tickets/_buy_form.html' %} diff --git a/events/templates/_types/tickets/_buy_form.html b/events/templates/_types/tickets/_buy_form.html new file mode 100644 index 0000000..3cb981a --- /dev/null +++ b/events/templates/_types/tickets/_buy_form.html @@ -0,0 +1,206 @@ +{# Ticket purchase form — shown on entry detail when tickets are available #} +{% if entry.ticket_price is not none and entry.state == 'confirmed' %} +
    +

    + + Tickets +

    + + {# Sold / remaining info #} +
    + {% if ticket_sold_count is defined and ticket_sold_count %} + {{ ticket_sold_count }} sold + {% endif %} + {% if ticket_remaining is not none %} + {{ ticket_remaining }} remaining + {% endif %} + {% if user_ticket_count is defined and user_ticket_count %} + + + {{ user_ticket_count }} in basket + + {% endif %} +
    + + {% if entry.ticket_types %} + {# Multiple ticket types #} +
    + {% for tt in entry.ticket_types %} + {% if tt.deleted_at is none %} + {% set type_count = user_ticket_counts_by_type.get(tt.id, 0) if user_ticket_counts_by_type is defined else 0 %} +
    +
    +
    {{ tt.name }}
    +
    + £{{ '%.2f'|format(tt.cost) }} +
    +
    + + {% if type_count == 0 %} + {# Add to basket button #} +
    + + + + + +
    + {% else %} + {# +/- controls #} +
    +
    + + + + + +
    + + + + + + + {{ type_count }} + + + + + +
    + + + + + +
    +
    + {% endif %} +
    + {% endif %} + {% endfor %} +
    + + {% else %} + {# Simple ticket (single price) #} +
    +
    + + £{{ '%.2f'|format(entry.ticket_price) }} + + per ticket +
    +
    + + {% set qty = user_ticket_count if user_ticket_count is defined else 0 %} + + {% if qty == 0 %} + {# Add to basket button #} +
    + + + + +
    + {% else %} + {# +/- controls #} +
    +
    + + + + +
    + + + + + + + {{ qty }} + + + + + +
    + + + + +
    +
    + {% endif %} + {% endif %} +
    +{% elif entry.ticket_price is not none %} + {# Tickets configured but entry not confirmed yet #} +
    + + Tickets available once this event is confirmed. +
    +{% endif %} diff --git a/events/templates/_types/tickets/_buy_result.html b/events/templates/_types/tickets/_buy_result.html new file mode 100644 index 0000000..900b098 --- /dev/null +++ b/events/templates/_types/tickets/_buy_result.html @@ -0,0 +1,43 @@ +{# Shown after ticket purchase — replaces the buy form #} +{# OOB: refresh cart badge to reflect new ticket count #} +{% from 'macros/cart_icon.html' import cart_icon %} +{{ cart_icon(count=cart_count|default(0), oob='true') }} + +
    +
    + + + {{ created_tickets|length }} ticket{{ 's' if created_tickets|length != 1 else '' }} reserved + +
    + +
    + {% for ticket in created_tickets %} + +
    + + {{ ticket.code[:12] }}... +
    + View ticket +
    + {% endfor %} +
    + + {% if remaining is not none %} +

    + {{ remaining }} ticket{{ 's' if remaining != 1 else '' }} remaining +

    + {% endif %} + + +
    diff --git a/events/templates/_types/tickets/_detail_panel.html b/events/templates/_types/tickets/_detail_panel.html new file mode 100644 index 0000000..75cde1a --- /dev/null +++ b/events/templates/_types/tickets/_detail_panel.html @@ -0,0 +1,124 @@ +
    + + {# Back link #} + + + Back to my tickets + + + {# Ticket card #} +
    + {# Header with state #} +
    +
    +

    + {{ ticket.entry.name if ticket.entry else 'Ticket' }} +

    + + {{ ticket.state|replace('_', ' ')|capitalize }} + +
    + {% if ticket.ticket_type %} +
    + {{ ticket.ticket_type.name }} +
    + {% endif %} +
    + + {# QR Code #} +
    +
    + {# QR code rendered via JavaScript #} +
    +

    + {{ ticket.code }} +

    +
    + + {# Event details #} +
    + {% if ticket.entry %} +
    + +
    +
    + {{ ticket.entry.start_at.strftime('%A, %B %d, %Y') }} +
    +
    + {{ ticket.entry.start_at.strftime('%H:%M') }} + {% if ticket.entry.end_at %} + – {{ ticket.entry.end_at.strftime('%H:%M') }} + {% endif %} +
    +
    +
    + + {% if ticket.entry.calendar %} +
    + +
    + {{ ticket.entry.calendar.name }} +
    +
    + {% endif %} + {% endif %} + + {% if ticket.ticket_type and ticket.ticket_type.cost %} +
    + +
    + {{ ticket.ticket_type.name }} — £{{ '%.2f'|format(ticket.ticket_type.cost) }} +
    +
    + {% endif %} + + {% if ticket.checked_in_at %} +
    + +
    + Checked in: {{ ticket.checked_in_at.strftime('%B %d, %Y at %H:%M') }} +
    +
    + {% endif %} +
    +
    + + {# QR code generation script #} + + +
    diff --git a/events/templates/_types/tickets/_main_panel.html b/events/templates/_types/tickets/_main_panel.html new file mode 100644 index 0000000..15b40d9 --- /dev/null +++ b/events/templates/_types/tickets/_main_panel.html @@ -0,0 +1,65 @@ +
    +

    My Tickets

    + + {% if tickets %} + + {% else %} +
    + +

    No tickets yet

    +

    Tickets will appear here after you purchase them.

    +
    + {% endif %} +
    diff --git a/events/templates/_types/tickets/detail.html b/events/templates/_types/tickets/detail.html new file mode 100644 index 0000000..31c9319 --- /dev/null +++ b/events/templates/_types/tickets/detail.html @@ -0,0 +1,8 @@ +{% extends '_types/root/index.html' %} + +{% block _main_mobile_menu %} +{% endblock %} + +{% block content %} +{% include '_types/tickets/_detail_panel.html' %} +{% endblock %} diff --git a/events/templates/_types/tickets/index.html b/events/templates/_types/tickets/index.html new file mode 100644 index 0000000..908be8b --- /dev/null +++ b/events/templates/_types/tickets/index.html @@ -0,0 +1,8 @@ +{% extends '_types/root/index.html' %} + +{% block _main_mobile_menu %} +{% endblock %} + +{% block content %} +{% include '_types/tickets/_main_panel.html' %} +{% endblock %} diff --git a/events/templates/fragments/account_nav_items.html b/events/templates/fragments/account_nav_items.html new file mode 100644 index 0000000..f1d9ca3 --- /dev/null +++ b/events/templates/fragments/account_nav_items.html @@ -0,0 +1,23 @@ +{# Account nav items: tickets + bookings links for the account dashboard #} + + diff --git a/events/templates/fragments/account_page_bookings.html b/events/templates/fragments/account_page_bookings.html new file mode 100644 index 0000000..28f8280 --- /dev/null +++ b/events/templates/fragments/account_page_bookings.html @@ -0,0 +1,44 @@ +
    +
    + +

    Bookings

    + + {% if bookings %} +
    + {% for booking in bookings %} +
    +
    +
    +

    {{ booking.name }}

    +
    + {{ booking.start_at.strftime('%d %b %Y, %H:%M') }} + {% if booking.end_at %} + – {{ booking.end_at.strftime('%H:%M') }} + {% endif %} + {% if booking.calendar_name %} + · {{ booking.calendar_name }} + {% endif %} + {% if booking.cost %} + · £{{ booking.cost }} + {% endif %} +
    +
    +
    + {% if booking.state == 'confirmed' %} + confirmed + {% elif booking.state == 'provisional' %} + provisional + {% else %} + {{ booking.state }} + {% endif %} +
    +
    +
    + {% endfor %} +
    + {% else %} +

    No bookings yet.

    + {% endif %} + +
    +
    diff --git a/events/templates/fragments/account_page_tickets.html b/events/templates/fragments/account_page_tickets.html new file mode 100644 index 0000000..69f7596 --- /dev/null +++ b/events/templates/fragments/account_page_tickets.html @@ -0,0 +1,44 @@ +
    +
    + +

    Tickets

    + + {% if tickets %} +
    + {% for ticket in tickets %} +
    +
    +
    + + {{ ticket.entry_name }} + +
    + {{ ticket.entry_start_at.strftime('%d %b %Y, %H:%M') }} + {% if ticket.calendar_name %} + · {{ ticket.calendar_name }} + {% endif %} + {% if ticket.ticket_type_name %} + · {{ ticket.ticket_type_name }} + {% endif %} +
    +
    +
    + {% if ticket.state == 'checked_in' %} + checked in + {% elif ticket.state == 'confirmed' %} + confirmed + {% else %} + {{ ticket.state }} + {% endif %} +
    +
    +
    + {% endfor %} +
    + {% else %} +

    No tickets yet.

    + {% endif %} + +
    +
    diff --git a/events/templates/fragments/container_cards_entries.html b/events/templates/fragments/container_cards_entries.html new file mode 100644 index 0000000..53ce49f --- /dev/null +++ b/events/templates/fragments/container_cards_entries.html @@ -0,0 +1,33 @@ +{# Calendar entries for blog listing cards — served as fragment from events app. + Each post's entries are delimited by comment markers so the consumer can + extract per-post HTML via simple string splitting. #} +{% for post_id in post_ids %} + +{% set widget_entries = batch.get(post_id, []) %} +{% if widget_entries %} +
    +

    Events:

    +
    +
    + {% for entry in widget_entries %} + {% set _post_slug = slug_map.get(post_id, '') %} + {% set _entry_path = '/' + _post_slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %} + +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%a, %b %d') }} +
    +
    + {{ entry.start_at.strftime('%H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    + {% endfor %} +
    +
    +
    +{% endif %} + +{% endfor %} diff --git a/events/templates/fragments/container_nav_calendars.html b/events/templates/fragments/container_nav_calendars.html new file mode 100644 index 0000000..cdf50e3 --- /dev/null +++ b/events/templates/fragments/container_nav_calendars.html @@ -0,0 +1,10 @@ +{# Calendar links nav — served as fragment from events app #} +{% for calendar in calendars %} + {% set local_href=events_url('/' + post_slug + '/calendars/' + calendar.slug + '/') %} + + +
    {{calendar.name}}
    +
    +{% endfor %} diff --git a/events/templates/fragments/container_nav_entries.html b/events/templates/fragments/container_nav_entries.html new file mode 100644 index 0000000..d217565 --- /dev/null +++ b/events/templates/fragments/container_nav_entries.html @@ -0,0 +1,28 @@ +{# Calendar entries nav — served as fragment from events app #} +{% for entry in entries %} + {% set _entry_path = '/' + post_slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %} + +
    +
    +
    {{ entry.name }}
    +
    + {{ entry.start_at.strftime('%b %d, %Y at %H:%M') }} + {% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %} +
    +
    +
    +{% endfor %} + +{# Infinite scroll sentinel — URL points back to the consumer app #} +{% if has_more and paginate_url_base %} +
    +
    +{% endif %} diff --git a/events/templates/macros/date.html b/events/templates/macros/date.html new file mode 100644 index 0000000..5954f28 --- /dev/null +++ b/events/templates/macros/date.html @@ -0,0 +1,7 @@ +{% macro dt(d) -%} +{{ d.astimezone().strftime('%-d %b %Y, %H:%M') if d.tzinfo else d.strftime('%-d %b %Y, %H:%M') }} +{%- endmacro %} + +{% macro t(d) -%} +{{ d.astimezone().strftime('%H:%M') if d.tzinfo else d.strftime('%H:%M') }} +{%- endmacro %} diff --git a/federation/.gitignore b/federation/.gitignore new file mode 100644 index 0000000..87d616e --- /dev/null +++ b/federation/.gitignore @@ -0,0 +1,9 @@ +__pycache__/ +*.pyc +*.pyo +.env +node_modules/ +*.egg-info/ +dist/ +build/ +.venv/ diff --git a/federation/Dockerfile b/federation/Dockerfile new file mode 100644 index 0000000..e961f11 --- /dev/null +++ b/federation/Dockerfile @@ -0,0 +1,50 @@ +# syntax=docker/dockerfile:1 + +# ---------- Python application ---------- +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY federation/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY blog/__init__.py ./blog/__init__.py +COPY blog/models/ ./blog/models/ +COPY market/__init__.py ./market/__init__.py +COPY market/models/ ./market/models/ +COPY cart/__init__.py ./cart/__init__.py +COPY cart/models/ ./cart/models/ +COPY events/__init__.py ./events/__init__.py +COPY events/models/ ./events/models/ +COPY account/__init__.py ./account/__init__.py +COPY account/models/ ./account/models/ + +# ---------- Runtime setup ---------- +COPY federation/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/federation/__init__.py b/federation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/federation/app.py b/federation/app.py new file mode 100644 index 0000000..ac8c6d0 --- /dev/null +++ b/federation/app.py @@ -0,0 +1,84 @@ +from __future__ import annotations +import path_setup # noqa: F401 # adds shared/ to sys.path +from pathlib import Path + +from quart import g, request +from jinja2 import FileSystemLoader, ChoiceLoader + +from shared.infrastructure.factory import create_base_app +from shared.services.registry import services + +from bp import ( + register_identity_bp, + register_social_bp, + register_fragments, +) + + +async def federation_context() -> dict: + """Federation app context processor.""" + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.infrastructure.cart_identity import current_cart_identity + from shared.infrastructure.fragments import fetch_fragment + + ctx = await base_context() + + ctx["nav_tree_html"] = await fetch_fragment( + "blog", "nav-tree", + params={"app_name": "federation", "path": request.path}, + ) + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + # Cart data (consistent with all other apps) + ident = current_cart_identity() + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + ctx["cart_count"] = summary.count + summary.calendar_count + summary.ticket_count + ctx["cart_total"] = float(summary.total + summary.calendar_total + summary.ticket_total) + + # Actor profile for logged-in users + if g.get("user"): + actor = await services.federation.get_actor_by_user_id(g.s, g.user.id) + ctx["actor"] = actor + else: + ctx["actor"] = None + + return ctx + + +def create_app() -> "Quart": + from services import register_domain_services + + app = create_base_app( + "federation", + context_fn=federation_context, + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + # --- blueprints --- + # Well-known + actors (webfinger, inbox, outbox, etc.) are now handled + # by the shared AP blueprint registered in create_base_app(). + app.register_blueprint(register_identity_bp()) + app.register_blueprint(register_social_bp()) + app.register_blueprint(register_fragments()) + + # --- home page --- + @app.get("/") + async def home(): + from quart import render_template + return await render_template("_types/federation/index.html") + + return app + + +app = create_app() diff --git a/federation/bp/__init__.py b/federation/bp/__init__.py new file mode 100644 index 0000000..1be06bb --- /dev/null +++ b/federation/bp/__init__.py @@ -0,0 +1,3 @@ +from .identity.routes import register as register_identity_bp +from .social.routes import register as register_social_bp +from .fragments import register_fragments diff --git a/federation/bp/auth/__init__.py b/federation/bp/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/federation/bp/auth/routes.py b/federation/bp/auth/routes.py new file mode 100644 index 0000000..6b33175 --- /dev/null +++ b/federation/bp/auth/routes.py @@ -0,0 +1,232 @@ +"""Authentication routes for the federation app. + +Owns magic link login/logout + OAuth2 authorization server endpoint. +Account pages (newsletters, widget pages) have moved to the account app. +""" +from __future__ import annotations + +import secrets +from datetime import datetime, timezone, timedelta + +from quart import ( + Blueprint, + request, + render_template, + redirect, + url_for, + session as qsession, + g, + current_app, +) +from sqlalchemy import select +from sqlalchemy.exc import SQLAlchemyError + +from shared.db.session import get_session +from shared.models import User +from shared.models.oauth_code import OAuthCode +from shared.infrastructure.urls import federation_url, app_url +from shared.infrastructure.cart_identity import current_cart_identity +from shared.events import emit_activity + +from .services import ( + pop_login_redirect_target, + store_login_redirect_target, + send_magic_email, + find_or_create_user, + create_magic_link, + validate_magic_link, + validate_email, +) + +SESSION_USER_KEY = "uid" + +ALLOWED_CLIENTS = {"blog", "market", "cart", "events", "account"} + + +def register(url_prefix="/auth"): + auth_bp = Blueprint("auth", __name__, url_prefix=url_prefix) + + # --- OAuth2 authorize endpoint ------------------------------------------- + + @auth_bp.get("/oauth/authorize") + @auth_bp.get("/oauth/authorize/") + async def oauth_authorize(): + client_id = request.args.get("client_id", "") + redirect_uri = request.args.get("redirect_uri", "") + state = request.args.get("state", "") + + if client_id not in ALLOWED_CLIENTS: + return "Invalid client_id", 400 + + expected_redirect = app_url(client_id, "/auth/callback") + if redirect_uri != expected_redirect: + return "Invalid redirect_uri", 400 + + # Not logged in — bounce to magic link login, then back here + if not g.get("user"): + # Preserve the full authorize URL so we return here after login + authorize_path = request.full_path # includes query string + store_login_redirect_target() + return redirect(url_for("auth.login_form", next=authorize_path)) + + # Logged in — issue authorization code + code = secrets.token_urlsafe(48) + now = datetime.now(timezone.utc) + expires = now + timedelta(minutes=5) + + async with get_session() as s: + async with s.begin(): + oauth_code = OAuthCode( + code=code, + user_id=g.user.id, + client_id=client_id, + redirect_uri=redirect_uri, + expires_at=expires, + ) + s.add(oauth_code) + + sep = "&" if "?" in redirect_uri else "?" + return redirect(f"{redirect_uri}{sep}code={code}&state={state}") + + # --- Magic link login flow ----------------------------------------------- + + @auth_bp.get("/login/") + async def login_form(): + store_login_redirect_target() + cross_cart_sid = request.args.get("cart_sid") + if cross_cart_sid: + qsession["cart_sid"] = cross_cart_sid + if g.get("user"): + # If there's a pending redirect (e.g. OAuth authorize), follow it + redirect_url = pop_login_redirect_target() + return redirect(redirect_url) + return await render_template("auth/login.html") + + @auth_bp.post("/start/") + async def start_login(): + form = await request.form + email_input = form.get("email") or "" + + is_valid, email = validate_email(email_input) + if not is_valid: + return ( + await render_template( + "auth/login.html", + error="Please enter a valid email address.", + email=email_input, + ), + 400, + ) + + user = await find_or_create_user(g.s, email) + token, expires = await create_magic_link(g.s, user.id) + + from shared.utils import host_url + magic_url = host_url(url_for("auth.magic", token=token)) + + email_error = None + try: + await send_magic_email(email, magic_url) + except Exception as e: + current_app.logger.error("EMAIL SEND FAILED: %r", e) + email_error = ( + "We couldn't send the email automatically. " + "Please try again in a moment." + ) + + return await render_template( + "auth/check_email.html", + email=email, + email_error=email_error, + ) + + @auth_bp.get("/magic//") + async def magic(token: str): + now = datetime.now(timezone.utc) + user_id: int | None = None + + try: + async with get_session() as s: + async with s.begin(): + user, error = await validate_magic_link(s, token) + + if error: + return ( + await render_template("auth/login.html", error=error), + 400, + ) + user_id = user.id + + except Exception: + return ( + await render_template( + "auth/login.html", + error="Could not sign you in right now. Please try again.", + ), + 502, + ) + + assert user_id is not None + + ident = current_cart_identity() + anon_session_id = ident.get("session_id") + + try: + async with get_session() as s: + async with s.begin(): + u2 = await s.get(User, user_id) + if u2: + u2.last_login_at = now + if anon_session_id: + await emit_activity( + s, + activity_type="rose:Login", + actor_uri="internal:system", + object_type="Person", + object_data={ + "user_id": user_id, + "session_id": anon_session_id, + }, + ) + except SQLAlchemyError: + current_app.logger.exception( + "[auth] non-fatal DB update for user_id=%s", user_id + ) + + qsession[SESSION_USER_KEY] = user_id + + redirect_url = pop_login_redirect_target() + resp = redirect(redirect_url, 303) + resp.set_cookie( + "sso_hint", "1", + domain=".rose-ash.com", max_age=30 * 24 * 3600, + secure=True, samesite="Lax", httponly=True, + ) + return resp + + @auth_bp.post("/logout/") + async def logout(): + qsession.pop(SESSION_USER_KEY, None) + resp = redirect(federation_url("/")) + resp.delete_cookie("sso_hint", domain=".rose-ash.com", path="/") + return resp + + @auth_bp.get("/clear/") + async def clear(): + """One-time migration helper: clear all session cookies.""" + qsession.clear() + resp = redirect(federation_url("/")) + resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/") + resp.delete_cookie("sso_hint", domain=".rose-ash.com", path="/") + return resp + + @auth_bp.get("/sso-logout/") + async def sso_logout(): + """SSO logout: clear federation session + sso_hint, redirect to blog.""" + qsession.pop(SESSION_USER_KEY, None) + from shared.infrastructure.urls import blog_url + resp = redirect(blog_url("/")) + resp.delete_cookie("sso_hint", domain=".rose-ash.com", path="/") + return resp + + return auth_bp diff --git a/federation/bp/auth/services/__init__.py b/federation/bp/auth/services/__init__.py new file mode 100644 index 0000000..648f87d --- /dev/null +++ b/federation/bp/auth/services/__init__.py @@ -0,0 +1,24 @@ +from .login_redirect import pop_login_redirect_target, store_login_redirect_target +from .auth_operations import ( + get_app_host, + get_app_root, + send_magic_email, + load_user_by_id, + find_or_create_user, + create_magic_link, + validate_magic_link, + validate_email, +) + +__all__ = [ + "pop_login_redirect_target", + "store_login_redirect_target", + "get_app_host", + "get_app_root", + "send_magic_email", + "load_user_by_id", + "find_or_create_user", + "create_magic_link", + "validate_magic_link", + "validate_email", +] diff --git a/federation/bp/auth/services/auth_operations.py b/federation/bp/auth/services/auth_operations.py new file mode 100644 index 0000000..d9f4487 --- /dev/null +++ b/federation/bp/auth/services/auth_operations.py @@ -0,0 +1,157 @@ +"""Auth operations for the federation app. + +Copied from blog/bp/auth/services/auth_operations.py to avoid cross-app +import chains. The logic is identical — shared models, shared config. +""" +from __future__ import annotations + +import os +import secrets +from datetime import datetime, timedelta, timezone +from typing import Optional, Tuple + +from quart import current_app, render_template, request, g +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models import User, MagicLink +from shared.config import config + + +def get_app_host() -> str: + host = ( + config().get("host") or os.getenv("APP_HOST") or "http://localhost:8000" + ).rstrip("/") + return host + + +def get_app_root() -> str: + root = (g.root).rstrip("/") + return root + + +async def send_magic_email(to_email: str, link_url: str) -> None: + host = os.getenv("SMTP_HOST") + port = int(os.getenv("SMTP_PORT") or "587") + username = os.getenv("SMTP_USER") + password = os.getenv("SMTP_PASS") + mail_from = os.getenv("MAIL_FROM") or "no-reply@example.com" + + site_name = config().get("title", "Rose Ash") + subject = f"Your sign-in link \u2014 {site_name}" + + tpl_vars = dict(site_name=site_name, link_url=link_url) + text_body = await render_template("_email/magic_link.txt", **tpl_vars) + html_body = await render_template("_email/magic_link.html", **tpl_vars) + + if not host or not username or not password: + current_app.logger.warning( + "SMTP not configured. Printing magic link to console for %s: %s", + to_email, + link_url, + ) + print(f"[DEV] Magic link for {to_email}: {link_url}") + return + + import aiosmtplib + from email.message import EmailMessage + + msg = EmailMessage() + msg["From"] = mail_from + msg["To"] = to_email + msg["Subject"] = subject + msg.set_content(text_body) + msg.add_alternative(html_body, subtype="html") + + is_secure = port == 465 + if is_secure: + smtp = aiosmtplib.SMTP( + hostname=host, port=port, use_tls=True, + username=username, password=password, + ) + else: + smtp = aiosmtplib.SMTP( + hostname=host, port=port, start_tls=True, + username=username, password=password, + ) + + async with smtp: + await smtp.send_message(msg) + + +async def load_user_by_id(session: AsyncSession, user_id: int) -> Optional[User]: + stmt = ( + select(User) + .options(selectinload(User.labels)) + .where(User.id == user_id) + ) + result = await session.execute(stmt) + return result.scalar_one_or_none() + + +async def find_or_create_user(session: AsyncSession, email: str) -> User: + result = await session.execute(select(User).where(User.email == email)) + user = result.scalar_one_or_none() + + if user is None: + user = User(email=email) + session.add(user) + await session.flush() + + return user + + +async def create_magic_link( + session: AsyncSession, + user_id: int, + purpose: str = "signin", + expires_minutes: int = 15, +) -> Tuple[str, datetime]: + token = secrets.token_urlsafe(32) + expires = datetime.now(timezone.utc) + timedelta(minutes=expires_minutes) + + ml = MagicLink( + token=token, + user_id=user_id, + purpose=purpose, + expires_at=expires, + ip=request.headers.get("x-forwarded-for", request.remote_addr), + user_agent=request.headers.get("user-agent"), + ) + session.add(ml) + + return token, expires + + +async def validate_magic_link( + session: AsyncSession, + token: str, +) -> Tuple[Optional[User], Optional[str]]: + now = datetime.now(timezone.utc) + + ml = await session.scalar( + select(MagicLink) + .where(MagicLink.token == token) + .with_for_update() + ) + + if not ml or ml.purpose != "signin": + return None, "Invalid or expired link." + + if ml.used_at or ml.expires_at < now: + return None, "This link has expired. Please request a new one." + + user = await session.get(User, ml.user_id) + if not user: + return None, "User not found." + + ml.used_at = now + return user, None + + +def validate_email(email: str) -> Tuple[bool, str]: + email = email.strip().lower() + if not email or "@" not in email: + return False, email + return True, email diff --git a/federation/bp/auth/services/login_redirect.py b/federation/bp/auth/services/login_redirect.py new file mode 100644 index 0000000..aff43d9 --- /dev/null +++ b/federation/bp/auth/services/login_redirect.py @@ -0,0 +1,45 @@ +from urllib.parse import urlparse +from quart import session + +from shared.infrastructure.urls import federation_url + + +LOGIN_REDIRECT_SESSION_KEY = "login_redirect_to" + + +def store_login_redirect_target() -> None: + from quart import request + + target = request.args.get("next") + if not target: + ref = request.referrer or "" + try: + parsed = urlparse(ref) + target = parsed.path or "" + except Exception: + target = "" + + if not target: + return + + # Accept both relative paths and absolute URLs (cross-app redirects) + if target.startswith("http://") or target.startswith("https://"): + session[LOGIN_REDIRECT_SESSION_KEY] = target + elif target.startswith("/") and not target.startswith("//"): + session[LOGIN_REDIRECT_SESSION_KEY] = target + + +def pop_login_redirect_target() -> str: + path = session.pop(LOGIN_REDIRECT_SESSION_KEY, None) + if not path or not isinstance(path, str): + return federation_url("/") + + # Absolute URL: return as-is (cross-app redirect) + if path.startswith("http://") or path.startswith("https://"): + return path + + # Relative path: must start with / and not // + if path.startswith("/") and not path.startswith("//"): + return federation_url(path) + + return federation_url("/") diff --git a/federation/bp/fragments/__init__.py b/federation/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/federation/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/federation/bp/fragments/routes.py b/federation/bp/fragments/routes.py new file mode 100644 index 0000000..d4e20d1 --- /dev/null +++ b/federation/bp/fragments/routes.py @@ -0,0 +1,34 @@ +"""Federation app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. +""" + +from __future__ import annotations + +from quart import Blueprint, Response, request + +from shared.infrastructure.fragments import FRAGMENT_HEADER + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + _handlers: dict[str, object] = {} + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + bp._fragment_handlers = _handlers + + return bp diff --git a/federation/bp/identity/__init__.py b/federation/bp/identity/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/federation/bp/identity/routes.py b/federation/bp/identity/routes.py new file mode 100644 index 0000000..b445eda --- /dev/null +++ b/federation/bp/identity/routes.py @@ -0,0 +1,108 @@ +"""Username selection flow. + +Users must choose a preferred_username before they can publish. +This creates their ActorProfile with RSA keys. +""" +from __future__ import annotations + +import re + +from quart import ( + Blueprint, request, render_template, redirect, url_for, g, abort, +) + +from shared.services.registry import services + + +# Username rules: 3-32 chars, lowercase alphanumeric + underscores +USERNAME_RE = re.compile(r"^[a-z][a-z0-9_]{2,31}$") + +# Reserved usernames +RESERVED = frozenset({ + "admin", "administrator", "root", "system", "moderator", "mod", + "support", "help", "info", "postmaster", "webmaster", "abuse", + "federation", "activitypub", "api", "static", "media", "assets", + "well-known", "nodeinfo", "inbox", "outbox", "followers", "following", +}) + + +def register(url_prefix="/identity"): + bp = Blueprint("identity", __name__, url_prefix=url_prefix) + + @bp.get("/choose-username") + async def choose_username_form(): + if not g.get("user"): + return redirect(url_for("auth.login_form")) + + # Already has a username? + actor = await services.federation.get_actor_by_user_id(g.s, g.user.id) + if actor: + return redirect(url_for("activitypub.actor_profile", username=actor.preferred_username)) + + return await render_template("federation/choose_username.html") + + @bp.post("/choose-username") + async def choose_username(): + if not g.get("user"): + abort(401) + + # Already has a username? + existing = await services.federation.get_actor_by_user_id(g.s, g.user.id) + if existing: + return redirect(url_for("activitypub.actor_profile", username=existing.preferred_username)) + + form = await request.form + username = (form.get("username") or "").strip().lower() + + # Validate format + error = None + if not USERNAME_RE.match(username): + error = ( + "Username must be 3-32 characters, start with a letter, " + "and contain only lowercase letters, numbers, and underscores." + ) + elif username in RESERVED: + error = "This username is reserved." + elif not await services.federation.username_available(g.s, username): + error = "This username is already taken." + + if error: + return await render_template( + "federation/choose_username.html", + error=error, + username=username, + ), 400 + + # Create ActorProfile with RSA keys + display_name = g.user.name or username + actor = await services.federation.create_actor( + g.s, g.user.id, username, + display_name=display_name, + ) + + # Redirect to where they were going, or their new profile + next_url = request.args.get("next") + if next_url: + return redirect(next_url) + return redirect(url_for("activitypub.actor_profile", username=actor.preferred_username)) + + @bp.get("/check-username") + async def check_username(): + """HTMX endpoint to check username availability.""" + username = (request.args.get("username") or "").strip().lower() + + if not username: + return "" + + if not USERNAME_RE.match(username): + return 'Invalid format' + + if username in RESERVED: + return 'Reserved' + + available = await services.federation.username_available(g.s, username) + if available: + return 'Available' + return 'Taken' + + return bp diff --git a/federation/bp/social/__init__.py b/federation/bp/social/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/federation/bp/social/routes.py b/federation/bp/social/routes.py new file mode 100644 index 0000000..7878156 --- /dev/null +++ b/federation/bp/social/routes.py @@ -0,0 +1,499 @@ +"""Social fediverse routes: timeline, compose, search, follow, interactions, notifications.""" +from __future__ import annotations + +import logging +from datetime import datetime + +from quart import Blueprint, request, g, redirect, url_for, abort, render_template, Response + +from shared.services.registry import services + +log = logging.getLogger(__name__) + + +def _require_actor(): + """Return actor context or abort 403.""" + actor = g.get("ctx", {}).get("actor") if hasattr(g, "ctx") else None + if not actor: + actor = getattr(g, "_social_actor", None) + if not actor: + abort(403, "You need to choose a federation username first") + return actor + + +def register(url_prefix="/social"): + bp = Blueprint("social", __name__, url_prefix=url_prefix) + + @bp.before_request + async def load_actor(): + """Load actor profile for authenticated users.""" + if g.get("user"): + actor = await services.federation.get_actor_by_user_id(g.s, g.user.id) + g._social_actor = actor + + # -- Timeline ------------------------------------------------------------- + + @bp.get("/") + async def home_timeline(): + if not g.get("user"): + return redirect(url_for("auth.login_form")) + actor = _require_actor() + items = await services.federation.get_home_timeline(g.s, actor.id) + return await render_template( + "federation/timeline.html", + items=items, + timeline_type="home", + actor=actor, + ) + + @bp.get("/timeline") + async def home_timeline_page(): + actor = _require_actor() + before_str = request.args.get("before") + before = None + if before_str: + try: + before = datetime.fromisoformat(before_str) + except ValueError: + pass + items = await services.federation.get_home_timeline( + g.s, actor.id, before=before, + ) + return await render_template( + "federation/_timeline_items.html", + items=items, + timeline_type="home", + actor=actor, + ) + + @bp.get("/public") + async def public_timeline(): + items = await services.federation.get_public_timeline(g.s) + actor = getattr(g, "_social_actor", None) + return await render_template( + "federation/timeline.html", + items=items, + timeline_type="public", + actor=actor, + ) + + @bp.get("/public/timeline") + async def public_timeline_page(): + before_str = request.args.get("before") + before = None + if before_str: + try: + before = datetime.fromisoformat(before_str) + except ValueError: + pass + items = await services.federation.get_public_timeline(g.s, before=before) + actor = getattr(g, "_social_actor", None) + return await render_template( + "federation/_timeline_items.html", + items=items, + timeline_type="public", + actor=actor, + ) + + # -- Compose -------------------------------------------------------------- + + @bp.get("/compose") + async def compose_form(): + actor = _require_actor() + reply_to = request.args.get("reply_to") + return await render_template( + "federation/compose.html", + actor=actor, + reply_to=reply_to, + ) + + @bp.post("/compose") + async def compose_submit(): + actor = _require_actor() + form = await request.form + content = form.get("content", "").strip() + if not content: + return redirect(url_for("social.compose_form")) + + visibility = form.get("visibility", "public") + in_reply_to = form.get("in_reply_to") or None + + await services.federation.create_local_post( + g.s, actor.id, + content=content, + visibility=visibility, + in_reply_to=in_reply_to, + ) + return redirect(url_for("social.home_timeline")) + + @bp.post("/delete/") + async def delete_post(post_id: int): + actor = _require_actor() + await services.federation.delete_local_post(g.s, actor.id, post_id) + return redirect(url_for("social.home_timeline")) + + # -- Search + Follow ------------------------------------------------------ + + @bp.get("/search") + async def search(): + actor = getattr(g, "_social_actor", None) + query = request.args.get("q", "").strip() + actors = [] + total = 0 + followed_urls: set[str] = set() + if query: + actors, total = await services.federation.search_actors(g.s, query) + if actor: + following, _ = await services.federation.get_following( + g.s, actor.preferred_username, page=1, per_page=1000, + ) + followed_urls = {a.actor_url for a in following} + return await render_template( + "federation/search.html", + query=query, + actors=actors, + total=total, + page=1, + followed_urls=followed_urls, + actor=actor, + ) + + @bp.get("/search/page") + async def search_page(): + actor = getattr(g, "_social_actor", None) + query = request.args.get("q", "").strip() + page = request.args.get("page", 1, type=int) + actors = [] + total = 0 + followed_urls: set[str] = set() + if query: + actors, total = await services.federation.search_actors( + g.s, query, page=page, + ) + if actor: + following, _ = await services.federation.get_following( + g.s, actor.preferred_username, page=1, per_page=1000, + ) + followed_urls = {a.actor_url for a in following} + return await render_template( + "federation/_search_results.html", + actors=actors, + total=total, + page=page, + query=query, + followed_urls=followed_urls, + actor=actor, + ) + + @bp.post("/follow") + async def follow(): + actor = _require_actor() + form = await request.form + remote_actor_url = form.get("actor_url", "") + if remote_actor_url: + await services.federation.send_follow( + g.s, actor.preferred_username, remote_actor_url, + ) + if request.headers.get("HX-Request"): + return await _actor_card_response(actor, remote_actor_url, is_followed=True) + return redirect(request.referrer or url_for("social.search")) + + @bp.post("/unfollow") + async def unfollow(): + actor = _require_actor() + form = await request.form + remote_actor_url = form.get("actor_url", "") + if remote_actor_url: + await services.federation.unfollow( + g.s, actor.preferred_username, remote_actor_url, + ) + if request.headers.get("HX-Request"): + return await _actor_card_response(actor, remote_actor_url, is_followed=False) + return redirect(request.referrer or url_for("social.search")) + + async def _actor_card_response(actor, remote_actor_url, is_followed): + """Re-render a single actor card after follow/unfollow via HTMX.""" + remote_dto = await services.federation.get_or_fetch_remote_actor( + g.s, remote_actor_url, + ) + if not remote_dto: + return Response("", status=200) + followed_urls = {remote_actor_url} if is_followed else set() + # Detect list context from referer + referer = request.referrer or "" + if "/followers" in referer: + list_type = "followers" + else: + list_type = "following" + return await render_template( + "federation/_actor_list_items.html", + actors=[remote_dto], + total=0, + page=1, + list_type=list_type, + followed_urls=followed_urls, + actor=actor, + ) + + # -- Interactions --------------------------------------------------------- + + @bp.post("/like") + async def like(): + actor = _require_actor() + form = await request.form + object_id = form.get("object_id", "") + author_inbox = form.get("author_inbox", "") + await services.federation.like_post(g.s, actor.id, object_id, author_inbox) + # Return updated buttons for HTMX + return await _interaction_buttons_response(actor, object_id, author_inbox) + + @bp.post("/unlike") + async def unlike(): + actor = _require_actor() + form = await request.form + object_id = form.get("object_id", "") + author_inbox = form.get("author_inbox", "") + await services.federation.unlike_post(g.s, actor.id, object_id, author_inbox) + return await _interaction_buttons_response(actor, object_id, author_inbox) + + @bp.post("/boost") + async def boost(): + actor = _require_actor() + form = await request.form + object_id = form.get("object_id", "") + author_inbox = form.get("author_inbox", "") + await services.federation.boost_post(g.s, actor.id, object_id, author_inbox) + return await _interaction_buttons_response(actor, object_id, author_inbox) + + @bp.post("/unboost") + async def unboost(): + actor = _require_actor() + form = await request.form + object_id = form.get("object_id", "") + author_inbox = form.get("author_inbox", "") + await services.federation.unboost_post(g.s, actor.id, object_id, author_inbox) + return await _interaction_buttons_response(actor, object_id, author_inbox) + + async def _interaction_buttons_response(actor, object_id, author_inbox): + """Re-render interaction buttons after a like/boost action.""" + from shared.models.federation import APInteraction, APRemotePost, APActivity + from sqlalchemy import select + from shared.services.federation_impl import SqlFederationService + + svc = services.federation + post_type, post_id = await svc._resolve_post(g.s, object_id) + + like_count = 0 + boost_count = 0 + liked_by_me = False + boosted_by_me = False + + if post_type: + from sqlalchemy import func as sa_func + like_count = (await g.s.execute( + select(sa_func.count(APInteraction.id)).where( + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "like", + ) + )).scalar() or 0 + boost_count = (await g.s.execute( + select(sa_func.count(APInteraction.id)).where( + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "boost", + ) + )).scalar() or 0 + liked_by_me = bool((await g.s.execute( + select(APInteraction.id).where( + APInteraction.actor_profile_id == actor.id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "like", + ).limit(1) + )).scalar()) + boosted_by_me = bool((await g.s.execute( + select(APInteraction.id).where( + APInteraction.actor_profile_id == actor.id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "boost", + ).limit(1) + )).scalar()) + + return await render_template( + "federation/_interaction_buttons.html", + item_object_id=object_id, + item_author_inbox=author_inbox, + like_count=like_count, + boost_count=boost_count, + liked_by_me=liked_by_me, + boosted_by_me=boosted_by_me, + actor=actor, + ) + + # -- Following / Followers ------------------------------------------------ + + @bp.get("/following") + async def following_list(): + actor = _require_actor() + actors, total = await services.federation.get_following( + g.s, actor.preferred_username, + ) + return await render_template( + "federation/following.html", + actors=actors, + total=total, + page=1, + actor=actor, + ) + + @bp.get("/following/page") + async def following_list_page(): + actor = _require_actor() + page = request.args.get("page", 1, type=int) + actors, total = await services.federation.get_following( + g.s, actor.preferred_username, page=page, + ) + return await render_template( + "federation/_actor_list_items.html", + actors=actors, + total=total, + page=page, + list_type="following", + followed_urls=set(), + actor=actor, + ) + + @bp.get("/followers") + async def followers_list(): + actor = _require_actor() + actors, total = await services.federation.get_followers_paginated( + g.s, actor.preferred_username, + ) + # Build set of followed actor URLs to show Follow Back vs Unfollow + following, _ = await services.federation.get_following( + g.s, actor.preferred_username, page=1, per_page=1000, + ) + followed_urls = {a.actor_url for a in following} + return await render_template( + "federation/followers.html", + actors=actors, + total=total, + page=1, + followed_urls=followed_urls, + actor=actor, + ) + + @bp.get("/followers/page") + async def followers_list_page(): + actor = _require_actor() + page = request.args.get("page", 1, type=int) + actors, total = await services.federation.get_followers_paginated( + g.s, actor.preferred_username, page=page, + ) + following, _ = await services.federation.get_following( + g.s, actor.preferred_username, page=1, per_page=1000, + ) + followed_urls = {a.actor_url for a in following} + return await render_template( + "federation/_actor_list_items.html", + actors=actors, + total=total, + page=page, + list_type="followers", + followed_urls=followed_urls, + actor=actor, + ) + + @bp.get("/actor/") + async def actor_timeline(id: int): + actor = getattr(g, "_social_actor", None) + # Get remote actor info + from shared.models.federation import RemoteActor + from sqlalchemy import select as sa_select + remote = ( + await g.s.execute( + sa_select(RemoteActor).where(RemoteActor.id == id) + ) + ).scalar_one_or_none() + if not remote: + abort(404) + from shared.services.federation_impl import _remote_actor_to_dto + remote_dto = _remote_actor_to_dto(remote) + items = await services.federation.get_actor_timeline(g.s, id) + # Check if we follow this actor + is_following = False + if actor: + from shared.models.federation import APFollowing + existing = ( + await g.s.execute( + sa_select(APFollowing).where( + APFollowing.actor_profile_id == actor.id, + APFollowing.remote_actor_id == id, + ) + ) + ).scalar_one_or_none() + is_following = existing is not None + return await render_template( + "federation/actor_timeline.html", + remote_actor=remote_dto, + items=items, + is_following=is_following, + actor=actor, + ) + + @bp.get("/actor//timeline") + async def actor_timeline_page(id: int): + actor = getattr(g, "_social_actor", None) + before_str = request.args.get("before") + before = None + if before_str: + try: + before = datetime.fromisoformat(before_str) + except ValueError: + pass + items = await services.federation.get_actor_timeline( + g.s, id, before=before, + ) + return await render_template( + "federation/_timeline_items.html", + items=items, + timeline_type="actor", + actor_id=id, + actor=actor, + ) + + # -- Notifications -------------------------------------------------------- + + @bp.get("/notifications") + async def notifications(): + actor = _require_actor() + items = await services.federation.get_notifications(g.s, actor.id) + await services.federation.mark_notifications_read(g.s, actor.id) + return await render_template( + "federation/notifications.html", + notifications=items, + actor=actor, + ) + + @bp.get("/notifications/count") + async def notification_count(): + actor = getattr(g, "_social_actor", None) + if not actor: + return Response("0", content_type="text/plain") + count = await services.federation.unread_notification_count(g.s, actor.id) + if count > 0: + return Response( + f'{count}', + content_type="text/html", + ) + return Response("", content_type="text/html") + + @bp.post("/notifications/read") + async def mark_read(): + actor = _require_actor() + await services.federation.mark_notifications_read(g.s, actor.id) + return redirect(url_for("social.notifications")) + + return bp diff --git a/federation/config/app-config.yaml b/federation/config/app-config.yaml new file mode 100644 index 0000000..3aa6a76 --- /dev/null +++ b/federation/config/app-config.yaml @@ -0,0 +1,84 @@ +# App-wide settings +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: Rose Ash +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "http://localhost:8000" + market: "http://localhost:8001" + cart: "http://localhost:8002" + events: "http://localhost:8003" + federation: "http://localhost:8004" +cache: + fs_root: _snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/wines + - branded-goods/ciders + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + - ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html + product-details: + - General Information + - A Note About Prices + +# SumUp payment settings (fill these in for live usage) +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING" + checkout_reference_prefix: 'dev-' + diff --git a/federation/entrypoint.sh b/federation/entrypoint.sh new file mode 100755 index 0000000..05d9e3d --- /dev/null +++ b/federation/entrypoint.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# Federation can optionally run migrations (set RUN_MIGRATIONS=true) +if [[ "${RUN_MIGRATIONS:-}" == "true" ]]; then + echo "Running Alembic migrations..." + (cd shared && alembic upgrade head) +fi + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/federation/models/__init__.py b/federation/models/__init__.py new file mode 100644 index 0000000..7d27499 --- /dev/null +++ b/federation/models/__init__.py @@ -0,0 +1,9 @@ +"""Re-export federation models from shared.models.""" +from shared.models.federation import ( # noqa: F401 + ActorProfile, + APActivity, + APFollower, + APInboxItem, + APAnchor, + IPFSPin, +) diff --git a/federation/path_setup.py b/federation/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/federation/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/federation/services/__init__.py b/federation/services/__init__.py new file mode 100644 index 0000000..e6794e2 --- /dev/null +++ b/federation/services/__init__.py @@ -0,0 +1,27 @@ +"""Federation app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the federation app. + + Federation owns: ActorProfile, APActivity, APFollower, APInboxItem, + APAnchor, IPFSPin. + Standard deployment registers all services as real DB impls (shared DB). + """ + from shared.services.registry import services + from shared.services.federation_impl import SqlFederationService + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + services.federation = SqlFederationService() + if not services.has("blog"): + services.blog = SqlBlogService() + if not services.has("calendar"): + services.calendar = SqlCalendarService() + if not services.has("market"): + services.market = SqlMarketService() + if not services.has("cart"): + services.cart = SqlCartService() diff --git a/federation/templates/_email/magic_link.html b/federation/templates/_email/magic_link.html new file mode 100644 index 0000000..3c1eac6 --- /dev/null +++ b/federation/templates/_email/magic_link.html @@ -0,0 +1,33 @@ + + + + + + +
    + + +
    +

    {{ site_name }}

    +

    Sign in to your account

    +

    + Click the button below to sign in. This link will expire in 15 minutes. +

    +
    + + Sign in + +
    +

    Or copy and paste this link into your browser:

    +

    + {{ link_url }} +

    +
    +

    + If you did not request this email, you can safely ignore it. +

    +
    +
    + + diff --git a/federation/templates/_email/magic_link.txt b/federation/templates/_email/magic_link.txt new file mode 100644 index 0000000..28a2efb --- /dev/null +++ b/federation/templates/_email/magic_link.txt @@ -0,0 +1,8 @@ +Hello, + +Click this link to sign in: +{{ link_url }} + +This link will expire in 15 minutes. + +If you did not request this, you can ignore this email. diff --git a/federation/templates/_types/federation/index.html b/federation/templates/_types/federation/index.html new file mode 100644 index 0000000..e2caacb --- /dev/null +++ b/federation/templates/_types/federation/index.html @@ -0,0 +1,3 @@ +{% extends '_types/root/_index.html' %} +{% block meta %}{% endblock %} +{% block content %}{% endblock %} diff --git a/federation/templates/_types/social/header/_header.html b/federation/templates/_types/social/header/_header.html new file mode 100644 index 0000000..3bc55e3 --- /dev/null +++ b/federation/templates/_types/social/header/_header.html @@ -0,0 +1,52 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='social-row', oob=oob) %} + + {% endcall %} +{% endmacro %} diff --git a/federation/templates/_types/social/index.html b/federation/templates/_types/social/index.html new file mode 100644 index 0000000..8eeed33 --- /dev/null +++ b/federation/templates/_types/social/index.html @@ -0,0 +1,10 @@ +{% extends '_types/root/_index.html' %} +{% block meta %}{% endblock %} +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('social-header-child', '_types/social/header/_header.html') %} + {% endcall %} +{% endblock %} +{% block content %} + {% block social_content %}{% endblock %} +{% endblock %} diff --git a/federation/templates/auth/check_email.html b/federation/templates/auth/check_email.html new file mode 100644 index 0000000..5eb1b61 --- /dev/null +++ b/federation/templates/auth/check_email.html @@ -0,0 +1,19 @@ +{% extends "_types/root/_index.html" %} +{% block meta %}{% endblock %} +{% block title %}Check your email — Rose Ash{% endblock %} +{% block content %} +
    +

    Check your email

    +

    + We sent a sign-in link to {{ email }}. +

    +

    + Click the link in the email to sign in. The link expires in 15 minutes. +

    + {% if email_error %} +
    + {{ email_error }} +
    + {% endif %} +
    +{% endblock %} diff --git a/federation/templates/auth/login.html b/federation/templates/auth/login.html new file mode 100644 index 0000000..79031e5 --- /dev/null +++ b/federation/templates/auth/login.html @@ -0,0 +1,36 @@ +{% extends "_types/root/_index.html" %} +{% block meta %}{% endblock %} +{% block title %}Login — Rose Ash{% endblock %} +{% block content %} +
    +

    Sign in

    + + {% if error %} +
    + {{ error }} +
    + {% endif %} + +
    + +
    + + +
    + +
    +
    +{% endblock %} diff --git a/federation/templates/federation/_actor_list_items.html b/federation/templates/federation/_actor_list_items.html new file mode 100644 index 0000000..13b18aa --- /dev/null +++ b/federation/templates/federation/_actor_list_items.html @@ -0,0 +1,63 @@ +{% for a in actors %} +
    + {% if a.icon_url %} + + {% else %} +
    + {{ (a.display_name or a.preferred_username)[0] | upper }} +
    + {% endif %} + +
    + {% if list_type == "following" and a.id %} + + {{ a.display_name or a.preferred_username }} + + {% else %} + + {{ a.display_name or a.preferred_username }} + + {% endif %} +
    @{{ a.preferred_username }}@{{ a.domain }}
    + {% if a.summary %} +
    {{ a.summary | striptags }}
    + {% endif %} +
    + + {% if actor %} +
    + {% if list_type == "following" or a.actor_url in (followed_urls or []) %} +
    + + + +
    + {% else %} +
    + + + +
    + {% endif %} +
    + {% endif %} +
    +{% endfor %} + +{% if actors | length >= 20 %} +
    +
    +{% endif %} diff --git a/federation/templates/federation/_interaction_buttons.html b/federation/templates/federation/_interaction_buttons.html new file mode 100644 index 0000000..5551732 --- /dev/null +++ b/federation/templates/federation/_interaction_buttons.html @@ -0,0 +1,61 @@ +{% set oid = item.object_id if item is defined and item.object_id is defined else item_object_id | default('') %} +{% set ainbox = item.author_inbox if item is defined and item.author_inbox is defined else item_author_inbox | default('') %} +{% set lcount = item.like_count if item is defined and item.like_count is defined else like_count | default(0) %} +{% set bcount = item.boost_count if item is defined and item.boost_count is defined else boost_count | default(0) %} +{% set liked = item.liked_by_me if item is defined and item.liked_by_me is defined else liked_by_me | default(false) %} +{% set boosted = item.boosted_by_me if item is defined and item.boosted_by_me is defined else boosted_by_me | default(false) %} + +
    + {% if liked %} +
    + + + + +
    + {% else %} +
    + + + + +
    + {% endif %} + + {% if boosted %} +
    + + + + +
    + {% else %} +
    + + + + +
    + {% endif %} + + {% if oid %} + Reply + {% endif %} +
    diff --git a/federation/templates/federation/_notification.html b/federation/templates/federation/_notification.html new file mode 100644 index 0000000..d18ef4d --- /dev/null +++ b/federation/templates/federation/_notification.html @@ -0,0 +1,42 @@ +
    +
    + {% if notif.from_actor_icon %} + + {% else %} +
    + {{ notif.from_actor_name[0] | upper if notif.from_actor_name else '?' }} +
    + {% endif %} + +
    +
    + {{ notif.from_actor_name }} + + @{{ notif.from_actor_username }}{% if notif.from_actor_domain %}@{{ notif.from_actor_domain }}{% endif %} + + + {% if notif.notification_type == "follow" %} + followed you + {% elif notif.notification_type == "like" %} + liked your post + {% elif notif.notification_type == "boost" %} + boosted your post + {% elif notif.notification_type == "mention" %} + mentioned you + {% elif notif.notification_type == "reply" %} + replied to your post + {% endif %} +
    + + {% if notif.target_content_preview %} +
    + {{ notif.target_content_preview }} +
    + {% endif %} + +
    + {{ notif.created_at.strftime('%b %d, %H:%M') }} +
    +
    +
    +
    diff --git a/federation/templates/federation/_post_card.html b/federation/templates/federation/_post_card.html new file mode 100644 index 0000000..33102ca --- /dev/null +++ b/federation/templates/federation/_post_card.html @@ -0,0 +1,52 @@ +
    + {% if item.boosted_by %} +
    + Boosted by {{ item.boosted_by }} +
    + {% endif %} + +
    + {% if item.actor_icon %} + + {% else %} +
    + {{ item.actor_name[0] | upper if item.actor_name else '?' }} +
    + {% endif %} + +
    +
    + {{ item.actor_name }} + + @{{ item.actor_username }}{% if item.actor_domain %}@{{ item.actor_domain }}{% endif %} + + + {% if item.published %} + {{ item.published.strftime('%b %d, %H:%M') }} + {% endif %} + +
    + + {% if item.summary %} +
    + CW: {{ item.summary }} +
    {{ item.content | safe }}
    +
    + {% else %} +
    {{ item.content | safe }}
    + {% endif %} + + {% if item.url and item.post_type == "remote" %} + + original + + {% endif %} + + {% if actor %} +
    + {% include "federation/_interaction_buttons.html" with context %} +
    + {% endif %} +
    +
    +
    diff --git a/federation/templates/federation/_search_results.html b/federation/templates/federation/_search_results.html new file mode 100644 index 0000000..ca8c248 --- /dev/null +++ b/federation/templates/federation/_search_results.html @@ -0,0 +1,61 @@ +{% for a in actors %} +
    + {% if a.icon_url %} + + {% else %} +
    + {{ (a.display_name or a.preferred_username)[0] | upper }} +
    + {% endif %} + +
    + {% if a.id %} + + {{ a.display_name or a.preferred_username }} + + {% else %} + {{ a.display_name or a.preferred_username }} + {% endif %} +
    @{{ a.preferred_username }}@{{ a.domain }}
    + {% if a.summary %} +
    {{ a.summary | striptags }}
    + {% endif %} +
    + + {% if actor %} +
    + {% if a.actor_url in (followed_urls or []) %} +
    + + + +
    + {% else %} +
    + + + +
    + {% endif %} +
    + {% endif %} +
    +{% endfor %} + +{% if actors | length >= 20 %} +
    +
    +{% endif %} diff --git a/federation/templates/federation/_timeline_items.html b/federation/templates/federation/_timeline_items.html new file mode 100644 index 0000000..c004743 --- /dev/null +++ b/federation/templates/federation/_timeline_items.html @@ -0,0 +1,18 @@ +{% for item in items %} + {% include "federation/_post_card.html" %} +{% endfor %} + +{% if items %} + {% set last = items[-1] %} + {% if timeline_type == "actor" %} +
    +
    + {% else %} +
    +
    + {% endif %} +{% endif %} diff --git a/federation/templates/federation/account.html b/federation/templates/federation/account.html new file mode 100644 index 0000000..ef7f7d6 --- /dev/null +++ b/federation/templates/federation/account.html @@ -0,0 +1,27 @@ +{% extends "_types/social/index.html" %} +{% block title %}Account — Rose Ash{% endblock %} +{% block social_content %} +
    +

    Account

    + +
    +

    Email: {{ g.user.email }}

    + {% if actor %} +

    Username: @{{ actor.preferred_username }}

    +

    + + View profile + +

    + {% else %} +

    + + Choose a username to start publishing + +

    + {% endif %} +
    +
    +{% endblock %} diff --git a/federation/templates/federation/actor_card.html b/federation/templates/federation/actor_card.html new file mode 100644 index 0000000..cd97c70 --- /dev/null +++ b/federation/templates/federation/actor_card.html @@ -0,0 +1,45 @@ +
    +
    + {% if result.icon_url %} + + {% else %} +
    + {{ result.preferred_username[0] | upper }} +
    + {% endif %} + +
    +
    + {{ result.display_name or result.preferred_username }} + @{{ result.preferred_username }}@{{ result.domain }} +
    + + {% if result.summary %} +
    + {{ result.summary | safe }} +
    + {% endif %} + + {% if actor %} +
    +
    + + + +
    +
    + + + +
    +
    + {% endif %} +
    +
    +
    diff --git a/federation/templates/federation/actor_timeline.html b/federation/templates/federation/actor_timeline.html new file mode 100644 index 0000000..0c69f8a --- /dev/null +++ b/federation/templates/federation/actor_timeline.html @@ -0,0 +1,53 @@ +{% extends "_types/social/index.html" %} + +{% block title %}{{ remote_actor.display_name or remote_actor.preferred_username }} — Rose Ash{% endblock %} + +{% block social_content %} +
    +
    + {% if remote_actor.icon_url %} + + {% else %} +
    + {{ (remote_actor.display_name or remote_actor.preferred_username)[0] | upper }} +
    + {% endif %} + +
    +

    {{ remote_actor.display_name or remote_actor.preferred_username }}

    +
    @{{ remote_actor.preferred_username }}@{{ remote_actor.domain }}
    + {% if remote_actor.summary %} +
    {{ remote_actor.summary | safe }}
    + {% endif %} +
    + + {% if actor %} +
    + {% if is_following %} +
    + + + +
    + {% else %} +
    + + + +
    + {% endif %} +
    + {% endif %} +
    +
    + +
    + {% set timeline_type = "actor" %} + {% set actor_id = remote_actor.id %} + {% include "federation/_timeline_items.html" %} +
    +{% endblock %} diff --git a/federation/templates/federation/choose_username.html b/federation/templates/federation/choose_username.html new file mode 100644 index 0000000..259afb2 --- /dev/null +++ b/federation/templates/federation/choose_username.html @@ -0,0 +1,54 @@ +{% extends "_types/social/index.html" %} +{% block title %}Choose Username — Rose Ash{% endblock %} +{% block social_content %} +
    +

    Choose your username

    +

    + This will be your identity on the fediverse: + @username@{{ config.get('ap_domain', 'rose-ash.com') }} +

    + + {% if error %} +
    + {{ error }} +
    + {% endif %} + +
    + +
    + +
    + @ + +
    +
    +

    + 3-32 characters. Lowercase letters, numbers, underscores. Must start with a letter. +

    +
    + + +
    +
    +{% endblock %} diff --git a/federation/templates/federation/compose.html b/federation/templates/federation/compose.html new file mode 100644 index 0000000..d82a031 --- /dev/null +++ b/federation/templates/federation/compose.html @@ -0,0 +1,34 @@ +{% extends "_types/social/index.html" %} + +{% block title %}Compose — Rose Ash{% endblock %} + +{% block social_content %} +

    Compose

    + +
    + + {% if reply_to %} + +
    + Replying to {{ reply_to }} +
    + {% endif %} + + + +
    + + + +
    +
    +{% endblock %} diff --git a/federation/templates/federation/followers.html b/federation/templates/federation/followers.html new file mode 100644 index 0000000..07eb862 --- /dev/null +++ b/federation/templates/federation/followers.html @@ -0,0 +1,12 @@ +{% extends "_types/social/index.html" %} + +{% block title %}Followers — Rose Ash{% endblock %} + +{% block social_content %} +

    Followers ({{ total }})

    + +
    + {% set list_type = "followers" %} + {% include "federation/_actor_list_items.html" %} +
    +{% endblock %} diff --git a/federation/templates/federation/following.html b/federation/templates/federation/following.html new file mode 100644 index 0000000..ca900e4 --- /dev/null +++ b/federation/templates/federation/following.html @@ -0,0 +1,13 @@ +{% extends "_types/social/index.html" %} + +{% block title %}Following — Rose Ash{% endblock %} + +{% block social_content %} +

    Following ({{ total }})

    + +
    + {% set list_type = "following" %} + {% set followed_urls = [] %} + {% include "federation/_actor_list_items.html" %} +
    +{% endblock %} diff --git a/federation/templates/federation/notifications.html b/federation/templates/federation/notifications.html new file mode 100644 index 0000000..11eb3f8 --- /dev/null +++ b/federation/templates/federation/notifications.html @@ -0,0 +1,17 @@ +{% extends "_types/social/index.html" %} + +{% block title %}Notifications — Rose Ash{% endblock %} + +{% block social_content %} +

    Notifications

    + +{% if not notifications %} +

    No notifications yet.

    +{% endif %} + +
    + {% for notif in notifications %} + {% include "federation/_notification.html" %} + {% endfor %} +
    +{% endblock %} diff --git a/federation/templates/federation/profile.html b/federation/templates/federation/profile.html new file mode 100644 index 0000000..2e21a08 --- /dev/null +++ b/federation/templates/federation/profile.html @@ -0,0 +1,32 @@ +{% extends "_types/social/index.html" %} +{% block title %}@{{ actor.preferred_username }} — Rose Ash{% endblock %} +{% block social_content %} +
    +
    +

    {{ actor.display_name or actor.preferred_username }}

    +

    @{{ actor.preferred_username }}@{{ config.get('ap_domain', 'rose-ash.com') }}

    + {% if actor.summary %} +

    {{ actor.summary }}

    + {% endif %} +
    + +

    Activities ({{ total }})

    + {% if activities %} +
    + {% for a in activities %} +
    +
    + {{ a.activity_type }} + {{ a.published.strftime('%Y-%m-%d %H:%M') if a.published }} +
    + {% if a.object_type %} + {{ a.object_type }} + {% endif %} +
    + {% endfor %} +
    + {% else %} +

    No activities yet.

    + {% endif %} +
    +{% endblock %} diff --git a/federation/templates/federation/search.html b/federation/templates/federation/search.html new file mode 100644 index 0000000..62c33dc --- /dev/null +++ b/federation/templates/federation/search.html @@ -0,0 +1,32 @@ +{% extends "_types/social/index.html" %} + +{% block title %}Search — Rose Ash{% endblock %} + +{% block social_content %} +

    Search

    + +
    +
    + + +
    +
    + +{% if query and total %} +

    {{ total }} result{{ 's' if total != 1 }} for {{ query }}

    +{% elif query %} +

    No results found for {{ query }}

    +{% endif %} + +
    + {% include "federation/_search_results.html" %} +
    +{% endblock %} diff --git a/federation/templates/federation/timeline.html b/federation/templates/federation/timeline.html new file mode 100644 index 0000000..74861f3 --- /dev/null +++ b/federation/templates/federation/timeline.html @@ -0,0 +1,19 @@ +{% extends "_types/social/index.html" %} + +{% block title %}{{ "Home" if timeline_type == "home" else "Public" }} Timeline — Rose Ash{% endblock %} + +{% block social_content %} +
    +

    {{ "Home" if timeline_type == "home" else "Public" }} Timeline

    + {% if actor %} + + Compose + + {% endif %} +
    + +
    + {% include "federation/_timeline_items.html" %} +
    +{% endblock %} diff --git a/market/.gitignore b/market/.gitignore new file mode 100644 index 0000000..1e06fbc --- /dev/null +++ b/market/.gitignore @@ -0,0 +1,12 @@ +__pycache__/ +*.pyc +*.pyo +.env +node_modules/ +_snapshot/ +_debug/ +*.egg-info/ +dist/ +build/ +.venv/ +venv/ diff --git a/market/Dockerfile b/market/Dockerfile new file mode 100644 index 0000000..836fa1c --- /dev/null +++ b/market/Dockerfile @@ -0,0 +1,50 @@ +# syntax=docker/dockerfile:1 + +# ---------- Python application ---------- +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONPATH=/app \ + PIP_NO_CACHE_DIR=1 \ + APP_PORT=8000 \ + APP_MODULE=app:app + +WORKDIR /app + +# Install system deps + psql client +RUN apt-get update && apt-get install -y --no-install-recommends \ + ca-certificates \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY shared/requirements.txt ./requirements.txt +RUN pip install -r requirements.txt + +# Shared code (replaces submodule) +COPY shared/ ./shared/ + +# App code +COPY market/ ./ + +# Sibling models for cross-domain SQLAlchemy imports +COPY blog/__init__.py ./blog/__init__.py +COPY blog/models/ ./blog/models/ +COPY cart/__init__.py ./cart/__init__.py +COPY cart/models/ ./cart/models/ +COPY events/__init__.py ./events/__init__.py +COPY events/models/ ./events/models/ +COPY federation/__init__.py ./federation/__init__.py +COPY federation/models/ ./federation/models/ +COPY account/__init__.py ./account/__init__.py +COPY account/models/ ./account/models/ + +# ---------- Runtime setup ---------- +COPY market/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app +USER appuser + +EXPOSE ${APP_PORT} +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/market/README.md b/market/README.md new file mode 100644 index 0000000..6d9a840 --- /dev/null +++ b/market/README.md @@ -0,0 +1,56 @@ +# Market App + +Product browsing and marketplace service for the Rose Ash cooperative. Displays products scraped from Suma Wholesale. + +## Architecture + +One of five Quart microservices sharing a single PostgreSQL database: + +| App | Port | Domain | +|-----|------|--------| +| blog (coop) | 8000 | Auth, blog, admin, menus, snippets | +| **market** | 8001 | Product browsing, Suma scraping | +| cart | 8002 | Shopping cart, checkout, orders | +| events | 8003 | Calendars, bookings, tickets | +| federation | 8004 | ActivityPub, fediverse social | + +## Structure + +``` +app.py # Application factory (create_base_app + blueprints) +path_setup.py # Adds project root + app dir to sys.path +config/app-config.yaml # App URLs, feature flags +models/ # Market-domain models (+ re-export stubs) +bp/ # Blueprints + market/ # Market root, navigation, category listing + browse/ # Product browsing with filters and infinite scroll + product/ # Product detail pages + cart/ # Page-scoped cart views + api/ # Product sync API (used by scraper) +scrape/ # Suma Wholesale scraper +services/ # register_domain_services() — wires market + cart +shared/ # Submodule -> git.rose-ash.com/coop/shared.git +``` + +## Cross-Domain Communication + +- `services.cart.*` — cart summary via CartService protocol +- `services.federation.*` — AP publishing via FederationService protocol +- `shared.services.navigation` — site navigation tree + +## Scraping + +```bash +bash scrape.sh # Full Suma Wholesale catalogue +bash scrape-test.sh # Limited test scrape +``` + +## Running + +```bash +export DATABASE_URL_ASYNC=postgresql+asyncpg://user:pass@localhost/coop +export REDIS_URL=redis://localhost:6379/0 +export SECRET_KEY=your-secret-key + +hypercorn app:app --bind 0.0.0.0:8001 +``` diff --git a/market/__init__.py b/market/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/app.py b/market/app.py new file mode 100644 index 0000000..bbfcbcf --- /dev/null +++ b/market/app.py @@ -0,0 +1,188 @@ +from __future__ import annotations +import path_setup # noqa: F401 # adds shared/ to sys.path + +from pathlib import Path + +from quart import g, abort, request +from jinja2 import FileSystemLoader, ChoiceLoader +from sqlalchemy import select + +from shared.infrastructure.factory import create_base_app +from shared.config import config + +from bp import register_market_bp, register_all_markets, register_page_markets, register_fragments + + +async def market_context() -> dict: + """ + Market app context processor. + + - nav_tree_html: fetched from blog as fragment + - cart_count/cart_total: via cart service (includes calendar entries) + - cart: direct ORM query (templates need .product relationship) + """ + from shared.infrastructure.context import base_context + from shared.services.navigation import get_navigation_tree + from shared.services.registry import services + from shared.infrastructure.cart_identity import current_cart_identity + from shared.infrastructure.fragments import fetch_fragment + from shared.models.market import CartItem + from sqlalchemy.orm import selectinload + + ctx = await base_context() + + ctx["nav_tree_html"] = await fetch_fragment( + "blog", "nav-tree", + params={"app_name": "market", "path": request.path}, + ) + # Fallback for _nav.html when nav-tree fragment fetch fails + ctx["menu_items"] = await get_navigation_tree(g.s) + + ident = current_cart_identity() + + # cart_count/cart_total via service (consistent with blog/events apps) + summary = await services.cart.cart_summary( + g.s, user_id=ident["user_id"], session_id=ident["session_id"], + ) + ctx["cart_count"] = summary.count + summary.calendar_count + ctx["cart_total"] = float(summary.total + summary.calendar_total) + + # ORM cart items for product templates (need .product relationship) + filters = [CartItem.deleted_at.is_(None)] + if ident["user_id"] is not None: + filters.append(CartItem.user_id == ident["user_id"]) + elif ident["session_id"] is not None: + filters.append(CartItem.session_id == ident["session_id"]) + else: + ctx["cart"] = [] + return ctx + + result = await g.s.execute( + select(CartItem).where(*filters).options(selectinload(CartItem.product)) + ) + ctx["cart"] = list(result.scalars().all()) + + return ctx + + +def create_app() -> "Quart": + from models.market_place import MarketPlace + from shared.services.registry import services + from services import register_domain_services + + app = create_base_app( + "market", + context_fn=market_context, + domain_services_fn=register_domain_services, + ) + + # App-specific templates override shared templates + app_templates = str(Path(__file__).resolve().parent / "templates") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(app_templates), + app.jinja_loader, + ]) + + # All markets: / — global view across all pages + app.register_blueprint( + register_all_markets(), + url_prefix="/", + ) + + # Page markets: // — markets for a single page + app.register_blueprint( + register_page_markets(), + url_prefix="/", + ) + + # Market blueprint nested under post slug: /// + app.register_blueprint( + register_market_bp( + url_prefix="/", + title=config()["market_title"], + ), + url_prefix="//", + ) + + app.register_blueprint(register_fragments()) + + # --- Auto-inject slugs into url_for() calls --- + @app.url_value_preprocessor + def pull_slugs(endpoint, values): + if values: + # page_markets blueprint uses "slug" + if "slug" in values: + g.post_slug = values.pop("slug") + # market blueprint uses "page_slug" / "market_slug" + if "page_slug" in values: + g.post_slug = values.pop("page_slug") + if "market_slug" in values: + g.market_slug = values.pop("market_slug") + + @app.url_defaults + def inject_slugs(endpoint, values): + slug = g.get("post_slug") + if slug: + for param in ("slug", "page_slug"): + if param not in values and app.url_map.is_endpoint_expecting(endpoint, param): + values[param] = slug + market_slug = g.get("market_slug") + if market_slug and "market_slug" not in values: + if app.url_map.is_endpoint_expecting(endpoint, "market_slug"): + values["market_slug"] = market_slug + + # --- Load post and market data --- + @app.before_request + async def hydrate_market(): + post_slug = getattr(g, "post_slug", None) + market_slug = getattr(g, "market_slug", None) + if not post_slug: + return + + # Load post by slug via blog service + post = await services.blog.get_post_by_slug(g.s, post_slug) + if not post: + abort(404) + + g.post_data = { + "post": { + "id": post.id, + "title": post.title, + "slug": post.slug, + "feature_image": post.feature_image, + "html": post.html, + "status": post.status, + "visibility": post.visibility, + "is_page": post.is_page, + }, + } + + # Only load market when market_slug is present (///) + if not market_slug: + return + + market = ( + await g.s.execute( + select(MarketPlace).where( + MarketPlace.slug == market_slug, + MarketPlace.container_type == "page", + MarketPlace.container_id == post.id, + MarketPlace.deleted_at.is_(None), + ) + ) + ).scalar_one_or_none() + if not market: + abort(404) + g.market = market + + @app.context_processor + async def inject_post(): + post_data = getattr(g, "post_data", None) + if not post_data: + return {} + return {**post_data} + + return app + + +app = create_app() diff --git a/market/bp/__init__.py b/market/bp/__init__.py new file mode 100644 index 0000000..b62b4b6 --- /dev/null +++ b/market/bp/__init__.py @@ -0,0 +1,5 @@ +from .market.routes import register as register_market_bp +from .product.routes import register as register_product +from .all_markets.routes import register as register_all_markets +from .page_markets.routes import register as register_page_markets +from .fragments import register_fragments diff --git a/market/bp/all_markets/__init__.py b/market/bp/all_markets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/all_markets/routes.py b/market/bp/all_markets/routes.py new file mode 100644 index 0000000..0ce086d --- /dev/null +++ b/market/bp/all_markets/routes.py @@ -0,0 +1,74 @@ +""" +All-markets blueprint — shows markets across ALL pages. + +Mounted at / (root of market app). No slug context. + +Routes: + GET / — full page with first page of markets + GET /all-markets — HTMX fragment for infinite scroll +""" +from __future__ import annotations + +from quart import Blueprint, g, request, render_template, make_response + +from shared.browser.app.utils.htmx import is_htmx_request +from shared.services.registry import services + + +def register() -> Blueprint: + bp = Blueprint("all_markets", __name__) + + async def _load_markets(page, per_page=20): + """Load all markets + page info for container badges.""" + markets, has_more = await services.market.list_marketplaces( + g.s, page=page, per_page=per_page, + ) + + # Batch-load page info for container_ids + page_info = {} + if markets: + post_ids = list({ + m.container_id for m in markets + if m.container_type == "page" + }) + if post_ids: + posts = await services.blog.get_posts_by_ids(g.s, post_ids) + for p in posts: + page_info[p.id] = {"title": p.title, "slug": p.slug} + + return markets, has_more, page_info + + @bp.get("/") + async def index(): + page = int(request.args.get("page", 1)) + markets, has_more, page_info = await _load_markets(page) + + ctx = dict( + markets=markets, + has_more=has_more, + page_info=page_info, + page=page, + ) + + if is_htmx_request(): + html = await render_template("_types/all_markets/_main_panel.html", **ctx) + else: + html = await render_template("_types/all_markets/index.html", **ctx) + + return await make_response(html, 200) + + @bp.get("/all-markets") + async def markets_fragment(): + page = int(request.args.get("page", 1)) + markets, has_more, page_info = await _load_markets(page) + + html = await render_template( + "_types/all_markets/_cards.html", + markets=markets, + has_more=has_more, + page_info=page_info, + page=page, + ) + return await make_response(html, 200) + + return bp diff --git a/market/bp/api/__init__.py b/market/bp/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/api/routes.py b/market/bp/api/routes.py new file mode 100644 index 0000000..e83824d --- /dev/null +++ b/market/bp/api/routes.py @@ -0,0 +1,432 @@ +# products_api_async.py +from __future__ import annotations + +from datetime import datetime, timezone +from decimal import Decimal +from typing import Any, Dict, List, Tuple, Iterable, Optional + +from quart import Blueprint, request, jsonify, g +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from scrape.persist_snapshot.log_product_result import _log_product_result +from scrape.persist_snapshot.save_nav import _save_nav +from scrape.persist_snapshot.capture_listing import _capture_listing +from scrape.persist_snapshot.save_subcategory_redirects import _save_subcategory_redirects + +# ⬇️ Import your models (names match your current file) +from models.market import ( + Product, + ProductImage, + ProductSection, + ProductLabel, + ProductSticker, + ProductAttribute, + ProductNutrition, + ProductAllergen, +) + +from shared.browser.app.redis_cacher import clear_cache +from shared.browser.app.csrf import csrf_exempt + + +products_api = Blueprint("products_api", __name__, url_prefix="/api/products") + +# ---- Comparison config (matches your schema) -------------------------------- + +PRODUCT_FIELDS: List[str] = [ + "slug", + "title", + "image", + "description_short", + "description_html", + "suma_href", + "brand", + "rrp", "rrp_currency", "rrp_raw", + "price_per_unit", "price_per_unit_currency", "price_per_unit_raw", + "special_price", "special_price_currency", "special_price_raw", + "regular_price", "regular_price_currency", "regular_price_raw", + "oe_list_price", + "case_size_count", "case_size_item_qty", "case_size_item_unit", "case_size_raw", + "ean", "sku", "unit_size", "pack_size", +] + +# rel_name -> (Model, fields_to_compare, key_for_orderless_compare) +CHILD_SPECS: Dict[str, Tuple[Any, List[str], str]] = { + "images": (ProductImage, ["url", "position", "kind"], "url"), + "sections": (ProductSection, ["title", "html"], "title"), + "labels": (ProductLabel, ["name"], "name"), + "stickers": (ProductSticker, ["name"], "name"), + "attributes": (ProductAttribute, ["key", "value"], "key"), + "nutrition": (ProductNutrition, ["key", "value", "unit"], "key"), + "allergens": (ProductAllergen, ["name", "contains"], "name"), +} + +def _now_utc(): + return datetime.now(timezone.utc) + +def _norm_scalar(v: Any) -> Any: + if isinstance(v, Decimal): + s = format(v.normalize(), "f") + return "0" if s in ("-0", "-0.0") else s + if isinstance(v, bool): + return bool(v) + if isinstance(v, (int, float, str)) or v is None: + return v + return str(v) + +def _normalize_row(obj: Dict[str, Any], keep: List[str]) -> Dict[str, Any]: + out: Dict[str, Any] = {} + for f in keep: + val = obj.get(f) + if isinstance(val, str): + val = val.strip() + out[f] = _norm_scalar(val) + return out + +def _list_to_index(items: Iterable[Dict[str, Any]], uniq: str) -> Dict[Any, Dict[str, Any]]: + ix: Dict[Any, Dict[str, Any]] = {} + for it in items or []: + key = it.get(uniq) + if key is None: + continue + ix[key] = it + return ix + +def _serialize_product_for_compare(p: Product) -> Dict[str, Any]: + root: Dict[str, Any] = {f: _norm_scalar(getattr(p, f)) for f in PRODUCT_FIELDS} + for rel_name, (_Model, fields, uniq) in CHILD_SPECS.items(): + rows: List[Dict[str, Any]] = [] + for child in getattr(p, rel_name) or []: + rows.append({f: _norm_scalar(getattr(child, f)) for f in fields}) + root[rel_name] = _list_to_index(rows, uniq) + return root + +def _serialize_payload_for_compare(payload: Dict[str, Any]) -> Dict[str, Any]: + root = _normalize_row(payload, PRODUCT_FIELDS) + for rel_name, (_Model, fields, uniq) in CHILD_SPECS.items(): + rows = payload.get(rel_name) or [] + rows = [r for r in rows if isinstance(r, dict)] + root[rel_name] = _list_to_index([_normalize_row(r, fields) for r in rows], uniq) + return root + +from decimal import Decimal, InvalidOperation + +def _is_numeric_like(x) -> bool: + if isinstance(x, bool): + return False + if isinstance(x, (int, float, Decimal)): + return True + if isinstance(x, str): + s = x.strip() + if not s: + return False + try: + Decimal(s) + return True + except InvalidOperation: + return False + return False + +def _to_decimal(x) -> Decimal: + if isinstance(x, Decimal): + return x + if isinstance(x, bool) or x is None: + raise InvalidOperation + if isinstance(x, (int, str)): + return Decimal(str(x).strip()) + if isinstance(x, float): + return Decimal(str(x)) # avoid float fp artifacts + # last resort: string-coerce + return Decimal(str(x).strip()) + +def values_different(av, bv) -> bool: + # match original None semantics first + if bv is None: + return av is not None + if av is None: + return True + + if _is_numeric_like(bv): + try: + return _to_decimal(av) != _to_decimal(bv) + except InvalidOperation: + # av isn't numeric-parsable → different + return True + else: + # non-numeric: compare as strings (like original) + return f"{av}" != f"{bv}" + +import re + +_cf_a_re = re.compile(r']+/cdn-cgi/l/email-protection#[^"]+"[^>]*>(.*?)', re.I | re.S) +_cf_span_re = re.compile(r']*class="__cf_email__"[^>]*>(.*?)', re.I | re.S) +_cf_data_attr_re = re.compile(r'\sdata-cfemail="[^"]+"', re.I) +_ws_re = re.compile(r'\s+') + +def normalize_cf_email(html: str) -> str: + if not isinstance(html, str): + return html + s = html + # Replace CF spans with their inner text + s = _cf_span_re.sub(r'\1', s) + # Replace CF protection anchors with their inner text + s = _cf_a_re.sub(r'\1', s) + # Drop the data-cfemail attribute if any remains + s = _cf_data_attr_re.sub('', s) + # Optional: collapse whitespace + s = _ws_re.sub(' ', s).strip() + return s + + +def _deep_equal(a: Dict[str, Any], b: Dict[str, Any]) -> bool: + # keys must match at this level + if a.keys() != b.keys(): + return False + + for k in a.keys(): + av, bv = a[k], b[k] + + # Dicts: recurse, but don't return early unless it's False + if isinstance(av, dict) and isinstance(bv, dict): + if not _deep_equal(av, bv): + # log_diff(k, av, bv) # optional + return False + continue + + # Lists/Tuples: compare length then elements (order-sensitive here) + if isinstance(av, (list, tuple)) and isinstance(bv, (list, tuple)): + if len(av) != len(bv): + # log_diff(k, av, bv) + return False + for i, (ai, bi) in enumerate(zip(av, bv)): + # nested dicts within lists + if isinstance(ai, dict) and isinstance(bi, dict): + if not _deep_equal(ai, bi): + return False + else: + if values_different(normalize_cf_email(ai), normalize_cf_email(bi)): + return False + continue + + # Scalars / everything else + if values_different(normalize_cf_email(av), normalize_cf_email(bv)): + # print('!!deep', k, av, bv) + return False + + return True + +# ---- Mutation helpers ------------------------------------------------------- + +def _apply_product_fields(p: Product, payload: Dict[str, Any]) -> None: + for f in PRODUCT_FIELDS: + setattr(p, f, payload.get(f)) + p.updated_at = _now_utc() + +def _replace_children(p: Product, payload: Dict[str, Any]) -> None: + # replace each relation wholesale (delete-orphan takes care of removal) + #p.images.clear() + for row in payload.get("images") or []: + p.images.append(ProductImage( + url=row.get("url"), + position=row.get("position") or 0, + kind=row.get("kind") or "gallery", + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.sections.clear() + for row in payload.get("sections") or []: + p.sections.append(ProductSection( + title=row.get("title") or "", + html=row.get("html") or "", + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.labels.clear() + for row in payload.get("labels") or []: + p.labels.append(ProductLabel( + name=row.get("name") or "", + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.stickers.clear() + for row in payload.get("stickers") or []: + p.stickers.append(ProductSticker( + name=row.get("name") or "", + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.attributes.clear() + for row in payload.get("attributes") or []: + p.attributes.append(ProductAttribute( + key=row.get("key") or "", + value=row.get("value"), + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.nutrition.clear() + for row in payload.get("nutrition") or []: + p.nutrition.append(ProductNutrition( + key=row.get("key") or "", + value=row.get("value"), + unit=row.get("unit"), + created_at=_now_utc(), updated_at=_now_utc(), + )) + + #p.allergens.clear() + for row in payload.get("allergens") or []: + p.allergens.append(ProductAllergen( + name=row.get("name") or "", + contains=bool(row.get("contains", False)), + created_at=_now_utc(), updated_at=_now_utc(), + )) + +async def _create_product_from_payload(session: AsyncSession, payload: Dict[str, Any]) -> Product: + p = Product() + _apply_product_fields(p, payload) + p.created_at = _now_utc() + p.deleted_at = None + session.add(p) + #await session.flush() # get p.id + _replace_children(p, payload) + await session.flush() + + # Publish to federation inline + from shared.services.federation_publish import try_publish + await try_publish( + session, + user_id=getattr(p, "user_id", None), + activity_type="Create", + object_type="Object", + object_data={ + "name": p.title or "", + "summary": getattr(p, "description", "") or "", + }, + source_type="Product", + source_id=p.id, + ) + + return p + +# ---- API -------------------------------------------------------------------- + + +@csrf_exempt +@products_api.post("/listing/") +@clear_cache(tag='browse') +async def capture_lsting(): + data: Dict[str, Any] = await request.get_json(force=True, silent=False) + url = data['url'] + items = data['items'] + total_pages = data['total_pages'] + await _capture_listing(g.s, url,items, total_pages) + return {"ok": True} + + + +@csrf_exempt +@products_api.post("/log/") +@clear_cache(tag='browse') +async def log_product(): + data: Dict[str, Any] = await request.get_json(force=True, silent=False) + ok = bool(data["ok"]) + + payload = data.get("payload") or {} + try: + await _log_product_result(g.s, ok, payload) + return {"ok": True} + except Exception as e: + return {"ok": False} + + +@csrf_exempt +@products_api.post("/redirects/") +@clear_cache(tag='browse') +async def rediects(): + data: Dict[str, str] = await request.get_json(force=True, silent=False) + await _save_subcategory_redirects(g.s, data) + return {"ok": True} + + +@csrf_exempt +@products_api.post("/nav/") +@clear_cache(tag='browse') +async def save_nav(): + data: Dict[str, Any] = await request.get_json(force=True, silent=False) + market = getattr(g, "market", None) + market_id = market.id if market else None + await _save_nav(g.s, data, market_id=market_id) + return {"ok": True} + + +@csrf_exempt +@products_api.post("/sync/") +@clear_cache(tag='browse') +async def sync_product(): + """ + POST /api/products/sync + Body includes top-level fields and child arrays like: + { + "slug": "my-product", + "title": "...", + "images": [{"url":"https://..","position":0,"kind":"gallery"}], + "sections": [{"title":"Details","html":"

    ..

    "}], + "labels": [{"name":"Vegan"}], + "stickers": [{"name":"Sale"}], + "attributes": [{"key":"Country","value":"UK"}], + "nutrition": [{"key":"Energy","value":"100","unit":"kcal"}], + "allergens": [{"name":"Nuts","contains":true}] + } + """ + payload = await request.get_json(force=True, silent=False) + if not isinstance(payload, dict): + return jsonify({"error": "Invalid JSON"}), 400 + + slug = payload.get("slug") + if not isinstance(slug, str) or not slug: + return jsonify({"error": "Missing 'slug'"}), 400 + + + # find undeleted row by slug + #stmt = select(Product).where(Product.slug == slug, Product.deleted_at.is_(None)) + + stmt = ( + select(Product) + .where(Product.slug == slug, Product.deleted_at.is_(None)) + .options( + selectinload(Product.images), + selectinload(Product.sections), + selectinload(Product.labels), + selectinload(Product.stickers), + selectinload(Product.attributes), + selectinload(Product.nutrition), + selectinload(Product.allergens), + ) + ) + existing: Optional[Product] = (await g.s.execute(stmt)).scalars().first() + + incoming_norm = _serialize_payload_for_compare(payload) + + if existing: + db_norm = _serialize_product_for_compare(existing) + + if _deep_equal(db_norm, incoming_norm): + # Exactly equal → just touch updated_at + existing.updated_at = _now_utc() + await g.s.flush() + return jsonify({"id": existing.id, "action": "touched"}), 200 + + # Different → soft delete old + create a new row + existing.deleted_at = _now_utc() + await g.s.flush() # ensure the soft-delete is persisted before inserting the new row + + new_p = await _create_product_from_payload(g.s, payload) + await g.s.flush() + return jsonify({"id": new_p.id, "action": "replaced"}), 201 + + # Not found → create + new_p = await _create_product_from_payload(g.s, payload) + await g.s.flush() + return jsonify({"id": new_p.id, "action": "created"}), 201 + diff --git a/market/bp/browse/__init__.py b/market/bp/browse/__init__.py new file mode 100644 index 0000000..85fd1a5 --- /dev/null +++ b/market/bp/browse/__init__.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +# create the blueprint at package import time +from .routes import register # = Blueprint("browse_bp", __name__) + +# import routes AFTER browse_bp is defined so routes can attach to it +from . import routes # noqa: F401 diff --git a/market/bp/browse/routes.py b/market/bp/browse/routes.py new file mode 100644 index 0000000..750b816 --- /dev/null +++ b/market/bp/browse/routes.py @@ -0,0 +1,163 @@ +from __future__ import annotations + + +from quart import ( + g, + Blueprint, + abort, + render_template, + render_template_string, + make_response, + current_app, +) +from shared.config import config +from .services.nav import category_context, get_nav +from .services.blacklist.category import is_category_blocked + +from .services import ( + _hx_fragment_request, + _productInfo, + _vary, + _current_url_without_page, +) + +from shared.browser.app.redis_cacher import cache_page +from shared.browser.app.utils.htmx import is_htmx_request + +def register(): + browse_bp = Blueprint("browse", __name__) + + from .. import register_product + browse_bp.register_blueprint( + register_product(), + ) + + @browse_bp.get("/") + @cache_page(tag="browse") + async def home(): + """ + Market landing page. + Uses the post data hydrated by the app-level before_request (g.post_data). + """ + p_data = getattr(g, "post_data", None) or {} + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/market/index.html", **p_data) + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/market/_oob_elements.html", **p_data) + + return await make_response(html) + + @browse_bp.get("/all/") + @cache_page(tag="browse") + async def browse_all(): + """ + Browse all products across all categories. + Renders full page or just product cards (HTMX pagination fragment). + """ + market = getattr(g, "market", None) + market_id = market.id if market else None + nav = await get_nav(g.s, market_id=market_id) + ctx = { + "category_label": "All Products", + "top_slug": "all", + "sub_slug": None, + } + + product_info = await _productInfo() + full_context = {**product_info, **ctx} + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/browse/index.html", **full_context) + elif product_info["page"] > 1: + # HTMX pagination: just product cards + sentinel + html = await render_template("_types/browse/_product_cards.html", **product_info) + else: + # HTMX navigation (page 1): main panel + OOB elements + html = await render_template("_types/browse/_oob_elements.html", **full_context) + + resp = await make_response(html) + resp.headers["Hx-Push-Url"] = _current_url_without_page() + return _vary(resp) + + + @browse_bp.get("//") + @cache_page(tag="browse") + async def browse_top(top_slug: str): + """ + Browse by top-level category (e.g. /fruit). + 404 if category not in allowed list or is blocked. + """ + REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()} + if top_slug not in REVERSE_CATEGORY: + abort(404) + if is_category_blocked(top_slug): + abort(404) + + market = getattr(g, "market", None) + market_id = market.id if market else None + nav = await get_nav(g.s, market_id=market_id) + ctx = category_context(top_slug, None, nav) + + product_info = await _productInfo(top_slug) + full_context = {**product_info, **ctx} + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/browse/index.html", **full_context) + elif product_info["page"] > 1: + # HTMX pagination: just product cards + sentinel + html = await render_template("_types/browse/_product_cards.html", **product_info) + else: + html = await render_template("_types/browse/_oob_elements.html", **full_context) + + resp = await make_response(html) + resp.headers["Hx-Push-Url"] = _current_url_without_page() + return _vary(resp) + + + @browse_bp.get("///") + @cache_page(tag="browse") + async def browse_sub(top_slug: str, sub_slug: str): + """ + Browse by subcategory (e.g. /fruit/citrus). + 404 if blocked or unknown. + """ + REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()} + if top_slug not in REVERSE_CATEGORY: + abort(404) + if is_category_blocked(top_slug, sub_slug): + abort(404) + + market = getattr(g, "market", None) + market_id = market.id if market else None + nav = await get_nav(g.s, market_id=market_id) + ctx = category_context(top_slug, sub_slug, nav) + + product_info = await _productInfo(top_slug, sub_slug) + full_context = {**product_info, **ctx} + + # Determine which template to use based on request type and pagination + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/browse/index.html", **full_context) + elif product_info["page"] > 1: + # HTMX pagination: just product cards + sentinel + html = await render_template("_types/browse/_product_cards.html", **product_info) + else: + # HTMX navigation (page 1): main panel + OOB elements + html = await render_template("_types/browse/_oob_elements.html", **full_context) + + resp = await make_response(html) + resp.headers["Hx-Push-Url"] = _current_url_without_page() + return _vary(resp) + + + + return browse_bp \ No newline at end of file diff --git a/market/bp/browse/services/__init__.py b/market/bp/browse/services/__init__.py new file mode 100644 index 0000000..70d11d0 --- /dev/null +++ b/market/bp/browse/services/__init__.py @@ -0,0 +1,13 @@ +from __future__ import annotations +from quart import Blueprint + + +from .services import ( + _hx_fragment_request, + _productInfo, + _order_brands_selected_first, + _massage_product, + _vary, + _current_url_without_page, + _is_liked +) diff --git a/market/bp/browse/services/blacklist/category.py b/market/bp/browse/services/blacklist/category.py new file mode 100644 index 0000000..87aceda --- /dev/null +++ b/market/bp/browse/services/blacklist/category.py @@ -0,0 +1,12 @@ +# suma_browser/category_blacklist.py +from __future__ import annotations +from typing import Optional +from shared.config import config + +def _norm(s: str) -> str: + return (s or "").strip().lower().strip("/") + +def is_category_blocked(top_slug: str, sub_slug: Optional[str] = None) -> bool: + if sub_slug: + return is_category_blocked(top_slug) or _norm(f"{top_slug}/{sub_slug}") in config()["blacklist"]["category"] + return _norm(top_slug) in config()["blacklist"]["category"] diff --git a/market/bp/browse/services/blacklist/product.py b/market/bp/browse/services/blacklist/product.py new file mode 100644 index 0000000..d7d298b --- /dev/null +++ b/market/bp/browse/services/blacklist/product.py @@ -0,0 +1,15 @@ +from typing import Set, Optional +from ..slugs import canonical_html_slug +from shared.config import config + +_blocked: Set[str] = set() +_mtime: Optional[float] = None + +def _norm(slug: str) -> str: + slug = (slug or "").strip().strip("/").lower() + if slug.startswith("product/"): + slug = slug.split("/", 1)[1] + return canonical_html_slug(slug) + +def is_product_blocked(slug: str) -> bool: + return _norm(slug) in config()["blacklist"]["product"] diff --git a/market/bp/browse/services/blacklist/product_details.py b/market/bp/browse/services/blacklist/product_details.py new file mode 100644 index 0000000..7a2244a --- /dev/null +++ b/market/bp/browse/services/blacklist/product_details.py @@ -0,0 +1,11 @@ +import re +from shared.config import config + +def _norm_title_key(t: str) -> str: + t = (t or "").strip().lower() + t = re.sub(r":\s*$", "", t) + t = re.sub(r"\s+", " ", t) + return t + +def is_blacklisted_heading(title: str) -> bool: + return _norm_title_key(title) in [s.lower() for s in config()["blacklist"]["product-details"]] diff --git a/market/bp/browse/services/cache_backend.py b/market/bp/browse/services/cache_backend.py new file mode 100644 index 0000000..00a0f77 --- /dev/null +++ b/market/bp/browse/services/cache_backend.py @@ -0,0 +1,367 @@ +from __future__ import annotations +import os, json +from typing import List, Optional +from shared.config import config +from .blacklist.product import is_product_blocked + + +def _json(path: str): + with open(path, "r", encoding="utf-8") as f: + return json.load(f) + + +def fs_nav(): + path = os.path.join(config()["cache"]["fs_root"], "nav.json") + return _json(path) + + +def _brand_of(item: dict) -> str: + b = (item.get("brand") or "").strip() + if b: + return b + try: + return (item.get("info_table", {}).get("Brand") or "").strip() + except Exception: + return "" + + +def _stickers_of(item: dict) -> List[str]: + vals = item.get("stickers") or [] + out = [] + for v in vals: + s = (str(v) or "").strip().lower() + if s: + out.append(s) + return out + + +def fs_product_by_slug(slug: str): + slug = (slug or "").strip() + if slug.endswith(".json"): + path = os.path.join(config()["cache"]["fs_root"], "products", slug) + else: + path = os.path.join(config()["cache"]["fs_root"], "products", f"{slug}.json") + return _json(path) + + +def fs_count_products_in_sub(top_slug: str, sub_slug: Optional[str]) -> int: + """ + Return how many products are in the listing for (top_slug, sub_slug), + after filtering out blocked products. + + If sub_slug is None, that's the top-level category listing. + """ + fs_root = config()["cache"]["fs_root"] + + # Build path to listings/.../items.json just like fs_products does + parts = ["listings", top_slug] + if sub_slug: + parts.append(sub_slug) + parts.append("items.json") + + path = os.path.join(fs_root, *parts) + if not os.path.exists(path): + return 0 + + try: + all_slugs = _json(path) + except Exception: + return 0 + + # Filter out blocked products + allowed = [ + slug for slug in all_slugs + if not is_product_blocked(slug) + ] + return len(allowed) + + +def fs_products( + top_slug: str | None, + sub_slug: str | None, + selected_brands: Optional[List[str]] = None, + selected_stickers: Optional[List[str]] = None, + selected_labels: Optional[List[str]] = None, + page: int = 1, + search: Optional[str] = None, + sort: Optional[str] = None, + page_size: int = 20, + + # NEW: only include products the current user has liked + liked_slugs: Optional[List[str]] = None, + liked: bool = None, +): + """ + Returns: + { + "total_pages": int, + "items": [product dict ...], # filtered + paginated (sorted) + "brands": [{"name": str, "count": int}], + "stickers": [{"name": str, "count": int}], + "labels": [{"name": str, "count": int}], + } + + Filters: + - top_slug / sub_slug scope + - selected_brands + - selected_stickers + - selected_labels + - search + - liked_slugs (if provided) + """ + + import os + from typing import List, Dict + + fs_root = config()["cache"]["fs_root"] + + # ---------- Collect slugs ---------- + slugs: List[str] = [] + if top_slug: # normal listing path + parts = ["listings", top_slug] + if sub_slug: + parts.append(sub_slug) + parts.append("items.json") + path = os.path.join(fs_root, *parts) + if os.path.exists(path): + try: + slugs = [s for s in _json(path) if not is_product_blocked(s)] + except Exception: + slugs = [] + else: + # No top slug: include ALL products from /products/*.json + products_dir = os.path.join(fs_root, "products") + try: + for fname in os.listdir(products_dir): + if not fname.endswith(".json"): + continue + slug = fname[:-5] # strip .json + if not is_product_blocked(slug): + slugs.append(slug) + except FileNotFoundError: + slugs = [] + + # ---------- Load product dicts ---------- + all_items: List[dict] = [] + for slug in slugs: + try: + item = fs_product_by_slug(slug) + if isinstance(item, dict): + all_items.append(item) + except Exception: + continue + + # Stable deterministic ordering when aggregating everything (name ASC) + def _title_key(it: dict) -> tuple: + title = (it.get("title") or it.get("name") or it.get("slug") or "").strip().lower() + return (title, it.get("slug") or "") + + all_items.sort(key=_title_key) + + # ---------- Helpers for filters & counts ---------- + def _brand_of_local(item: dict) -> str: + b = item.get("brand") or (item.get("info_table") or {}).get("Brand") + return (b or "").strip() + + def _stickers_of_local(item: dict) -> List[str]: + vals = item.get("stickers") or [] + out = [] + for s in vals: + if isinstance(s, str): + s2 = s.strip().lower() + if s2: + out.append(s2) + return out + + def _labels_of_local(item: dict) -> List[str]: + vals = item.get("labels") or [] + out = [] + for s in vals: + if isinstance(s, str): + s2 = s.strip().lower() + if s2: + out.append(s2) + return out + + sel_brands = [ + (s or "").strip().lower() + for s in (selected_brands or []) + if (s or "").strip() + ] + sel_stickers = [ + (s or "").strip().lower() + for s in (selected_stickers or []) + if (s or "").strip() + ] + sel_labels = [ + (s or "").strip().lower() + for s in (selected_labels or []) + if (s or "").strip() + ] + search_q = (search or "").strip().lower() or None + + liked_set = { + (slug or "").strip().lower() + for slug in (liked_slugs or [] if liked else []) + if (slug or "").strip() + } + + real_liked_set = { + (slug or "").strip().lower() + for slug in (liked_slugs or []) + if (slug or "").strip() + } + + def matches_brand(item: dict) -> bool: + if not sel_brands: + return True + return _brand_of_local(item).strip().lower() in sel_brands + + def has_all_selected_stickers(item: dict) -> bool: + if not sel_stickers: + return True + tags = set(_stickers_of_local(item)) + return all(s in tags for s in sel_stickers) + + def has_all_selected_labels(item: dict) -> bool: + if not sel_labels: + return True + tags = set(_labels_of_local(item)) + return all(s in tags for s in sel_labels) + + def matches_search(item: dict) -> bool: + if not search_q: + return True + desc = (item.get("description_short") or "").strip().lower() + return search_q in desc + + def is_liked(item: dict) -> bool: + """ + True if this item should be shown under the liked filter. + If liked_set is empty, treat everything as allowed. + """ + slug_val = (item.get("slug") or "").strip().lower() + return slug_val in real_liked_set + + # ---------- Counts (dependent on other filters + search + liked) ---------- + brand_counts: Dict[str, int] = {} + for b in (selected_brands or []): + brand_counts[b] = 0 + + for it in all_items: + b = _brand_of_local(it) + if not b: + continue + brand_counts[b] = brand_counts.get(b, 0) + 1 + + sticker_counts: Dict[str, int] = {} + for s in (selected_stickers or []): + sticker_counts[s] = 0 + for it in all_items: + for s in _stickers_of_local(it): + sticker_counts[s] = sticker_counts.get(s, 0) + 1 + + label_counts: Dict[str, int] = {} + for s in (selected_labels or []): + label_counts[s] = 0 + for it in all_items: + for s in _labels_of_local(it): + label_counts[s] = label_counts.get(s, 0) + 1 + + liked_count = 0 + for it in all_items: + if is_liked(it): + liked_count += 1 + + search_count=0 + for it in all_items: + if matches_search(it): + search_count += 1 + + + # ---------- Apply filters ---------- + filtered = [ + it + for it in all_items + if matches_brand(it) + and has_all_selected_stickers(it) + and has_all_selected_labels(it) + and matches_search(it) + and (not liked or is_liked(it)) + ] + + # ---------- Sorting ---------- + sort_mode = (sort or "az").strip().lower() + + def _price_key(item: dict): + p = item["regular_price"] + title, slug = _title_key(item) + return (0 if p is not None else 1, p if p is not None else 0, title, slug) + + def _price_key_desc(item: dict): + p = item["regular_price"] + title, slug = _title_key(item) + return ( + 0 if p is not None else 1, + -(p if p is not None else 0), + title, + slug, + ) + + if sort_mode in ("az",): + filtered.sort(key=_title_key) + elif sort_mode in ("za",): + filtered.sort(key=_title_key, reverse=True) + elif sort_mode in ( + "price-asc", "price_asc", "price-low", "price-low-high", "low-high", "lo-hi" + ): + filtered.sort(key=_price_key) + elif sort_mode in ( + "price-desc", "price_desc", "price-high", "price-high-low", "high-low", "hi-lo" + ): + filtered.sort(key=_price_key_desc) + else: + filtered.sort(key=_title_key) + + # ---------- Pagination ---------- + total_pages = max(1, (len(filtered) + page_size - 1) // page_size) + page = max(1, page) + start = (page - 1) * page_size + end = start + page_size + page_items = filtered[start:end] + # ---------- Format counts lists ---------- + brands_list = sorted( + [{"name": k, "count": v} for k, v in brand_counts.items()], + key=lambda x: (-x["count"], x["name"].lower()), + ) + stickers_list = sorted( + [{"name": k, "count": v} for k, v in sticker_counts.items()], + key=lambda x: (-x["count"], x["name"]), + ) + labels_list = sorted( + [{"name": k, "count": v} for k, v in label_counts.items()], + key=lambda x: (-x["count"], x["name"]), + ) + return { + "total_pages": total_pages, + "items": page_items, + "brands": brands_list, + "stickers": stickers_list, + "labels": labels_list, + "liked_count": liked_count, + "search_count": search_count + } + +# async wrappers (unchanged) +async def read_nav(): + return fs_nav() + +async def read_listing(top_slug: str, sub_slug: str | None, page: int): + return fs_products(top_slug, sub_slug, None, None, page) + +async def read_product(slug_or_path: str): + slug = (slug_or_path or "").strip() + if "/" in slug: + slug = slug.rsplit("/", 1)[-1] + slug = slug.split("?", 1)[0] + return fs_product_by_slug(slug) diff --git a/market/bp/browse/services/db_backend.py b/market/bp/browse/services/db_backend.py new file mode 100644 index 0000000..dab83b2 --- /dev/null +++ b/market/bp/browse/services/db_backend.py @@ -0,0 +1,714 @@ +from __future__ import annotations +from typing import Dict, List, Optional + +from sqlalchemy import select, and_ +from sqlalchemy.orm import selectinload + +from shared.config import config # if unused elsewhere, you can remove this import + +# ORM models +from models.market import ( + Product, ProductImage, ProductSection, + Listing, ListingItem, + NavTop, NavSub, + ProductSticker, ProductLabel, + ProductAttribute, ProductNutrition, ProductAllergen, ProductLike + +) +from sqlalchemy import func, case + + +# ---------- helpers ---------- +def _regular_price_of(p: Product) -> Optional[float]: + try: + return ( + float(p.regular_price) + if p.regular_price is not None + else ( + float(p.special_price) + if p.special_price is not None + else None + ) + ) + except Exception: + return None + +# ---------- NAV ---------- +async def db_nav(session, market_id=None) -> Dict: + top_q = select(NavTop).where(NavTop.deleted_at.is_(None)) + if market_id is not None: + top_q = top_q.where(NavTop.market_id == market_id) + tops = (await session.execute(top_q)).scalars().all() + + top_ids = [t.id for t in tops] + if top_ids: + subs = (await session.execute( + select(NavSub).where(NavSub.top_id.in_(top_ids), NavSub.deleted_at.is_(None)) + )).scalars().all() + else: + subs = [] + + subs_by_top: Dict[int, List[Dict]] = {} + for s in subs: + sub_name = (s.label or s.slug or "").strip() + subs_by_top.setdefault(s.top_id, []).append({ + "label": s.label, + "name": sub_name, # back-compat for callers expecting "name" + "slug": s.slug, + "href": s.href, + }) + + cats: Dict[str, Dict] = {} + for t in tops: + top_label = (t.label or t.slug or "").strip() + cats[top_label] = { + "label": t.label, + "name": top_label, # back-compat + "slug": t.slug, + "subs": sorted(subs_by_top.get(t.id, []), key=lambda x: (x["name"] or "").lower()), + } + return {"cats": cats} + + +async def db_product_full(session, slug: str, user_id=0) -> Optional[dict]: + + liked_product_ids_subq = ( + select(ProductLike.product_slug) + .where( + and_( + ProductLike.user_id == user_id, + ProductLike.deleted_at.is_(None) + ) + ) + ) + + is_liked_case = case( + (and_( + (Product.slug.in_(liked_product_ids_subq)), + Product.deleted_at.is_(None) + ), True), + else_=False + ).label("is_liked") + + q = ( + select(Product, is_liked_case) + .where(Product.slug == slug, Product.deleted_at.is_(None)) + .options( + selectinload(Product.images.and_(ProductImage.deleted_at.is_(None))), + selectinload(Product.sections.and_(ProductSection.deleted_at.is_(None))), + selectinload(Product.labels.and_(ProductLabel.deleted_at.is_(None))), + selectinload(Product.stickers.and_(ProductSticker.deleted_at.is_(None))), + selectinload(Product.attributes.and_(ProductAttribute.deleted_at.is_(None))), + selectinload(Product.nutrition.and_(ProductNutrition.deleted_at.is_(None))), + selectinload(Product.allergens.and_(ProductAllergen.deleted_at.is_(None))), + ) + ) + result = await session.execute(q) + + row = result.first() if result is not None else None + p, is_liked = row if row else (None, None) + if not p: + return None + + gallery = [ + img.url + for img in sorted(p.images, key=lambda i: (i.kind or "gallery", i.position or 0)) + if (img.kind or "gallery") == "gallery" + ] + embedded = [ + img.url + for img in sorted(p.images, key=lambda i: i.position or 0) + if (img.kind or "") == "embedded" + ] + all_imgs = [ + img.url + for img in sorted(p.images, key=lambda i: i.position or 0) + if (img.kind or "") == "all" + ] + return { + "id": p.id, + "slug": p.slug, + "title": p.title, + "brand": p.brand, + "image": p.image, + "description_short": p.description_short, + "description_html": p.description_html, + "suma_href": p.suma_href, + "rrp": float(p.rrp) if p.rrp is not None else None, + "special_price": float(p.special_price) if p.special_price is not None else None, + "special_price_raw": p.special_price_raw, + "special_price_currency": p.special_price_currency, + "regular_price": _regular_price_of(p), + "regular_price_raw": p.regular_price_raw, + "regular_price_currency": p.regular_price_currency, + "rrp_raw": p.rrp_raw, + "rrp_currency": p.rrp_currency, + "price_per_unit_raw": p.price_per_unit_raw, + "price_per_unit": p.price_per_unit, + "price_per_unit_currency": p.price_per_unit_currency, + "oe_list_price": p.oe_list_price, + "images": gallery, + "embedded_image_urls": embedded, + "all_image_urls": all_imgs, + "sections": [{"title": s.title, "html": s.html} for s in p.sections], + "stickers": [v.name.strip().lower() for v in p.stickers if v.name], + "labels": [v.name for v in p.labels if v.name], + "ean": p.ean, + "sku": p.sku, + "unit_size": p.unit_size, + "pack_size": p.pack_size, + "case_size_raw": p.case_size_raw, + "case_size_count": p.case_size_count, + "case_size_item_qty": p.case_size_item_qty, + "case_size_item_unit": p.case_size_item_unit, + "info_table": {a.key: a.value for a in p.attributes if a.key}, + "nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key], + "allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name], + "is_liked": is_liked, + "deleted_at": p.deleted_at + } + + +async def db_product_full_id(session, id:int, user_id=0) -> Optional[dict]: + liked_product_ids_subq = ( + select(ProductLike.product_slug) + .where( + and_( + ProductLike.user_id == user_id, + ProductLike.deleted_at.is_(None) + ) + ) + ) + + is_liked_case = case( + ( + (Product.slug.in_(liked_product_ids_subq)), + True + ), + else_=False + ).label("is_liked") + + q = ( + select(Product, is_liked_case) + .where(Product.id == id) + .options( + selectinload(Product.images.and_(ProductImage.deleted_at.is_(None))), + selectinload(Product.sections.and_(ProductSection.deleted_at.is_(None))), + selectinload(Product.labels.and_(ProductLabel.deleted_at.is_(None))), + selectinload(Product.stickers.and_(ProductSticker.deleted_at.is_(None))), + selectinload(Product.attributes.and_(ProductAttribute.deleted_at.is_(None))), + selectinload(Product.nutrition.and_(ProductNutrition.deleted_at.is_(None))), + selectinload(Product.allergens.and_(ProductAllergen.deleted_at.is_(None))), + ) + ) + result = await session.execute(q) + + row = result.first() if result is not None else None + p, is_liked = row if row else (None, None) + if not p: + return None + + gallery = [ + img.url + for img in sorted(p.images, key=lambda i: (i.kind or "gallery", i.position or 0)) + if (img.kind or "gallery") == "gallery" + ] + embedded = [ + img.url + for img in sorted(p.images, key=lambda i: i.position or 0) + if (img.kind or "") == "embedded" + ] + all_imgs = [ + img.url + for img in sorted(p.images, key=lambda i: i.position or 0) + if (img.kind or "") == "all" + ] + return { + "id": p.id, + "slug": p.slug, + "title": p.title, + "brand": p.brand, + "image": p.image, + "description_short": p.description_short, + "description_html": p.description_html, + "suma_href": p.suma_href, + "rrp": float(p.rrp) if p.rrp is not None else None, + "special_price": float(p.special_price) if p.special_price is not None else None, + "special_price_raw": p.special_price_raw, + "special_price_currency": p.special_price_currency, + "regular_price": _regular_price_of(p), + "regular_price_raw": p.regular_price_raw, + "regular_price_currency": p.regular_price_currency, + "rrp_raw": p.rrp_raw, + "rrp_currency": p.rrp_currency, + "price_per_unit_raw": p.price_per_unit_raw, + "price_per_unit": p.price_per_unit, + "price_per_unit_currency": p.price_per_unit_currency, + "oe_list_price": p.oe_list_price, + "images": gallery, + "embedded_image_urls": embedded, + "all_image_urls": all_imgs, + "sections": [{"title": s.title, "html": s.html} for s in p.sections], + "stickers": [v.name.strip().lower() for v in p.stickers if v.name], + "labels": [v.name for v in p.labels if v.name], + "ean": p.ean, + "sku": p.sku, + "unit_size": p.unit_size, + "pack_size": p.pack_size, + "case_size_raw": p.case_size_raw, + "case_size_count": p.case_size_count, + "case_size_item_qty": p.case_size_item_qty, + "case_size_item_unit": p.case_size_item_unit, + "info_table": {a.key: a.value for a in p.attributes if a.key}, + "nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key], + "allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name], + "is_liked": is_liked, + "deleted_at": p.deleted_at + } + + + + + +# ---------- PRODUCTS LISTING ---------- + +async def db_products_nocounts( + session, + top_slug: str | None, + sub_slug: str | None, + selected_brands: Optional[List[str]] = None, + selected_stickers: Optional[List[str]] = None, + selected_labels: Optional[List[str]] = None, + page: int = 1, + search: Optional[str] = None, + sort: Optional[str] = None, + page_size: int = 20, + liked: bool = None, + user_id: int=0, + market_id: int | None = None, +) -> Dict: + BLOCKED_SLUGS = set((config().get("blacklist", {}).get("product", []) or [])) + base_conditions = [] + if BLOCKED_SLUGS: + base_conditions.append( + ~Product.slug.in_(BLOCKED_SLUGS), + ) + + if top_slug: + q_list_conditions = [ + Listing.deleted_at.is_(None), + NavTop.deleted_at.is_(None), + NavTop.slug == top_slug, + NavSub.deleted_at.is_(None), + NavSub.slug == sub_slug if sub_slug else Listing.sub_id.is_(None), + ] + if market_id is not None: + q_list_conditions.append(NavTop.market_id == market_id) + + q_list = ( + select(Listing.id) + .join(NavTop, Listing.top) + .outerjoin(NavSub, Listing.sub) + .where(*q_list_conditions) + ) + + listing_id = (await session.execute(q_list)).scalars().first() + if not listing_id: + return {"total_pages": 1, "items": []} + + base_conditions.append(Product.slug.in_( + select(ListingItem.slug).where(ListingItem.listing_id == listing_id, ListingItem.deleted_at.is_(None)) + )) + elif market_id is not None: + # Browse all within a specific market: filter products through market's nav hierarchy + market_product_slugs = ( + select(ListingItem.slug) + .join(Listing, ListingItem.listing_id == Listing.id) + .join(NavTop, Listing.top_id == NavTop.id) + .where( + ListingItem.deleted_at.is_(None), + Listing.deleted_at.is_(None), + NavTop.deleted_at.is_(None), + NavTop.market_id == market_id, + ) + ) + base_conditions.append(Product.slug.in_(market_product_slugs)) + + base_ids_subq = select(Product.id).where(*base_conditions, Product.deleted_at.is_(None)) + base_ids = (await session.execute(base_ids_subq)).scalars().all() + + if not base_ids: + return {"total_pages": 1, "items": []} + sel_brands = [(b or "").strip().lower() for b in (selected_brands or []) if (b or "").strip()] + sel_stickers = [(s or "").strip().lower() for s in (selected_stickers or []) if (s or "").strip()] + sel_labels = [(l or "").strip().lower() for l in (selected_labels or []) if (l or "").strip()] + search_q = (search or "").strip().lower() + + filter_conditions = [] + if sel_brands: + filter_conditions.append(func.lower(Product.brand).in_(sel_brands)) + for sticker_name in sel_stickers: + filter_conditions.append( + Product.stickers.any( + and_( + func.lower(ProductSticker.name) == sticker_name, + ProductSticker.deleted_at.is_(None) + ) + ) + ) + for label_name in sel_labels: + filter_conditions.append( + Product.labels.any( + and_( + func.lower(ProductLabel.name) == label_name, + ProductLabel.deleted_at.is_(None), + ) + ) + ) + if search_q: + filter_conditions.append(func.lower(Product.description_short).contains(search_q)) + if liked: + liked_subq = liked_subq = ( + select(ProductLike.product_slug) + .where( + and_( + ProductLike.user_id == user_id, + ProductLike.deleted_at.is_(None) + ) + ) + .subquery() + ) + filter_conditions.append(Product.slug.in_(liked_subq)) + + filtered_count_query = select(func.count(Product.id)).where(Product.id.in_(base_ids), *filter_conditions) + total_filtered = (await session.execute(filtered_count_query)).scalars().one() + total_pages = max(1, (total_filtered + page_size - 1) // page_size) + page = max(1, page) + + + liked_product_slugs_subq = ( + select(ProductLike.product_slug) + .where( + and_( + ProductLike.user_id == user_id, + ProductLike.deleted_at.is_(None) + ) + ) + ) + is_liked_case = case( + (Product.slug.in_(liked_product_slugs_subq), True), + else_=False + ).label("is_liked") + + q_filtered = select(Product, is_liked_case).where(Product.id.in_(base_ids), *filter_conditions).options( + selectinload(Product.images), + selectinload(Product.sections), + selectinload(Product.labels), + selectinload(Product.stickers), + selectinload(Product.attributes), + selectinload(Product.nutrition), + selectinload(Product.allergens), + ) + + sort_mode = (sort or "az").strip().lower() + if sort_mode == "az": + q_filtered = q_filtered.order_by(func.lower(Product.title), Product.slug) + elif sort_mode == "za": + q_filtered = q_filtered.order_by(func.lower(Product.title).desc(), Product.slug.desc()) + elif sort_mode in ("price-asc", "price_asc", "price-low", "price-low-high", "low-high", "lo-hi"): + q_filtered = q_filtered.order_by( + case((Product.regular_price.is_(None), 1), else_=0), + Product.regular_price.asc(), + func.lower(Product.title), + Product.slug + ) + elif sort_mode in ("price-desc", "price_desc", "price-high", "price-high-low", "high-low", "hi-lo"): + q_filtered = q_filtered.order_by( + case((Product.regular_price.is_(None), 1), else_=0), + Product.regular_price.desc(), + func.lower(Product.title), + Product.slug + ) + else: + q_filtered = q_filtered.order_by(func.lower(Product.title), Product.slug) + + offset_val = (page - 1) * page_size + q_filtered = q_filtered.offset(offset_val).limit(page_size) + products_page = (await session.execute(q_filtered)).all() + + items: List[Dict] = [] + for p, is_liked in products_page: + gallery_imgs = sorted((img for img in p.images), key=lambda i: (i.kind or "gallery", i.position or 0)) + gallery = [img.url for img in gallery_imgs if (img.kind or "gallery") == "gallery"] + embedded = [img.url for img in sorted(p.images, key=lambda i: i.position or 0) if (img.kind or "") == "embedded"] + all_imgs = [img.url for img in sorted(p.images, key=lambda i: i.position or 0) if (img.kind or "") == "all"] + + items.append({ + "slug": p.slug, + "title": p.title, + "brand": p.brand, + "description_short": p.description_short, + "description_html": p.description_html, + "image": p.image, + "rrp": float(p.rrp) if p.rrp is not None else None, + "special_price": float(p.special_price) if p.special_price is not None else None, + "special_price_raw": p.special_price_raw, + "special_price_currency": p.special_price_currency, + "regular_price": _regular_price_of(p), + "regular_price_raw": p.regular_price_raw, + "regular_price_currency": p.regular_price_currency, + "rrp_raw": p.rrp_raw, + "rrp_currency": p.rrp_currency, + "price_per_unit_raw": p.price_per_unit_raw, + "price_per_unit": p.price_per_unit, + "price_per_unit_currency": p.price_per_unit_currency, + "images": gallery, + "embedded_image_urls": embedded, + "all_image_urls": all_imgs, + "sections": [{"title": s.title, "html": s.html} for s in p.sections], + "labels": [l.name for l in p.labels if l.name], + "stickers": [s.name.strip().lower() for s in p.stickers if s.name], + "info_table": {a.key: a.value for a in p.attributes if a.key}, + "nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key], + "allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name], + "ean": p.ean, + "sku": p.sku, + "unit_size": p.unit_size, + "pack_size": p.pack_size, + "is_liked": is_liked, + }) + + return { + "total_pages": total_pages, + "items": items, + } + + +async def db_products_counts( + session, + top_slug: str | None, + sub_slug: str | None, + search: Optional[str] = None, + user_id: int=0, + market_id: int | None = None, +) -> Dict: + BLOCKED_SLUGS = set((config().get("blacklist", {}).get("product", []) or [])) + base_conditions = [] + + if top_slug: + q_list_conditions = [ + Listing.deleted_at.is_(None), + Listing.top.has(slug=top_slug), + Listing.sub.has(slug=sub_slug) if sub_slug else Listing.sub_id.is_(None), + ] + if market_id is not None: + q_list_conditions.append(Listing.top.has(market_id=market_id)) + q_list = select(Listing.id).where(*q_list_conditions) + listing_id = (await session.execute(q_list)).scalars().first() + if not listing_id: + return { + "brands": [], + "stickers": [], + "labels": [], + "liked_count": 0, + "search_count": 0, + } + + listing_slug_subquery = select(ListingItem.slug).where(ListingItem.listing_id == listing_id, ListingItem.deleted_at.is_(None)) + + if BLOCKED_SLUGS: + base_conditions.append( + and_( + Product.slug.in_(listing_slug_subquery), + ~Product.slug.in_(BLOCKED_SLUGS), + ) + ) + else: + base_conditions.append(Product.slug.in_(listing_slug_subquery)) + else: + if market_id is not None: + # Browse all within a specific market + market_product_slugs = ( + select(ListingItem.slug) + .join(Listing, ListingItem.listing_id == Listing.id) + .join(NavTop, Listing.top_id == NavTop.id) + .where( + ListingItem.deleted_at.is_(None), + Listing.deleted_at.is_(None), + NavTop.deleted_at.is_(None), + NavTop.market_id == market_id, + ) + ) + if BLOCKED_SLUGS: + base_conditions.append( + and_( + Product.slug.in_(market_product_slugs), + ~Product.slug.in_(BLOCKED_SLUGS), + ) + ) + else: + base_conditions.append(Product.slug.in_(market_product_slugs)) + elif BLOCKED_SLUGS: + base_conditions.append(~Product.slug.in_(BLOCKED_SLUGS)) + base_ids = (await session.execute(select(Product.id).where(*base_conditions, Product.deleted_at.is_(None)))).scalars().all() + if base_ids: + base_products_slugs = (await session.execute( + select(Product.slug).where(Product.id.in_(base_ids), Product.deleted_at.is_(None)) + )).scalars().all() + if not base_products_slugs: + return { + "brands": [], + "stickers": [], + "labels": [], + "liked_count": 0, + "search_count": 0, + } + base_ids = (await session.execute( + select(Product.id).where(Product.slug.in_(base_products_slugs), Product.deleted_at.is_(None)) + )).scalars().all() + else: + return { + "brands": [], + "stickers": [], + "labels": [], + "liked_count": 0, + "search_count": 0, + } + + brands_list: List[Dict] = [] + stickers_list: List[Dict] = [] + labels_list: List[Dict] = [] + liked_count = 0 + search_count = 0 + liked_product_slugs_subq = ( + select(ProductLike.product_slug) + .where(ProductLike.user_id == user_id, ProductLike.deleted_at.is_(None)) + ) + liked_count = await session.scalar( + select(func.count(Product.id)) + .where( + Product.id.in_(base_ids), + Product.slug.in_(liked_product_slugs_subq), + Product.deleted_at.is_(None) + ) + ) + + liked_count = (await session.execute( + select(func.count()) + .select_from(ProductLike) + .where( + ProductLike.user_id == user_id, + ProductLike.product_slug.in_( + select(Product.slug).where(Product.id.in_(base_ids)) + ), + ProductLike.deleted_at.is_(None) + ) + )).scalar_one() if user_id else 0 + + # Brand counts + brand_count_rows = await session.execute( + select(Product.brand, func.count(Product.id)) + .where(Product.id.in_(base_ids), + Product.brand.is_not(None), + func.trim(Product.brand) != "", + Product.deleted_at.is_(None) + ) + .group_by(Product.brand) + ) + for brand_name, count in brand_count_rows: + brands_list.append({"name": brand_name, "count": count}) + brands_list.sort(key=lambda x: (-x["count"], x["name"].lower())) + + # Sticker counts + sticker_count_rows = await session.execute( + select(ProductSticker.name, func.count(ProductSticker.product_id)) + .where( + ProductSticker.product_id.in_(base_ids), + ProductSticker.deleted_at.is_(None) + ) + .group_by(ProductSticker.name) + ) + for sticker_name, count in sticker_count_rows: + if sticker_name: + stickers_list.append({"name": sticker_name.strip().lower(), "count": count}) + stickers_list.sort(key=lambda x: (-x["count"], x["name"])) + + # Label counts + label_count_rows = await session.execute( + select(ProductLabel.name, func.count(ProductLabel.product_id)) + .where( + ProductLabel.product_id.in_(base_ids), + ProductLabel.deleted_at.is_(None) + ) + .group_by(ProductLabel.name) + ) + for label_name, count in label_count_rows: + if label_name: + labels_list.append({"name": label_name, "count": count}) + labels_list.sort(key=lambda x: (-x["count"], x["name"])) + + + # Search count + search_q = (search or "").strip().lower() + if search_q: + search_count = (await session.execute( + select(func.count(Product.id)) + .where( + Product.id.in_(base_ids), + func.lower(Product.description_short).contains(search_q), + Product.deleted_at.is_(None) + ) + )).scalars().one() + else: + search_count = len(base_ids) + + return { + "brands": brands_list, + "stickers": stickers_list, + "labels": labels_list, + "liked_count": liked_count, + "search_count": search_count, + } + +async def db_products( + session, + top_slug: str | None, + sub_slug: str | None, + selected_brands: Optional[List[str]] = None, + selected_stickers: Optional[List[str]] = None, + selected_labels: Optional[List[str]] = None, + page: int = 1, + search: Optional[str] = None, + sort: Optional[str] = None, + page_size: int = 20, + liked: bool = None, + user_id: int=0, + market_id: int | None = None, +) -> Dict: + return { + **(await db_products_nocounts( + session, + top_slug=top_slug, + sub_slug=sub_slug, + selected_brands=selected_brands, + selected_stickers=selected_stickers, + selected_labels=selected_labels, + page=page, + search=search, + sort=sort, + page_size=page_size, + liked=liked, + user_id=user_id, + market_id=market_id, + )), + **(await db_products_counts( + session, + top_slug=top_slug, + sub_slug=sub_slug, + search=search, + user_id=user_id, + market_id=market_id, + )), + } + + diff --git a/market/bp/browse/services/nav.py b/market/bp/browse/services/nav.py new file mode 100644 index 0000000..bdef674 --- /dev/null +++ b/market/bp/browse/services/nav.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import time +import re +from typing import Dict, List, Tuple, Optional +from urllib.parse import urlparse, urljoin + +from shared.config import config +from . import db_backend as cb +from .blacklist.category import is_category_blocked # Reverse map: slug -> label + +# ------------------ Caches ------------------ +_nav_cache: Dict = {} +_nav_cache_ts: float = 0.0 +_nav_ttl_seconds = 60 * 60 * 6 # 6 hours + + +def _now() -> float: + try: + return now() # type: ignore[name-defined] + except Exception: + return time.time() + + +def extract_sub_slug(href: str, top_slug: str) -> Optional[str]: + p = urlparse(href) + parts = [x for x in (p.path or "").split("/") if x] + if len(parts) >= 2 and parts[0].lower() == top_slug.lower(): + sub = parts[1] + if sub.lower().endswith((".html", ".htm")): + sub = re.sub(r"\.(html?|HTML?)$", "", sub) + return sub + return None + + +def group_by_category(slug_to_links: Dict[str, List[Tuple[str, str]]]) -> Dict[str, Dict]: + nav = {"cats": {}} + for label, slug in config()["categories"]["allow"].items(): + top_href = urljoin(config()["base_url"], f"/{slug}") + subs = [] + for text, href in slug_to_links.get(slug, []): + sub_slug = extract_sub_slug(href, slug) + if sub_slug: + subs.append({ + "name": text, + "href": href, + "slug": sub_slug, + # no count here yet in this path + }) + subs.sort(key=lambda x: x["name"].lower()) + nav["cats"][label] = {"href": top_href, "slug": slug, "subs": subs} + nav = _apply_category_blacklist(nav) + return nav + + +async def get_nav(session, market_id=None) -> Dict[str, Dict]: + """ + Return navigation structure; annotate each sub with product counts. + Uses snapshot for offline behaviour. + """ + global _nav_cache, _nav_cache_ts + now_ts = _now() + + # load from snapshot + nav = await cb.db_nav(session, market_id=market_id) + + # inject counts for each subcategory (and for top-level too if you like) + for label, cat in (nav.get("cats") or {}).items(): + top_slug = cat.get("slug") + if not top_slug: + continue + + + # Counts for subs + new_subs = [] + for s in cat.get("subs", []): + s.get("slug") + #if not sub_slug: + # s_count = 0 + #else: + # s_count = await cb.db_count_products_in_sub(session,top_slug, sub_slug) + #print('sub', s_count) + new_subs.append({ + **s, + #"count": s_count, + }) + cat["subs"] = new_subs + + _nav_cache = nav + _nav_cache_ts = now_ts + + nav = _apply_category_blacklist(nav) + return nav + + +def category_context(top_slug: Optional[str], sub_slug: Optional[str], nav: Dict[str, Dict]): + """Build template context for a category/subcategory page.""" + def _order_subs_selected_first(subs, sub_slug: str | None): + """Return subs with the selected subcategory (by slug) first.""" + if not subs or not sub_slug: + return subs + head = [s for s in subs if sub_slug and sub_slug.lower() == s['slug']] + tail = [s for s in subs if not (sub_slug and sub_slug.lower() == s['slug'])] + return head + tail + + REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()} + label = REVERSE_CATEGORY.get(top_slug) + cat = nav["cats"].get(label) or {} + + top_suma_href = cat.get("href") or urljoin(config()["base_url"], f"/{top_slug}") + top_local_href = f"{top_slug}" + + # total products in this top-level category (all subs combined / top-level listing) + top_count = cat.get("count", 0) + + subs = [] + for s in cat.get("subs", []): + subs.append({ + "name": s["name"], + "slug": s.get("slug"), + "local_href": f"{top_slug}/{s.get('slug')}", + "suma_href": s["href"], + "count": s.get("count", 0), # per-subcategory product count + }) + + current_local_href = ( + f"{top_slug}/{sub_slug}" if sub_slug + else f"{top_slug}" if top_slug + else "" + ) + + return { + "category_label": label, + "top_slug": top_slug, + "sub_slug": sub_slug, + "top_suma_href": top_suma_href, + "top_local_href": top_local_href, + + # 👇 expose total count for the parent category + "top_count": top_count, + + # list of subcategories, each with its own count + "subs_local": _order_subs_selected_first(subs, sub_slug), + + #"current_local_href": current_local_href, + } + +def _apply_category_blacklist(nav: Dict[str, Dict]) -> Dict[str, Dict]: + cats = nav.get("cats", {}) + out = {"cats": {}} + for label, data in cats.items(): + top = (data or {}).get("slug") + if not top or is_category_blocked(top): + continue + # filter subs + subs = [] + for s in (data.get("subs") or []): + sub_slug = s.get("slug") + if sub_slug and not is_category_blocked(top, sub_slug): + subs.append(s) + # keep everything else (including counts) + out["cats"][label] = {**data, "subs": subs} + return out diff --git a/market/bp/browse/services/products.py b/market/bp/browse/services/products.py new file mode 100644 index 0000000..f9a7be3 --- /dev/null +++ b/market/bp/browse/services/products.py @@ -0,0 +1,118 @@ +# products.py +from __future__ import annotations +from typing import List, Optional +from urllib.parse import urlparse + +from .state import KNOWN_PRODUCT_SLUGS +from .blacklist.category import is_category_blocked +from . import db_backend as cb + +# NEW IMPORT: +from quart import g + +async def products( + list_url: str, + selected_brands: Optional[List[str]] = None, + selected_stickers: Optional[List[str]] = None, + selected_labels: Optional[List[str]] = None, + page: int = 1, + search: Optional[str] = None, + sort: Optional[str] = None, + liked: Optional[bool] = None, + user_id: Optional[int] = None, + market_id: int | None = None, +): + p = urlparse(list_url) + parts = [x for x in (p.path or "").split("/") if x] + top = parts[0] if parts else None + sub = parts[1] if len(parts) >= 2 else None + + if is_category_blocked(top, sub): + return [], [], [], [], 1 # <- note: 5 values now, keep shape consistent below + data = await cb.db_products( + g.s, + top, + sub, + selected_brands, + selected_stickers, + selected_labels, + page, + search, + sort, + liked=liked, + user_id = g.user.id if g.user else 0, + market_id=market_id, + ) + items = data.get("items", []) or [] + brands = data.get("brands", []) or [] + stickers = data.get("stickers", []) or [] + labels = data.get("labels", []) or [] + total_pages = int(data.get("total_pages", 1) or 1) + + # Track known product slugs + for it in items: + try: + slug = it.get("slug") + if slug: + KNOWN_PRODUCT_SLUGS.add(slug) + except Exception: + pass + + # --- NEW BIT: mark which are liked by this user --- + + + # Return same shape you were already returning: + # items, brands, stickers, labels, total_pages + return items, brands, stickers, labels, total_pages, data.get("liked_count"), data.get("search_count") + + +async def products_nocounts( + session, + list_url: str, + selected_brands: Optional[List[str]] = None, + selected_stickers: Optional[List[str]] = None, + selected_labels: Optional[List[str]] = None, + page: int = 1, + search: Optional[str] = None, + sort: Optional[str] = None, + liked: Optional[bool] = None, + user_id: Optional[int] = None, + market_id: int | None = None, +): + p = urlparse(list_url) + parts = [x for x in (p.path or "").split("/") if x] + top = parts[0] if parts else None + sub = parts[1] if len(parts) >= 2 else None + + if is_category_blocked(top, sub): + return [], [], [], [], 1 # <- note: 5 values now, keep shape consistent below + data = await cb.db_products_nocounts( + session, + top, + sub, + selected_brands, + selected_stickers, + selected_labels, + page, + search, + sort, + liked=liked, + user_id = g.user.id if g.user else 0, + market_id=market_id, + ) + items = data.get("items", []) or [] + total_pages = int(data.get("total_pages", 1) or 1) + + # Track known product slugs + for it in items: + try: + slug = it.get("slug") + if slug: + KNOWN_PRODUCT_SLUGS.add(slug) + except Exception: + pass + + + # Return same shape you were already returning: + # items, brands, stickers, labels, total_pages + return items, total_pages diff --git a/market/bp/browse/services/services.py b/market/bp/browse/services/services.py new file mode 100644 index 0000000..dbdcaad --- /dev/null +++ b/market/bp/browse/services/services.py @@ -0,0 +1,185 @@ +from __future__ import annotations + +from urllib.parse import urljoin + +from quart import ( + g, + request, +) +from shared.config import config +from .products import products, products_nocounts +from .blacklist.product_details import is_blacklisted_heading + +from shared.utils import host_url + + +from sqlalchemy import select +from models import ProductLike +from ...market.filters.qs import decode + + +def _hx_fragment_request() -> bool: + return request.headers.get("HX-Request", "").lower() == "true" + +async def _productInfo(top_slug=None, sub_slug=None): + """ + Shared query logic for home / category / subcategory pages. + Pulls filters from qs.decode(), queries products(), and orders brands/stickers/etc. + """ + + q = decode() + page, search, sort = q.page, q.search, q.sort + selected_brands, selected_stickers, selected_labels = q.selected_brands, q.selected_stickers, q.selected_labels + liked = q.liked + + # Get market_id from hydrated market context + market = getattr(g, "market", None) + market_id = market.id if market else None + + if top_slug is not None and sub_slug is not None: + list_url = urljoin(config()["base_url"], f"/{top_slug}/{sub_slug}") + else: + if top_slug is not None: + list_url = top_slug + else: + list_url = "" + if not _hx_fragment_request() or page==1: + items, brands, stickers, labels, total_pages, liked_count, search_count = await products( + list_url, + selected_brands=selected_brands, + selected_stickers=selected_stickers, + selected_labels=selected_labels, + page=page, + search=search, + sort=sort, + user_id=g.user.id if g.user else None, + liked = liked, + market_id=market_id, + ) + + brands_ordered = _order_brands_selected_first(brands, selected_brands) + + return { + "products": items, + "page": page, + "search": search, + "sort": sort, + "total_pages": int(total_pages or 1), + "brands": brands_ordered, + "selected_brands": selected_brands, + "stickers": stickers, + "selected_stickers": selected_stickers, + "labels": labels, + "selected_labels": selected_labels, + "liked": liked, + "liked_count": liked_count, + "search_count": search_count + } + else: + items, total_pages = await products_nocounts( + g.s, + list_url, + selected_brands=selected_brands, + selected_stickers=selected_stickers, + selected_labels=selected_labels, + page=page, + search=search, + sort=sort, + user_id=g.user.id if g.user else None, + liked = liked, + market_id=market_id, + ) + return { + "products": items, + "page": page, + "search": search, + "sort": sort, + "total_pages": int(total_pages or 1), + } + + +def _order_brands_selected_first(brands, selected): + """Return brands with the selected brand(s) first.""" + if not brands or not selected: + return brands + sel = [(s or "").strip() for s in selected] + head = [s for s in brands if (s.get("name") or "").strip() in sel] + tail = [s for s in brands if (s.get("name") or "").strip() not in sel] + return head + tail + + +def _order_stickers_selected_first( + stickers: list[dict], selected_stickers: list[str] | None +): + if not stickers or not selected_stickers: + return stickers + sel = [(s or "").strip().lower() for s in selected_stickers] + head = [s for s in stickers if (s.get("name") or "").strip().lower() in sel] + tail = [ + s + for s in stickers + if (s.get("name") or "").strip().lower() not in sel + ] + return head + tail + + +def _order_labels_selected_first( + labels: list[dict], selected_labels: list[str] | None +): + if not labels or not selected_labels: + return labels + sel = [(s or "").strip().lower() for s in selected_labels] + head = [s for s in labels if (s.get("name") or "").strip().lower() in sel] + tail = [ + s + for s in labels + if (s.get("name") or "").strip().lower() not in sel + ] + return head + tail + +def _massage_product(d): + """ + Normalise the product dict for templates: + - inject APP_ROOT into HTML + - drop blacklisted sections + """ + massaged = { + **d, + "description_html": d["description_html"].replace( + "[**__APP_ROOT__**]", g.root + ), + "sections": [ + { + **section, + "html": section["html"].replace( + "[**__APP_ROOT__**]", g.root + ), + } + for section in d["sections"] + if not is_blacklisted_heading(section["title"]) + ], + } + return massaged + + +# Re-export from canonical shared location +from shared.infrastructure.http_utils import vary as _vary, current_url_without_page as _current_url_without_page + +async def _is_liked(user_id: int | None, slug: str) -> bool: + """ + Check if this user has liked this product. + """ + if not user_id: + return False + # because ProductLike has composite PK (user_id, product_slug), + # we can fetch it by primary key dict: + row = await g.s.execute( + select(ProductLike).where( + ProductLike.user_id == user_id, + ProductLike.product_slug == slug, + ) + ) + row.scalar_one_or_none() + return row is not None + + diff --git a/market/bp/browse/services/slugs.py b/market/bp/browse/services/slugs.py new file mode 100644 index 0000000..f45a258 --- /dev/null +++ b/market/bp/browse/services/slugs.py @@ -0,0 +1,24 @@ +import re +from urllib.parse import urljoin, urlparse +from shared.config import config + +def product_slug_from_href(href: str) -> str: + p = urlparse(href) + parts = [x for x in p.path.split("/") if x] + if not parts: + return "" + last = parts[-1] + if last.endswith(".html"): + last = last[:-5] + elif last.endswith(".htm"): + last = last[:-4] + last = re.sub(r"-(html|htm)+$", "", last, flags=re.I) + return f"{last}-html" + +def canonical_html_slug(slug: str) -> str: + base = re.sub(r"-(html|htm)+$", "", slug, flags=re.I) + return f"{base}-html" + +def suma_href_from_html_slug(slug: str) -> str: + canon = canonical_html_slug(slug) + return urljoin(config()["base_url"], f"/{canon}.html") diff --git a/market/bp/browse/services/state.py b/market/bp/browse/services/state.py new file mode 100644 index 0000000..2ad0495 --- /dev/null +++ b/market/bp/browse/services/state.py @@ -0,0 +1,21 @@ +from typing import Dict, Tuple, List +import time + +_nav_cache: dict = {} +_nav_cache_ts: float = 0.0 +_nav_ttl_seconds = 60 * 60 * 6 + +_detail_cache: Dict[str, Dict] = {} +_detail_cache_ts: Dict[str, float] = {} +_detail_ttl_seconds = 60 * 60 * 6 + +KNOWN_PRODUCT_SLUGS: set[str] = set() + +_listing_variant_cache: Dict[str, Tuple[str, float]] = {} +_listing_variant_ttl = 60 * 60 * 6 + +_listing_page_cache: Dict[str, Tuple[Tuple[List[Dict], int], float]] = {} +_listing_page_ttl = 60 * 30 + +def now() -> float: + return time.time() diff --git a/market/bp/cart/__init__.py b/market/bp/cart/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/cart/services/__init__.py b/market/bp/cart/services/__init__.py new file mode 100644 index 0000000..2643d81 --- /dev/null +++ b/market/bp/cart/services/__init__.py @@ -0,0 +1,2 @@ +from .total import total +from .identity import CartIdentity, current_cart_identity diff --git a/market/bp/cart/services/identity.py b/market/bp/cart/services/identity.py new file mode 100644 index 0000000..50ecb70 --- /dev/null +++ b/market/bp/cart/services/identity.py @@ -0,0 +1,4 @@ +# Re-export from canonical shared location +from shared.infrastructure.cart_identity import CartIdentity, current_cart_identity + +__all__ = ["CartIdentity", "current_cart_identity"] diff --git a/market/bp/cart/services/total.py b/market/bp/cart/services/total.py new file mode 100644 index 0000000..15e074f --- /dev/null +++ b/market/bp/cart/services/total.py @@ -0,0 +1,6 @@ +def total(cart): + return sum( + (item.product.special_price or item.product.regular_price) * item.quantity + for item in cart + if (item.product.special_price or item.product.regular_price) is not None + ) diff --git a/market/bp/fragments/__init__.py b/market/bp/fragments/__init__.py new file mode 100644 index 0000000..a4af44b --- /dev/null +++ b/market/bp/fragments/__init__.py @@ -0,0 +1 @@ +from .routes import register as register_fragments diff --git a/market/bp/fragments/routes.py b/market/bp/fragments/routes.py new file mode 100644 index 0000000..bd2bdde --- /dev/null +++ b/market/bp/fragments/routes.py @@ -0,0 +1,54 @@ +"""Market app fragment endpoints. + +Exposes HTML fragments at ``/internal/fragments/`` for consumption +by other coop apps via the fragment client. +""" + +from __future__ import annotations + +from quart import Blueprint, Response, g, render_template, request + +from shared.infrastructure.fragments import FRAGMENT_HEADER +from shared.services.registry import services + + +def register(): + bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments") + + _handlers: dict[str, object] = {} + + @bp.before_request + async def _require_fragment_header(): + if not request.headers.get(FRAGMENT_HEADER): + return Response("", status=403) + + @bp.get("/") + async def get_fragment(fragment_type: str): + handler = _handlers.get(fragment_type) + if handler is None: + return Response("", status=200, content_type="text/html") + html = await handler() + return Response(html, status=200, content_type="text/html") + + # --- container-nav fragment: market links -------------------------------- + + async def _container_nav_handler(): + container_type = request.args.get("container_type", "page") + container_id = int(request.args.get("container_id", 0)) + post_slug = request.args.get("post_slug", "") + + markets = await services.market.marketplaces_for_container( + g.s, container_type, container_id, + ) + if not markets: + return "" + return await render_template( + "fragments/container_nav_markets.html", + markets=markets, post_slug=post_slug, + ) + + _handlers["container-nav"] = _container_nav_handler + + bp._fragment_handlers = _handlers + + return bp diff --git a/market/bp/market/__init__.py b/market/bp/market/__init__.py new file mode 100644 index 0000000..85fd1a5 --- /dev/null +++ b/market/bp/market/__init__.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +# create the blueprint at package import time +from .routes import register # = Blueprint("browse_bp", __name__) + +# import routes AFTER browse_bp is defined so routes can attach to it +from . import routes # noqa: F401 diff --git a/market/bp/market/admin/__init__.py b/market/bp/market/admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/market/admin/routes.py b/market/bp/market/admin/routes.py new file mode 100644 index 0000000..0b8478a --- /dev/null +++ b/market/bp/market/admin/routes.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from quart import ( + render_template, make_response, Blueprint +) + + +from shared.browser.app.authz import require_admin + + +def register(): + bp = Blueprint("admin", __name__, url_prefix='/admin') + + # ---------- Pages ---------- + @bp.get("/") + @require_admin + async def admin(): + from shared.browser.app.utils.htmx import is_htmx_request + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/market/admin/index.html") + else: + html = await render_template("_types/market/admin/_oob_elements.html") + + return await make_response(html) + return bp diff --git a/market/bp/market/filters/__init__.py b/market/bp/market/filters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/market/filters/qs.py b/market/bp/market/filters/qs.py new file mode 100644 index 0000000..d5a9950 --- /dev/null +++ b/market/bp/market/filters/qs.py @@ -0,0 +1,101 @@ +from quart import request + +from typing import Iterable, Optional, Union + +from shared.browser.app.filters.qs_base import ( + KEEP, _norm, make_filter_set, build_qs, +) +from shared.browser.app.filters.query_types import MarketQuery + + +def decode() -> MarketQuery: + page = int(request.args.get("page", 1)) + search = request.args.get("search") + sort = request.args.get("sort") + liked = request.args.get("liked") + + selected_brands = tuple(s.strip() for s in request.args.getlist("brand") if s.strip()) + selected_stickers = tuple(s.strip().lower() for s in request.args.getlist("sticker") if s.strip()) + selected_labels = tuple(s.strip().lower() for s in request.args.getlist("label") if s.strip()) + + return MarketQuery(page, search, sort, selected_brands, selected_stickers, selected_labels, liked) + + +def makeqs_factory(): + """ + Build a makeqs(...) that starts from the current filters + page. + Auto-resets page to 1 when filters change unless you pass page explicitly. + """ + q = decode() + base_stickers = [s for s in q.selected_stickers if (s or "").strip()] + base_labels = [s for s in q.selected_labels if (s or "").strip()] + base_brands = [s for s in q.selected_brands if (s or "").strip()] + base_search = q.search or None + base_liked = q.liked or None + base_sort = q.sort or None + base_page = int(q.page or 1) + + def makeqs( + *, + clear_filters: bool = False, + add_sticker: Union[str, Iterable[str], None] = None, + remove_sticker: Union[str, Iterable[str], None] = None, + add_label: Union[str, Iterable[str], None] = None, + remove_label: Union[str, Iterable[str], None] = None, + add_brand: Union[str, Iterable[str], None] = None, + remove_brand: Union[str, Iterable[str], None] = None, + search: Union[str, None, object] = KEEP, + sort: Union[str, None, object] = KEEP, + page: Union[int, None, object] = None, + extra: Optional[Iterable[tuple]] = None, + leading_q: bool = True, + liked: Union[bool, None, object] = KEEP, + ) -> str: + stickers = make_filter_set(base_stickers, add_sticker, remove_sticker, clear_filters) + labels = make_filter_set(base_labels, add_label, remove_label, clear_filters) + brands = make_filter_set(base_brands, add_brand, remove_brand, clear_filters) + + final_search = None if clear_filters else base_search if search is KEEP else ((search or "").strip() or None) + final_sort = base_sort if sort is KEEP else (sort or None) + final_liked = None if clear_filters else base_liked if liked is KEEP else liked + + # Did filters change? + filters_changed = ( + set(map(_norm, stickers)) != set(map(_norm, base_stickers)) + or set(map(_norm, labels)) != set(map(_norm, base_labels)) + or set(map(_norm, brands)) != set(map(_norm, base_brands)) + or final_search != base_search + or final_sort != base_sort + or final_liked != base_liked + ) + + # Page logic + if page is KEEP: + final_page = 1 if filters_changed else base_page + else: + final_page = page + + # Build params + params = [] + for s in stickers: + params.append(("sticker", s)) + for s in labels: + params.append(("label", s)) + for s in brands: + params.append(("brand", s)) + if final_search: + params.append(("search", final_search)) + if final_liked is not None: + params.append(("liked", final_liked)) + if final_sort: + params.append(("sort", final_sort)) + if final_page is not None: + params.append(("page", str(final_page))) + if extra: + for k, v in extra: + if v is not None: + params.append((k, str(v))) + + return build_qs(params, leading_q=leading_q) + + return makeqs diff --git a/market/bp/market/routes.py b/market/bp/market/routes.py new file mode 100644 index 0000000..2eefecc --- /dev/null +++ b/market/bp/market/routes.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from quart import Blueprint, g, render_template, make_response, url_for + + +from ..browse.routes import register as register_browse_bp + +from .filters.qs import makeqs_factory +from ..browse.services.nav import get_nav +from ..api.routes import products_api +from .admin.routes import register as register_admin + + + +def register(url_prefix, title): + bp = Blueprint("market", __name__, url_prefix) + + @bp.before_request + def route(): + g.makeqs_factory = makeqs_factory + + + @bp.context_processor + async def inject_root(): + market = getattr(g, "market", None) + market_id = market.id if market else None + post_data = getattr(g, "post_data", None) or {} + return { + **post_data, + "market_title": market.name if market else title, + "categories": (await get_nav(g.s, market_id=market_id))["cats"], + "qs": makeqs_factory()(), + "market": market, + } + + bp.register_blueprint( + register_browse_bp(), + ) + bp.register_blueprint( + products_api, + ) + bp.register_blueprint( + register_admin(), + ) + + + + return bp + diff --git a/market/bp/page_markets/__init__.py b/market/bp/page_markets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/bp/page_markets/routes.py b/market/bp/page_markets/routes.py new file mode 100644 index 0000000..e18a616 --- /dev/null +++ b/market/bp/page_markets/routes.py @@ -0,0 +1,65 @@ +""" +Page-markets blueprint — shows markets for a single page. + +Mounted at / (page-scoped). Requires g.post_data from hydrate_post. + +Routes: + GET // — full page scoped to this page + GET //page-markets — HTMX fragment for infinite scroll +""" +from __future__ import annotations + +from quart import Blueprint, g, request, render_template, make_response + +from shared.browser.app.utils.htmx import is_htmx_request +from shared.services.registry import services + + +def register() -> Blueprint: + bp = Blueprint("page_markets", __name__) + + async def _load_markets(post_id, page, per_page=20): + """Load markets for this page's container.""" + markets, has_more = await services.market.list_marketplaces( + g.s, "page", post_id, page=page, per_page=per_page, + ) + return markets, has_more + + @bp.get("/") + async def index(): + post = g.post_data["post"] + page = int(request.args.get("page", 1)) + + markets, has_more = await _load_markets(post["id"], page) + + ctx = dict( + markets=markets, + has_more=has_more, + page_info={}, + page=page, + ) + + if is_htmx_request(): + html = await render_template("_types/page_markets/_main_panel.html", **ctx) + else: + html = await render_template("_types/page_markets/index.html", **ctx) + + return await make_response(html, 200) + + @bp.get("/page-markets") + async def markets_fragment(): + post = g.post_data["post"] + page = int(request.args.get("page", 1)) + + markets, has_more = await _load_markets(post["id"], page) + + html = await render_template( + "_types/page_markets/_cards.html", + markets=markets, + has_more=has_more, + page_info={}, + page=page, + ) + return await make_response(html, 200) + + return bp diff --git a/market/bp/product/routes.py b/market/bp/product/routes.py new file mode 100644 index 0000000..19e76c4 --- /dev/null +++ b/market/bp/product/routes.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +from quart import ( + g, + Blueprint, + abort, + redirect, + render_template, + make_response, +) +from sqlalchemy import select, func, update + +from models.market import Product, ProductLike +from ..browse.services.slugs import canonical_html_slug +from ..browse.services.blacklist.product import is_product_blocked +from ..browse.services import db_backend as cb +from ..browse.services import _massage_product +from shared.utils import host_url +from shared.browser.app.redis_cacher import cache_page, clear_cache +from ..cart.services import total +from .services.product_operations import toggle_product_like, massage_full_product + + +def register(): + bp = Blueprint("product", __name__, url_prefix="/product/") + @bp.url_value_preprocessor + def pull_product_slug(endpoint, values): + # product_slug is distinct from the app-level "slug"/"page_slug" params, + # so it won't be popped by the app-level preprocessor in app.py. + g.product_slug = values.pop("product_slug", None) + + # ───────────────────────────────────────────────────────────── + # BEFORE REQUEST: Slug or numeric ID resolver + # ───────────────────────────────────────────────────────────── + @bp.before_request + async def resolve_product(): + from quart import request as req + + raw_slug = g.product_slug = getattr(g, "product_slug", None) + if raw_slug is None: + return + + is_post = req.method == "POST" + + # 1. If slug is INT → load product by ID + if raw_slug.isdigit(): + product_id = int(raw_slug) + + product = await cb.db_product_full_id( + g.s, product_id, user_id=g.user.id if g.user else 0 + ) + + if not product: + abort(404) + + # If product is deleted → SHOW as-is + if product["deleted_at"]: + d = product + g.item_data = {"d": d, "slug": product["slug"], "liked": False} + return + + # Not deleted → redirect to canonical slug (GET only) + if not is_post: + canon = canonical_html_slug(product["slug"]) + return redirect( + host_url(url_for("market.browse.product.product_detail", product_slug=canon)) + ) + + g.item_data = {"d": product, "slug": product["slug"], "liked": False} + return + + # 2. Normal slug-based behaviour + if is_product_blocked(raw_slug): + abort(404) + + canon = canonical_html_slug(raw_slug) + if canon != raw_slug and not is_post: + return redirect( + host_url(url_for("market.browse.product.product_detail", product_slug=canon)) + ) + + # hydrate full product + d = await cb.db_product_full( + g.s, canon, user_id=g.user.id if g.user else 0 + ) + if not d: + abort(404) + g.item_data = {"d": d, "slug": canon, "liked": d.get("is_liked", False)} + + @bp.context_processor + def context(): + item_data = getattr(g, "item_data", None) + + if item_data: + return { + **item_data, + } + else: + return {} + + # ───────────────────────────────────────────────────────────── + # RENDER PRODUCT + # ───────────────────────────────────────────────────────────── + @bp.get("/") + @cache_page(tag="browse") + async def product_detail(): + from shared.browser.app.utils.htmx import is_htmx_request + + # Determine which template to use based on request type + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/product/index.html") + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/product/_oob_elements.html") + + return html + + @bp.post("/like/toggle/") + @clear_cache(tag="browse", tag_scope="user") + async def like_toggle(): + product_slug = g.product_slug + + if not g.user: + html = await render_template( + "_types/browse/like/button.html", + slug=product_slug, + liked=False, + ) + resp = make_response(html, 403) + return resp + + user_id = g.user.id + + liked, error = await toggle_product_like(g.s, user_id, product_slug) + + if error: + resp = make_response(error, 404) + return resp + + html = await render_template( + "_types/browse/like/button.html", + slug=product_slug, + liked=liked, + ) + return html + + + + @bp.get("/admin/") + async def admin(): + from shared.browser.app.utils.htmx import is_htmx_request + + if not is_htmx_request(): + # Normal browser request: full page with layout + html = await render_template("_types/product/admin/index.html") + else: + # HTMX request: main panel + OOB elements + html = await render_template("_types/product/admin/_oob_elements.html") + + return await make_response(html) + + + from bp.cart.services.identity import current_cart_identity + #from bp.cart.routes import view_cart + from models.market import CartItem + from quart import request, url_for + + @bp.post("/cart/") + @clear_cache(tag="browse", tag_scope="user") + async def cart(): + slug = g.product_slug + # make sure product exists (we *allow* deleted_at != None later if you want) + product_id = await g.s.scalar( + select(Product.id).where( + Product.slug == slug, + Product.deleted_at.is_(None), + ) + ) + + product = await g.s.scalar( + select(Product).where(Product.id == product_id) + ) + if not product: + return await make_response("Product not found", 404) + + # --- NEW: read `count` from body (JSON or form), default to 1 --- + count = 1 + try: + if request.is_json: + data = await request.get_json() + if data is not None and "count" in data: + count = int(data["count"]) + else: + form = await request.form + if "count" in form: + count = int(form["count"]) + except (ValueError, TypeError): + # if parsing fails, just fall back to 1 + count = 1 + # --- END NEW --- + + ident = current_cart_identity() + + # Load cart items for current user/session + from sqlalchemy.orm import selectinload + cart_filters = [CartItem.deleted_at.is_(None)] + if ident["user_id"] is not None: + cart_filters.append(CartItem.user_id == ident["user_id"]) + else: + cart_filters.append(CartItem.session_id == ident["session_id"]) + cart_result = await g.s.execute( + select(CartItem) + .where(*cart_filters) + .order_by(CartItem.created_at.desc()) + .options( + selectinload(CartItem.product), + selectinload(CartItem.market_place), + ) + ) + g.cart = list(cart_result.scalars().all()) + + ci = next( + (item for item in g.cart if item.product_id == product_id), + None, + ) + + # --- NEW: set quantity based on `count` --- + if ci: + if count > 0: + ci.quantity = count + else: + # count <= 0 → remove from cart entirely + ci.quantity=0 + g.cart.remove(ci) + await g.s.delete(ci) + + else: + if count > 0: + ci = CartItem( + user_id=ident["user_id"], + session_id=ident["session_id"], + product_id=product.id, + product=product, + quantity=count, + market_place_id=getattr(g, "market", None) and g.market.id, + ) + g.cart.append(ci) + g.s.add(ci) + # if count <= 0 and no existing item, do nothing + # --- END NEW --- + + # no explicit commit; your session middleware should handle it + + # htmx response: OOB-swap mini cart + product buttons + if request.headers.get("HX-Request") == "true": + return await render_template( + "_types/product/_added.html", + cart=g.cart, + item=ci, + ) + + # normal POST: go to cart page + from shared.infrastructure.urls import cart_url + return redirect(cart_url("/")) + + + + return bp diff --git a/market/bp/product/services/__init__.py b/market/bp/product/services/__init__.py new file mode 100644 index 0000000..ce711a7 --- /dev/null +++ b/market/bp/product/services/__init__.py @@ -0,0 +1,3 @@ +from .product_operations import toggle_product_like, massage_full_product + +__all__ = ["toggle_product_like", "massage_full_product"] diff --git a/market/bp/product/services/product_operations.py b/market/bp/product/services/product_operations.py new file mode 100644 index 0000000..343be8e --- /dev/null +++ b/market/bp/product/services/product_operations.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import Optional + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from models.market import Product, ProductLike + + +def massage_full_product(product: Product) -> dict: + """ + Convert a Product ORM model to a dictionary with all fields. + Used for rendering product detail pages. + """ + from bp.browse.services import _massage_product + + gallery = [] + if product.image: + gallery.append(product.image) + + d = { + "id": product.id, + "slug": product.slug, + "title": product.title, + "brand": product.brand, + "image": product.image, + "description_short": product.description_short, + "description_html": product.description_html or "", + "suma_href": product.suma_href, + "rrp": float(product.rrp) if product.rrp else None, + "special_price": float(product.special_price) if product.special_price else None, + "regular_price": float(product.regular_price) if product.regular_price else None, + "images": gallery or [img.url for img in product.images], + "all_image_urls": gallery or [img.url for img in product.images], + "sections": [{"title": s.title, "html": s.html} for s in product.sections], + "stickers": [s.name.lower() for s in product.stickers], + "labels": [l.name for l in product.labels], + "nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in product.nutrition], + "allergens": [{"name": a.name, "contains": a.contains} for a in product.allergens], + "is_liked": False, + } + + return _massage_product(d) + + +async def toggle_product_like( + session: AsyncSession, + user_id: int, + product_slug: str, +) -> tuple[bool, Optional[str]]: + """ + Toggle a product like for a given user using soft deletes. + Returns (liked_state, error_message). + - If error_message is not None, an error occurred. + - liked_state indicates whether product is now liked (True) or unliked (False). + """ + from sqlalchemy import func, update + + # Get product_id from slug + product_id = await session.scalar( + select(Product.id).where(Product.slug == product_slug, Product.deleted_at.is_(None)) + ) + if not product_id: + return False, "Product not found" + + # Check if like exists (not deleted) + existing = await session.scalar( + select(ProductLike).where( + ProductLike.user_id == user_id, + ProductLike.product_slug == product_slug, + ProductLike.deleted_at.is_(None), + ) + ) + + if existing: + # Unlike: soft delete the like + await session.execute( + update(ProductLike) + .where( + ProductLike.user_id == user_id, + ProductLike.product_slug == product_slug, + ProductLike.deleted_at.is_(None), + ) + .values(deleted_at=func.now()) + ) + return False, None + else: + # Like: add a new like + new_like = ProductLike( + user_id=user_id, + product_slug=product_slug, + ) + session.add(new_like) + return True, None diff --git a/market/config/app-config.yaml b/market/config/app-config.yaml new file mode 100644 index 0000000..3aa6a76 --- /dev/null +++ b/market/config/app-config.yaml @@ -0,0 +1,84 @@ +# App-wide settings +base_host: "wholesale.suma.coop" +base_login: https://wholesale.suma.coop/customer/account/login/ +base_url: https://wholesale.suma.coop/ +title: Rose Ash +market_root: /market +market_title: Market +blog_root: / +blog_title: all the news +cart_root: /cart +app_urls: + blog: "http://localhost:8000" + market: "http://localhost:8001" + cart: "http://localhost:8002" + events: "http://localhost:8003" + federation: "http://localhost:8004" +cache: + fs_root: _snapshot # <- absolute path to your snapshot dir +categories: + allow: + Basics: basics + Branded Goods: branded-goods + Chilled: chilled + Frozen: frozen + Non-foods: non-foods + Supplements: supplements + Christmas: christmas +slugs: + skip: + - "" + - customer + - account + - checkout + - wishlist + - sales + - contact + - privacy-policy + - terms-and-conditions + - delivery + - catalogsearch + - quickorder + - apply + - search + - static + - media +section-titles: + - ingredients + - allergy information + - allergens + - nutritional information + - nutrition + - storage + - directions + - preparation + - serving suggestions + - origin + - country of origin + - recycling + - general information + - additional information + - a note about prices + +blacklist: + category: + - branded-goods/alcoholic-drinks + - branded-goods/beers + - branded-goods/wines + - branded-goods/ciders + product: + - list-price-suma-current-suma-price-list-each-bk012-2-html + - ---just-lem-just-wholefoods-jelly-crystals-lemon-12-x-85g-vf067-2-html + product-details: + - General Information + - A Note About Prices + +# SumUp payment settings (fill these in for live usage) +sumup: + merchant_code: "ME4J6100" + currency: "GBP" + # Name of the environment variable that holds your SumUp API key + api_key_env: "SUMUP_API_KEY" + webhook_secret: "CHANGE_ME_TO_A_LONG_RANDOM_STRING" + checkout_reference_prefix: 'dev-' + diff --git a/market/entrypoint.sh b/market/entrypoint.sh new file mode 100644 index 0000000..320acdf --- /dev/null +++ b/market/entrypoint.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Optional: wait for Postgres to be reachable +if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then + echo "Waiting for Postgres at ${DATABASE_HOST}:${DATABASE_PORT}..." + for i in {1..60}; do + (echo > /dev/tcp/${DATABASE_HOST}/${DATABASE_PORT}) >/dev/null 2>&1 && break || true + sleep 1 + done +fi + +# NOTE: Market app does NOT run Alembic migrations. +# Migrations are managed by the blog app which owns the shared database schema. + +# Clear Redis page cache on deploy +if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then + echo "Flushing Redis cache..." + python3 -c " +import redis, os +r = redis.from_url(os.environ['REDIS_URL']) +r.flushall() +print('Redis cache cleared.') +" || echo "Redis flush failed (non-fatal), continuing..." +fi + +# Start the app +echo "Starting Hypercorn (${APP_MODULE:-app:app})..." +PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} diff --git a/market/models/__init__.py b/market/models/__init__.py new file mode 100644 index 0000000..9ca9e79 --- /dev/null +++ b/market/models/__init__.py @@ -0,0 +1,8 @@ +from .market import ( + Product, ProductLike, ProductImage, ProductSection, + NavTop, NavSub, Listing, ListingItem, + LinkError, LinkExternal, SubcategoryRedirect, ProductLog, + ProductLabel, ProductSticker, ProductAttribute, ProductNutrition, ProductAllergen, + CartItem, +) +from .market_place import MarketPlace diff --git a/market/models/market.py b/market/models/market.py new file mode 100644 index 0000000..65511e1 --- /dev/null +++ b/market/models/market.py @@ -0,0 +1,7 @@ +from shared.models.market import ( # noqa: F401 + Product, ProductLike, ProductImage, ProductSection, + NavTop, NavSub, Listing, ListingItem, + LinkError, LinkExternal, SubcategoryRedirect, ProductLog, + ProductLabel, ProductSticker, ProductAttribute, ProductNutrition, ProductAllergen, + CartItem, +) diff --git a/market/models/market_place.py b/market/models/market_place.py new file mode 100644 index 0000000..ca65447 --- /dev/null +++ b/market/models/market_place.py @@ -0,0 +1 @@ +from shared.models.market_place import MarketPlace # noqa: F401 diff --git a/market/path_setup.py b/market/path_setup.py new file mode 100644 index 0000000..c7166f7 --- /dev/null +++ b/market/path_setup.py @@ -0,0 +1,9 @@ +import sys +import os + +_app_dir = os.path.dirname(os.path.abspath(__file__)) +_project_root = os.path.dirname(_app_dir) + +for _p in (_project_root, _app_dir): + if _p not in sys.path: + sys.path.insert(0, _p) diff --git a/market/scrape-test.sh b/market/scrape-test.sh new file mode 100644 index 0000000..c6e299f --- /dev/null +++ b/market/scrape-test.sh @@ -0,0 +1,6 @@ +. .env +source venv/bin/activate +rm -rf _debug/* +python test_scrape_detail.py --out ./_debug --slug sum-saag-suma-aloo-saag-12-x-400g-vf270-2-html +#git -C _debug status +#git -C _debug diff diff --git a/market/scrape.sh b/market/scrape.sh new file mode 100644 index 0000000..639cba8 --- /dev/null +++ b/market/scrape.sh @@ -0,0 +1,5 @@ +. .env +echo sumauser: $SUMA_USER +source .venv/bin/activate # was venv/bin/a +python scrape_to_snapshot.py --out ./_snapshot --max-pages 50 --max-products 200000 --concurrency 8 + diff --git a/market/scrape/__init__.py b/market/scrape/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/market/scrape/build_snapshot/__init__.py b/market/scrape/build_snapshot/__init__.py new file mode 100644 index 0000000..1eec55e --- /dev/null +++ b/market/scrape/build_snapshot/__init__.py @@ -0,0 +1 @@ +from .build_snapshot import build_snapshot diff --git a/market/scrape/build_snapshot/build_snapshot.py b/market/scrape/build_snapshot/build_snapshot.py new file mode 100644 index 0000000..3b7f623 --- /dev/null +++ b/market/scrape/build_snapshot/build_snapshot.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import os +from typing import Dict, Set + +from ..http_client import configure_cookies +from ..get_auth import login + +from shared.config import config + +from shared.utils import log + +# DB: persistence helpers + +from .tools import ( + _resolve_sub_redirects, + valid_subs, + candidate_subs, + rewrite_nav, + capture_product_slugs, + fetch_and_upsert_products, +) + +from ..nav import nav_scrape + +# ------------------------ core ------------------------ +async def build_snapshot( + concurrency: int, + user: str, + password: str, + save_nav, + capture_listing, + upsert_product, + log_product_result, + save_subcategory_redirects, + save_link_reports = None, +) -> None: + # NOTE: we keep ensure_dir for listings iteration but no longer write JSON files. + + # Make project importable + import sys + sys.path.insert(0, os.path.abspath(".")) + + + cookies = await login(username=user, password=password) + await configure_cookies(cookies) + for k, v in dict(cookies).items(): + print("logged in with", k, v) + + # 1) NAV + log("Fetching nav…") + nav = await nav_scrape() + + # Build valid subs per top from nav + valid_subs_by_top: Dict[str, Set[str]] = valid_subs(nav) + + # Resolve redirects for all subs in nav first + nav_sub_candidates = candidate_subs(nav) + nav_redirects = await _resolve_sub_redirects( + base_url=config()["base_url"], + candidates=nav_sub_candidates, + allowed_tops=set(config()["categories"]["allow"].values()), + valid_subs_by_top=valid_subs_by_top, + ) + rewrite_nav(nav, nav_redirects) + + # DB: save nav + await save_nav(nav) + + product_slugs: Set[str] = await capture_product_slugs( + nav, + capture_listing + ) + unknown_sub_paths: Set[str] = set() + + # 3) PRODUCTS (fetch details) + await fetch_and_upsert_products( + upsert_product, + log_product_result, + save_link_reports, + concurrency, + product_slugs, + valid_subs_by_top, + unknown_sub_paths + ) + + # Subcategory redirects from HTML + log("Resolving subcategory redirects…") + html_redirects = await _resolve_sub_redirects( + base_url=config()["base_url"], + candidates=unknown_sub_paths, + allowed_tops=set(config()["categories"]["allow"].values()), + valid_subs_by_top=valid_subs_by_top, + ) + sub_redirects: Dict[str, str] = dict(nav_redirects) + sub_redirects.update(html_redirects) + + # DB: persist redirects + await save_subcategory_redirects(sub_redirects) + + log("Snapshot build complete (to Postgres).") + + diff --git a/market/scrape/build_snapshot/tools/APP_ROOT_PLACEHOLDER.py b/market/scrape/build_snapshot/tools/APP_ROOT_PLACEHOLDER.py new file mode 100644 index 0000000..3291777 --- /dev/null +++ b/market/scrape/build_snapshot/tools/APP_ROOT_PLACEHOLDER.py @@ -0,0 +1 @@ +APP_ROOT_PLACEHOLDER = "[**__APP_ROOT__**]" diff --git a/market/scrape/build_snapshot/tools/__init__.py b/market/scrape/build_snapshot/tools/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/market/scrape/build_snapshot/tools/__init__.py @@ -0,0 +1 @@ + diff --git a/market/scrape/build_snapshot/tools/_anchor_text.py b/market/scrape/build_snapshot/tools/_anchor_text.py new file mode 100644 index 0000000..fd3ce6d --- /dev/null +++ b/market/scrape/build_snapshot/tools/_anchor_text.py @@ -0,0 +1,6 @@ +def _anchor_text(a) -> str: + try: + txt = " ".join((a.get_text(" ") or "").split()) + return txt[:200] + except Exception: + return "" diff --git a/market/scrape/build_snapshot/tools/_collect_html_img_srcs.py b/market/scrape/build_snapshot/tools/_collect_html_img_srcs.py new file mode 100644 index 0000000..c5feaef --- /dev/null +++ b/market/scrape/build_snapshot/tools/_collect_html_img_srcs.py @@ -0,0 +1,16 @@ +from bs4 import BeautifulSoup +from typing import List, Optional + +def _collect_html_img_srcs(html: Optional[str]) -> List[str]: + urls: List[str] = [] + if not html: + return urls + try: + soup = BeautifulSoup(html, "lxml") + for img in soup.find_all("img"): + src = img.get("src") + if src: + urls.append(src) + except Exception: + pass + return urls diff --git a/market/scrape/build_snapshot/tools/_dedupe_preserve_order.py b/market/scrape/build_snapshot/tools/_dedupe_preserve_order.py new file mode 100644 index 0000000..492cb5a --- /dev/null +++ b/market/scrape/build_snapshot/tools/_dedupe_preserve_order.py @@ -0,0 +1,14 @@ + +from typing import Iterable, List, Set + +def _dedupe_preserve_order(urls: Iterable[str]) -> List[str]: + seen: Set[str] = set() + out: List[str] = [] + for u in urls: + if not u or not isinstance(u, str): + continue + if u in seen: + continue + seen.add(u) + out.append(u) + return out diff --git a/market/scrape/build_snapshot/tools/_product_dict_is_cf.py b/market/scrape/build_snapshot/tools/_product_dict_is_cf.py new file mode 100644 index 0000000..5802af7 --- /dev/null +++ b/market/scrape/build_snapshot/tools/_product_dict_is_cf.py @@ -0,0 +1,32 @@ +from typing import Dict,Optional, Tuple + +_CF_TOKENS = ( + "One moment, please...", + "Please wait while your request is being verified", + "/cdn-cgi/challenge-platform/", + "rocket-loader.min.js", +) + +def _looks_like_cf_html(html: Optional[str]) -> Tuple[bool, Optional[str]]: + if not html: + return False, None + for tok in _CF_TOKENS: + if tok in html: + return True, tok + return False, None + +def _product_dict_is_cf(d: Dict) -> Tuple[bool, Optional[str]]: + title = (d.get("title") or "").strip() + if title.lower() == "one moment, please...": + return True, "One moment, please..." + ok, tok = _looks_like_cf_html(d.get("description_html")) + if ok: + return True, tok + for sec in d.get("sections") or []: + if isinstance(sec, dict) and sec.get("html"): + ok2, tok2 = _looks_like_cf_html(sec["html"]) + if ok2: + return True, tok2 + if not d.get("images") and not d.get("description_html") and not d.get("sections"): + return True, "all_empty_heuristic" + return False, None diff --git a/market/scrape/build_snapshot/tools/_resolve_sub_redirects.py b/market/scrape/build_snapshot/tools/_resolve_sub_redirects.py new file mode 100644 index 0000000..c3e4f43 --- /dev/null +++ b/market/scrape/build_snapshot/tools/_resolve_sub_redirects.py @@ -0,0 +1,34 @@ +from typing import Dict, Set +from urllib.parse import urlparse, urljoin +import httpx + + +async def _resolve_sub_redirects( + base_url: str, + candidates: Set[str], + allowed_tops: Set[str], + valid_subs_by_top: Dict[str, Set[str]], +) -> Dict[str, str]: + mapping: Dict[str, str] = {} + if not candidates: + return mapping + timeout = httpx.Timeout(20.0, connect=10.0) + async with httpx.AsyncClient(follow_redirects=True, timeout=timeout, http2=True) as client: + for path in sorted(candidates): + try: + url = urljoin(base_url, path) + r = await client.get(url) + final = str(r.url) + p = urlparse(final) + parts = [x for x in (p.path or "").split("/") if x] + if len(parts) >= 2: + top_new = parts[0].lower() + sub_new = parts[1].lower().removesuffix(".html").removesuffix(".htm") + if top_new in allowed_tops: + new_path = f"/{top_new}/{sub_new}" + if new_path != path: + mapping[path] = new_path + valid_subs_by_top.setdefault(top_new, set()).add(sub_new) + except Exception: + continue + return mapping diff --git a/market/scrape/build_snapshot/tools/_rewrite_links_fragment.py b/market/scrape/build_snapshot/tools/_rewrite_links_fragment.py new file mode 100644 index 0000000..2d3a816 --- /dev/null +++ b/market/scrape/build_snapshot/tools/_rewrite_links_fragment.py @@ -0,0 +1,100 @@ +from typing import Dict, List, Optional, Set +from bs4 import BeautifulSoup +from urllib.parse import urlparse, urljoin + +from ._anchor_text import _anchor_text +from bp.browse.services.slugs import product_slug_from_href +from .APP_ROOT_PLACEHOLDER import APP_ROOT_PLACEHOLDER + +def _rewrite_links_fragment( + html: Optional[str], + base_url: str, + known_slugs: Set[str], + category_allow_values: Set[str], + valid_subs_by_top: Dict[str, Set[str]], + current_product_slug: str, + link_errors: List[Dict], + link_externals: List[Dict], + unknown_sub_paths: Set[str], +) -> str: + if not html: + return "" + soup = BeautifulSoup(html, "lxml") + base_host = urlparse(base_url).netloc + + for a in soup.find_all("a", href=True): + raw = (a.get("href") or "").strip() + if not raw: + continue + low = raw.lower() + if low.startswith(("mailto:", "tel:", "javascript:", "data:")) or low.startswith("#"): + continue + abs_href = urljoin(base_url, raw) + p = urlparse(abs_href) + if not p.scheme or not p.netloc: + continue + if p.netloc != base_host: + link_externals.append({ + "product": current_product_slug, + "href": abs_href, + "text": _anchor_text(a), + "host": p.netloc, + }) + continue + parts = [x for x in (p.path or "").split("/") if x] + if not parts: + continue + last = parts[-1].lower() + if last.endswith((".html", ".htm")): + target_slug = product_slug_from_href(abs_href) + if target_slug and target_slug in known_slugs: + a["href"] = f"{APP_ROOT_PLACEHOLDER}/product/{target_slug}" + else: + link_errors.append({ + "product": current_product_slug, + "href": abs_href, + "text": _anchor_text(a), + "top": None, + "sub": None, + "target_slug": target_slug or None, + "type": "suma_product_unknown", + }) + continue + top = parts[0].lower() + if top in category_allow_values: + if len(parts) == 1: + a["href"] = f"{APP_ROOT_PLACEHOLDER}/{top}" + else: + sub = parts[1] + if sub.lower().endswith((".html", ".htm")): + sub = sub.rsplit(".", 1)[0] + if sub in (valid_subs_by_top.get(top) or set()): + a["href"] = f"{APP_ROOT_PLACEHOLDER}/{top}/{sub}" + else: + unknown_path = f"/{top}/{sub}" + unknown_sub_paths.add(unknown_path) + a["href"] = f"{APP_ROOT_PLACEHOLDER}{unknown_path}" + link_errors.append({ + "product": current_product_slug, + "href": abs_href, + "text": _anchor_text(a), + "top": top, + "sub": sub, + "target_slug": None, + "type": "suma_category_invalid_sub_pending", + }) + else: + link_errors.append({ + "product": current_product_slug, + "href": abs_href, + "text": _anchor_text(a), + "top": top, + "sub": parts[1] if len(parts) > 1 else None, + "target_slug": None, + "type": "suma_other", + }) + + for t in soup.find_all(["html", "body"]): + t.unwrap() + return "".join(str(c) for c in soup.contents).strip() + diff --git a/market/scrape/build_snapshot/tools/candidate_subs.py b/market/scrape/build_snapshot/tools/candidate_subs.py new file mode 100644 index 0000000..b7853b8 --- /dev/null +++ b/market/scrape/build_snapshot/tools/candidate_subs.py @@ -0,0 +1,14 @@ +from typing import Dict, Set + +def candidate_subs(nav: Dict[str, Dict])-> Set[str]: + nav_sub_candidates: Set[str] = set() + for label, data in (nav.get("cats") or {}).items(): + top_slug = (data or {}).get("slug") + if not top_slug: + continue + for s in (data.get("subs") or []): + sub_slug = (s.get("slug") or "").strip() + if sub_slug: + nav_sub_candidates.add(f"/{top_slug}/{sub_slug}") + return nav_sub_candidates + diff --git a/market/scrape/build_snapshot/tools/capture_category.py b/market/scrape/build_snapshot/tools/capture_category.py new file mode 100644 index 0000000..84e51e7 --- /dev/null +++ b/market/scrape/build_snapshot/tools/capture_category.py @@ -0,0 +1,18 @@ +from urllib.parse import urljoin +from shared.config import config +from shared.utils import log +from ...listings import scrape_products + +async def capture_category( + slug: str, +): + list_url = urljoin(config()["base_url"], f"/{slug}") + log(f"[{slug}] page 1…") + items, total_pages = await scrape_products(list_url, page=1) + + pmax = int(total_pages or 1) + for p in range(2, pmax + 1): + log(f"[{slug}] page {p}…") + items_p, _tp = await scrape_products(list_url, page=p) + items.extend(items_p) + return (list_url, items, total_pages) diff --git a/market/scrape/build_snapshot/tools/capture_product_slugs.py b/market/scrape/build_snapshot/tools/capture_product_slugs.py new file mode 100644 index 0000000..1592e1e --- /dev/null +++ b/market/scrape/build_snapshot/tools/capture_product_slugs.py @@ -0,0 +1,25 @@ +from typing import Dict, Set +from .capture_category import capture_category +from .capture_sub import capture_sub +from shared.config import config + + +async def capture_product_slugs( + nav: Dict[str, Dict], + capture_listing, +): + product_slugs: Set[str] = set() + for label, slug in config()["categories"]["allow"].items(): + lpars = await capture_category( slug) + await capture_listing(*lpars) + (_, items, __) = lpars + for slug_ in items: + product_slugs.add(slug_) + for sub in (nav["cats"].get(label, {}).get("subs", []) or []): + lpars = await capture_sub(sub, slug) + await capture_listing(*lpars) + (_, items, __) = lpars + for slug_ in items: + product_slugs.add(slug_) + return product_slugs + diff --git a/market/scrape/build_snapshot/tools/capture_sub.py b/market/scrape/build_snapshot/tools/capture_sub.py new file mode 100644 index 0000000..5c14ca7 --- /dev/null +++ b/market/scrape/build_snapshot/tools/capture_sub.py @@ -0,0 +1,22 @@ +from urllib.parse import urljoin +from urllib.parse import urljoin +from shared.config import config +from shared.utils import log +from ...listings import scrape_products + +async def capture_sub( + sub, + slug, +): + sub_slug = sub.get("slug") + if not sub_slug: + return + sub_url = urljoin(config()["base_url"], f"/{slug}/{sub_slug}") + log(f"[{slug}/{sub_slug}] page 1…") + items_s, total_pages_s = await scrape_products(sub_url, page=1) + spmax = int(total_pages_s or 1) + for p in range(2, spmax + 1): + log(f"[{slug}/{sub_slug}] page {p}…") + items_ps, _ = await scrape_products(sub_url, page=p) + items_s.extend(items_ps) + return (sub_url, items_s, total_pages_s) diff --git a/market/scrape/build_snapshot/tools/fetch_and_upsert_product.py b/market/scrape/build_snapshot/tools/fetch_and_upsert_product.py new file mode 100644 index 0000000..0fb625c --- /dev/null +++ b/market/scrape/build_snapshot/tools/fetch_and_upsert_product.py @@ -0,0 +1,106 @@ + +import asyncio +from typing import List + +import httpx + + +from ...html_utils import to_fragment +from bp.browse.services.slugs import suma_href_from_html_slug + + +from shared.config import config + +from shared.utils import log + +# DB: persistence helpers +from ...product.product_detail import scrape_product_detail +from ._product_dict_is_cf import _product_dict_is_cf +from ._rewrite_links_fragment import _rewrite_links_fragment +from ._dedupe_preserve_order import _dedupe_preserve_order +from ._collect_html_img_srcs import _collect_html_img_srcs + + +async def fetch_and_upsert_product( + upsert_product, + log_product_result, + sem: asyncio.Semaphore, + slug: str, + product_slugs, + category_values, + valid_subs_by_top, + link_errors, + link_externals, + unknown_sub_paths +) -> bool: + href = suma_href_from_html_slug(slug) + try: + async with sem: + d = await scrape_product_detail(href) + + is_cf, cf_token = _product_dict_is_cf(d) + if is_cf: + payload = { + "slug": slug, + "href_tried": href, + "error_type": "CloudflareChallengeDetected", + "error_message": f"Detected Cloudflare interstitial via token: {cf_token}", + "cf_token": cf_token, + } + await log_product_result(ok=False, payload=payload) + log(f" ! CF challenge detected: {slug} ({cf_token})") + return False + + # Rewrite embedded links; collect reports + if d.get("description_html"): + d["description_html"] = _rewrite_links_fragment( + d["description_html"], config()["base_url"], product_slugs, category_values, + valid_subs_by_top, slug, link_errors, link_externals, unknown_sub_paths + ) + d["description_html"] = to_fragment(d["description_html"]) + if d.get("sections"): + for sec in d["sections"]: + if isinstance(sec, dict) and sec.get("html"): + sec["html"] = _rewrite_links_fragment( + sec["html"], config()["base_url"], product_slugs, category_values, + valid_subs_by_top, slug, link_errors, link_externals, unknown_sub_paths + ) + sec["html"] = to_fragment(sec["html"]) + + # Images + gallery = _dedupe_preserve_order(d.get("images") or []) + embedded: List[str] = [] + if d.get("description_html"): + embedded += _collect_html_img_srcs(d["description_html"]) + for sec in d.get("sections", []) or []: + if isinstance(sec, dict) and sec.get("html"): + embedded += _collect_html_img_srcs(sec["html"]) + embedded = _dedupe_preserve_order(embedded) + all_imgs = _dedupe_preserve_order(list(gallery) + list(embedded)) + + d["images"] = gallery + d["embedded_image_urls"] = embedded + d["all_image_urls"] = all_imgs + await upsert_product(slug, href, d) + # DB: upsert product + success log + return True + except Exception as e: + payload = { + "slug": slug, + "href_tried": href, + "error_type": e.__class__.__name__, + "error_message": str(e), + } + try: + if isinstance(e, httpx.HTTPStatusError): + payload["http_status"] = getattr(e.response, "status_code", None) + req = getattr(e, "request", None) + if req is not None and getattr(req, "url", None) is not None: + payload["final_url"] = str(req.url) + elif isinstance(e, httpx.TransportError): + payload["transport_error"] = True + except Exception: + pass + await log_product_result(ok=False, payload=payload) + log(f" ! product failed: {slug} ({e})") + return False diff --git a/market/scrape/build_snapshot/tools/fetch_and_upsert_products.py b/market/scrape/build_snapshot/tools/fetch_and_upsert_products.py new file mode 100644 index 0000000..836dde0 --- /dev/null +++ b/market/scrape/build_snapshot/tools/fetch_and_upsert_products.py @@ -0,0 +1,49 @@ +import asyncio +from typing import Dict, List, Set +from shared.config import config +from shared.utils import log +from .fetch_and_upsert_product import fetch_and_upsert_product + + +async def fetch_and_upsert_products( + upsert_product, + log_product_result, + save_link_reports = None, + concurrency: int=8, + product_slugs: Set[str] = set(), + valid_subs_by_top: Dict[str, Set[str]] = {}, + unknown_sub_paths: Set[str] = set() +): + sem = asyncio.Semaphore(max(1, concurrency)) + link_errors: List[Dict] = [] + link_externals: List[Dict] = [] + + category_values: Set[str] = set(config()["categories"]["allow"].values()) + to_fetch = sorted(list(product_slugs)) + log(f"Fetching {len(to_fetch)} product details (concurrency={concurrency})…") + tasks = [asyncio.create_task( + fetch_and_upsert_product( + upsert_product, + log_product_result, + sem, + s, + product_slugs, + category_values, + valid_subs_by_top, + link_errors, + link_externals, + unknown_sub_paths + ) + ) for s in to_fetch] + done = 0 + ok_count = 0 + for coro in asyncio.as_completed(tasks): + ok = await coro + done += 1 + if ok: + ok_count += 1 + if done % 50 == 0 or done == len(tasks): + log(f" …{done}/{len(tasks)} saved (ok={ok_count})") + if save_link_reports: + await save_link_reports(link_errors, link_externals) + \ No newline at end of file diff --git a/market/scrape/build_snapshot/tools/rewrite_nav.py b/market/scrape/build_snapshot/tools/rewrite_nav.py new file mode 100644 index 0000000..aaa03da --- /dev/null +++ b/market/scrape/build_snapshot/tools/rewrite_nav.py @@ -0,0 +1,24 @@ + +from typing import Dict +from urllib.parse import urljoin +from shared.config import config + +def rewrite_nav(nav: Dict[str, Dict], nav_redirects:Dict[str, str]): + if nav_redirects: + for label, data in (nav.get("cats") or {}).items(): + top_slug = (data or {}).get("slug") + if not top_slug: + continue + new_subs = [] + for s in (data.get("subs") or []): + old_sub = (s.get("slug") or "").strip() + if not old_sub: + continue + old_path = f"/{top_slug}/{old_sub}" + canonical_path = nav_redirects.get(old_path, old_path) + parts = [x for x in canonical_path.split("/") if x] + top2, sub2 = parts[0], parts[1] + s["slug"] = sub2 + s["href"] = urljoin(config()["base_url"], f"/{top2}/{sub2}") + new_subs.append(s) + data["subs"] = new_subs diff --git a/market/scrape/build_snapshot/tools/valid_subs.py b/market/scrape/build_snapshot/tools/valid_subs.py new file mode 100644 index 0000000..8939a10 --- /dev/null +++ b/market/scrape/build_snapshot/tools/valid_subs.py @@ -0,0 +1,16 @@ +from typing import Dict, Set + +# make valid subs for ewch top in nav +def valid_subs(nav: Dict[str, Dict])->Dict[str, Set[str]] : + valid_subs_by_top: Dict[str, Set[str]] = {} + for label, data in (nav.get("cats") or {}).items(): + top_slug = (data or {}).get("slug") + if not top_slug: + continue + subs_set = { + (s.get("slug") or "").strip() + for s in (data.get("subs") or []) + if s.get("slug") + } + valid_subs_by_top[top_slug] = subs_set + return valid_subs_by_top diff --git a/market/scrape/get_auth.py b/market/scrape/get_auth.py new file mode 100644 index 0000000..a57b66c --- /dev/null +++ b/market/scrape/get_auth.py @@ -0,0 +1,244 @@ +from typing import Optional, Dict, Any, List +from urllib.parse import urljoin +import httpx +from bs4 import BeautifulSoup +from shared.config import config + +class LoginFailed(Exception): + def __init__(self, message: str, *, debug: Dict[str, Any]): + super().__init__(message) + self.debug = debug + +def _ff_headers(referer: Optional[str] = None, origin: Optional[str] = None) -> Dict[str, str]: + h = { + "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:142.0) Gecko/20100101 Firefox/142.0", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8", + "Accept-Language": "en-GB,en;q=0.5", + "Accept-Encoding": "gzip, deflate, br, zstd", + "Connection": "keep-alive", + "Upgrade-Insecure-Requests": "1", + "DNT": "1", + "Sec-GPC": "1", + "Cache-Control": "no-cache", + "Pragma": "no-cache", + } + if referer: + h["Referer"] = referer + if origin: + h["Origin"] = origin + return h + +def _cookie_header_from_jar(jar: httpx.Cookies, domain: str, path: str = "/") -> str: + pairs: List[str] = [] + for c in jar.jar: + if not c.name or c.value is None: + continue + dom = (c.domain or "").lstrip(".") + if not dom: + continue + if not (domain == dom or domain.endswith("." + dom) or dom.endswith("." + domain)): + continue + if not (path.startswith(c.path or "/")): + continue + pairs.append(f"{c.name}={c.value}") + return "; ".join(pairs) + +def _extract_magento_errors(html_text: str) -> list[str]: + msgs: list[str] = [] + try: + soup = BeautifulSoup(html_text or "", "lxml") + for sel in [ + ".message-error", + ".messages .message-error", + ".page.messages .message-error", + "[data-ui-id='message-error']", + ".message.warning", + ".message.notice", + ]: + for box in soup.select(sel): + t = " ".join((box.get_text(" ") or "").split()) + if t and t not in msgs: + msgs.append(t) + except Exception: + pass + return msgs + +def _looks_like_login_page(html_text: str) -> bool: + try: + s = BeautifulSoup(html_text or "", "lxml") + if s.select_one("form#login-form.form-login"): + return True + title = (s.title.get_text() if s.title else "").strip().lower() + if "customer login" in title: + return True + except Exception: + pass + return False + +def _chrome_headers(referer=None, origin=None): + headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " + "(KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate, br", + "Connection": "keep-alive", + "Upgrade-Insecure-Requests": "1", + } + if referer: + headers["Referer"] = referer + if origin: + headers["Origin"] = origin + return headers + +async def login( + username: str, + password: str, + *, + extra_cookies = {}, # ok to pass cf_clearance etc., but NOT form_key + timeout: float = 30.0, +) -> httpx.Cookies: + """ + Attempt login and return an authenticated cookie jar. + + Success criteria (strict): + 1) /customer/section/load?sections=customer reports is_logged_in == True + OR + 2) GET /customer/account/ resolves to an account page (not the login page). + + Otherwise raises LoginFailed with debug info. + """ + limits = httpx.Limits(max_connections=10, max_keepalive_connections=6) + cookies = httpx.Cookies() + for k, v in { + **extra_cookies, + "pr-cookie-consent": '["all"]', + "user_allowed_save_cookie": '{"1":1}', + }.items(): + if k.lower() == "form_key": + continue + cookies.set(k, v, domain="wholesale.suma.coop", path="/") + + base_login = config()["base_login"] + base_url = config()["base_url"] + + async with httpx.AsyncClient( + follow_redirects=True, + timeout=httpx.Timeout(timeout, connect=15.0), + http2=True, + limits=limits, + cookies=cookies, + headers=_chrome_headers(), + trust_env=True, + ) as client: + # 1) GET login page for fresh form_key + import time + login_bust = base_login + ("&" if "?" in base_login else "?") + f"_={int(time.time()*1000)}" + login_bust = base_login + r_get = await client.get(login_bust, headers=_chrome_headers()) + print("Login GET failed. Status:", r_get.status_code) + print("Login GET URL:", r_get.url) + print("Response text:", r_get.text[:1000]) # trim if long + r_get.raise_for_status() + soup = BeautifulSoup(r_get.text, "lxml") + + form = soup.select_one("form.form.form-login#login-form") or soup.select_one("#login-form") + if not form: + raise LoginFailed( + "Login form not found (possible bot challenge or theme change).", + debug={"get_status": r_get.status_code, "final_url": str(r_get.url)}, + ) + action = urljoin(base_login, form.get("action") or base_login) + fk_el = form.find("input", attrs={"name": "form_key"}) + hidden_form_key = (fk_el.get("value") if fk_el else "") or "" + + # mirror Magento behavior: form_key also appears as a cookie + client.cookies.set("form_key", hidden_form_key, domain="wholesale.suma.coop", path="/") + + payload = { + "form_key": hidden_form_key, + "login[username]": username, + "login[password]": password, + "send": "Login", + } + + post_headers = _chrome_headers(referer=base_login, origin=base_url) + post_headers["Content-Type"] = "application/x-www-form-urlencoded" + post_headers["Cookie"] = _cookie_header_from_jar( + client.cookies, domain="wholesale.suma.coop", path="/customer/" + ) + + r_post = await client.post(action, data=payload, headers=post_headers) + + # 2) Primary check: sections API must say logged in + is_logged_in = False + sections_url = "https://wholesale.suma.coop/customer/section/load/?sections=customer&force_new_section_timestamp=1" + section_json: Dict[str, Any] = {} + try: + r_sec = await client.get(sections_url, headers=_chrome_headers(referer=base_login)) + if r_sec.status_code == 200: + section_json = r_sec.json() + cust = section_json.get("customer") or {} + is_logged_in = bool(cust.get("is_logged_in")) + except Exception: + pass + + # 3) Secondary check: account page should NOT be the login page + looks_like_login = False + final_account_url = "" + try: + r_acc = await client.get("https://wholesale.suma.coop/customer/account/", headers=_chrome_headers(referer=base_login)) + final_account_url = str(r_acc.url) + looks_like_login = ( + "/customer/account/login" in final_account_url + or _looks_like_login_page(r_acc.text) + ) + except Exception: + # ignore; we'll rely on section status + pass + + # Decide success/failure strictly + if not (is_logged_in or (final_account_url and not looks_like_login)): + errors = _extract_magento_errors(r_post.text) + # Clean up transient form_key cookie + try: + client.cookies.jar.clear("wholesale.suma.coop", "/", "form_key") + except Exception: + pass + raise LoginFailed( + errors[0] if errors else "Invalid username or password.", + debug={ + "get_status": r_get.status_code, + "post_status": r_post.status_code, + "post_final_url": str(r_post.url), + "sections_customer": section_json.get("customer"), + "account_final_url": final_account_url, + "looks_like_login_page": looks_like_login, + }, + ) + def clear_cookie_everywhere(cookies: httpx.Cookies, name: str) -> None: + to_delete = [] + for c in list(cookies.jar): # http.cookiejar.Cookie objects + if c.name == name: + # Note: CookieJar.clear requires exact (domain, path, name) + to_delete.append((c.domain, c.path, c.name)) + + for domain, path, nm in to_delete: + try: + cookies.jar.clear(domain, path, nm) + except KeyError: + # Mismatch can happen if domain has a leading dot vs not, etc. + # Try again with a normalized domain variant. + if domain and domain.startswith("."): + + cookies.jar.clear(domain.lstrip("."), path, nm) + else: + # or try with leading dot + cookies.jar.clear("." + domain, path, nm) + if name in cookies: + del cookies[name] + + clear_cookie_everywhere(client.cookies, "form_key") + #client.cookies.jar.clear(config()["base_host"] or "wholesale.suma.coop", "/", "form_key") + print('cookies', client.cookies) + return client.cookies diff --git a/market/scrape/html_utils.py b/market/scrape/html_utils.py new file mode 100644 index 0000000..9d9d3ef --- /dev/null +++ b/market/scrape/html_utils.py @@ -0,0 +1,44 @@ +# suma_browser/html_utils.py +from __future__ import annotations +from typing import Optional +from bs4 import BeautifulSoup +from urllib.parse import urljoin +from shared.config import config + + + +def to_fragment(html: Optional[str]) -> str: + """Return just the fragment contents (no / wrappers).""" + if not html: + return "" + soup = BeautifulSoup(html, "lxml") + + # unwrap document-level containers + for t in soup.find_all(["html", "body"]): + t.unwrap() + + return "".join(str(c) for c in soup.contents).strip() + +def absolutize_fragment(html: Optional[str]) -> str: + """Absolutize href/src against BASE_URL and return a fragment (no wrappers).""" + if not html: + return "" + frag = BeautifulSoup(html, "lxml") + + for tag in frag.find_all(True): + if tag.has_attr("href"): + raw = str(tag["href"]) + abs_href = urljoin(config()["base_url"], raw) if raw.startswith("/") else raw + #if rewrite_suma_href_to_local: + # local = rewrite_suma_href_to_local(abs_href) + # tag["href"] = local if local else abs_href + #else: + tag["href"] = abs_href + if tag.has_attr("src"): + raw = str(tag["src"]) + tag["src"] = urljoin(config()["base_url"], raw) if raw.startswith("/") else raw + + # unwrap wrappers and return only the inner HTML + for t in frag.find_all(["html", "body"]): + t.unwrap() + return "".join(str(c) for c in frag.contents).strip() diff --git a/market/scrape/http_client.py b/market/scrape/http_client.py new file mode 100644 index 0000000..3865605 --- /dev/null +++ b/market/scrape/http_client.py @@ -0,0 +1,220 @@ +# suma_browser/http_client.py +from __future__ import annotations + +import asyncio +import os +import secrets +from typing import Optional, Dict + +import httpx +from shared.config import config + +_CLIENT: httpx.AsyncClient | None = None + +# ----- optional decoders -> Accept-Encoding +BROTLI_OK = False +ZSTD_OK = False +try: + import brotli # noqa: F401 + BROTLI_OK = True +except Exception: + pass +try: + import zstandard as zstd # noqa: F401 + ZSTD_OK = True +except Exception: + pass + +def _accept_encoding() -> str: + enc = ["gzip", "deflate"] + if BROTLI_OK: + enc.append("br") + if ZSTD_OK: + enc.append("zstd") + return ", ".join(enc) + +FIREFOX_UA = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:142.0) Gecko/20100101 Firefox/142.0" + +def _ff_headers(referer: Optional[str] = None) -> Dict[str, str]: + h = { + "User-Agent": FIREFOX_UA, + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", + "Accept-Language": "en-GB,en;q=0.5", + "Accept-Encoding": _accept_encoding(), + "Connection": "keep-alive", + "Upgrade-Insecure-Requests": "1", + "Sec-Fetch-Dest": "document", + "Sec-Fetch-Mode": "navigate", + "Sec-Fetch-Site": "none" if not referer else "same-origin", + "Sec-Fetch-User": "?1", + "DNT": "1", + "Sec-GPC": "1", + "Priority": "u=0, i", + "Cache-Control": "no-cache", + "Pragma": "no-cache", + } + if referer: + h["Referer"] = referer + return h +def _chrome_headers(referer=None, origin=None): + headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " + "(KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate, br", + "Connection": "keep-alive", + "Upgrade-Insecure-Requests": "1", + } + if referer: + headers["Referer"] = referer + if origin: + headers["Origin"] = origin + return headers + +def _parse_cookie_header(cookie_header: str) -> Dict[str, str]: + jar: Dict[str, str] = {} + for part in cookie_header.split(";"): + part = part.strip() + if not part or "=" not in part: + continue + k, v = part.split("=", 1) + jar[k.strip()] = v.strip() + return jar + +def _looks_like_cloudflare(html: bytes) -> bool: + if not html: + return False + s = html[:40000].lower() + return ( + b"please wait while your request is being verified" in s + or b"/cdn-cgi/challenge-platform/scripts/jsd/main.js" in s + or b"rocket-loader.min.js" in s + or b"cf-ray" in s + or b"challenge-platform" in s + or b"cf-chl-" in s + ) + +# -------- runtime cookie configuration (preferred over env) -------------------- +_INITIAL_COOKIES: Dict[str, str] = {} +_INITIAL_COOKIE_HEADER: Optional[str] = None + +async def configure_cookies(cookies: Dict[str, str]) -> None: + """ + Configure initial cookies programmatically (preferred over env). + Call BEFORE the first request (i.e., before get_client()/fetch()). + If a client already exists, its jar is updated immediately. + """ + global _INITIAL_COOKIES, _INITIAL_COOKIE_HEADER + _INITIAL_COOKIE_HEADER = None + _INITIAL_COOKIES = dict(cookies or {}) + # If client already built, update it now + if _CLIENT is not None: + print('configuring cookies') + host = config()["base_host"] or "wholesale.suma.coop" + for k, v in _INITIAL_COOKIES.items(): + _CLIENT.cookies.set(k, v, domain=host, path="/") + +def configure_cookies_from_header(cookie_header: str) -> None: + """ + Configure initial cookies from a raw 'Cookie:' header string. + Preferred over env; call BEFORE the first request. + """ + global _INITIAL_COOKIES, _INITIAL_COOKIE_HEADER + _INITIAL_COOKIE_HEADER = cookie_header or "" + _INITIAL_COOKIES = _parse_cookie_header(_INITIAL_COOKIE_HEADER) + if _CLIENT is not None: + host = config()["base_host"] or "wholesale.suma.coop" + for k, v in _INITIAL_COOKIES.items(): + _CLIENT.cookies.set(k, v, domain=host, path="/") + +# ------------------------------------------------------------------------------ +async def get_client() -> httpx.AsyncClient: + """Public accessor (same as _get_client).""" + return await _get_client() + +async def _get_client() -> httpx.AsyncClient: + global _CLIENT + if _CLIENT is None: + timeout = httpx.Timeout(300.0, connect=150.0) + limits = httpx.Limits(max_keepalive_connections=8, max_connections=16) + _CLIENT = httpx.AsyncClient( + follow_redirects=True, + timeout=timeout, + http2=True, + limits=limits, + headers=_chrome_headers(), + trust_env=True, + ) + + # ---- Seed cookies (priority: runtime config > env var) --------------- + host = config()["base_host"] or "wholesale.suma.coop" + + if _INITIAL_COOKIES or _INITIAL_COOKIE_HEADER: + # From runtime config + if _INITIAL_COOKIE_HEADER: + _CLIENT.cookies.update(_parse_cookie_header(_INITIAL_COOKIE_HEADER)) + for k, v in _INITIAL_COOKIES.items(): + _CLIENT.cookies.set(k, v, domain=host, path="/") + else: + # Fallback to environment + cookie_str = os.environ.get("SUMA_COOKIES", "").strip() + if cookie_str: + _CLIENT.cookies.update(_parse_cookie_header(cookie_str)) + + # Ensure private_content_version is present + if "private_content_version" not in _CLIENT.cookies: + pcv = secrets.token_hex(16) + _CLIENT.cookies.set("private_content_version", pcv, domain=host, path="/") + # --------------------------------------------------------------------- + + return _CLIENT + +async def aclose_client() -> None: + global _CLIENT + if _CLIENT is not None: + await _CLIENT.aclose() + _CLIENT = None + +async def fetch(url: str, *, referer: Optional[str] = None, retries: int = 3) -> str: + client = await _get_client() + + # Warm-up visit to look like a real session + if len(client.cookies.jar) == 0: + try: + await client.get(config()["base_url"].rstrip("/") + "/", headers=_chrome_headers()) + await asyncio.sleep(0.25) + except Exception: + pass + + last_exc: Optional[Exception] = None + for attempt in range(1, retries + 1): + try: + h = _chrome_headers(referer=referer or (config()["base_url"].rstrip("/") + "/")) + r = await client.get(url, headers=h) + if _looks_like_cloudflare(r.content): + if attempt < retries: + await asyncio.sleep(0.9 if attempt == 1 else 1.3) + try: + await client.get(config()["base_url"].rstrip("/") + "/", headers=_chrome_headers()) + await asyncio.sleep(0.4) + except Exception: + pass + continue + try: + r.raise_for_status() + except httpx.HTTPStatusError as e: + print(f"Fetch failed for {url}") + print("Status:", r.status_code) + print("Body:", r.text[:1000]) # Trimmed + raise + return r.text + except Exception as e: + last_exc = e + if attempt >= retries: + raise + await asyncio.sleep(0.45 * attempt + 0.25) + + if last_exc: + raise last_exc + raise RuntimeError("fetch failed unexpectedly") diff --git a/market/scrape/listings.py b/market/scrape/listings.py new file mode 100644 index 0000000..0a7e197 --- /dev/null +++ b/market/scrape/listings.py @@ -0,0 +1,289 @@ +from __future__ import annotations + +import math +import re +from typing import Callable, Dict, List, Optional, Tuple +from urllib.parse import parse_qsl, urlencode, urljoin, urlparse, urlunparse + + +from .http_client import fetch +from bp.browse.services.slugs import product_slug_from_href +from bp.browse.services.state import ( + KNOWN_PRODUCT_SLUGS, + _listing_page_cache, + _listing_page_ttl, + _listing_variant_cache, + _listing_variant_ttl, + now, +) +from shared.utils import normalize_text, soup_of +from shared.config import config + + +def parse_total_pages_from_text(text: str) -> Optional[int]: + m = re.search(r"Showing\s+(\d+)\s+of\s+(\d+)", text, re.I) + if not m: + return None + shown = int(m.group(1)) + total = int(m.group(2)) + per_page = 36 if shown in (12, 24, 36) else shown + return max(1, math.ceil(total / per_page)) + + +def _first_from_srcset(val: str) -> Optional[str]: + if not val: + return None + first = val.split(",")[0].strip() + parts = first.split() + return parts[0] if parts else first + + +def _abs_url(u: Optional[str]) -> Optional[str]: + if not u: + return None + return urljoin(config()["base_url"], u) if isinstance(u, str) and u.startswith("/") else u + + +def _collect_img_candidates(el) -> List[str]: + urls: List[str] = [] + if not el: + return urls + attrs = ["src", "data-src", "data-original", "data-zoom-image", "data-thumb", "content", "href"] + for a in attrs: + v = el.get(a) + if v: + urls.append(v) + for a in ["srcset", "data-srcset"]: + v = el.get(a) + if v: + first = _first_from_srcset(v) + if first: + urls.append(first) + return urls + + +def _dedupe_preserve_order_by(seq: List[str], key: Callable[[str], str]) -> List[str]: + seen = set() + out: List[str] = [] + for s in seq: + if not s: + continue + k = key(s) + if k in seen: + continue + seen.add(k) + out.append(s) + return out + + +def _filename_key(u: str) -> str: + p = urlparse(u) + path = p.path or "" + if path.endswith("/"): + path = path[:-1] + last = path.split("/")[-1] + return f"{p.netloc}:{last}".lower() + + +def _parse_cards_from_soup(soup) -> List[Dict]: + """Extract product tiles (name, href, image, desc) from listing soup. + De-duplicate by slug to avoid doubles from overlapping selectors.""" + items: List[str] = [] + seen_slugs: set[str] = set() + + # Primary selectors (Magento 2 default) + card_wrappers = soup.select( + "li.product-item, .product-item, ol.products.list.items li, .products.list.items li, .product-item-info" + ) + for card in card_wrappers: + a = ( + card.select_one("a.product-item-link") + or card.select_one(".product-item-name a") + or card.select_one("a[href$='.html'], a[href$='.htm']") + ) + if not a: + continue + #name = normalize_text(a.get_text()) or normalize_text(a.get("title") or "") + href = a.get("href") + #if not name or not href: + # continue + if href.startswith("/"): + href = urljoin(config()["base_url"], href) + + + slug = product_slug_from_href(href) + KNOWN_PRODUCT_SLUGS.add(slug) + + if slug and slug not in seen_slugs: + seen_slugs.add(slug) + items.append(slug) + # Secondary: any product-looking anchors inside products container + if not items: + products_container = soup.select_one(".products") or soup + for a in products_container.select("a[href$='.html'], a[href$='.htm']"): + href = a.get("href") + if href.startswith("/"): + href = urljoin(config()["base_url"], href) + slug = product_slug_from_href(href) + KNOWN_PRODUCT_SLUGS.add(slug) + if slug not in seen_slugs: + seen_slugs.add(slug) + items.append(slug) + + # Tertiary: JSON-LD fallback (ItemList/Product) + if not items: + import json + + def add_product(name: Optional[str], url: Optional[str], image: Optional[str]): + if not url: + return + absu = urljoin(config()["base_url"], url) if url.startswith("/") else url + slug = product_slug_from_href(absu) + if not slug: + return + KNOWN_PRODUCT_SLUGS.add(slug) + if slug not in seen_slugs: + seen_slugs.add(slug) + items.append(slug) + + for script in soup.find_all("script", attrs={"type": "application/ld+json"}): + #try: + data = json.loads(script.get_text()) + #except Exception: + # continue + if isinstance(data, dict): + if data.get("@type") == "ItemList" and isinstance(data.get("itemListElement"), list): + for it in data["itemListElement"]: + if isinstance(it, dict): + ent = it.get("item") or it + if isinstance(ent, dict): + add_product( + ent.get("name"), + ent.get("url"), + (ent.get("image") if isinstance(ent.get("image"), str) else None), + ) + if data.get("@type") == "Product": + add_product( + data.get("name"), + data.get("url"), + (data.get("image") if isinstance(data.get("image"), str) else None), + ) + elif isinstance(data, list): + for ent in data: + if not isinstance(ent, dict): + continue + if ent.get("@type") == "Product": + add_product( + ent.get("name"), + ent.get("url"), + (ent.get("image") if isinstance(ent.get("image"), str) else None), + ) + if ent.get("@type") == "ItemList": + for it in ent.get("itemListElement", []): + if isinstance(it, dict): + obj = it.get("item") or it + if isinstance(obj, dict): + add_product( + obj.get("name"), + obj.get("url"), + (obj.get("image") if isinstance(obj.get("image"), str) else None), + ) + + return items + + +def _with_query(url: str, add: Dict[str, str]) -> str: + p = urlparse(url) + q = dict(parse_qsl(p.query, keep_blank_values=True)) + q.update(add) + new_q = urlencode(q) + return urlunparse((p.scheme, p.netloc, p.path, p.params, new_q, p.fragment)) + + +def _with_page(url: str, page: int) -> str: + if page and page > 1: + return _with_query(url, {"p": str(page)}) + return url + + +def _listing_base_key(url: str) -> str: + p = urlparse(url) + path = p.path.rstrip("/") + return f"{p.scheme}://{p.netloc}{path}".lower() + + +def _variant_cache_get(base_key: str) -> Optional[str]: + info = _listing_variant_cache.get(base_key) + if not info: + return None + url, ts = info + if (now() - ts) > _listing_variant_ttl: + _listing_variant_cache.pop(base_key, None) + return None + return url + + +def _variant_cache_set(base_key: str, working_url: str) -> None: + _listing_variant_cache[base_key] = (working_url, now()) + + +def _page_cache_get(working_url: str, page: int) -> Optional[Tuple[List[Dict], int]]: + key = f"{working_url}|p={page}" + info = _listing_page_cache.get(key) + if not info: + return None + (items, total_pages), ts = info + if (now() - ts) > _listing_page_ttl: + _listing_page_cache.pop(key, None) + return None + return items, total_pages + + +def _page_cache_set(working_url: str, page: int, items: List[Dict], total_pages: int) -> None: + key = f"{working_url}|p={page}" + _listing_page_cache[key] = ((items, total_pages), now()) + + +async def _fetch_parse(url: str, page: int): + html = await fetch(_with_page(url, page)) + soup = soup_of(html) + items = _parse_cards_from_soup(soup) + return items, soup + + + + +async def scrape_products(list_url: str, page: int = 1): + """Fast listing fetch with variant memoization + page cache.""" + _listing_base_key(list_url) + items, soup = await _fetch_parse(list_url, page) + + total_pages = _derive_total_pages(soup) + return items, total_pages + +def _derive_total_pages(soup) -> int: + total_pages = 1 + textdump = normalize_text(soup.get_text(" ")) + pages_from_text = parse_total_pages_from_text(textdump) + if pages_from_text: + total_pages = pages_from_text + else: + pages = {1} + for a in soup.find_all("a", href=True): + m = re.search(r"[?&]p=(\d+)", a["href"]) + if m: + pages.add(int(m.group(1))) + total_pages = max(pages) if pages else 1 + return total_pages + + +def _slugs_from_list_url(list_url: str) -> Tuple[str, Optional[str]]: + p = urlparse(list_url) + parts = [x for x in (p.path or "").split("/") if x] + top = parts[0].lower() if parts else "" + sub = None + if len(parts) >= 2: + sub = parts[1] + if sub.lower().endswith((".html", ".htm")): + sub = re.sub(r"\.(html?|HTML?)$", "", sub) + return top, sub diff --git a/market/scrape/nav.py b/market/scrape/nav.py new file mode 100644 index 0000000..7e187d6 --- /dev/null +++ b/market/scrape/nav.py @@ -0,0 +1,104 @@ +from __future__ import annotations + +import re +from typing import Dict, List, Tuple, Optional +from urllib.parse import urlparse, urljoin + +from bs4 import BeautifulSoup +from shared.config import config +from .http_client import fetch # only fetch; define soup_of locally +#from .. import cache_backend as cb +#from ..blacklist.category import is_category_blocked # Reverse map: slug -> label + + +# ------------------ Caches ------------------ + + + +def soup_of(html: str) -> BeautifulSoup: + return BeautifulSoup(html or "", "lxml") + + +def normalize_text(s: str) -> str: + return re.sub(r"\s+", " ", (s or "").strip()) + + +async def scrape_nav_raw() -> List[Tuple[str, str]]: + html = await fetch(config()["base_url"]) + soup = soup_of(html) + results: List[Tuple[str, str]] = [] + for a in soup.find_all("a", href=True): + text = normalize_text(a.get_text()) + if not text: + continue + href = a["href"].strip() + if href.startswith("/"): + href = urljoin(config()["base_url"], href) + if not href.startswith(config()["base_url"]): + continue + results.append((text, href)) + return results + + +def extract_sub_slug(href: str, top_slug: str) -> Optional[str]: + p = urlparse(href) + parts = [x for x in (p.path or "").split("/") if x] + if len(parts) >= 2 and parts[0].lower() == top_slug.lower(): + sub = parts[1] + if sub.lower().endswith((".html", ".htm")): + sub = re.sub(r"\.(html?|HTML?)$", "", sub) + return sub + return None + + +async def group_by_category(slug_to_links: Dict[str, List[Tuple[str, str]]]) -> Dict[str, Dict]: + nav = {"cats": {}} + for label, slug in config()["categories"]["allow"].items(): + top_href = urljoin(config()["base_url"], f"/{slug}") + subs = [] + for text, href in slug_to_links.get(slug, []): + sub_slug = extract_sub_slug(href, slug) + if sub_slug: + #list_url = _join(config()["base_url"], f"/{slug}/{sub_slug}") + #log(f"naving [{slug}/{sub_slug}] page 1…") + #items, total_pages = await scrape_products(list_url, page=1) + #for p in range(2, total_pages + 1): + # log(f"naving [{slug}/{sub_slug}] page {p}…") + # moreitems, _tp = await scrape_products(list_url, page=p) + # items.extend( + # moreitems, + # ) + subs.append({"name": text, "href": href, "slug": sub_slug}) + subs.sort(key=lambda x: x["name"].lower()) + #list_url = _join(config()["base_url"], f"/{slug}") + #log(f"naving [{slug}] page 1…") + #items, total_pages = await scrape_products(list_url, page=1) + #for p in range(2, total_pages + 1): + # log(f"naving [{slug}] page {p}…") + # moreitems, _tp = await scrape_products(list_url, page=p) + # items.extend( + # moreitems, + # ) + nav["cats"][label] = {"href": top_href, "slug": slug, "subs": subs} + return nav + + +async def scrape_nav_filtered() -> Dict[str, Dict]: + anchors = await scrape_nav_raw() + slug_to_links: Dict[str, List[Tuple[str, str]]] = {} + for text, href in anchors: + p = urlparse(href) + parts = [x for x in (p.path or "").split("/") if x] + if not parts: + continue + top = parts[0].lower() + if top in config()["slugs"]["skip"]: + continue + slug_to_links.setdefault(top, []).append((text, href)) + return await group_by_category(slug_to_links) + +async def nav_scrape() -> Dict[str, Dict]: + """Return navigation structure; use snapshot when offline.""" + + nav = await scrape_nav_filtered() + return nav diff --git a/market/scrape/persist_api/__init__.py b/market/scrape/persist_api/__init__.py new file mode 100644 index 0000000..d5273af --- /dev/null +++ b/market/scrape/persist_api/__init__.py @@ -0,0 +1,6 @@ +from .upsert_product import upsert_product +from .log_product_result import log_product_result +from .save_nav import save_nav +from .save_subcategory_redirects import save_subcategory_redirects +from .capture_listing import capture_listing + diff --git a/market/scrape/persist_api/capture_listing.py b/market/scrape/persist_api/capture_listing.py new file mode 100644 index 0000000..3943253 --- /dev/null +++ b/market/scrape/persist_api/capture_listing.py @@ -0,0 +1,27 @@ +# replace your existing upsert_product with this version + +import os +import httpx + +from typing import List + +async def capture_listing( + url: str, + items: List[str], + total_pages: int +): + + sync_url = os.getenv("CAPTURE_LISTING_URL", "http://localhost:8001/market/suma-market/api/products/listing/") + + async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + _d = { + "url": url, + "items": items, + "total_pages": total_pages + } + resp = await client.post(sync_url, json=_d) + # Raise for non-2xx + resp.raise_for_status() + data = resp.json() if resp.content else {} + return data + \ No newline at end of file diff --git a/market/scrape/persist_api/log_product_result.py b/market/scrape/persist_api/log_product_result.py new file mode 100644 index 0000000..bf285ed --- /dev/null +++ b/market/scrape/persist_api/log_product_result.py @@ -0,0 +1,24 @@ +# replace your existing upsert_product with this version + +import os +import httpx + + +async def log_product_result( + ok: bool, + payload +): + + sync_url = os.getenv("PRODUCT_LOG_URL", "http://localhost:8000/market/api/products/log/") + + async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + _d = { + "ok": ok, + "payload": payload + } + resp = await client.post(sync_url, json=_d) + # Raise for non-2xx + resp.raise_for_status() + data = resp.json() if resp.content else {} + return data + \ No newline at end of file diff --git a/market/scrape/persist_api/save_nav.py b/market/scrape/persist_api/save_nav.py new file mode 100644 index 0000000..3feeadb --- /dev/null +++ b/market/scrape/persist_api/save_nav.py @@ -0,0 +1,19 @@ +# replace your existing upsert_product with this version + +import os +import httpx + +from typing import Dict + +async def save_nav( + nav: Dict, +): + sync_url = os.getenv("SAVE_NAV_URL", "http://localhost:8001/market/suma-market/api/products/nav/") + + async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + resp = await client.post(sync_url, json=nav) + # Raise for non-2xx + resp.raise_for_status() + data = resp.json() if resp.content else {} + return data + \ No newline at end of file diff --git a/market/scrape/persist_api/save_subcategory_redirects.py b/market/scrape/persist_api/save_subcategory_redirects.py new file mode 100644 index 0000000..60eba97 --- /dev/null +++ b/market/scrape/persist_api/save_subcategory_redirects.py @@ -0,0 +1,15 @@ +import os +import httpx + +from typing import Dict + +async def save_subcategory_redirects(mapping: Dict[str, str]) -> None: + sync_url = os.getenv("SAVE_REDIRECTS", "http://localhost:8000/market/api/products/redirects/") + + async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + resp = await client.post(sync_url, json=mapping) + # Raise for non-2xx + resp.raise_for_status() + data = resp.json() if resp.content else {} + return data + \ No newline at end of file diff --git a/market/scrape/persist_api/upsert_product.py b/market/scrape/persist_api/upsert_product.py new file mode 100644 index 0000000..d65149a --- /dev/null +++ b/market/scrape/persist_api/upsert_product.py @@ -0,0 +1,256 @@ +# replace your existing upsert_product with this version + +import os +import httpx + +from typing import Dict, List, Any + +async def upsert_product( + slug, + href, + d, +): + """ + Posts the given product dict `d` to the /api/products/sync endpoint. + Keeps the same signature as before and preserves logging/commit behavior. + """ + + + # Ensure slug in payload matches the function arg if present + if not d.get("slug"): + d["slug"] = slug + + # Where to post; override via env if needed + sync_url = os.getenv("PRODUCT_SYNC_URL", "http://localhost:8001/market/suma-market/api/products/sync/") + + + + + payload = _massage_payload(d) + + async def _do_call() -> Dict[str, Any]: + async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + resp = await client.post(sync_url, json=payload) + resp.raise_for_status() + # tolerate empty body + if not resp.content: + return {} + # prefer JSON if possible, otherwise return text + try: + return resp.json() + except ValueError: + return {"raw": resp.text} + + async def _log_error(exc: BaseException) -> None: + # Optional: add your own logging here + print(f"[upsert_product] POST failed: {type(exc).__name__}: {exc}. Retrying in 5s... slug={slug} url={sync_url}") + + return await retry_until_success(_do_call, delay=5.0, on_error=_log_error) + + + + #async with httpx.AsyncClient(timeout=httpx.Timeout(20.0, connect=10.0)) as client: + # _d=_massage_payload(d) + # resp = await client.post(sync_url, json=_d) + # Raise for non-2xx + #resp.raise_for_status() + #data = resp.json() if resp.content else {} + #return data + +import asyncio +from typing import Any, Awaitable, Callable, Dict, Optional + +async def retry_until_success( + fn: Callable[[], Awaitable[Any]], + *, + delay: float = 5.0, + on_error: Optional[Callable[[BaseException], Awaitable[None]]] = None, +) -> Any: + """ + Repeatedly call the async no-arg function `fn` until it succeeds (returns without raising). + Waits `delay` seconds between attempts. Never gives up. + If provided, `on_error(exc)` is awaited after each failure. + """ + attempt = 0 + while True: + try: + return await fn() + except asyncio.CancelledError: + # bubble up cancellations immediately + raise + except BaseException as exc: + attempt += 1 + if on_error is not None: + try: + await on_error(exc) + except Exception: + # don't let error handler failures prevent retrying + pass + # fallback stderr log if no on_error handler + if on_error is None: + print(f"[retry] attempt {attempt} failed: {type(exc).__name__}: {exc}") + await asyncio.sleep(delay) + + + +def _get(d, key, default=None): + v = d.get(key) + return default if v in (None, "", [], {}) else v + + +def _massage_payload(d: Dict[str, Any]) -> Dict[str, Any]: + """Mirror the DB-upsert massaging so the API sees the same structure/values.""" + slug = d.get("slug") + if not slug: + raise ValueError("product missing slug") + + # --- Top-level fields (use _get where DB upsert uses it) --- + out: Dict[str, Any] = { + "slug": slug, + "title": _get(d, "title"), + "image": _get(d, "image"), + "description_short": _get(d, "description_short"), + "description_html": _get(d, "description_html"), + "suma_href": _get(d, "suma_href"), + "brand": _get(d, "brand"), + "rrp": _get(d, "rrp"), + "rrp_currency": _get(d, "rrp_currency"), + "rrp_raw": _get(d, "rrp_raw"), + "price_per_unit": _get(d, "price_per_unit"), + "price_per_unit_currency": _get(d, "price_per_unit_currency"), + "price_per_unit_raw": _get(d, "price_per_unit_raw"), + "special_price": _get(d, "special_price"), + "special_price_currency": _get(d, "special_price_currency"), + "special_price_raw": _get(d, "special_price_raw"), + "regular_price": _get(d, "regular_price"), + "regular_price_currency": _get(d, "regular_price_currency"), + "regular_price_raw": _get(d, "regular_price_raw"), + "case_size_count": _get(d, "case_size_count"), + "case_size_item_qty": _get(d, "case_size_item_qty"), + "case_size_item_unit": _get(d, "case_size_item_unit"), + "case_size_raw": _get(d, "case_size_raw"), + "ean": d.get("ean") or d.get("barcode") or None, + "sku": d.get("sku"), + "unit_size": d.get("unit_size"), + "pack_size": d.get("pack_size"), + } + + # --- Sections: only dicts with title+html (like DB sync) --- + sections_in = d.get("sections") or [] + sections_out: List[Dict[str, Any]] = [] + for sec in sections_in: + if isinstance(sec, dict) and sec.get("title") and sec.get("html"): + sections_out.append({"title": sec["title"], "html": sec["html"]}) + out["sections"] = sections_out + + # --- Images: same 3 buckets used in DB sync --- + def _coerce_str_list(x): + if not x: + return [] + # accept list of strings or list of dicts with {"url": ...} + out_urls = [] + for item in x: + if isinstance(item, str): + if item: + out_urls.append(item) + elif isinstance(item, dict): + u = item.get("url") + if u: + out_urls.append(u) + return out_urls + + out["images"] = _coerce_str_list(d.get("images")) + out["embedded_image_urls"] = _coerce_str_list(d.get("embedded_image_urls")) + out["all_image_urls"] = _coerce_str_list(d.get("all_image_urls")) + + # --- Labels: strip (DB code trims) --- + labels_in = d.get("labels") or [] + out["labels"] = [str(x).strip() for x in labels_in if x] + + # --- Stickers: strip + lower (DB code lower-cases) --- + stickers_in = d.get("stickers") or [] + out["stickers"] = [str(x).strip().lower() for x in stickers_in if x] + + # --- Attributes: pass through the same dict sources the DB code reads --- + out["info_table"] = d.get("info_table") or {} + #out["oe_list_price"] = d.get("oe_list_price") or {} + + # --- Nutrition: allow dict or list of dicts, mirroring DB code --- + nutrition = d.get("nutrition") or [] + if isinstance(nutrition, dict): + out["nutrition"] = {str(k).strip(): (None if v is None else str(v)) for k, v in nutrition.items()} + elif isinstance(nutrition, list): + rows = [] + for row in nutrition: + if not isinstance(row, dict): + continue + key = str(row.get("key") or "").strip() + if not key: + continue + rows.append({ + "key": key, + "value": None if row.get("value") is None else str(row.get("value")), + "unit": None if row.get("unit") is None else str(row.get("unit")), + }) + out["nutrition"] = rows + else: + out["nutrition"] = [] + + # --- Allergens: accept str (→ contains=True) or dict --- + alls_in = d.get("allergens") or [] + alls_out = [] + for a in alls_in: + if isinstance(a, str): + nm, contains = a.strip(), True + elif isinstance(a, dict): + nm, contains = (a.get("name") or "").strip(), bool(a.get("contains", True)) + else: + continue + if nm: + alls_out.append({"name": nm, "contains": contains}) + out["allergens"] = alls_out + + out["images"]=[ + {"url": s.strip(), "kind": "gallery", "position": i} + for i, s in enumerate(out.get("images") or []) + if isinstance(s, str) and s.strip() + ] + [ + {"url": s.strip(), "kind": "embedded", "position": i} + for i, s in enumerate(out.get("embedded_image_urls") or []) + if isinstance(s, str) and s.strip() + ] + [ + {"url": s.strip(), "kind": "all", "position": i} + for i, s in enumerate(out.get("all_image_urls") or []) + if isinstance(s, str) and s.strip() + ] + out["labels"]= [{"name": s.strip()} for s in out["labels"] if isinstance(s, str) and s.strip()] + out["stickers"]= [{"name": s.strip()} for s in out["stickers"] if isinstance(s, str) and s.strip()] + out["attributes"] = build_attributes_list(d) + + + return out + + + + + +def build_attributes_list(d: Dict[str, Any]) -> List[Dict[str, Any]]: + attrs = [] + for src, prefix in [ + (d.get("info_table") or {}, "info_table"), + (d.get("oe_list_price") or {}, "oe_list_price"), + ]: + for k, v in src.items(): + key = f"{prefix}/{str(k).strip()}" + val = None if v is None else str(v) + attrs.append({"key": key, "value": val}) + # optional: dedupe by (key, value) + seen = set() + dedup = [] + for item in attrs: + t = (item["key"], item["value"]) + if t in seen: + continue + seen.add(t) + dedup.append(item) + return dedup diff --git a/market/scrape/persist_snapshot/__init__.py b/market/scrape/persist_snapshot/__init__.py new file mode 100644 index 0000000..43d7e24 --- /dev/null +++ b/market/scrape/persist_snapshot/__init__.py @@ -0,0 +1,7 @@ +from .log_product_result import log_product_result +from .upsert_product import upsert_product +from .save_nav import save_nav +from .capture_listing import capture_listing +from .save_link_reports import save_link_reports +from .save_subcategory_redirects import save_subcategory_redirects + diff --git a/market/scrape/persist_snapshot/_get.py b/market/scrape/persist_snapshot/_get.py new file mode 100644 index 0000000..dd316b6 --- /dev/null +++ b/market/scrape/persist_snapshot/_get.py @@ -0,0 +1,3 @@ +def _get(d, key, default=None): + v = d.get(key) + return default if v in (None, "", [], {}) else v diff --git a/market/scrape/persist_snapshot/capture_listing.py b/market/scrape/persist_snapshot/capture_listing.py new file mode 100644 index 0000000..c6948dc --- /dev/null +++ b/market/scrape/persist_snapshot/capture_listing.py @@ -0,0 +1,137 @@ +# at top of persist_snapshot.py: +from typing import Optional, List +from sqlalchemy.ext.asyncio import AsyncSession + +from typing import List, Optional, Tuple +from sqlalchemy.dialects.postgresql import insert as pg_insert +from datetime import datetime +from sqlalchemy import ( + select, update +) +from urllib.parse import urlparse +import re + +from models.market import ( + NavTop, + NavSub, + Listing, + ListingItem, +) +from shared.db.session import get_session + +# --- Models are unchanged, see original code --- + +# ---------------------- Helper fns called from scraper ------------------------ + + + +async def capture_listing( + #product_slugs: Set[str], + url: str, + items: List[str], + total_pages: int + ) -> None: + async with get_session() as session: + await _capture_listing( + session, + url, + items, + total_pages + ) + await session.commit() + + +async def _capture_listing( + session, + url: str, + items: List[str], + total_pages: int + ) -> None: + top_id, sub_id = await _nav_ids_from_list_url(session, url) + await _save_listing(session, top_id, sub_id, items, total_pages) + +async def _save_listing(session: AsyncSession, top_id: int, sub_id: Optional[int], + items: List[str], total_pages: Optional[int]) -> None: + res = await session.execute( + select(Listing).where(Listing.top_id == top_id, Listing.sub_id == sub_id, Listing.deleted_at.is_(None)) + ) + listing = res.scalar_one_or_none() + if not listing: + listing = Listing(top_id=top_id, sub_id=sub_id, total_pages=total_pages) + session.add(listing) + await session.flush() + else: + listing.total_pages = total_pages + + # Normalize and deduplicate incoming slugs + seen: set[str] = set() + deduped: list[str] = [] + for s in items or []: + if s and isinstance(s, str) and s not in seen: + seen.add(s) + deduped.append(s) + + if not deduped: + return + + # Fetch existing slugs from the database + res = await session.execute( + select(ListingItem.slug) + .where(ListingItem.listing_id == listing.id, ListingItem.deleted_at.is_(None)) + ) + existing_slugs = set(res.scalars().all()) + + now = datetime.utcnow() + + # Slugs to delete (present in DB but not in the new data) + to_delete = existing_slugs - seen + if to_delete: + await session.execute( + update(ListingItem) + .where( + ListingItem.listing_id == listing.id, + ListingItem.slug.in_(to_delete), + ListingItem.deleted_at.is_(None) + ) + .values(deleted_at=now) + ) + + # Slugs to insert (new ones not in DB) + to_insert = seen - existing_slugs + if to_insert: + stmt = pg_insert(ListingItem).values( + [{"listing_id": listing.id, "slug": s} for s in to_insert] + ) + #.on_conflict_do_nothing( + # constraint="uq_listing_items_listing_slug" + #) + await session.execute(stmt) + +async def _nav_ids_from_list_url(session: AsyncSession, list_url: str) -> Tuple[int, Optional[int]]: + parts = [x for x in (urlparse(list_url).path or "").split("/") if x] + top_slug = parts[0].lower() if parts else "" + sub_slug = None + if len(parts) >= 2: + sub_slug = parts[1] + if sub_slug.lower().endswith((".html", ".htm")): + sub_slug = re.sub(r"\\.(html?|HTML?)$", "", sub_slug) + return await _get_nav_ids(session, top_slug, sub_slug) + + + +async def _get_nav_ids(session: AsyncSession, top_slug: str, sub_slug: Optional[str]) -> Tuple[int, Optional[int]]: + res_top = await session.execute(select(NavTop.id).where(NavTop.slug == top_slug, NavTop.deleted_at.is_(None))) + top_id = res_top.scalar_one_or_none() + if not top_id: + raise ValueError(f"NavTop not found for slug: {top_slug}") + + sub_id = None + if sub_slug: + res_sub = await session.execute( + select(NavSub.id).where(NavSub.slug == sub_slug, NavSub.top_id == top_id, NavSub.deleted_at.is_(None)) + ) + sub_id = res_sub.scalar_one_or_none() + if sub_id is None: + raise ValueError(f"NavSub not found for slug: {sub_slug} under top_id={top_id}") + + return top_id, sub_id diff --git a/market/scrape/persist_snapshot/log_product_result.py b/market/scrape/persist_snapshot/log_product_result.py new file mode 100644 index 0000000..88eb27b --- /dev/null +++ b/market/scrape/persist_snapshot/log_product_result.py @@ -0,0 +1,35 @@ +# at top of persist_snapshot.py: +from sqlalchemy.ext.asyncio import AsyncSession + +from typing import Dict +from models.market import ( + ProductLog, +) +from shared.db.session import get_session + + +async def log_product_result(ok: bool, payload: Dict) -> None: + async with get_session() as session: + await _log_product_result(session, ok, payload) + await session.commit() + + +async def _log_product_result(session: AsyncSession, ok: bool, payload: Dict) -> None: + session.add(ProductLog( + ok=ok, + slug=payload.get("slug"), + href_tried=payload.get("href_tried"), + error_type=payload.get("error_type"), + error_message=payload.get("error_message"), + http_status=payload.get("http_status"), + final_url=payload.get("final_url"), + transport_error=payload.get("transport_error"), + title=payload.get("title"), + has_description_html=payload.get("has_description_html"), + has_description_short=payload.get("has_description_short"), + sections_count=payload.get("sections_count"), + images_count=payload.get("images_count"), + embedded_images_count=payload.get("embedded_images_count"), + all_images_count=payload.get("all_images_count"), + )) + diff --git a/market/scrape/persist_snapshot/save_link_reports.py b/market/scrape/persist_snapshot/save_link_reports.py new file mode 100644 index 0000000..932b61a --- /dev/null +++ b/market/scrape/persist_snapshot/save_link_reports.py @@ -0,0 +1,29 @@ +# at top of persist_snapshot.py: +from typing import List + +from typing import Dict, List + +from models.market import ( + LinkError, + LinkExternal, +) +from shared.db.session import get_session + +# --- Models are unchanged, see original code --- + +# ---------------------- Helper fns called from scraper ------------------------ + + + +async def save_link_reports(link_errors: List[Dict], link_externals: List[Dict]) -> None: + async with get_session() as session: + for e in link_errors: + session.add(LinkError( + product_slug=e.get("product"), href=e.get("href"), text=e.get("text"), + top=e.get("top"), sub=e.get("sub"), target_slug=e.get("target_slug"), type=e.get("type"), + )) + for e in link_externals: + session.add(LinkExternal( + product_slug=e.get("product"), href=e.get("href"), text=e.get("text"), host=e.get("host"), + )) + await session.commit() diff --git a/market/scrape/persist_snapshot/save_nav.py b/market/scrape/persist_snapshot/save_nav.py new file mode 100644 index 0000000..5f73626 --- /dev/null +++ b/market/scrape/persist_snapshot/save_nav.py @@ -0,0 +1,110 @@ +# at top of persist_snapshot.py: +from datetime import datetime +from sqlalchemy import ( + select, tuple_ +) +from typing import Dict + +from models.market import ( + NavTop, + NavSub, +) +from shared.db.session import get_session + + + + +async def save_nav(nav: Dict) -> None: + async with get_session() as session: + await _save_nav(session, nav) + await session.commit() + +async def _save_nav(session, nav: Dict, market_id=None) -> None: + print('===================SAVE NAV========================') + print(nav) + now = datetime.utcnow() + + incoming_top_slugs = set() + incoming_sub_keys = set() # (top_slug, sub_slug) + + # First pass: collect slugs + for label, data in (nav.get("cats") or {}).items(): + top_slug = (data or {}).get("slug") + if not top_slug: + continue + incoming_top_slugs.add(top_slug) + + for s in (data.get("subs") or []): + sub_slug = s.get("slug") + if sub_slug: + incoming_sub_keys.add((top_slug, sub_slug)) + + # Soft-delete stale NavSub entries + # This requires joining NavTop to access top_slug + subs_to_delete = await session.execute( + select(NavSub) + .join(NavTop, NavSub.top_id == NavTop.id) + .where( + NavSub.deleted_at.is_(None), + ~tuple_(NavTop.slug, NavSub.slug).in_(incoming_sub_keys) + ) + ) + for sub in subs_to_delete.scalars(): + sub.deleted_at = now + + # Soft-delete stale NavTop entries + tops_to_delete = await session.execute( + select(NavTop) + .where( + NavTop.deleted_at.is_(None), + ~NavTop.slug.in_(incoming_top_slugs) + ) + ) + for top in tops_to_delete.scalars(): + top.deleted_at = now + + await session.flush() + + # Upsert NavTop and NavSub + for label, data in (nav.get("cats") or {}).items(): + top_slug = (data or {}).get("slug") + if not top_slug: + continue + + res = await session.execute( + select(NavTop).where(NavTop.slug == top_slug) + ) + top = res.scalar_one_or_none() + + if top: + top.label = label + top.deleted_at = None + if market_id is not None and top.market_id is None: + top.market_id = market_id + else: + top = NavTop(label=label, slug=top_slug, market_id=market_id) + session.add(top) + + await session.flush() + + for s in (data.get("subs") or []): + sub_slug = s.get("slug") + if not sub_slug: + continue + sub_label = s.get("label") + sub_href = s.get("href") + + res_sub = await session.execute( + select(NavSub).where( + NavSub.slug == sub_slug, + NavSub.top_id == top.id + ) + ) + sub = res_sub.scalar_one_or_none() + if sub: + sub.label = sub_label + sub.href = sub_href + sub.deleted_at = None + else: + session.add(NavSub(top_id=top.id, label=sub_label, slug=sub_slug, href=sub_href)) + diff --git a/market/scrape/persist_snapshot/save_subcategory_redirects.py b/market/scrape/persist_snapshot/save_subcategory_redirects.py new file mode 100644 index 0000000..6ffdd7b --- /dev/null +++ b/market/scrape/persist_snapshot/save_subcategory_redirects.py @@ -0,0 +1,32 @@ +# at top of persist_snapshot.py: + +from typing import Dict +from datetime import datetime +from sqlalchemy import ( + update +) +from models.market import ( + SubcategoryRedirect, +) +from shared.db.session import get_session + +# --- Models are unchanged, see original code --- + +# ---------------------- Helper fns called from scraper ------------------------ + + +async def save_subcategory_redirects(mapping: Dict[str, str]) -> None: + async with get_session() as session: + await _save_subcategory_redirects(session, mapping) + await session.commit() + + +async def _save_subcategory_redirects(session, mapping: Dict[str, str]) -> None: + await session.execute(update(SubcategoryRedirect).where(SubcategoryRedirect.deleted_at.is_(None)).values(deleted_at=datetime.utcnow())) + for old, new in mapping.items(): + session.add(SubcategoryRedirect(old_path=old, new_path=new)) + + + + #for slug in items: + # product_slugs.add(slug) diff --git a/market/scrape/persist_snapshot/upsert_product.py b/market/scrape/persist_snapshot/upsert_product.py new file mode 100644 index 0000000..4ab1613 --- /dev/null +++ b/market/scrape/persist_snapshot/upsert_product.py @@ -0,0 +1,237 @@ +# at top of persist_snapshot.py: +from sqlalchemy.ext.asyncio import AsyncSession + +from typing import Dict +from datetime import datetime +from sqlalchemy import ( + func, select, update +) + +from models.market import ( + Product, + ProductImage, + ProductSection, + ProductLabel, + ProductSticker, + ProductAttribute, + ProductNutrition, + ProductAllergen +) +from shared.db.session import get_session + +from ._get import _get +from .log_product_result import _log_product_result + +# --- Models are unchanged, see original code --- + +# ---------------------- Helper fns called from scraper ------------------------ + + + + +async def _upsert_product(session: AsyncSession, d: Dict) -> Product: + slug = d.get("slug") + if not slug: + raise ValueError("product missing slug") + res = await session.execute(select(Product).where(Product.slug == slug, Product.deleted_at.is_(None))) + p = res.scalar_one_or_none() + if not p: + p = Product(slug=slug) + session.add(p) + + p.title = _get(d, "title") + p.image = _get(d, "image") + p.description_short = _get(d, "description_short") + p.description_html = _get(d, "description_html") + p.suma_href = _get(d, "suma_href") + p.brand = _get(d, "brand") + p.rrp = _get(d, "rrp") + p.rrp_currency = _get(d, "rrp_currency") + p.rrp_raw = _get(d, "rrp_raw") + p.price_per_unit = _get(d, "price_per_unit") + p.price_per_unit_currency = _get(d, "price_per_unit_currency") + p.price_per_unit_raw = _get(d, "price_per_unit_raw") + p.special_price = _get(d, "special_price") + p.special_price_currency = _get(d, "special_price_currency") + p.special_price_raw = _get(d, "special_price_raw") + p.regular_price = _get(d, "regular_price") + p.regular_price_currency = _get(d, "regular_price_currency") + p.regular_price_raw = _get(d, "regular_price_raw") + p.case_size_count = _get(d, "case_size_count") + p.case_size_item_qty = _get(d, "case_size_item_qty") + p.case_size_item_unit = _get(d, "case_size_item_unit") + p.case_size_raw = _get(d, "case_size_raw") + p.ean = d.get("ean") or d.get("barcode") or None + p.sku = d.get("sku") + p.unit_size = d.get("unit_size") + p.pack_size = d.get("pack_size") + p.updated_at = func.now() + + now = datetime.utcnow() + + + + # ProductSection sync + existing_sections = await session.execute(select(ProductSection).where(ProductSection.product_id == p.id, ProductSection.deleted_at.is_(None))) + existing_sections_set = {(s.title, s.html) for s in existing_sections.scalars()} + + new_sections_set = set() + for sec in d.get("sections") or []: + if isinstance(sec, dict) and sec.get("title") and sec.get("html"): + new_sections_set.add((sec["title"], sec["html"])) + if (sec["title"], sec["html"]) not in existing_sections_set: + session.add(ProductSection(product_id=p.id, title=sec["title"], html=sec["html"])) + + for s in existing_sections_set - new_sections_set: + await session.execute(update(ProductSection).where(ProductSection.product_id == p.id, ProductSection.title == s[0], ProductSection.html == s[1], ProductSection.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductImage sync + existing_images = await session.execute(select(ProductImage).where(ProductImage.product_id == p.id, ProductImage.deleted_at.is_(None))) + existing_images_set = {(img.url, img.kind) for img in existing_images.scalars()} + + new_images_set = set() + for kind, urls in [ + ("gallery", d.get("images") or []), + ("embedded", d.get("embedded_image_urls") or []), + ("all", d.get("all_image_urls") or []), + ]: + for idx, url in enumerate(urls): + if url: + new_images_set.add((url, kind)) + if (url, kind) not in existing_images_set: + session.add(ProductImage(product_id=p.id, url=url, position=idx, kind=kind)) + + for img in existing_images_set - new_images_set: + await session.execute(update(ProductImage).where(ProductImage.product_id == p.id, ProductImage.url == img[0], ProductImage.kind == img[1], ProductImage.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductLabel sync + existing_labels = await session.execute(select(ProductLabel).where(ProductLabel.product_id == p.id, ProductLabel.deleted_at.is_(None))) + existing_labels_set = {label.name.strip() for label in existing_labels.scalars()} + + new_labels = {str(name).strip() for name in (d.get("labels") or []) if name} + + for name in new_labels - existing_labels_set: + session.add(ProductLabel(product_id=p.id, name=name)) + + for name in existing_labels_set - new_labels: + await session.execute(update(ProductLabel).where(ProductLabel.product_id == p.id, ProductLabel.name == name, ProductLabel.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductSticker sync + existing_stickers = await session.execute(select(ProductSticker).where(ProductSticker.product_id == p.id, ProductSticker.deleted_at.is_(None))) + existing_stickers_set = {sticker.name.strip() for sticker in existing_stickers.scalars()} + + new_stickers = {str(name).strip().lower() for name in (d.get("stickers") or []) if name} + + for name in new_stickers - existing_stickers_set: + session.add(ProductSticker(product_id=p.id, name=name)) + + for name in existing_stickers_set - new_stickers: + await session.execute(update(ProductSticker).where(ProductSticker.product_id == p.id, ProductSticker.name == name, ProductSticker.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductAttribute sync + existing_attrs = await session.execute(select(ProductAttribute).where(ProductAttribute.product_id == p.id, ProductAttribute.deleted_at.is_(None))) + existing_attrs_set = {(a.key, a.value) for a in existing_attrs.scalars()} + + new_attrs_set = set() + for src, prefix in [(d.get("info_table") or {}, "info_table"), (d.get("oe_list_price") or {}, "oe_list_price")]: + for k, v in src.items(): + key = f"{prefix}/{str(k).strip()}" + val = None if v is None else str(v) + new_attrs_set.add((key, val)) + if (key, val) not in existing_attrs_set: + session.add(ProductAttribute(product_id=p.id, key=key, value=val)) + + for key, val in existing_attrs_set - new_attrs_set: + await session.execute(update(ProductAttribute).where(ProductAttribute.product_id == p.id, ProductAttribute.key == key, ProductAttribute.value == val, ProductAttribute.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductNutrition sync + existing_nuts = await session.execute(select(ProductNutrition).where(ProductNutrition.product_id == p.id, ProductNutrition.deleted_at.is_(None))) + existing_nuts_set = {(n.key, n.value, n.unit) for n in existing_nuts.scalars()} + + new_nuts_set = set() + nutrition = d.get("nutrition") or [] + if isinstance(nutrition, dict): + for k, v in nutrition.items(): + key, val = str(k).strip(), str(v) if v is not None else None + new_nuts_set.add((key, val, None)) + if (key, val, None) not in existing_nuts_set: + session.add(ProductNutrition(product_id=p.id, key=key, value=val, unit=None)) + elif isinstance(nutrition, list): + for row in nutrition: + try: + key = str(row.get("key") or "").strip() + val = None if row.get("value") is None else str(row.get("value")) + unit = None if row.get("unit") is None else str(row.get("unit")) + if key: + new_nuts_set.add((key, val, unit)) + if (key, val, unit) not in existing_nuts_set: + session.add(ProductNutrition(product_id=p.id, key=key, value=val, unit=unit)) + except Exception: + continue + + for key, val, unit in existing_nuts_set - new_nuts_set: + await session.execute(update(ProductNutrition).where(ProductNutrition.product_id == p.id, ProductNutrition.key == key, ProductNutrition.value == val, ProductNutrition.unit == unit, ProductNutrition.deleted_at.is_(None)).values(deleted_at=now)) + + # ProductAllergen sync + existing_allergens = await session.execute(select(ProductAllergen).where(ProductAllergen.product_id == p.id, ProductAllergen.deleted_at.is_(None))) + existing_allergens_set = {(a.name, a.contains) for a in existing_allergens.scalars()} + + new_allergens_set = set() + for a in d.get("allergens") or []: + if isinstance(a, str): + nm, contains = a.strip(), True + elif isinstance(a, dict): + nm, contains = (a.get("name") or "").strip(), bool(a.get("contains", True)) + else: + continue + if nm: + new_allergens_set.add((nm, contains)) + if (nm, contains) not in existing_allergens_set: + session.add(ProductAllergen(product_id=p.id, name=nm, contains=contains)) + + for name, contains in existing_allergens_set - new_allergens_set: + await session.execute(update(ProductAllergen).where(ProductAllergen.product_id == p.id, ProductAllergen.name == name, ProductAllergen.contains == contains, ProductAllergen.deleted_at.is_(None)).values(deleted_at=now)) + + + + + await session.flush() + return p + +async def upsert_product( + slug, + href, + d, +): + async with get_session() as session: + try: + await _upsert_product(session, d) + await _log_product_result(session, ok=True, payload={ + "slug": slug, + "href_tried": href, + "title": d.get("title"), + "has_description_html": bool(d.get("description_html")), + "has_description_short": bool(d.get("description_short")), + "sections_count": len(d.get("sections") or []), + "images_count": len(d.get("images")), + "embedded_images_count": len(d.get("embedded_image_urls")), + "all_images_count": len(d.get("all_image_urls")), + }) + + except Exception as e: + print(f"[ERROR] Failed to upsert product '{d.get('slug')}'") + print(f" Title: {d}.get('title')") + print(f" URL: {d.get('suma_href')}") + print(f" Error type: {type(e).__name__}") + print(f" Error message: {str(e)}") + import traceback + traceback.print_exc() + await _log_product_result(session, ok=False, payload={ + "slug": d.get("slug"), + "href_tried": d.get("suma_href"), + "error_type": type(e).__name__, + "error_message": str(e), + "title": d.get("title"), + }) + raise + await session.commit() \ No newline at end of file diff --git a/market/scrape/product/__init__.py b/market/scrape/product/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/market/scrape/product/__init__.py @@ -0,0 +1 @@ + diff --git a/market/scrape/product/extractors/__init__.py b/market/scrape/product/extractors/__init__.py new file mode 100644 index 0000000..9e29637 --- /dev/null +++ b/market/scrape/product/extractors/__init__.py @@ -0,0 +1,13 @@ + +# Auto-import all extractor modules so they register themselves. +from .title import ex_title # noqa: F401 +from .images import ex_images # noqa: F401 +from .short_description import ex_short_description # noqa: F401 +from .description_sections import ex_description_sections # noqa: F401 +from .nutrition_ex import ex_nutrition # noqa: F401 +from .stickers import ex_stickers # noqa: F401 +from .labels import ex_labels # noqa: F401 +from .info_table import ex_info_table # noqa: F401 +from .oe_list_price import ex_oe_list_price # noqa: F401 +from .regular_price_fallback import ex_regular_price_fallback # noqa: F401 +from .breadcrumbs import ex_breadcrumbs # noqa: F401 diff --git a/market/scrape/product/extractors/breadcrumbs.py b/market/scrape/product/extractors/breadcrumbs.py new file mode 100644 index 0000000..6aadefa --- /dev/null +++ b/market/scrape/product/extractors/breadcrumbs.py @@ -0,0 +1,68 @@ + +from __future__ import annotations +from typing import Dict, List, Union +from urllib.parse import urlparse +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor + +@extractor +def ex_breadcrumbs(soup: BeautifulSoup, url: str) -> Dict: + """ + Parse breadcrumbs to identify top and sub categories. + """ + bc_ul = (soup.select_one(".breadcrumbs ul.items") + or soup.select_one("nav.breadcrumbs ul.items") + or soup.select_one("ul.items")) + if not bc_ul: + return {} + + crumbs = [] + for li in bc_ul.select("li.item"): + a = li.find("a") + if a: + title = normalize_text(a.get("title") or a.get_text()) + href = a.get("href") + else: + title = normalize_text(li.get_text()) + href = None + slug = None + if href: + try: + p = urlparse(href) + path = (p.path or "").strip("/") + slug = path.split("/")[-1] if path else None + except Exception: + slug = None + if slug: + crumbs.append({"title": title or None, "href": href or None, "slug": slug}) + + category_links = [c for c in crumbs if c.get("href")] + top = None + sub = None + for c in category_links: + t = (c.get("title") or "").lower() + s = (c.get("slug") or "").lower() + if t == "home" or s in ("", "home"): + continue + if top is None: + top = c + continue + if sub is None: + sub = c + break + + out: Dict[str, Union[str, List[Dict[str, str]]]] = { + "category_breadcrumbs": crumbs + } + if top: + out["category_top_title"] = top.get("title") + out["category_top_href"] = top.get("href") + out["category_top_slug"] = top.get("slug") + if sub: + out["category_sub_title"] = sub.get("title") + out["category_sub_href"] = sub.get("href") + out["category_sub_slug"] = sub.get("slug") + if top and sub: + out["category_path"] = f"{(top.get('slug') or '').strip()}/{(sub.get('slug') or '').strip()}" + return out diff --git a/market/scrape/product/extractors/description_sections.py b/market/scrape/product/extractors/description_sections.py new file mode 100644 index 0000000..719ed06 --- /dev/null +++ b/market/scrape/product/extractors/description_sections.py @@ -0,0 +1,43 @@ + +from __future__ import annotations +from typing import Dict, List +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ...html_utils import absolutize_fragment +from ..registry import extractor +from ..helpers.desc import ( + split_description_container, find_description_container, + pair_title_content_from_magento_tabs, scan_headings_for_sections, + additional_attributes_table, +) +from ..helpers.text import clean_title, is_blacklisted_heading + +@extractor +def ex_description_sections(soup: BeautifulSoup, url: str) -> Dict: + description_html = None + sections: List[Dict] = [] + desc_el = find_description_container(soup) + if desc_el: + open_html, sections_from_desc = split_description_container(desc_el) + description_html = open_html or None + sections.extend(sections_from_desc) + + existing = {s["title"].lower() for s in sections} + for t, html_fragment in (pair_title_content_from_magento_tabs(soup) or scan_headings_for_sections(soup)): + low = t.lower() + if "product description" in low or low == "description" or "details" in low: + if not description_html and html_fragment: + description_html = absolutize_fragment(html_fragment) + continue + if t.lower() not in existing and normalize_text(BeautifulSoup(html_fragment, "lxml").get_text()): + if not is_blacklisted_heading(t): + sections.append({"title": clean_title(t), "html": absolutize_fragment(html_fragment)}) + existing.add(t.lower()) + addl = additional_attributes_table(soup) + if addl and "additional information" not in existing and not is_blacklisted_heading("additional information"): + sections.append({"title": "Additional Information", "html": addl}) + out = {"sections": sections} + if description_html: + out["description_html"] = description_html + return out + diff --git a/market/scrape/product/extractors/images.py b/market/scrape/product/extractors/images.py new file mode 100644 index 0000000..b3d519d --- /dev/null +++ b/market/scrape/product/extractors/images.py @@ -0,0 +1,89 @@ +from __future__ import annotations +import json, re +from typing import Dict, List +from bs4 import BeautifulSoup +from ..registry import extractor +from ..helpers.html import abs_url, collect_img_candidates, dedup_by_filename + +@extractor +def ex_images(soup: BeautifulSoup, url: str) -> Dict: + images: List[str] = [] + debug = False # set True while debugging + + # 1) Magento init script (gallery) + scripts = soup.find_all("script", attrs={"type": "text/x-magento-init"}) + if debug: print(f"[ex_images] x-magento-init scripts: {len(scripts)}") + + for script in scripts: + # Use raw string as-is; no stripping/collapsing + text = script.string or script.get_text() or "" + if "mage/gallery/gallery" not in text: + continue + + # Correct (not over-escaped) patterns: + m = re.search(r'"data"\s*:\s*(\[[\s\S]*?\])', text) + if not m: + if debug: print("[ex_images] 'data' array not found in gallery block") + continue + + arr_txt = m.group(1) + added = False + try: + data = json.loads(arr_txt) + for entry in data: + u = abs_url(entry.get("full")) or abs_url(entry.get("img")) + if u: + images.append(u); added = True + except Exception as e: + if debug: print(f"[ex_images] json.loads failed: {e!r}; trying regex fallback") + # Fallback to simple key extraction + fulls = re.findall(r'"full"\s*:\s*"([^"]+)"', arr_txt) + imgs = re.findall(r'"img"\s*:\s*"([^"]+)"', arr_txt) if not fulls else [] + for u in (fulls or imgs): + u = abs_url(u) + if u: + images.append(u); added = True + + if added: + break # got what we need from the gallery block + + # 2) JSON-LD fallback + if not images: + for script in soup.find_all("script", attrs={"type": "application/ld+json"}): + raw = script.string or script.get_text() or "" + try: + data = json.loads(raw) + except Exception: + continue + + def add_from(val): + if isinstance(val, str): + u = abs_url(val); u and images.append(u) + elif isinstance(val, list): + for v in val: + if isinstance(v, str): + u = abs_url(v); u and images.append(u) + elif isinstance(v, dict) and "url" in v: + u = abs_url(v["url"]); u and images.append(u) + elif isinstance(val, dict) and "url" in val: + u = abs_url(val["url"]); u and images.append(u) + + if isinstance(data, dict) and "image" in data: + add_from(data["image"]) + if isinstance(data, list): + for item in data: + if isinstance(item, dict) and "image" in item: + add_from(item["image"]) + + # 3) Generic DOM scan fallback + if not images: + # consider broadening selectors if needed, e.g. '.fotorama__img' + for el in soup.select(".product.media img, .gallery-placeholder img, .fotorama__stage img"): + for cand in collect_img_candidates(el): + u = abs_url(cand) + if u: + images.append(u) + + images = dedup_by_filename(images) + if debug: print(f"[ex_images] found images: {images}") + return {"images": images, "image": images[0] if images else None} diff --git a/market/scrape/product/extractors/info_table.py b/market/scrape/product/extractors/info_table.py new file mode 100644 index 0000000..e1a8ef0 --- /dev/null +++ b/market/scrape/product/extractors/info_table.py @@ -0,0 +1,76 @@ + +from __future__ import annotations +from typing import Dict, Union +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor +from ..helpers.price import parse_price, parse_case_size + +@extractor +def ex_info_table(soup: BeautifulSoup, url: str) -> Dict: + """ + Extracts: +
    ... rows of label/content ...
    + Produces: + info_table (raw map), brand, rrp[_raw|_currency], price_per_unit[_raw|_currency], + case_size_* fields + """ + container = soup.select_one(".product-page-info-table") or None + if not container: + return {} + rows_parent = container.select_one(".product-page-info-table-rows") or container + rows = rows_parent.select(".product-page-info-table-row") or [] + if not rows: + return {} + + raw_map: Dict[str, str] = {} + for r in rows: + lab_el = r.select_one(".product-page-info-table__label") + val_el = r.select_one(".product-page-info-table__content") + if not lab_el or not val_el: + continue + label = normalize_text(lab_el.get_text()) + value = normalize_text(val_el.get_text()) + if label: + raw_map[label] = value + + out: Dict[str, Union[str, float, int, Dict]] = {"info_table": raw_map} + + # Brand + brand = raw_map.get("Brand") or raw_map.get("Brand Name") or None + if brand: + out["brand"] = brand + + # RRP + rrp_val, rrp_cur, rrp_raw = parse_price(raw_map.get("RRP", "")) + if rrp_raw and (rrp_val is not None or rrp_cur is not None): + out["rrp_raw"] = rrp_raw + if rrp_val is not None: + out["rrp"] = rrp_val + if rrp_cur: + out["rrp_currency"] = rrp_cur + + # Price Per Unit + ppu_val, ppu_cur, ppu_raw = parse_price( + raw_map.get("Price Per Unit", "") or raw_map.get("Unit Price", "") + ) + if ppu_raw and (ppu_val is not None or ppu_cur is not None): + out["price_per_unit_raw"] = ppu_raw + if ppu_val is not None: + out["price_per_unit"] = ppu_val + if ppu_cur: + out["price_per_unit_currency"] = ppu_cur + + # Case Size + cs_text = raw_map.get("Case Size", "") or raw_map.get("Pack Size", "") + cs_count, cs_item_qty, cs_item_unit, cs_raw = parse_case_size(cs_text) + if cs_raw: + out["case_size_raw"] = cs_raw + if cs_count is not None: + out["case_size_count"] = cs_count + if cs_item_qty is not None: + out["case_size_item_qty"] = cs_item_qty + if cs_item_unit: + out["case_size_item_unit"] = cs_item_unit + + return out diff --git a/market/scrape/product/extractors/labels.py b/market/scrape/product/extractors/labels.py new file mode 100644 index 0000000..b4e4bd1 --- /dev/null +++ b/market/scrape/product/extractors/labels.py @@ -0,0 +1,41 @@ + +from __future__ import annotations +from typing import Dict, List +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor + +@extractor +def ex_labels(soup: BeautifulSoup, url: str) -> Dict: + """ + From: +
      +
    • NEW
    • +
    + Returns "labels": lower-cased union of class hints and visible text. + """ + root = soup.select_one("ul.cdz-product-labels") + if not root: + return {} + items: List[str] = [] + texts: List[str] = [] + + for li in root.select("li.label-item"): + for c in (li.get("class") or []): + c = (c or "").strip() + if c and c.lower() != "label-item" and c not in items: + items.append(c) + txt = normalize_text(li.get_text()) + if txt and txt not in texts: + texts.append(txt) + + if not items and not texts: + return {} + union = [] + seen = set() + for s in items + [t.lower() for t in texts]: + key = (s or "").strip().lower() + if key and key not in seen: + seen.add(key) + union.append(key) + return {"labels": union} diff --git a/market/scrape/product/extractors/nutrition_ex.py b/market/scrape/product/extractors/nutrition_ex.py new file mode 100644 index 0000000..d39253d --- /dev/null +++ b/market/scrape/product/extractors/nutrition_ex.py @@ -0,0 +1,129 @@ +from __future__ import annotations +from typing import Dict, List, Optional, Tuple +import re +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor +from ..helpers.desc import ( + split_description_container, find_description_container, + pair_title_content_from_magento_tabs, scan_headings_for_sections, +) + +# ----- value/unit parser ------------------------------------------------------ + +_NUM_UNIT_RE = re.compile( + r""" + ^\s* + (?P[-+]?\d{1,3}(?:[.,]\d{3})*(?:[.,]\d+)?|\d+(?:[.,]\d+)?) + \s* + (?P[a-zA-Z%µ/]+)? + \s*$ + """, + re.X, +) + +def _parse_value_unit(s: str) -> Tuple[Optional[str], Optional[str]]: + if not s: + return None, None + s = re.sub(r"\s+", " ", s.strip()) + m = _NUM_UNIT_RE.match(s) + if not m: + return None, None + num = (m.group("num") or "").replace(",", "") + unit = m.group("unit") or None + if unit: + u = unit.lower() + if u in {"kcal", "kcal.", "kcalories", "kcalorie"}: + unit = "kcal" + elif u in {"kj", "kj.", "kilojoule", "kilojoules"}: + unit = "kJ" + return (num or None, unit) + +# ----- section finder --------------------------------------------------------- + +def _find_nutrition_section_html(soup: BeautifulSoup) -> Optional[str]: + """ + Return the HTML for the section whose title matches 'Nutritional Information'. + We look in the same places your description extractor does. + """ + # 1) Magento tabs + for t, html in (pair_title_content_from_magento_tabs(soup) or []): + if not t or not html: + continue + title = normalize_text(t).rstrip(":").lower() + if "nutritional information" in title: + return html + + # 2) Description container split into sections + desc_el = find_description_container(soup) + if desc_el: + _open_html, sections = split_description_container(desc_el) + for sec in sections or []: + title = normalize_text((sec.get("title") or "")).rstrip(":").lower() + if "nutritional information" in title: + return sec.get("html") or "" + + # 3) Fallback: generic heading scan + for t, html in (scan_headings_for_sections(soup) or []): + if not t or not html: + continue + title = normalize_text(t).rstrip(":").lower() + if "nutritional information" in title: + return html + + return None + +# ----- table parser ----------------------------------------------------------- + +def _extract_rows_from_table(root: BeautifulSoup) -> List[Dict[str, str]]: + out: List[Dict[str, str]] = [] + table = root.select_one("table") + if not table: + return out + + for tr in table.select("tr"): + th = tr.find("th") + tds = tr.find_all("td") + if th and tds: + key = normalize_text(th.get_text(" ").strip()) + val_raw = normalize_text(tds[0].get_text(" ").strip()) + elif len(tds) >= 2: + key = normalize_text(tds[0].get_text(" ").strip()) + val_raw = normalize_text(tds[1].get_text(" ").strip()) + else: + continue + + if not key or not val_raw: + continue + + value, unit = _parse_value_unit(val_raw) + if value is None: # keep raw if not parseable + value, unit = val_raw, None + + out.append({"key": key, "value": value, "unit": unit}) + + # Deduplicate while preserving order + seen = set() + dedup: List[Dict[str, str]] = [] + for r in out: + t = (r["key"], r.get("value"), r.get("unit")) + if t in seen: + continue + seen.add(t) + dedup.append(r) + return dedup + +# ----- extractor -------------------------------------------------------------- + +@extractor +def ex_nutrition(soup: BeautifulSoup, url: str) -> Dict: + """ + Extract nutrition ONLY from the section titled 'Nutritional Information'. + Returns: {"nutrition": [{"key": "...", "value": "...", "unit": "..."}]} + """ + section_html = _find_nutrition_section_html(soup) + if not section_html: + return {"nutrition": []} + section_soup = BeautifulSoup(section_html, "lxml") + rows = _extract_rows_from_table(section_soup) + return {"nutrition": rows} diff --git a/market/scrape/product/extractors/oe_list_price.py b/market/scrape/product/extractors/oe_list_price.py new file mode 100644 index 0000000..7e790fa --- /dev/null +++ b/market/scrape/product/extractors/oe_list_price.py @@ -0,0 +1,56 @@ + +from __future__ import annotations +from typing import Dict, Union +from bs4 import BeautifulSoup +from ..registry import extractor +from ..helpers.price import parse_price + +@extractor +def ex_oe_list_price(soup: BeautifulSoup, url: str) -> Dict: + """ + Extract Magento "oe-list-price" block: +
    +
    £30.50
    +
    £23.63
    +
    + Produces: + oe_list_price: { rrp_raw, rrp, rrp_currency, special_raw, special, special_currency } + Also promotes special_* to top-level (special_price_*) if available. + """ + box = soup.select_one(".oe-list-price") + if not box: + return {} + out: Dict[str, Union[str, float, dict]] = {} + oe: Dict[str, Union[str, float]] = {} + + # RRP inside oe-list-price (if present) + rrp = box.select_one(".rrp-price") + if rrp: + txt = (rrp.select_one("span.price") or rrp.select_one("span") or rrp).get_text(strip=True) + val, cur, raw = parse_price(txt) + if raw: + oe["rrp_raw"] = raw + if val is not None: + oe["rrp"] = val + if cur: + oe["rrp_currency"] = cur + + # Special Price inside oe-list-price + sp = box.select_one(".oe-final-price, .special-price, .final-price") + if sp: + txt = (sp.select_one("span.price") or sp.select_one("span") or sp).get_text(strip=True) + val, cur, raw = parse_price(txt) + if raw: + oe["special_raw"] = raw + if val is not None: + oe["special"] = val + out["special_price"] = val + if cur: + oe["special_currency"] = cur + out["special_price_currency"] = cur + if raw: + out["special_price_raw"] = raw + + if oe: + out["oe_list_price"] = oe + return out diff --git a/market/scrape/product/extractors/regular_price_fallback.py b/market/scrape/product/extractors/regular_price_fallback.py new file mode 100644 index 0000000..2693a90 --- /dev/null +++ b/market/scrape/product/extractors/regular_price_fallback.py @@ -0,0 +1,33 @@ + +from __future__ import annotations +from typing import Dict, Union +from bs4 import BeautifulSoup +from ..registry import extractor +from ..helpers.price import parse_price + +@extractor +def ex_regular_price_fallback(soup: BeautifulSoup, url: str) -> Dict: + """ + Fallback extractor for legacy 'Regular Price' blocks outside oe-list-price: +
    £16.55
    + """ + rrp = soup.select_one("div.rrp-price") + if not rrp: + return {} + span = rrp.select_one("span.price") + price_text = span.get_text(strip=True) if span else rrp.get_text(" ", strip=True) + value, currency, raw = parse_price(price_text or "") + out: Dict[str, Union[str, float]] = {} + if raw: + out["regular_price_raw"] = raw + if value is not None: + out["regular_price"] = value + if currency: + out["regular_price_currency"] = currency + if value is not None: + out.setdefault("rrp", value) + if currency: + out.setdefault("rrp_currency", currency) + if raw: + out.setdefault("rrp_raw", raw) + return out diff --git a/market/scrape/product/extractors/short_description.py b/market/scrape/product/extractors/short_description.py new file mode 100644 index 0000000..fefa827 --- /dev/null +++ b/market/scrape/product/extractors/short_description.py @@ -0,0 +1,19 @@ + +from __future__ import annotations +from typing import Dict +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor + +@extractor +def ex_short_description(soup: BeautifulSoup, url: str) -> Dict: + desc_short = None + for sel in [".product.attribute.description .value", ".product.attribute.overview .value", + "meta[name='description']", "meta[property='og:description']"]: + el = soup.select_one(sel) + if not el: + continue + desc_short = normalize_text(el.get_text() if el.name != "meta" else el.get("content")) + if desc_short: + break + return {"description_short": desc_short} diff --git a/market/scrape/product/extractors/stickers.py b/market/scrape/product/extractors/stickers.py new file mode 100644 index 0000000..6bd7444 --- /dev/null +++ b/market/scrape/product/extractors/stickers.py @@ -0,0 +1,30 @@ + +from __future__ import annotations +from typing import Dict, List +from bs4 import BeautifulSoup +from ..registry import extractor + +@extractor +def ex_stickers(soup: BeautifulSoup, url: str) -> Dict: + """ +
    + + ... +
    + """ + root = soup.select_one("div.stickers") + if not root: + return {"stickers": []} + stickers: List[str] = [] + seen = set() + for sp in root.select("span.sticker"): + classes = sp.get("class") or [] + extras = [c.strip() for c in classes if c and c.lower() != "sticker"] + data_name = (sp.get("data-sticker") or "").strip() + if data_name: + extras.append(data_name) + for x in extras: + if x and x not in seen: + seen.add(x) + stickers.append(x) + return {"stickers": stickers} diff --git a/market/scrape/product/extractors/title.py b/market/scrape/product/extractors/title.py new file mode 100644 index 0000000..f7677ab --- /dev/null +++ b/market/scrape/product/extractors/title.py @@ -0,0 +1,17 @@ + +from __future__ import annotations +from typing import Dict +from bs4 import BeautifulSoup +from shared.utils import normalize_text +from ..registry import extractor + +@extractor +def ex_title(soup: BeautifulSoup, url: str) -> Dict: + title = None + for sel in ["h1.page-title span", "h1.page-title", "h1.product-name", "meta[property='og:title']"]: + el = soup.select_one(sel) + if el: + title = normalize_text(el.get_text()) if el.name != "meta" else el.get("content") + if title: + break + return {"title": title or "Product"} diff --git a/market/scrape/product/helpers/desc.py b/market/scrape/product/helpers/desc.py new file mode 100644 index 0000000..c093362 --- /dev/null +++ b/market/scrape/product/helpers/desc.py @@ -0,0 +1,165 @@ + +from __future__ import annotations +from typing import Dict, List, Optional, Tuple +from bs4 import BeautifulSoup, NavigableString, Tag +from shared.utils import normalize_text +from ...html_utils import absolutize_fragment +from .text import clean_title, is_blacklisted_heading +from shared.config import config + + +def split_description_container(desc_el: Tag) -> Tuple[str, List[Dict]]: + """ + Extract sections from accordion blocks within the description container. + + Looks for headings with class 'accordion-title' and pairs each with its + next element-sibling having class 'accordion-details'. Returns: + - open_html: the remaining description HTML with those accordion blocks removed + - sections: [{"title": ..., "html": ...}, ...] + """ + # Work on an isolated copy to avoid mutating the original DOM + frag = BeautifulSoup(desc_el.decode_contents(), "lxml") + + # Collect candidate (heading, details) pairs without mutating during iteration + pairs: List[Tuple[Tag, Tag]] = [] + for h in frag.select("#accordion .accordion-title, .accordion .accordion-title, h5.accordion-title, .accordion-title"): + if not isinstance(h, Tag): + continue + title = clean_title((h.get_text() or "").strip()) + if not title: + continue + + # Walk forward siblings until we hit an element; accept the first with 'accordion-details' + sib = h.next_sibling + details: Optional[Tag] = None + while sib is not None: + if isinstance(sib, Tag): + classes = sib.get("class") or [] + if "accordion-details" in classes: + details = sib + break + sib = sib.next_sibling + + if details is not None: + pairs.append((h, details)) + + sections: List[Dict] = [] + + # Extract sections, then remove nodes from frag + for h, details in pairs: + # Pull details HTML + html = details.decode_contents() + # Only keep non-empty (textual) content + if normalize_text(BeautifulSoup(html, "lxml").get_text()): + sections.append({ + "title": clean_title(h.get_text() or ""), + "html": absolutize_fragment(html), + }) + # Remove the matched nodes from the fragment copy + details.decompose() + h.decompose() + + # Whatever remains is the open description html + open_html = absolutize_fragment(str(frag)) if frag else "" + + return open_html, sections + +def pair_title_content_from_magento_tabs(soup: BeautifulSoup): + out = [] + container = soup.select_one(".product.info.detailed .product.data.items") or soup.select_one(".product.data.items") + if not container: + return out + titles = container.select(".data.item.title") + for t in titles: + title = normalize_text(t.get_text()) + if not title: + continue + content_id = t.get("aria-controls") or t.get("data-target") + content = soup.select_one(f"#{content_id}") if content_id else None + if content is None: + sib = t.find_next_sibling( + lambda x: isinstance(x, Tag) and "data" in x.get("class", []) and "item" in x.get("class", []) and "content" in x.get("class", []) + ) + content = sib + if content: + html = content.decode_contents() + if not is_blacklisted_heading(title): + out.append((title, absolutize_fragment(html))) + return out + +def scan_headings_for_sections(soup: BeautifulSoup): + out = [] + container = ( + soup.select_one(".product.info.detailed") + or soup.select_one(".product-info-main") + or soup.select_one(".page-main") + or soup + ) + heads = container.select("h2, h3, h4, h5, h6") + section_titles = (config().get("section-titles") or []) + for h in heads: + title = clean_title(h.get_text() or "") + if not title: + continue + low = title.lower() + if not any(k in low for k in section_titles + ["product description", "description", "details"]): + continue + parts: List[str] = [] + for sib in h.next_siblings: + if isinstance(sib, NavigableString): + parts.append(str(sib)) + continue + if isinstance(sib, Tag) and sib.name in ("h2", "h3", "h4", "h5", "h6"): + break + if isinstance(sib, Tag): + parts.append(str(sib)) + html = absolutize_fragment("".join(parts).strip()) + if html and not is_blacklisted_heading(title): + out.append((title, html)) + return out + +def additional_attributes_table(soup: BeautifulSoup) -> Optional[str]: + table = soup.select_one(".additional-attributes, table.additional-attributes, .product.attribute.additional table") + if not table: + return None + try: + rows = [] + for tr in table.select("tr"): + th = tr.find("th") or tr.find("td") + tds = tr.find_all("td") + key = normalize_text(th.get_text()) if th else None + val = normalize_text(tds[-1].get_text()) if tds else None + if key and val: + rows.append((key, val)) + if not rows: + return None + items = "\n".join( + [ + f"""
    +
    {key}
    +
    {val}
    +
    """ + for key, val in rows + ] + ) + return f"
    {items}
    " + except Exception: + return None + +def find_description_container(soup: BeautifulSoup) -> Optional[Tag]: + for sel in ["#description", "#tab-description", ".product.attribute.description .value", + ".product.attribute.overview .value", ".product.info.detailed .value"]: + el = soup.select_one(sel) + if el and normalize_text(el.get_text()): + return el + for h in soup.select("h2, h3, h4, h5, h6"): + txt = normalize_text(h.get_text()).lower() + if txt.startswith("product description") or txt == "description": + wrapper = soup.new_tag("div") + for sib in h.next_siblings: + if isinstance(sib, Tag) and sib.name in ("h2", "h3", "h4", "h5", "h6"): + break + wrapper.append(sib if isinstance(sib, Tag) else NavigableString(str(sib))) + if normalize_text(wrapper.get_text()): + return wrapper + return None diff --git a/market/scrape/product/helpers/html.py b/market/scrape/product/helpers/html.py new file mode 100644 index 0000000..6f355c5 --- /dev/null +++ b/market/scrape/product/helpers/html.py @@ -0,0 +1,53 @@ + +from __future__ import annotations +from typing import List, Optional +from urllib.parse import urljoin, urlparse +from shared.config import config + +def first_from_srcset(val: str) -> Optional[str]: + if not val: + return None + first = val.split(",")[0].strip() + parts = first.split() + return parts[0] if parts else first + +def abs_url(u: Optional[str]) -> Optional[str]: + if not u: + return None + return urljoin(config()["base_url"], u) if isinstance(u, str) and u.startswith("/") else u + +def collect_img_candidates(el) -> List[str]: + urls: List[str] = [] + if not el: + return urls + attrs = ["src", "data-src", "data-original", "data-zoom-image", "data-thumb", "content", "href"] + for a in attrs: + v = el.get(a) + if v: + urls.append(v) + for a in ["srcset", "data-srcset"]: + v = el.get(a) + if v: + first = first_from_srcset(v) + if first: + urls.append(first) + return urls + +def _filename_key(u: str) -> str: + p = urlparse(u) + path = p.path or "" + if path.endswith("/"): + path = path[:-1] + last = path.split("/")[-1] + return f"{p.netloc}:{last}".lower() + +def dedup_by_filename(urls: List[str]) -> List[str]: + seen = set() + out: List[str] = [] + for u in urls: + k = _filename_key(u) + if k in seen: + continue + seen.add(k) + out.append(u) + return out diff --git a/market/scrape/product/helpers/price.py b/market/scrape/product/helpers/price.py new file mode 100644 index 0000000..68aad1b --- /dev/null +++ b/market/scrape/product/helpers/price.py @@ -0,0 +1,42 @@ + +from __future__ import annotations +import re +from typing import Optional, Tuple + +def parse_price(text: str) -> Tuple[Optional[float], Optional[str], str]: + """ + Return (value, currency, raw) from a price-like string. + Supports symbols £, €, $; strips thousands commas. + """ + raw = (text or "").strip() + m = re.search(r'([£€$])?\s*([0-9][0-9.,]*)', raw) + if not m: + return None, None, raw + sym = m.group(1) or "" + num = m.group(2).replace(",", "") + try: + value = float(num) + except ValueError: + return None, None, raw + currency = {"£": "GBP", "€": "EUR", "$": "USD"}.get(sym, None) + return value, currency, raw + +def parse_case_size(text: str) -> Tuple[Optional[int], Optional[float], Optional[str], str]: + """ + Parse strings like "6 x 500g", "12x1L", "24 × 330 ml" + Returns (count, item_qty, item_unit, raw) + """ + raw = (text or "").strip() + if not raw: + return None, None, None, raw + t = re.sub(r"[×Xx]\s*", " x ", raw) + m = re.search(r"(\d+)\s*x\s*([0-9]*\.?[0-9]+)\s*([a-zA-Z]+)", t) + if not m: + return None, None, None, raw + count = int(m.group(1)) + try: + item_qty = float(m.group(2)) + except ValueError: + item_qty = None + unit = m.group(3) + return count, item_qty, unit, raw diff --git a/market/scrape/product/helpers/text.py b/market/scrape/product/helpers/text.py new file mode 100644 index 0000000..c8d6190 --- /dev/null +++ b/market/scrape/product/helpers/text.py @@ -0,0 +1,16 @@ + +from __future__ import annotations +import re +from shared.utils import normalize_text +from shared.config import config + +def clean_title(t: str) -> str: + t = normalize_text(t) + t = re.sub(r":\s*$", "", t) + return t + +def is_blacklisted_heading(title: str) -> bool: + """Return True if heading should be skipped based on config blacklist.""" + bl = (config().get("blacklist") or {}).get("product-details") or [] + low = (title or "").strip().lower() + return any(low == (s or "").strip().lower() for s in bl) diff --git a/market/scrape/product/product_core.py b/market/scrape/product/product_core.py new file mode 100644 index 0000000..9fbf5f2 --- /dev/null +++ b/market/scrape/product/product_core.py @@ -0,0 +1,48 @@ + +from __future__ import annotations +from typing import Dict, Tuple, Union +from shared.utils import soup_of +from ..http_client import fetch +from ..html_utils import absolutize_fragment +from bp.browse.services.slugs import product_slug_from_href +from .registry import REGISTRY, merge_missing +from . import extractors as _auto_register # noqa: F401 (import-time side effects) + +async def scrape_product_detail(product_url: str, include_html: bool = False) -> Union[dict, Tuple[dict, str]]: + """ + Returns a dict with fields (subset): + title, images, image, description_short, description_html, sections, + slug, suma_href, stickers, labels, info_table fields, oe_list_price, prices, + breadcrumbs-derived category_* fields. + If include_html=True, returns (data, html). + """ + html = await fetch(product_url) + + + data: Dict[str, Union[str, float, int, list, dict, None]] = { + "suma_href": product_url, + "slug": product_slug_from_href(product_url), + } + + # Run all extractors + for fn in REGISTRY: + try: + soup = soup_of(html) + piece = fn(soup, product_url) or {} + except Exception: + # Tolerate site drift + continue + merge_missing(data, piece) + # If we found short description but not description_html, echo it + if not data.get("description_html") and data.get("description_short"): + data["description_html"] = absolutize_fragment(f"

    {data['description_short']}

    ") + + # Ensure "image" mirrors first of images if not set + if not data.get("image"): + imgs = data.get("images") or [] + if isinstance(imgs, list) and imgs: + data["image"] = imgs[0] + + if include_html: + return data, html + return data diff --git a/market/scrape/product/product_detail.py b/market/scrape/product/product_detail.py new file mode 100644 index 0000000..705d35b --- /dev/null +++ b/market/scrape/product/product_detail.py @@ -0,0 +1,4 @@ + +from __future__ import annotations +# Thin wrapper to keep import path stable +from .product_core import scrape_product_detail # re-export diff --git a/market/scrape/product/registry.py b/market/scrape/product/registry.py new file mode 100644 index 0000000..53cabc4 --- /dev/null +++ b/market/scrape/product/registry.py @@ -0,0 +1,20 @@ + +from __future__ import annotations +from typing import Callable, Dict, List, Union + +Extractor = Callable[[object, str], Dict[str, Union[str, float, int, list, dict, None]]] +REGISTRY: List[Extractor] = [] + +def extractor(fn: Extractor) -> Extractor: + """Decorator to register an extractor.""" + REGISTRY.append(fn) + return fn + +def merge_missing(dst: dict, src: dict) -> None: + """ + Merge src into dst. Only write keys that are missing or empty in dst. + "Empty" means None, "", [], {}. + """ + for k, v in (src or {}).items(): + if k not in dst or dst[k] in (None, "", [], {}): + dst[k] = v diff --git a/market/services/__init__.py b/market/services/__init__.py new file mode 100644 index 0000000..8453359 --- /dev/null +++ b/market/services/__init__.py @@ -0,0 +1,29 @@ +"""Market app service registration.""" +from __future__ import annotations + + +def register_domain_services() -> None: + """Register services for the market app. + + Market owns: Product, CartItem, MarketPlace, NavTop, NavSub, + Listing, ProductImage. + Standard deployment registers all 4 services as real DB impls + (shared DB). For composable deployments, swap non-owned services + with stubs from shared.services.stubs. + """ + from shared.services.registry import services + from shared.services.blog_impl import SqlBlogService + from shared.services.calendar_impl import SqlCalendarService + from shared.services.market_impl import SqlMarketService + from shared.services.cart_impl import SqlCartService + + services.market = SqlMarketService() + if not services.has("blog"): + services.blog = SqlBlogService() + if not services.has("calendar"): + services.calendar = SqlCalendarService() + if not services.has("cart"): + services.cart = SqlCartService() + if not services.has("federation"): + from shared.services.federation_impl import SqlFederationService + services.federation = SqlFederationService() diff --git a/market/templates/_types/all_markets/_card.html b/market/templates/_types/all_markets/_card.html new file mode 100644 index 0000000..3680e60 --- /dev/null +++ b/market/templates/_types/all_markets/_card.html @@ -0,0 +1,33 @@ +{# Card for a single market in the global listing #} +{% set pi = page_info.get(market.container_id, {}) %} +{% set page_slug = pi.get('slug', '') %} +{% set page_title = pi.get('title') %} +{% if page_slug %} + {% set market_href = market_url('/' ~ page_slug ~ '/' ~ market.slug ~ '/') %} +{% else %} + {% set market_href = '' %} +{% endif %} +
    +
    + {% if market_href %} + +

    {{ market.name }}

    +
    + {% else %} +

    {{ market.name }}

    + {% endif %} + + {% if market.description %} +

    {{ market.description }}

    + {% endif %} +
    + +
    + {% if page_title %} + + {{ page_title }} + + {% endif %} +
    +
    diff --git a/market/templates/_types/all_markets/_cards.html b/market/templates/_types/all_markets/_cards.html new file mode 100644 index 0000000..f3545c5 --- /dev/null +++ b/market/templates/_types/all_markets/_cards.html @@ -0,0 +1,18 @@ +{% for market in markets %} + {% include "_types/all_markets/_card.html" %} +{% endfor %} +{% if has_more %} + {# Infinite scroll sentinel #} + {% set next_url = url_for('all_markets.markets_fragment', page=page + 1)|host %} + +{% endif %} diff --git a/market/templates/_types/all_markets/_main_panel.html b/market/templates/_types/all_markets/_main_panel.html new file mode 100644 index 0000000..3599065 --- /dev/null +++ b/market/templates/_types/all_markets/_main_panel.html @@ -0,0 +1,12 @@ +{# Markets grid #} +{% if markets %} +
    + {% include "_types/all_markets/_cards.html" %} +
    +{% else %} +
    + +

    No markets available

    +
    +{% endif %} +
    diff --git a/market/templates/_types/all_markets/index.html b/market/templates/_types/all_markets/index.html new file mode 100644 index 0000000..2e7990d --- /dev/null +++ b/market/templates/_types/all_markets/index.html @@ -0,0 +1,7 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block content %} + {% include '_types/all_markets/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/browse/_admin.html b/market/templates/_types/browse/_admin.html new file mode 100644 index 0000000..e3cf3a2 --- /dev/null +++ b/market/templates/_types/browse/_admin.html @@ -0,0 +1,7 @@ +{% import "macros/links.html" as links %} +{% if g.rights.admin %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{admin_nav_item( + url_for('market.browse.product.admin', product_slug=slug) + )}} +{% endif %} \ No newline at end of file diff --git a/market/templates/_types/browse/_main_panel.html b/market/templates/_types/browse/_main_panel.html new file mode 100644 index 0000000..8640ce8 --- /dev/null +++ b/market/templates/_types/browse/_main_panel.html @@ -0,0 +1,5 @@ + +
    + {% include "_types/browse/_product_cards.html" %} +
    +
    diff --git a/market/templates/_types/browse/_oob_elements.html b/market/templates/_types/browse/_oob_elements.html new file mode 100644 index 0000000..dac5626 --- /dev/null +++ b/market/templates/_types/browse/_oob_elements.html @@ -0,0 +1,37 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-header-child', 'market-header-child', '_types/market/header/_header.html')}} + + {% from '_types/post/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/market/mobile/_nav_panel.html' %} +{% endblock %} + +{# Filter container with child summary - from browse/index.html child_summary block #} +{% block filter %} + {% include "_types/browse/mobile/_filter/summary.html" %} +{% endblock %} + +{% block aside %} + {% include "_types/browse/desktop/menu.html" %} +{% endblock %} + + +{% block content %} + {% include "_types/browse/_main_panel.html" %} +{% endblock %} diff --git a/market/templates/_types/browse/_product_card.html b/market/templates/_types/browse/_product_card.html new file mode 100644 index 0000000..b923bc5 --- /dev/null +++ b/market/templates/_types/browse/_product_card.html @@ -0,0 +1,104 @@ +{% import 'macros/stickers.html' as stick %} +{% import '_types/product/prices.html' as prices %} +{% set prices_ns = namespace() %} +{{ prices.set_prices(p, prices_ns) }} +{% set item_href = url_for('market.browse.product.product_detail', product_slug=p.slug)|host %} + \ No newline at end of file diff --git a/market/templates/_types/browse/_product_cards.html b/market/templates/_types/browse/_product_cards.html new file mode 100644 index 0000000..cc8edb3 --- /dev/null +++ b/market/templates/_types/browse/_product_cards.html @@ -0,0 +1,107 @@ +{% for p in products %} + {% include "_types/browse/_product_card.html" %} +{% endfor %} +{% if page < total_pages|int %} + + + + + +{% else %} +
    End of results
    +{% endif %} + diff --git a/market/templates/_types/browse/desktop/_category_selector.html b/market/templates/_types/browse/desktop/_category_selector.html new file mode 100644 index 0000000..b3c68b6 --- /dev/null +++ b/market/templates/_types/browse/desktop/_category_selector.html @@ -0,0 +1,40 @@ +{# Categories #} + diff --git a/market/templates/_types/browse/desktop/_filter/brand.html b/market/templates/_types/browse/desktop/_filter/brand.html new file mode 100644 index 0000000..616e36e --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/brand.html @@ -0,0 +1,40 @@ +{# Brand filter (desktop, single-select) #} + +{# Brands #} + diff --git a/market/templates/_types/browse/desktop/_filter/labels.html b/market/templates/_types/browse/desktop/_filter/labels.html new file mode 100644 index 0000000..7a4a41e --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/labels.html @@ -0,0 +1,44 @@ + + + +{% import 'macros/stickers.html' as stick %} + + diff --git a/market/templates/_types/browse/desktop/_filter/like.html b/market/templates/_types/browse/desktop/_filter/like.html new file mode 100644 index 0000000..c830f98 --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/like.html @@ -0,0 +1,38 @@ +{% import 'macros/stickers.html' as stick %} + {% set qs = {"liked": None if liked else True, "page": None}|qs %} + {% set href = (current_local_href ~ qs)|host %} + + {% if liked %} + + {% else %} + + {% endif %} + + {{ liked_count }} + + diff --git a/market/templates/_types/browse/desktop/_filter/search.html b/market/templates/_types/browse/desktop/_filter/search.html new file mode 100644 index 0000000..2e0ea8e --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/search.html @@ -0,0 +1,44 @@ + +{% macro search(current_local_href,search, search_count, hx_select) -%} + + +
    + + +
    + {% if search %} + {{search_count}} + {% endif %} + {{zap_filter}} +
    +
    +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/browse/desktop/_filter/sort.html b/market/templates/_types/browse/desktop/_filter/sort.html new file mode 100644 index 0000000..a4b5404 --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/sort.html @@ -0,0 +1,34 @@ + + + + +{% import 'macros/stickers.html' as stick %} +{% set sort_val = sort|default('az', true) %} + +
      + {% for key,label,icon in sort_options %} + {% set is_on = (sort_val == key) %} + {% set qs = {"sort": None, "page": None}|qs if is_on + else {"sort": key, "page": None}|qs %} + {% set href = (current_local_href ~ qs)|host %} + +
    • + + {{ stick.sticker(asset_url(icon), label, is_on) }} + +
    • + {% endfor %} +
    diff --git a/market/templates/_types/browse/desktop/_filter/stickers.html b/market/templates/_types/browse/desktop/_filter/stickers.html new file mode 100644 index 0000000..46fd22b --- /dev/null +++ b/market/templates/_types/browse/desktop/_filter/stickers.html @@ -0,0 +1,46 @@ + + + + +{% import 'macros/stickers.html' as stick %} + + diff --git a/market/templates/_types/browse/desktop/menu.html b/market/templates/_types/browse/desktop/menu.html new file mode 100644 index 0000000..893cf2d --- /dev/null +++ b/market/templates/_types/browse/desktop/menu.html @@ -0,0 +1,37 @@ + {% import '_types/browse/desktop/_filter/search.html' as s %} + {{ s.search(current_local_href, search, search_count, hx_select) }} + +
    +
    +
    {{ category_label }}
    +
    + {% include "_types/browse/desktop/_filter/sort.html" %} + + + {% if stickers %} + {% include "_types/browse/desktop/_filter/stickers.html" %} + {% endif %} + + + {% if subs_local and top_local_href %} + {% include "_types/browse/desktop/_category_selector.html" %} + {% endif %} + +
    + +
    + + {% include "_types/browse/desktop/_filter/brand.html" %} + +
    diff --git a/market/templates/_types/browse/index.html b/market/templates/_types/browse/index.html new file mode 100644 index 0000000..015e6b3 --- /dev/null +++ b/market/templates/_types/browse/index.html @@ -0,0 +1,13 @@ +{% extends '_types/market/index.html' %} + +{% block filter %} + {% include "_types/browse/mobile/_filter/summary.html" %} +{% endblock %} + +{% block aside %} + {% include "_types/browse/desktop/menu.html" %} +{% endblock %} + +{% block content %} + {% include "_types/browse/_main_panel.html" %} +{% endblock %} diff --git a/market/templates/_types/browse/like/button.html b/market/templates/_types/browse/like/button.html new file mode 100644 index 0000000..426bdc1 --- /dev/null +++ b/market/templates/_types/browse/like/button.html @@ -0,0 +1,20 @@ + diff --git a/market/templates/_types/browse/mobile/_filter/brand_ul.html b/market/templates/_types/browse/mobile/_filter/brand_ul.html new file mode 100644 index 0000000..ac15400 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/brand_ul.html @@ -0,0 +1,40 @@ + \ No newline at end of file diff --git a/market/templates/_types/browse/mobile/_filter/index.html b/market/templates/_types/browse/mobile/_filter/index.html new file mode 100644 index 0000000..7c2a615 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/index.html @@ -0,0 +1,30 @@ + + {% include "_types/browse/mobile/_filter/sort_ul.html" %} + {% if search or selected_labels|length or selected_stickers|length or selected_brands|length %} + {% set href = (current_local_href ~ {"clear_filters": True}|qs)|host %} + + {% endif %} +
    + {% include "_types/browse/mobile/_filter/like.html" %} + {% include "_types/browse/mobile/_filter/labels.html" %} +
    + {% include "_types/browse/mobile/_filter/stickers.html" %} + {% include "_types/browse/mobile/_filter/brand_ul.html" %} + diff --git a/market/templates/_types/browse/mobile/_filter/labels.html b/market/templates/_types/browse/mobile/_filter/labels.html new file mode 100644 index 0000000..3868d42 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/labels.html @@ -0,0 +1,47 @@ +{% import 'macros/stickers.html' as stick %} + + +{# Optional: hide horizontal scrollbar on mobile while keeping scrollable #} + diff --git a/market/templates/_types/browse/mobile/_filter/like.html b/market/templates/_types/browse/mobile/_filter/like.html new file mode 100644 index 0000000..509ea92 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/like.html @@ -0,0 +1,40 @@ +{% import 'macros/stickers.html' as stick %} + \ No newline at end of file diff --git a/market/templates/_types/browse/mobile/_filter/search.html b/market/templates/_types/browse/mobile/_filter/search.html new file mode 100644 index 0000000..0f39178 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/search.html @@ -0,0 +1,40 @@ +{% macro search(current_local_href, search, search_count, hx_select) -%} + +
    + + +
    + {% if search %} + {{search_count}} + {% endif %} +
    +
    +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/browse/mobile/_filter/sort_ul.html b/market/templates/_types/browse/mobile/_filter/sort_ul.html new file mode 100644 index 0000000..c02de19 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/sort_ul.html @@ -0,0 +1,33 @@ + + + +{% import 'macros/stickers.html' as stick %} + + + \ No newline at end of file diff --git a/market/templates/_types/browse/mobile/_filter/stickers.html b/market/templates/_types/browse/mobile/_filter/stickers.html new file mode 100644 index 0000000..fed0927 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/stickers.html @@ -0,0 +1,50 @@ +{% import 'macros/stickers.html' as stick %} + + + +{# Optional: hide horizontal scrollbar on mobile while keeping scrollable #} + diff --git a/market/templates/_types/browse/mobile/_filter/summary.html b/market/templates/_types/browse/mobile/_filter/summary.html new file mode 100644 index 0000000..07a86a1 --- /dev/null +++ b/market/templates/_types/browse/mobile/_filter/summary.html @@ -0,0 +1,120 @@ +{% import 'macros/stickers.html' as stick %} +{% import 'macros/layout.html' as layout %} + + + + +{% call layout.details('/filter', 'md:hidden') %} + {% call layout.filter_summary("filter-summary-mobile", current_local_href, search, search_count, hx_select) %} +
    + + +
    + {% if sort %} +
      + + {% for k,l,i in sort_options %} + {% if k == sort %} + {% set key = k %} + {% set label = l %} + {% set icon = i %} +
    • + {{ stick.sticker(asset_url(icon), label, True)}} +
    • + {% endif %} + {% endfor %} +
    + {% endif %} + {% if liked %} +
    + + {% if liked_count is not none %} +
    + {{ liked_count }} +
    + {% endif %} +
    + {% endif %} + {% if selected_labels and selected_labels|length %} +
      + {% for st in selected_labels %} + {% for s in labels %} + {% if st == s.name %} +
    • + {{ stick.sticker(asset_url('nav-labels/' + s.name + '.svg'), s.name, True)}} + {% if s.count is not none %} +
      + {{ s.count }} +
      + {% endif %} +
    • + {% endif %} + {% endfor %} + {% endfor %} +
    + {% endif %} + {% if selected_stickers and selected_stickers|length %} +
      + {% for st in selected_stickers %} + {% for s in stickers %} + {% if st == s.name %} +
    • + + {{ stick.sticker(asset_url('stickers/' + s.name + '.svg'), s.name, True)}} + {% if s.count is not none %} + + {{ s.count }} + + {% endif %} +
    • + {% endif %} + {% endfor %} + {% endfor %} +
    + {% endif %} +
    + + {% if selected_brands and selected_brands|length %} +
      + {% for b in selected_brands %} +
    • + {% set ns = namespace(count=0) %} + {% for brand in brands %} + {% if brand.name == b %} + {% set ns.count = brand.count %} + {% endif %} + {% endfor %} + {% if ns.count %} +
      {{ b }}
      +
      {{ ns.count }}
      + {% else %} +
      {{ b }}
      +
      0
      + {% endif %} +
    • + {% endfor %} + + +
    + {% endif %} +
    + {% endcall %} +
    + {% include "_types/browse/mobile/_filter/index.html" %} +
    +{% endcall %} diff --git a/market/templates/_types/market/_admin.html b/market/templates/_types/market/_admin.html new file mode 100644 index 0000000..0b09927 --- /dev/null +++ b/market/templates/_types/market/_admin.html @@ -0,0 +1,7 @@ +{% import "macros/links.html" as links %} +{% if g.rights.admin %} + {% from 'macros/admin_nav.html' import admin_nav_item %} + {{admin_nav_item( + url_for('market.admin.admin') + )}} +{% endif %} \ No newline at end of file diff --git a/market/templates/_types/market/_main_panel.html b/market/templates/_types/market/_main_panel.html new file mode 100644 index 0000000..87bb965 --- /dev/null +++ b/market/templates/_types/market/_main_panel.html @@ -0,0 +1,23 @@ +{# Main panel fragment for HTMX navigation - market landing page #} +
    + {% if post.custom_excerpt %} +
    + {{post.custom_excerpt|safe}} +
    + {% endif %} + {% if post.feature_image %} +
    + +
    + {% endif %} +
    + {% if post.html %} + {{post.html|safe}} + {% endif %} +
    +
    +
    diff --git a/market/templates/_types/market/_oob_elements.html b/market/templates/_types/market/_oob_elements.html new file mode 100644 index 0000000..075c166 --- /dev/null +++ b/market/templates/_types/market/_oob_elements.html @@ -0,0 +1,30 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('post-header-child', 'market-header-child', '_types/market/header/_header.html')}} + + {% from '_types/post/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/market/mobile/_nav_panel.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/market/_main_panel.html" %} +{% endblock %} + + diff --git a/market/templates/_types/market/_title.html b/market/templates/_types/market/_title.html new file mode 100644 index 0000000..6e8024b --- /dev/null +++ b/market/templates/_types/market/_title.html @@ -0,0 +1,17 @@ +
    +
    + + {{ market_title }} +
    +
    +
    + {{top_slug or ''}} +
    + {% if sub_slug %} +
    + {{sub_slug}} +
    + {% endif %} +
    +
    \ No newline at end of file diff --git a/market/templates/_types/market/admin/_main_panel.html b/market/templates/_types/market/admin/_main_panel.html new file mode 100644 index 0000000..a354325 --- /dev/null +++ b/market/templates/_types/market/admin/_main_panel.html @@ -0,0 +1 @@ +market admin \ No newline at end of file diff --git a/market/templates/_types/market/admin/_nav.html b/market/templates/_types/market/admin/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/market/templates/_types/market/admin/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/market/templates/_types/market/admin/_oob_elements.html b/market/templates/_types/market/admin/_oob_elements.html new file mode 100644 index 0000000..9b306fd --- /dev/null +++ b/market/templates/_types/market/admin/_oob_elements.html @@ -0,0 +1,29 @@ +{% extends 'oob_elements.html' %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{# Header with app title - includes cart-mini, navigation, and market-specific header #} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('market-header-child', 'market-admin-header-child', '_types/market/admin/header/_header.html')}} + + {% from '_types/market/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% block mobile_menu %} + {% include '_types/market/admin/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include "_types/market/admin/_main_panel.html" %} +{% endblock %} + + diff --git a/market/templates/_types/market/admin/header/_header.html b/market/templates/_types/market/admin/header/_header.html new file mode 100644 index 0000000..950eefc --- /dev/null +++ b/market/templates/_types/market/admin/header/_header.html @@ -0,0 +1,11 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='market-admin-row', oob=oob) %} + {% call links.link(url_for('market.admin.admin'), hx_select_search) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/market/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/market/admin/index.html b/market/templates/_types/market/admin/index.html new file mode 100644 index 0000000..4798c46 --- /dev/null +++ b/market/templates/_types/market/admin/index.html @@ -0,0 +1,19 @@ +{% extends '_types/market/index.html' %} + + +{% block market_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('market-admin-header-child', '_types/market/admin/header/_header.html') %} + {% block market_admin_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block _main_mobile_menu %} + {% include '_types/market/admin/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include '_types/market/admin/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/market/desktop/_nav.html b/market/templates/_types/market/desktop/_nav.html new file mode 100644 index 0000000..d4de6e6 --- /dev/null +++ b/market/templates/_types/market/desktop/_nav.html @@ -0,0 +1,38 @@ + + diff --git a/market/templates/_types/market/header/_header.html b/market/templates/_types/market/header/_header.html new file mode 100644 index 0000000..2d92286 --- /dev/null +++ b/market/templates/_types/market/header/_header.html @@ -0,0 +1,11 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='market-row', oob=oob) %} + {% call links.link(url_for('market.browse.home'), hx_select_search ) %} + {% include '_types/market/_title.html' %} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/market/desktop/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/market/index.html b/market/templates/_types/market/index.html new file mode 100644 index 0000000..4da7f68 --- /dev/null +++ b/market/templates/_types/market/index.html @@ -0,0 +1,27 @@ +{% extends '_types/root/_index.html' %} + + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% call index_row('market-header-child', '_types/market/header/_header.html') %} + {% block market_header_child %} + {% endblock %} + {% endcall %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/market/mobile/_nav_panel.html' %} +{% endblock %} + + + +{% block aside %} +{# No aside on landing page #} +{% endblock %} + +{% block content %} + {% include "_types/market/_main_panel.html" %} +{% endblock %} diff --git a/market/templates/_types/market/markets_listing.html b/market/templates/_types/market/markets_listing.html new file mode 100644 index 0000000..ab8d2b0 --- /dev/null +++ b/market/templates/_types/market/markets_listing.html @@ -0,0 +1,23 @@ +{% extends '_types/root/_index.html' %} + +{% block content %} +
    +

    Markets

    + + {% if markets %} + + {% else %} +

    No markets available.

    + {% endif %} +
    +{% endblock %} diff --git a/market/templates/_types/market/mobile/_nav_panel.html b/market/templates/_types/market/mobile/_nav_panel.html new file mode 100644 index 0000000..65a9685 --- /dev/null +++ b/market/templates/_types/market/mobile/_nav_panel.html @@ -0,0 +1,110 @@ +{% from 'macros/glyphs.html' import opener %} +
    +
    + {% set all_href = (url_for('market.browse.browse_all') ~ qs)|host %} + {% set all_active = (category_label == 'All Products') %} + +
    + All +
    +
    + {% for cat, data in categories.items() %} +
    + + + {% set href = (url_for('market.browse.browse_top', top_slug=data.slug) ~ qs)|host %} + + +
    {{ cat }}
    +
    {{ data.count }}
    +
    + {{ opener('cat')}} + +
    + +
    + {% if data.subs %} + +
    + +
    + {% for sub in data.subs %} + {% set href = (url_for('market.browse.browse_sub', top_slug=data.slug, sub_slug=sub.slug) ~qs)|host%} + {% if top_slug==(data.slug | lower) and sub_slug == sub.slug %} + +
    {{ sub.html_label or sub.name }}
    +
    {{ sub.count }}
    +
    + {% endif %} + {% endfor %} + {% for sub in data.subs %} + {% if not (top_slug==(data.slug | lower) and sub_slug == sub.slug) %} + {% set href = (url_for('market.browse.browse_sub', top_slug=data.slug, sub_slug=sub.slug) ~ qs)|host%} + +
    {{ sub.name }}
    +
    {{ sub.count }}
    +
    + {% endif %} + {% endfor %} +
    +
    + {% else %} + {% set href = (url_for('market.browse.browse_top', top_slug=data.slug) ~ qs)|host%} + View all + {% endif %} +
    +
    + {% endfor %} + {% include '_types/market/_admin.html' %} +
    +
    diff --git a/market/templates/_types/market/mobile/menu.html b/market/templates/_types/market/mobile/menu.html new file mode 100644 index 0000000..145b551 --- /dev/null +++ b/market/templates/_types/market/mobile/menu.html @@ -0,0 +1,6 @@ +{% extends 'mobile/menu.html' %} +{% block menu %} + {% block mobile_menu %} + {% endblock %} + {% include '_types/market/mobile/_nav_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/page_markets/_card.html b/market/templates/_types/page_markets/_card.html new file mode 100644 index 0000000..19e31af --- /dev/null +++ b/market/templates/_types/page_markets/_card.html @@ -0,0 +1,13 @@ +{# Card for a single market in a page-scoped listing #} +{% set market_href = market_url('/' ~ post.slug ~ '/' ~ market.slug ~ '/') %} + diff --git a/market/templates/_types/page_markets/_cards.html b/market/templates/_types/page_markets/_cards.html new file mode 100644 index 0000000..bcce864 --- /dev/null +++ b/market/templates/_types/page_markets/_cards.html @@ -0,0 +1,18 @@ +{% for market in markets %} + {% include "_types/page_markets/_card.html" %} +{% endfor %} +{% if has_more %} + {# Infinite scroll sentinel #} + {% set next_url = url_for('page_markets.markets_fragment', page=page + 1)|host %} + +{% endif %} diff --git a/market/templates/_types/page_markets/_main_panel.html b/market/templates/_types/page_markets/_main_panel.html new file mode 100644 index 0000000..c01cfb2 --- /dev/null +++ b/market/templates/_types/page_markets/_main_panel.html @@ -0,0 +1,12 @@ +{# Markets grid for a single page #} +{% if markets %} +
    + {% include "_types/page_markets/_cards.html" %} +
    +{% else %} +
    + +

    No markets for this page

    +
    +{% endif %} +
    diff --git a/market/templates/_types/page_markets/index.html b/market/templates/_types/page_markets/index.html new file mode 100644 index 0000000..23f99a1 --- /dev/null +++ b/market/templates/_types/page_markets/index.html @@ -0,0 +1,15 @@ +{% extends '_types/root/_index.html' %} + +{% block meta %}{% endblock %} + +{% block root_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('post-header-child', '_types/post/header/_header.html') %} + {% block post_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/page_markets/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/post/_nav.html b/market/templates/_types/post/_nav.html new file mode 100644 index 0000000..037bdcd --- /dev/null +++ b/market/templates/_types/post/_nav.html @@ -0,0 +1,15 @@ +{% import 'macros/links.html' as links %} + {# Widget-driven container nav — entries, calendars, markets #} + {% if container_nav_widgets %} +
    + {% include '_types/post/admin/_nav_entries.html' %} +
    + {% endif %} + + {# Admin link #} + {% if post and has_access('blog.post.admin.admin') %} + {% call links.link(url_for('blog.post.admin.admin', slug=post.slug), hx_select_search, select_colours, True, aclass=styles.nav_button) %} + + {% endcall %} + {% endif %} diff --git a/market/templates/_types/post/admin/_nav_entries.html b/market/templates/_types/post/admin/_nav_entries.html new file mode 100644 index 0000000..47290d4 --- /dev/null +++ b/market/templates/_types/post/admin/_nav_entries.html @@ -0,0 +1,50 @@ + + {# Left scroll arrow - desktop only #} + + + {# Widget-driven nav items container #} +
    +
    + {% for wdata in container_nav_widgets %} + {% with ctx=wdata.ctx %} + {% include wdata.widget.template with context %} + {% endwith %} + {% endfor %} +
    +
    + + + + {# Right scroll arrow - desktop only #} + diff --git a/market/templates/_types/post/header/_header.html b/market/templates/_types/post/header/_header.html new file mode 100644 index 0000000..6655eb5 --- /dev/null +++ b/market/templates/_types/post/header/_header.html @@ -0,0 +1,28 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='post-row', oob=oob) %} + {% call links.link(blog_url('/' + post.slug + '/'), hx_select_search ) %} + {% if post.feature_image %} + + {% endif %} + + {{ post.title | truncate(160, True, '…') }} + + {% endcall %} + {% call links.desktop_nav() %} + {% if page_cart_count is defined and page_cart_count > 0 %} + + + {{ page_cart_count }} + + {% endif %} + {% include '_types/post/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/product/_added.html b/market/templates/_types/product/_added.html new file mode 100644 index 0000000..251387a --- /dev/null +++ b/market/templates/_types/product/_added.html @@ -0,0 +1,17 @@ +{# HTMX response after add-to-cart: OOB-swap the mini cart + product buttons #} +{% import '_types/product/_cart.html' as _cart %} + +{# 1. Update mini cart directly — handler already has the cart data #} +{% from 'macros/cart_icon.html' import cart_icon %} +{{ cart_icon(count=cart | sum(attribute="quantity")) }} + +{# 2. Update add/remove buttons on the product #} +{{ _cart.add(d.slug, cart, oob='true') }} + +{# 3. Update cart item row if visible #} +{% from '_types/product/_cart.html' import cart_item with context %} +{% if item and item.quantity > 0 %} + {{ cart_item(oob='true') }} +{% elif item %} + {{ cart_item(oob='delete') }} +{% endif %} diff --git a/market/templates/_types/product/_cart.html b/market/templates/_types/product/_cart.html new file mode 100644 index 0000000..2c68284 --- /dev/null +++ b/market/templates/_types/product/_cart.html @@ -0,0 +1,250 @@ +{% macro add(slug, cart, oob='false') %} +{% set quantity = cart + | selectattr('product.slug', 'equalto', slug) + | sum(attribute='quantity') %} + +
    + + {% if not quantity %} +
    + + + + +
    + + {% else %} +
    + +
    + + + +
    + + + + + + + + + {{ quantity }} + + + + + + +
    + + + +
    +
    + {% endif %} +
    +{% endmacro %} + + + +{% macro cart_item(oob=False) %} + +{% set p = item.product %} +{% set unit_price = p.special_price or p.regular_price %} +
    +
    + {% if p.image %} + {{ p.title }} + {% else %} +
    + No image +
    'market', 'product', p.slug + {% endif %} +
    + + {# Details #} +
    +
    +
    +

    + {% set href=url_for('market.browse.product.product_detail', product_slug=p.slug) %} + + {{ p.title }} + +

    + + {% if p.brand %} +

    + {{ p.brand }} +

    + {% endif %} + + {% if item.is_deleted %} +

    + + This item is no longer available or price has changed +

    + {% endif %} +
    + + {# Unit price #} +
    + {% if unit_price %} + {% set symbol = "£" if p.regular_price_currency == "GBP" else p.regular_price_currency %} +

    + {{ symbol }}{{ "%.2f"|format(unit_price) }} +

    + {% if p.special_price and p.special_price != p.regular_price %} +

    + {{ symbol }}{{ "%.2f"|format(p.regular_price) }} +

    + {% endif %} + {% else %} +

    No price

    + {% endif %} +
    +
    + +
    +
    + Quantity +
    + + + +
    + + {{ item.quantity }} + +
    + + + +
    +
    + +
    + {% if unit_price %} + {% set line_total = unit_price * item.quantity %} + {% set symbol = "£" if p.regular_price_currency == "GBP" else p.regular_price_currency %} +

    + Line total: + {{ symbol }}{{ "%.2f"|format(line_total) }} +

    + {% endif %} +
    +
    +
    +
    + +{% endmacro %} diff --git a/market/templates/_types/product/_main_panel.html b/market/templates/_types/product/_main_panel.html new file mode 100644 index 0000000..cf8df31 --- /dev/null +++ b/market/templates/_types/product/_main_panel.html @@ -0,0 +1,131 @@ +{# Main panel fragment for HTMX navigation - product detail content #} +{% import 'macros/stickers.html' as stick %} +{% import '_types/product/prices.html' as prices %} +{% set prices_ns = namespace() %} +{{ prices.set_prices(d, prices_ns)}} + + {# Product detail grid from content block #} +
    +
    + {% if d.images and d.images|length > 0 %} +
    + {# --- like button overlay in top-right --- #} + {% if g.user %} +
    + {% set slug = d.slug %} + {% set liked = liked_by_current_user %} + {% include "_types/browse/like/button.html" %} +
    + {% endif %} + +
    +
    + {{ d.title }} + + {% for l in d.labels %} + + {% endfor %} +
    +
    + {{ d.brand }} +
    +
    + + {% if d.images|length > 1 %} + + + {% endif %} +
    + +
    +
    + {% for u in d.images %} + + + {% endfor %} +
    +
    + {% else %} +
    + {# Even if no image, still render the like button in the corner for consistency #} + {% if g.user %} +
    + {% set slug = d.slug %} + {% set liked = liked_by_current_user %} + {% include "_types/browse/like/button.html" %} +
    + {% endif %} + + No image +
    + {% endif %} + +
    + {% for s in d.stickers %} + {{ stick.sticker(asset_url('stickers/' + s + '.svg'), s, True, size=40) }} + {% endfor %} +
    +
    + +
    + {# Optional extras shown quietly #} +
    + {% if d.price_per_unit or d.price_per_unit_raw %} +
    Unit price: {{ prices.price_str(d.price_per_unit, d.price_per_unit_raw, d.price_per_unit_currency) }}
    + {% endif %} + {% if d.case_size_raw %} +
    Case size: {{ d.case_size_raw }}
    + {% endif %} + +
    + + {% if d.description_short or d.description_html %} +
    + {% if d.description_short %} +

    {{ d.description_short }}

    + {% endif %} + {% if d.description_html %} +
    + {{ d.description_html | safe }} +
    + {% endif %} +
    + {% endif %} + + {% if d.sections and d.sections|length %} +
    + {% for sec in d.sections %} +
    + + {{ sec.title }} + + +
    + {{ sec.html | safe }} +
    +
    + {% endfor %} +
    + {% endif %} +
    + +
    +
    diff --git a/market/templates/_types/product/_meta.html b/market/templates/_types/product/_meta.html new file mode 100644 index 0000000..aebb684 --- /dev/null +++ b/market/templates/_types/product/_meta.html @@ -0,0 +1,106 @@ +{# --- social/meta_product.html --- #} +{# Context expected: + site, d (Product), request +#} + +{# Visibility → robots: index unless soft-deleted #} +{% set robots_here = 'noindex,nofollow' if d.deleted_at else 'index,follow' %} + +{# Compute canonical #} +{% set _site_url = site().url.rstrip('/') if site and site().url else '' %} +{% set _product_path = request.path if request else ('/products/' ~ (d.slug or '')) %} +{% set canonical = _site_url ~ _product_path if _site_url else (request.url if request else None) %} + +{# Include common base (charset, viewport, robots default, RSS, Org/WebSite JSON-LD) #} +{% set robots_override = robots_here %} +{% include 'social/meta_base.html' %} + +{# ---- Titles / descriptions ---- #} +{% set base_product_title = d.title or base_title %} +{% set og_title = base_product_title %} +{% set tw_title = base_product_title %} + +{# Description: prefer short, then HTML stripped #} +{% set desc_source = d.description_short + or (d.description_html|striptags if d.description_html else '') %} +{% set description = (desc_source|trim|replace('\n',' ')|replace('\r',' ')|striptags)|truncate(160, True, '…') %} + +{# ---- Image priority: product image, then first gallery image, then site default ---- #} +{% set image_url = d.image + or ((d.images|first).url if d.images and (d.images|first).url else None) + or (site().default_image if site and site().default_image else None) %} + +{# ---- Price / offer helpers ---- #} +{% set price = d.special_price or d.regular_price or d.rrp %} +{% set price_currency = d.special_price_currency or d.regular_price_currency or d.rrp_currency %} + +{# ---- Basic meta ---- #} +{{ base_product_title }} + +{% if canonical %}{% endif %} + +{# ---- Open Graph ---- #} + + + + +{% if canonical %}{% endif %} +{% if image_url %}{% endif %} + +{# Optional product OG price tags #} +{% if price and price_currency %} + + +{% endif %} +{% if d.brand %} + +{% endif %} +{% if d.sku %} + +{% endif %} + +{# ---- Twitter ---- #} + +{% if site and site().twitter_site %}{% endif %} + + +{% if image_url %}{% endif %} + +{# ---- JSON-LD Product ---- #} +{% set jsonld = { + "@context": "https://schema.org", + "@type": "Product", + "name": d.title, + "image": image_url, + "description": description, + "sku": d.sku, + "brand": d.brand, + "url": canonical +} %} + +{# Brand as proper object if present #} +{% if d.brand %} + {% set jsonld = jsonld | combine({ + "brand": { + "@type": "Brand", + "name": d.brand + } + }) %} +{% endif %} + +{# Offers if price available #} +{% if price and price_currency %} + {% set jsonld = jsonld | combine({ + "offers": { + "@type": "Offer", + "price": price, + "priceCurrency": price_currency, + "url": canonical, + "availability": "https://schema.org/InStock" + } + }) %} +{% endif %} + + diff --git a/market/templates/_types/product/_oob_elements.html b/market/templates/_types/product/_oob_elements.html new file mode 100644 index 0000000..589d369 --- /dev/null +++ b/market/templates/_types/product/_oob_elements.html @@ -0,0 +1,49 @@ +{% extends 'oob_elements.html' %} +{# OOB elements for HTMX navigation - product extends browse so use similar structure #} +{% import 'macros/layout.html' as layout %} +{% import 'macros/stickers.html' as stick %} +{% import '_types/product/prices.html' as prices %} +{% set prices_ns = namespace() %} +{{ prices.set_prices(d, prices_ns)}} + +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + + +{% block oobs %} + {% from '_types/market/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('market-header-child', 'product-header-child', '_types/product/header/_header.html')}} + +{% endblock %} + + + +{% block mobile_menu %} + {% include '_types/market/mobile/_nav_panel.html' %} + {% include '_types/browse/_admin.html' %} +{% endblock %} + +{% block filter %} + {% call layout.details() %} + {% call layout.summary('blog-child-header') %} + {% endcall %} + {% call layout.menu('blog-child-menu') %} + {% endcall %} + {% endcall %} + + {% call layout.details() %} + {% call layout.summary('product-child-header') %} + {% endcall %} + {% call layout.menu('item-child-menu') %} + {% endcall %} + {% endcall %} +{% endblock %} + +{% block content %} + {% include '_types/product/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/product/_prices.html b/market/templates/_types/product/_prices.html new file mode 100644 index 0000000..e56339f --- /dev/null +++ b/market/templates/_types/product/_prices.html @@ -0,0 +1,33 @@ +{% import '_types/product/_cart.html' as _cart %} + {# ---- Price block ---- #} + {% import '_types/product/prices.html' as prices %} + {% set prices_ns = namespace() %} + {{ prices.set_prices(d, prices_ns)}} + +
    + {{ _cart.add(d.slug, cart)}} + + {% if prices_ns.sp_val %} +
    + Special price +
    +
    + {{ prices.price_str(prices_ns.sp_val, prices_ns.sp_raw, prices_ns.sp_cur) }} +
    + {% if prices_ns.sp_val and prices_ns.rp_val %} +
    + {{ prices.price_str(prices_ns.rp_val, prices_ns.rp_raw, prices_ns.rp_cur) }} +
    + {% endif %} + {% elif prices_ns.rp_val %} + +
    + {{ prices.price_str(prices_ns.rp_val, prices_ns.rp_raw, prices_ns.rp_cur) }} +
    + {% endif %} + {{ prices.rrp(prices_ns) }} + +
    + diff --git a/market/templates/_types/product/_title.html b/market/templates/_types/product/_title.html new file mode 100644 index 0000000..0b3be43 --- /dev/null +++ b/market/templates/_types/product/_title.html @@ -0,0 +1,2 @@ + +
    {{ d.title }}
    diff --git a/market/templates/_types/product/admin/_nav.html b/market/templates/_types/product/admin/_nav.html new file mode 100644 index 0000000..f5c504d --- /dev/null +++ b/market/templates/_types/product/admin/_nav.html @@ -0,0 +1,2 @@ +{% from 'macros/admin_nav.html' import placeholder_nav %} +{{ placeholder_nav() }} diff --git a/market/templates/_types/product/admin/_oob_elements.html b/market/templates/_types/product/admin/_oob_elements.html new file mode 100644 index 0000000..84acac6 --- /dev/null +++ b/market/templates/_types/product/admin/_oob_elements.html @@ -0,0 +1,40 @@ +{% extends 'oob_elements.html' %} + + +{# OOB elements for HTMX navigation - all elements that need updating #} +{# Import shared OOB macros #} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header('product-header-child', 'product-admin-header-child', '_types/product/admin/header/_header.html')}} + + {% from '_types/product/header/_header.html' import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{% from '_types/root/_n/macros.html' import header with context %} +{% call header(id='product-header-child', oob=True) %} + {% call header() %} + {% from '_types/product/admin/header/_header.html' import header_row with context %} + {{header_row()}} +
    + +
    + {% endcall %} +{% endcall %} + + +{% block mobile_menu %} + {% include '_types/product/admin/_nav.html' %} +{% endblock %} + + +{% block content %} + {% include '_types/product/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/product/admin/header/_header.html b/market/templates/_types/product/admin/header/_header.html new file mode 100644 index 0000000..eacdf7d --- /dev/null +++ b/market/templates/_types/product/admin/header/_header.html @@ -0,0 +1,11 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='product-admin-row', oob=oob) %} + {% call links.link(url_for('market.browse.product.admin', product_slug=d.slug), hx_select_search ) %} + admin!! + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/product/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} \ No newline at end of file diff --git a/market/templates/_types/product/admin/index.html b/market/templates/_types/product/admin/index.html new file mode 100644 index 0000000..d1cb714 --- /dev/null +++ b/market/templates/_types/product/admin/index.html @@ -0,0 +1,39 @@ +{% extends '_types/product/index.html' %} + +{% import 'macros/layout.html' as layout %} + +{% block product_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('market-header-child', '_types/product/admin/header/_header.html') %} + {% block product_admin_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + + +{% block ___app_title %} + {% import 'macros/links.html' as links %} + {% call links.menu_row() %} + {% call links.link(url_for('market.browse.product.admin', product_slug=slug), hx_select_search) %} + {{ links.admin() }} + {% endcall %} + {% call links.desktop_nav() %} + {% include '_types/product/admin/_nav.html' %} + {% endcall %} + {% endcall %} +{% endblock %} + + + +{% block _main_mobile_menu %} + {% include '_types/product/admin/_nav.html' %} +{% endblock %} + +{% block aside %} +{% endblock %} + + +{% block content %} +{% include '_types/product/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/product/header/_header.html b/market/templates/_types/product/header/_header.html new file mode 100644 index 0000000..6608fce --- /dev/null +++ b/market/templates/_types/product/header/_header.html @@ -0,0 +1,15 @@ +{% import 'macros/links.html' as links %} +{% macro header_row(oob=False) %} + {% call links.menu_row(id='product-row', oob=oob) %} + {% call links.link(url_for('market.browse.product.product_detail', product_slug=d.slug), hx_select_search ) %} + {% include '_types/product/_title.html' %} + {% endcall %} + {% include '_types/product/_prices.html' %} + {% call links.desktop_nav() %} + {% include '_types/browse/_admin.html' %} + {% endcall %} + {% endcall %} +{% endmacro %} + + + diff --git a/market/templates/_types/product/index.html b/market/templates/_types/product/index.html new file mode 100644 index 0000000..31ccd88 --- /dev/null +++ b/market/templates/_types/product/index.html @@ -0,0 +1,61 @@ +{% extends '_types/browse/index.html' %} + +{% block meta %} + {% include '_types/product/_meta.html' %} +{% endblock %} + + +{% import 'macros/stickers.html' as stick %} +{% import '_types/product/prices.html' as prices %} +{% set prices_ns = namespace() %} +{{ prices.set_prices(d, prices_ns)}} + + + +{% block market_header_child %} + {% from '_types/root/_n/macros.html' import index_row with context %} + {% call index_row('market-header-child', '_types/product/header/_header.html') %} + {% block product_header_child %} + {% endblock %} + {% endcall %} +{% endblock %} + + +{% block _main_mobile_menu %} + {% include '_types/browse/_admin.html' %} +{% endblock %} + + + +{% block filter %} + +{% call layout.details() %} + {% call layout.summary('blog-child-header') %} + {% block blog_child_summary %} + {% endblock %} + {% endcall %} + {% call layout.menu('blog-child-menu') %} + {% block post_child_menu %} + {% endblock %} + {% endcall %} + {% endcall %} + + {% call layout.details() %} + {% call layout.summary('product-child-header') %} + {% block item_child_summary %} + {% endblock %} + {% endcall %} + {% call layout.menu('item-child-menu') %} + {% block item_child_menu %} + {% endblock %} + {% endcall %} + {% endcall %} + +{% endblock %} + +{% block aside %} +{% endblock %} + +{% block content %} + {% include '_types/product/_main_panel.html' %} +{% endblock %} diff --git a/market/templates/_types/product/prices.html b/market/templates/_types/product/prices.html new file mode 100644 index 0000000..be9cc4c --- /dev/null +++ b/market/templates/_types/product/prices.html @@ -0,0 +1,66 @@ +{# ---- Price formatting helpers ---- #} +{% set _sym = {'GBP':'£','EUR':'€','USD':'$'} %} +{% macro price_str(val, raw, cur) -%} + {%- if raw -%} + {{ raw }} + {%- elif val is number -%} + {{ (_sym.get(cur) or '') ~ ('%.2f'|format(val)) }} + {%- else -%} + {{ val or '' }} + {%- endif -%} +{%- endmacro %} + + +{% macro set_prices(item, ns) -%} + +{% set ns.sp_val = item.special_price or (item.oe_list_price and item.oe_list_price.special) %} +{% set ns.sp_raw = item.special_price_raw or (item.oe_list_price and item.oe_list_price.special_raw) %} +{% set ns.sp_cur = item.special_price_currency or (item.oe_list_price and item.oe_list_price.special_currency) %} + +{% set ns.rp_val = item.regular_price or item.rrp or (item.oe_list_price and item.oe_list_price.rrp) %} +{% set ns.rp_raw = item.regular_price_raw or item.rrp_raw or (item.oe_list_price and item.oe_list_price.rrp_raw) %} +{% set ns.rp_cur = item.regular_price_currency or item.rrp_currency or (item.oe_list_price and item.oe_list_price.rrp_currency) %} + +{% set ns.case_size_count = (item.case_size_count or 1) %} +{% set ns.rrp = item.rrp_raw[0] ~ "%.2f"|format(item.rrp * (ns.case_size_count)) %} +{% set ns.rrp_raw = item.rrp_raw %} + +{%- endmacro %} + + +{% macro rrp(ns) -%} + {% if ns.rrp %} +
    + rrp: + + {{ ns.rrp }} + +
    + {% endif %} +{%- endmacro %} + + +{% macro card_price(item) %} + + +{# price block unchanged #} + {% set _sym = {'GBP':'£','EUR':'€','USD':'$'} %} + {% set sp_val = item.special_price or (item.oe_list_price and item.oe_list_price.special) %} + {% set sp_raw = item.special_price_raw or (item.oe_list_price and item.oe_list_price.special_raw) %} + {% set sp_cur = item.special_price_currency or (item.oe_list_price and item.oe_list_price.special_currency) %} + {% set rp_val = item.regular_price or item.rrp or (item.oe_list_price and item.oe_list_price.rrp) %} + {% set rp_raw = item.regular_price_raw or item.rrp_raw or (item.oe_list_price and item.oe_list_price.rrp_raw) %} + {% set rp_cur = item.regular_price_currency or item.rrp_currency or (item.oe_list_price and item.oe_list_price.rrp_currency) %} + {% set sp_str = sp_raw if sp_raw else ( (_sym.get(sp_cur, '') ~ ('%.2f'|format(sp_val))) if sp_val is number else (sp_val or '')) %} + {% set rp_str = rp_raw if rp_raw else ( (_sym.get(rp_cur, '') ~ ('%.2f'|format(rp_val))) if rp_val is number else (rp_val or '')) %} +
    + {% if sp_val %} +
    {{ sp_str }}
    + {% if rp_val %} +
    {{ rp_str }}
    + {% endif %} + {% elif rp_val %} +
    {{ rp_str }}
    + {% endif %} +
    +{% endmacro %} diff --git a/market/templates/aside_clear.html b/market/templates/aside_clear.html new file mode 100644 index 0000000..e091ac2 --- /dev/null +++ b/market/templates/aside_clear.html @@ -0,0 +1,7 @@ + + diff --git a/market/templates/filter_clear.html b/market/templates/filter_clear.html new file mode 100644 index 0000000..fc3901e --- /dev/null +++ b/market/templates/filter_clear.html @@ -0,0 +1,5 @@ +
    +
    diff --git a/market/templates/fragments/container_nav_markets.html b/market/templates/fragments/container_nav_markets.html new file mode 100644 index 0000000..3c8814d --- /dev/null +++ b/market/templates/fragments/container_nav_markets.html @@ -0,0 +1,9 @@ +{# Market links nav — served as fragment from market app #} +{% for m in markets %} + + +
    {{m.name}}
    +
    +{% endfor %} diff --git a/market/templates/macros/filters.html b/market/templates/macros/filters.html new file mode 100644 index 0000000..8d13887 --- /dev/null +++ b/market/templates/macros/filters.html @@ -0,0 +1,117 @@ +{# + Unified filter macros for browse/shop pages + Consolidates duplicate mobile/desktop filter components +#} + +{% macro filter_item(href, is_on, title, icon_html, count=none, variant='desktop') %} + {# + Generic filter item (works for labels, stickers, etc.) + variant: 'desktop' or 'mobile' + #} + {% set base_class = "flex flex-col items-center justify-center" %} + {% if variant == 'mobile' %} + {% set item_class = base_class ~ " p-1 rounded hover:bg-stone-50" %} + {% set count_class = "text-[10px] text-stone-500 mt-1 leading-none tabular-nums" if count != 0 else "text-md text-red-500 font-bold mt-1 leading-none tabular-nums" %} + {% else %} + {% set item_class = base_class ~ " py-2 w-full h-full" %} + {% set count_class = "text-xs text-stone-500 leading-none justify-self-end tabular-nums" if count != 0 else "text-md text-red-500 font-bold leading-none justify-self-end tabular-nums" %} + {% endif %} + + + {{ icon_html | safe }} + {% if count is not none %} + {{ count }} + {% endif %} + +{% endmacro %} + + +{% macro labels_list(labels, selected_labels, current_local_href, variant='desktop') %} + {# + Unified labels filter list + variant: 'desktop' or 'mobile' + #} + {% import 'macros/stickers.html' as stick %} + + {% if variant == 'mobile' %} + + {% endif %} +{% endmacro %} + + +{% macro stickers_list(stickers, selected_stickers, current_local_href, variant='desktop') %} + {# + Unified stickers filter list + variant: 'desktop' or 'mobile' + #} + {% import 'macros/stickers.html' as stick %} + + {% if variant == 'mobile' %} + + + {% endif %} +{% endmacro %} + + diff --git a/schema.sql b/schema.sql new file mode 100644 index 0000000..68992ac --- /dev/null +++ b/schema.sql @@ -0,0 +1,2741 @@ +-- +-- PostgreSQL database dump +-- + + +-- Dumped from database version 16.10 (Debian 16.10-1.pgdg13+1) +-- Dumped by pg_dump version 16.10 (Ubuntu 16.10-1.pgdg22.04+1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: alembic_version; Type: TABLE; Schema: public; Owner: postgres +-- + +-- +-- Name: authors; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.authors ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + slug character varying(191) NOT NULL, + name character varying(255) NOT NULL, + profile_image text, + cover_image text, + bio text, + website text, + location text, + facebook text, + twitter text, + created_at timestamp with time zone, + updated_at timestamp with time zone, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.authors OWNER TO postgres; + +-- +-- Name: authors_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.authors_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.authors_id_seq OWNER TO postgres; + +-- +-- Name: authors_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.authors_id_seq OWNED BY public.authors.id; + + +-- +-- Name: calendar_entries; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.calendar_entries ( + id integer NOT NULL, + calendar_id integer NOT NULL, + name character varying(255) NOT NULL, + start_at timestamp with time zone NOT NULL, + end_at timestamp with time zone, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone, + CONSTRAINT ck_calendar_entries_end_after_start CHECK (((end_at IS NULL) OR (end_at >= start_at))) +); + + +ALTER TABLE public.calendar_entries OWNER TO postgres; + +-- +-- Name: calendar_entries_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.calendar_entries_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.calendar_entries_id_seq OWNER TO postgres; + +-- +-- Name: calendar_entries_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.calendar_entries_id_seq OWNED BY public.calendar_entries.id; + + +-- +-- Name: calendar_slots; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.calendar_slots ( + id integer NOT NULL, + calendar_id integer NOT NULL, + name character varying(255) NOT NULL, + description text, + mon boolean DEFAULT false NOT NULL, + tue boolean DEFAULT false NOT NULL, + wed boolean DEFAULT false NOT NULL, + thu boolean DEFAULT false NOT NULL, + fri boolean DEFAULT false NOT NULL, + sat boolean DEFAULT false NOT NULL, + sun boolean DEFAULT false NOT NULL, + time_start time without time zone NOT NULL, + time_end time without time zone NOT NULL, + cost numeric(10,2), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone, + CONSTRAINT ck_calendar_slots_time_end_after_start CHECK ((time_end > time_start)) +); + + +ALTER TABLE public.calendar_slots OWNER TO postgres; + +-- +-- Name: calendar_slots_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.calendar_slots_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.calendar_slots_id_seq OWNER TO postgres; + +-- +-- Name: calendar_slots_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.calendar_slots_id_seq OWNED BY public.calendar_slots.id; + + +-- +-- Name: calendars; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.calendars ( + id integer NOT NULL, + post_id integer NOT NULL, + name character varying(255) NOT NULL, + slug character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone, + description text +); + + +ALTER TABLE public.calendars OWNER TO postgres; + +-- +-- Name: calendars_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.calendars_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.calendars_id_seq OWNER TO postgres; + +-- +-- Name: calendars_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.calendars_id_seq OWNED BY public.calendars.id; + + +-- +-- Name: ghost_labels; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.ghost_labels ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + name character varying(255) NOT NULL, + slug character varying(255), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL +); + + +ALTER TABLE public.ghost_labels OWNER TO postgres; + +-- +-- Name: ghost_labels_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.ghost_labels_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.ghost_labels_id_seq OWNER TO postgres; + +-- +-- Name: ghost_labels_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.ghost_labels_id_seq OWNED BY public.ghost_labels.id; + + +-- +-- Name: ghost_newsletters; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.ghost_newsletters ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + name character varying(255) NOT NULL, + slug character varying(255), + description text, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL +); + + +ALTER TABLE public.ghost_newsletters OWNER TO postgres; + +-- +-- Name: ghost_newsletters_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.ghost_newsletters_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.ghost_newsletters_id_seq OWNER TO postgres; + +-- +-- Name: ghost_newsletters_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.ghost_newsletters_id_seq OWNED BY public.ghost_newsletters.id; + + +-- +-- Name: ghost_subscriptions; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.ghost_subscriptions ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + user_id integer NOT NULL, + status character varying(50), + tier_id integer, + cadence character varying(50), + price_amount integer, + price_currency character varying(10), + stripe_customer_id character varying(255), + stripe_subscription_id character varying(255), + raw jsonb +); + + +ALTER TABLE public.ghost_subscriptions OWNER TO postgres; + +-- +-- Name: ghost_subscriptions_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.ghost_subscriptions_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.ghost_subscriptions_id_seq OWNER TO postgres; + +-- +-- Name: ghost_subscriptions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.ghost_subscriptions_id_seq OWNED BY public.ghost_subscriptions.id; + + +-- +-- Name: ghost_tiers; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.ghost_tiers ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + name character varying(255) NOT NULL, + slug character varying(255), + type character varying(50), + visibility character varying(50) +); + + +ALTER TABLE public.ghost_tiers OWNER TO postgres; + +-- +-- Name: ghost_tiers_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.ghost_tiers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.ghost_tiers_id_seq OWNER TO postgres; + +-- +-- Name: ghost_tiers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.ghost_tiers_id_seq OWNED BY public.ghost_tiers.id; + + +-- +-- Name: kv; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.kv ( + key character varying(120) NOT NULL, + value text, + updated_at timestamp with time zone NOT NULL +); + + +ALTER TABLE public.kv OWNER TO postgres; + +-- +-- Name: link_errors; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.link_errors ( + id integer NOT NULL, + product_slug character varying(255), + href text, + text text, + top character varying(255), + sub character varying(255), + target_slug character varying(255), + type character varying(255), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.link_errors OWNER TO postgres; + +-- +-- Name: link_errors_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.link_errors_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.link_errors_id_seq OWNER TO postgres; + +-- +-- Name: link_errors_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.link_errors_id_seq OWNED BY public.link_errors.id; + + +-- +-- Name: link_externals; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.link_externals ( + id integer NOT NULL, + product_slug character varying(255), + href text, + text text, + host character varying(255), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone + +); + + +ALTER TABLE public.link_externals OWNER TO postgres; + +-- +-- Name: link_externals_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.link_externals_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.link_externals_id_seq OWNER TO postgres; + +-- +-- Name: link_externals_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.link_externals_id_seq OWNED BY public.link_externals.id; + + +-- +-- Name: listing_items; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.listing_items ( + id integer NOT NULL, + listing_id integer NOT NULL, + slug character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.listing_items OWNER TO postgres; + +-- +-- Name: listing_items_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.listing_items_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.listing_items_id_seq OWNER TO postgres; + +-- +-- Name: listing_items_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.listing_items_id_seq OWNED BY public.listing_items.id; + + +-- +-- Name: listings; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.listings ( + id integer NOT NULL, + total_pages integer, + top_id integer NOT NULL, + sub_id integer, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.listings OWNER TO postgres; + +-- +-- Name: listings_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.listings_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.listings_id_seq OWNER TO postgres; + +-- +-- Name: listings_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.listings_id_seq OWNED BY public.listings.id; + + +-- +-- Name: magic_links; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.magic_links ( + id integer NOT NULL, + token character varying(128) NOT NULL, + user_id integer NOT NULL, + purpose character varying(32) NOT NULL, + expires_at timestamp with time zone NOT NULL, + used_at timestamp with time zone, + created_at timestamp with time zone DEFAULT now() NOT NULL, + ip character varying(64), + user_agent character varying(256) +); + + +ALTER TABLE public.magic_links OWNER TO postgres; + +-- +-- Name: magic_links_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.magic_links_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.magic_links_id_seq OWNER TO postgres; + +-- +-- Name: magic_links_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.magic_links_id_seq OWNED BY public.magic_links.id; + + +-- +-- Name: nav_subs; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.nav_subs ( + id integer NOT NULL, + top_id integer NOT NULL, + label character varying(255), + slug character varying(255) NOT NULL, + href text, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.nav_subs OWNER TO postgres; + +-- +-- Name: nav_subs_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.nav_subs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.nav_subs_id_seq OWNER TO postgres; + +-- +-- Name: nav_subs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.nav_subs_id_seq OWNED BY public.nav_subs.id; + + +-- +-- Name: nav_tops; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.nav_tops ( + id integer NOT NULL, + label character varying(255) NOT NULL, + slug character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.nav_tops OWNER TO postgres; + +-- +-- Name: nav_tops_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.nav_tops_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.nav_tops_id_seq OWNER TO postgres; + +-- +-- Name: nav_tops_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.nav_tops_id_seq OWNED BY public.nav_tops.id; + + +-- +-- Name: post_authors; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.post_authors ( + post_id integer NOT NULL, + author_id integer NOT NULL, + sort_order integer DEFAULT 0 NOT NULL +); + + +ALTER TABLE public.post_authors OWNER TO postgres; + +-- +-- Name: post_tags; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.post_tags ( + post_id integer NOT NULL, + tag_id integer NOT NULL, + sort_order integer DEFAULT 0 NOT NULL +); + + +ALTER TABLE public.post_tags OWNER TO postgres; + +-- +-- Name: posts; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.posts ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + uuid character varying(64) NOT NULL, + slug character varying(191) NOT NULL, + title character varying(500) NOT NULL, + html text, + plaintext text, + mobiledoc text, + lexical text, + feature_image text, + feature_image_alt text, + feature_image_caption text, + excerpt text, + custom_excerpt text, + visibility character varying(32) DEFAULT 'public'::character varying NOT NULL, + status character varying(32) DEFAULT 'draft'::character varying NOT NULL, + featured boolean DEFAULT false NOT NULL, + is_page boolean DEFAULT false NOT NULL, + email_only boolean DEFAULT false NOT NULL, + canonical_url text, + meta_title character varying(500), + meta_description text, + og_image text, + og_title character varying(500), + og_description text, + twitter_image text, + twitter_title character varying(500), + twitter_description text, + custom_template character varying(191), + reading_time integer, + comment_id character varying(191), + published_at timestamp with time zone, + updated_at timestamp with time zone, + created_at timestamp with time zone, + deleted_at timestamp with time zone, + primary_author_id integer, + primary_tag_id integer +); + + +ALTER TABLE public.posts OWNER TO postgres; + +-- +-- Name: posts_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.posts_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.posts_id_seq OWNER TO postgres; + +-- +-- Name: posts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.posts_id_seq OWNED BY public.posts.id; + + +-- +-- Name: product_allergens; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_allergens ( + id integer NOT NULL, + product_id integer NOT NULL, + name character varying(255) NOT NULL, + contains boolean DEFAULT false NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_allergens OWNER TO postgres; + +-- +-- Name: product_allergens_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_allergens_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_allergens_id_seq OWNER TO postgres; + +-- +-- Name: product_allergens_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_allergens_id_seq OWNED BY public.product_allergens.id; + + +-- +-- Name: product_attributes; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_attributes ( + id integer NOT NULL, + product_id integer NOT NULL, + key character varying(255) NOT NULL, + value text, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_attributes OWNER TO postgres; + +-- +-- Name: product_attributes_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_attributes_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_attributes_id_seq OWNER TO postgres; + +-- +-- Name: product_attributes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_attributes_id_seq OWNED BY public.product_attributes.id; + + +-- +-- Name: product_images; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_images ( + id integer NOT NULL, + product_id integer NOT NULL, + url text NOT NULL, + "position" integer DEFAULT 0 NOT NULL, + kind character varying(16) DEFAULT 'gallery'::character varying NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone, + CONSTRAINT ck_product_images_position_nonneg CHECK (("position" >= 0)) +); + + +ALTER TABLE public.product_images OWNER TO postgres; + +-- +-- Name: product_images_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_images_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_images_id_seq OWNER TO postgres; + +-- +-- Name: product_images_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_images_id_seq OWNED BY public.product_images.id; + + +-- +-- Name: product_labels; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_labels ( + id integer NOT NULL, + product_id integer NOT NULL, + name character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_labels OWNER TO postgres; + +-- +-- Name: product_labels_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_labels_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_labels_id_seq OWNER TO postgres; + +-- +-- Name: product_labels_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_labels_id_seq OWNED BY public.product_labels.id; + + +-- +-- Name: product_likes; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_likes ( + user_id integer NOT NULL, + id integer NOT NULL, + product_slug character varying(255), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_likes OWNER TO postgres; + +-- +-- Name: product_likes_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_likes_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_likes_id_seq OWNER TO postgres; + +-- +-- Name: product_likes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_likes_id_seq OWNED BY public.product_likes.id; + + +-- +-- Name: product_logs; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_logs ( + id integer NOT NULL, + slug character varying(255), + href_tried text, + ok boolean DEFAULT false NOT NULL, + error_type character varying(255), + error_message text, + http_status integer, + final_url text, + transport_error boolean, + title character varying(512), + has_description_html boolean, + has_description_short boolean, + sections_count integer, + images_count integer, + embedded_images_count integer, + all_images_count integer, + created_at timestamp with time zone DEFAULT now() NOT NULL +); + + +ALTER TABLE public.product_logs OWNER TO postgres; + +-- +-- Name: product_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_logs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_logs_id_seq OWNER TO postgres; + +-- +-- Name: product_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_logs_id_seq OWNED BY public.product_logs.id; + + +-- +-- Name: product_nutrition; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_nutrition ( + id integer NOT NULL, + product_id integer NOT NULL, + key character varying(255) NOT NULL, + value character varying(255), + unit character varying(64), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_nutrition OWNER TO postgres; + +-- +-- Name: product_nutrition_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_nutrition_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_nutrition_id_seq OWNER TO postgres; + +-- +-- Name: product_nutrition_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_nutrition_id_seq OWNED BY public.product_nutrition.id; + + +-- +-- Name: product_sections; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_sections ( + id integer NOT NULL, + product_id integer NOT NULL, + title character varying(255) NOT NULL, + html text NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_sections OWNER TO postgres; + +-- +-- Name: product_sections_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_sections_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_sections_id_seq OWNER TO postgres; + +-- +-- Name: product_sections_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_sections_id_seq OWNED BY public.product_sections.id; + + +-- +-- Name: product_stickers; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.product_stickers ( + id integer NOT NULL, + product_id integer NOT NULL, + name character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.product_stickers OWNER TO postgres; + +-- +-- Name: product_stickers_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.product_stickers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.product_stickers_id_seq OWNER TO postgres; + +-- +-- Name: product_stickers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.product_stickers_id_seq OWNED BY public.product_stickers.id; + + +-- +-- Name: products; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.products ( + id integer NOT NULL, + slug character varying(255) NOT NULL, + title character varying(512), + image text, + description_short text, + description_html text, + suma_href text, + brand character varying(255), + rrp numeric(12,2), + rrp_currency character varying(16), + rrp_raw character varying(128), + price_per_unit numeric(12,4), + price_per_unit_currency character varying(16), + price_per_unit_raw character varying(128), + special_price numeric(12,2), + special_price_currency character varying(16), + special_price_raw character varying(128), + case_size_count integer, + case_size_item_qty numeric(12,3), + case_size_item_unit character varying(32), + case_size_raw character varying(128), + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone, + ean character varying(64), + sku character varying(128), + unit_size character varying(128), + pack_size character varying(128), + regular_price numeric(12,2), + regular_price_currency character varying(16), + regular_price_raw character varying(128), + oe_list_price numeric(12,2) +); + + +ALTER TABLE public.products OWNER TO postgres; + +-- +-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.products_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.products_id_seq OWNER TO postgres; + +-- +-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id; + + +-- +-- Name: subcategory_redirects; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.subcategory_redirects ( + id integer NOT NULL, + old_path character varying(512) NOT NULL, + new_path character varying(512) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + updated_at timestamp with time zone DEFAULT now() NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.subcategory_redirects OWNER TO postgres; + +-- +-- Name: subcategory_redirects_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.subcategory_redirects_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.subcategory_redirects_id_seq OWNER TO postgres; + +-- +-- Name: subcategory_redirects_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.subcategory_redirects_id_seq OWNED BY public.subcategory_redirects.id; + + +-- +-- Name: tags; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.tags ( + id integer NOT NULL, + ghost_id character varying(64) NOT NULL, + slug character varying(191) NOT NULL, + name character varying(255) NOT NULL, + description text, + visibility character varying(32) DEFAULT 'public'::character varying NOT NULL, + feature_image text, + meta_title character varying(300), + meta_description text, + created_at timestamp with time zone, + updated_at timestamp with time zone, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.tags OWNER TO postgres; + +-- +-- Name: tags_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.tags_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.tags_id_seq OWNER TO postgres; + +-- +-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.tags_id_seq OWNED BY public.tags.id; + + +-- +-- Name: user_labels; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.user_labels ( + id integer NOT NULL, + user_id integer NOT NULL, + label_id integer NOT NULL +); + + +ALTER TABLE public.user_labels OWNER TO postgres; + +-- +-- Name: user_labels_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.user_labels_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.user_labels_id_seq OWNER TO postgres; + +-- +-- Name: user_labels_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.user_labels_id_seq OWNED BY public.user_labels.id; + + +-- +-- Name: user_newsletters; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.user_newsletters ( + id integer NOT NULL, + user_id integer NOT NULL, + newsletter_id integer NOT NULL, + subscribed boolean DEFAULT true NOT NULL +); + + +ALTER TABLE public.user_newsletters OWNER TO postgres; + +-- +-- Name: user_newsletters_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.user_newsletters_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.user_newsletters_id_seq OWNER TO postgres; + +-- +-- Name: user_newsletters_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.user_newsletters_id_seq OWNED BY public.user_newsletters.id; + + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.users ( + id integer NOT NULL, + email character varying(255) NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + last_login_at timestamp with time zone, + ghost_id character varying(64), + name character varying(255), + ghost_status character varying(50), + ghost_subscribed boolean DEFAULT true NOT NULL, + ghost_note text, + avatar_image text, + stripe_customer_id character varying(255), + ghost_raw jsonb +); + + +ALTER TABLE public.users OWNER TO postgres; + +-- +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.users_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.users_id_seq OWNER TO postgres; + +-- +-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + + +-- +-- Name: authors id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.authors ALTER COLUMN id SET DEFAULT nextval('public.authors_id_seq'::regclass); + + +-- +-- Name: calendar_entries id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_entries ALTER COLUMN id SET DEFAULT nextval('public.calendar_entries_id_seq'::regclass); + + +-- +-- Name: calendar_slots id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_slots ALTER COLUMN id SET DEFAULT nextval('public.calendar_slots_id_seq'::regclass); + + +-- +-- Name: calendars id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendars ALTER COLUMN id SET DEFAULT nextval('public.calendars_id_seq'::regclass); + + +-- +-- Name: ghost_labels id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_labels ALTER COLUMN id SET DEFAULT nextval('public.ghost_labels_id_seq'::regclass); + + +-- +-- Name: ghost_newsletters id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_newsletters ALTER COLUMN id SET DEFAULT nextval('public.ghost_newsletters_id_seq'::regclass); + + +-- +-- Name: ghost_subscriptions id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_subscriptions ALTER COLUMN id SET DEFAULT nextval('public.ghost_subscriptions_id_seq'::regclass); + + +-- +-- Name: ghost_tiers id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_tiers ALTER COLUMN id SET DEFAULT nextval('public.ghost_tiers_id_seq'::regclass); + + +-- +-- Name: link_errors id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.link_errors ALTER COLUMN id SET DEFAULT nextval('public.link_errors_id_seq'::regclass); + + +-- +-- Name: link_externals id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.link_externals ALTER COLUMN id SET DEFAULT nextval('public.link_externals_id_seq'::regclass); + + +-- +-- Name: listing_items id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listing_items ALTER COLUMN id SET DEFAULT nextval('public.listing_items_id_seq'::regclass); + + +-- +-- Name: listings id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listings ALTER COLUMN id SET DEFAULT nextval('public.listings_id_seq'::regclass); + + +-- +-- Name: magic_links id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.magic_links ALTER COLUMN id SET DEFAULT nextval('public.magic_links_id_seq'::regclass); + + +-- +-- Name: nav_subs id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_subs ALTER COLUMN id SET DEFAULT nextval('public.nav_subs_id_seq'::regclass); + + +-- +-- Name: nav_tops id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_tops ALTER COLUMN id SET DEFAULT nextval('public.nav_tops_id_seq'::regclass); + + +-- +-- Name: posts id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts ALTER COLUMN id SET DEFAULT nextval('public.posts_id_seq'::regclass); + + +-- +-- Name: product_allergens id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_allergens ALTER COLUMN id SET DEFAULT nextval('public.product_allergens_id_seq'::regclass); + + +-- +-- Name: product_attributes id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_attributes ALTER COLUMN id SET DEFAULT nextval('public.product_attributes_id_seq'::regclass); + + +-- +-- Name: product_images id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_images ALTER COLUMN id SET DEFAULT nextval('public.product_images_id_seq'::regclass); + + +-- +-- Name: product_labels id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_labels ALTER COLUMN id SET DEFAULT nextval('public.product_labels_id_seq'::regclass); + + +-- +-- Name: product_likes id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_likes ALTER COLUMN id SET DEFAULT nextval('public.product_likes_id_seq'::regclass); + + +-- +-- Name: product_logs id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_logs ALTER COLUMN id SET DEFAULT nextval('public.product_logs_id_seq'::regclass); + + +-- +-- Name: product_nutrition id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_nutrition ALTER COLUMN id SET DEFAULT nextval('public.product_nutrition_id_seq'::regclass); + + +-- +-- Name: product_sections id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_sections ALTER COLUMN id SET DEFAULT nextval('public.product_sections_id_seq'::regclass); + + +-- +-- Name: product_stickers id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_stickers ALTER COLUMN id SET DEFAULT nextval('public.product_stickers_id_seq'::regclass); + + +-- +-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass); + + +-- +-- Name: subcategory_redirects id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.subcategory_redirects ALTER COLUMN id SET DEFAULT nextval('public.subcategory_redirects_id_seq'::regclass); + + +-- +-- Name: tags id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tags ALTER COLUMN id SET DEFAULT nextval('public.tags_id_seq'::regclass); + + +-- +-- Name: user_labels id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_labels ALTER COLUMN id SET DEFAULT nextval('public.user_labels_id_seq'::regclass); + + +-- +-- Name: user_newsletters id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_newsletters ALTER COLUMN id SET DEFAULT nextval('public.user_newsletters_id_seq'::regclass); + + +-- +-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + + +-- + + +-- +-- Name: authors authors_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.authors + ADD CONSTRAINT authors_pkey PRIMARY KEY (id); + + +-- +-- Name: calendar_entries calendar_entries_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_entries + ADD CONSTRAINT calendar_entries_pkey PRIMARY KEY (id); + + +-- +-- Name: calendar_slots calendar_slots_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_slots + ADD CONSTRAINT calendar_slots_pkey PRIMARY KEY (id); + + +-- +-- Name: calendars calendars_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendars + ADD CONSTRAINT calendars_pkey PRIMARY KEY (id); + + +-- +-- Name: ghost_labels ghost_labels_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_labels + ADD CONSTRAINT ghost_labels_pkey PRIMARY KEY (id); + + +-- +-- Name: ghost_newsletters ghost_newsletters_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_newsletters + ADD CONSTRAINT ghost_newsletters_pkey PRIMARY KEY (id); + + +-- +-- Name: ghost_subscriptions ghost_subscriptions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_subscriptions + ADD CONSTRAINT ghost_subscriptions_pkey PRIMARY KEY (id); + + +-- +-- Name: ghost_tiers ghost_tiers_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_tiers + ADD CONSTRAINT ghost_tiers_pkey PRIMARY KEY (id); + + +-- +-- Name: kv kv_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.kv + ADD CONSTRAINT kv_pkey PRIMARY KEY (key); + + +-- +-- Name: link_errors link_errors_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.link_errors + ADD CONSTRAINT link_errors_pkey PRIMARY KEY (id); + + +-- +-- Name: link_externals link_externals_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.link_externals + ADD CONSTRAINT link_externals_pkey PRIMARY KEY (id); + + +-- +-- Name: listing_items listing_items_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listing_items + ADD CONSTRAINT listing_items_pkey PRIMARY KEY (id); + + +-- +-- Name: listings listings_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listings + ADD CONSTRAINT listings_pkey PRIMARY KEY (id); + + +-- +-- Name: magic_links magic_links_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.magic_links + ADD CONSTRAINT magic_links_pkey PRIMARY KEY (id); + + +-- +-- Name: magic_links magic_links_token_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.magic_links + ADD CONSTRAINT magic_links_token_key UNIQUE (token); + + +-- +-- Name: nav_subs nav_subs_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_subs + ADD CONSTRAINT nav_subs_pkey PRIMARY KEY (id); + + +-- +-- Name: nav_tops nav_tops_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_tops + ADD CONSTRAINT nav_tops_pkey PRIMARY KEY (id); + + +-- +-- Name: post_authors post_authors_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_authors + ADD CONSTRAINT post_authors_pkey PRIMARY KEY (post_id, author_id); + + +-- +-- Name: post_tags post_tags_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_tags + ADD CONSTRAINT post_tags_pkey PRIMARY KEY (post_id, tag_id); + + +-- +-- Name: posts posts_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT posts_pkey PRIMARY KEY (id); + + +-- +-- Name: product_allergens product_allergens_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_allergens + ADD CONSTRAINT product_allergens_pkey PRIMARY KEY (id); + + +-- +-- Name: product_attributes product_attributes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_attributes + ADD CONSTRAINT product_attributes_pkey PRIMARY KEY (id); + + +-- +-- Name: product_images product_images_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_images + ADD CONSTRAINT product_images_pkey PRIMARY KEY (id); + + +-- +-- Name: product_labels product_labels_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_labels + ADD CONSTRAINT product_labels_pkey PRIMARY KEY (id); + + +-- +-- Name: product_logs product_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_logs + ADD CONSTRAINT product_logs_pkey PRIMARY KEY (id); + + +-- +-- Name: product_nutrition product_nutrition_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_nutrition + ADD CONSTRAINT product_nutrition_pkey PRIMARY KEY (id); + + +-- +-- Name: product_sections product_sections_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_sections + ADD CONSTRAINT product_sections_pkey PRIMARY KEY (id); + + +-- +-- Name: product_stickers product_stickers_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_stickers + ADD CONSTRAINT product_stickers_pkey PRIMARY KEY (id); + + +-- +-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.products + ADD CONSTRAINT products_pkey PRIMARY KEY (id); + + +-- +-- Name: products products_slug_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.products + ADD CONSTRAINT products_slug_deleted_at UNIQUE (slug, deleted_at); + + +-- +-- Name: subcategory_redirects subcategory_redirects_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.subcategory_redirects + ADD CONSTRAINT subcategory_redirects_pkey PRIMARY KEY (id); + + +-- +-- Name: tags tags_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tags + ADD CONSTRAINT tags_pkey PRIMARY KEY (id); + + +-- +-- Name: authors uq_authors_ghost_id; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.authors + ADD CONSTRAINT uq_authors_ghost_id UNIQUE (ghost_id); + + +-- +-- Name: calendar_slots uq_calendar_slots_unique_band; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_slots + ADD CONSTRAINT uq_calendar_slots_unique_band UNIQUE (calendar_id, name); + + +-- +-- Name: listing_items uq_listing_items; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listing_items + ADD CONSTRAINT uq_listing_items UNIQUE (listing_id, slug, deleted_at); + + +-- +-- Name: listing_items uq_listing_items_listing_slug; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listing_items + ADD CONSTRAINT uq_listing_items_listing_slug UNIQUE (listing_id, slug, deleted_at); + + +-- +-- Name: listings uq_listings_top_sub; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listings + ADD CONSTRAINT uq_listings_top_sub UNIQUE (top_id, sub_id, deleted_at); + + + +-- +-- Name: nav_subs uq_nav_subs_top_slug; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_subs + ADD CONSTRAINT uq_nav_subs_top_slug UNIQUE (top_id, slug, deleted_at); + + + + +-- +-- Name: nav_tops uq_nav_tops_label_slug; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_tops + ADD CONSTRAINT uq_nav_tops_label_slug UNIQUE (label, slug, deleted_at); + + +-- +-- Name: posts uq_posts_ghost_id; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT uq_posts_ghost_id UNIQUE (ghost_id); + + +-- +-- Name: posts uq_posts_uuid; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT uq_posts_uuid UNIQUE (uuid); + + +-- +-- Name: product_allergens uq_product_allergens_product_name; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_allergens + ADD CONSTRAINT uq_product_allergens_product_name UNIQUE (product_id, name, deleted_at); + + +-- +-- Name: product_attributes uq_product_attributes_product_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_attributes + ADD CONSTRAINT uq_product_attributes_product_key UNIQUE (product_id, key, deleted_at); + + +-- +-- Name: product_images uq_product_images_product_url_kind; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_images + ADD CONSTRAINT uq_product_images_product_url_kind UNIQUE (product_id, url, kind, deleted_at); + + +-- +-- Name: product_labels uq_product_labels_product_name; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_labels + ADD CONSTRAINT uq_product_labels_product_name UNIQUE (product_id, name, deleted_at); + + +-- +-- Name: product_likes uq_product_likes_product_user; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_likes + ADD CONSTRAINT uq_product_likes_product_user UNIQUE (product_slug, user_id, deleted_at); + + +-- +-- Name: product_nutrition uq_product_nutrition_product_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_nutrition + ADD CONSTRAINT uq_product_nutrition_product_key UNIQUE (product_id, key, deleted_at); + + +-- +-- Name: product_sections uq_product_sections_product_title; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_sections + ADD CONSTRAINT uq_product_sections_product_title UNIQUE (product_id, title, deleted_at); + + +-- +-- Name: product_stickers uq_product_stickers_product_name; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_stickers + ADD CONSTRAINT uq_product_stickers_product_name UNIQUE (product_id, name, deleted_at); + + +-- +-- Name: subcategory_redirects uq_subcategory_redirects_old_new; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.subcategory_redirects + ADD CONSTRAINT uq_subcategory_redirects_old_new UNIQUE (old_path, new_path, deleted_at); + + +-- +-- Name: tags uq_tags_ghost_id; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tags + ADD CONSTRAINT uq_tags_ghost_id UNIQUE (ghost_id); + + +-- +-- Name: user_labels uq_user_label; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_labels + ADD CONSTRAINT uq_user_label UNIQUE (user_id, label_id); + + +-- +-- Name: user_newsletters uq_user_newsletter; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_newsletters + ADD CONSTRAINT uq_user_newsletter UNIQUE (user_id, newsletter_id); + + +-- +-- Name: user_labels user_labels_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_labels + ADD CONSTRAINT user_labels_pkey PRIMARY KEY (id); + + +-- +-- Name: user_newsletters user_newsletters_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_newsletters + ADD CONSTRAINT user_newsletters_pkey PRIMARY KEY (id); + + +-- +-- Name: users users_email_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_email_key UNIQUE (email); + + +-- +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + + +-- +-- Name: ix_authors_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_authors_ghost_id ON public.authors USING btree (ghost_id); + + +-- +-- Name: ix_authors_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_authors_slug ON public.authors USING btree (slug); + + +-- +-- Name: ix_calendar_entries_calendar_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendar_entries_calendar_id ON public.calendar_entries USING btree (calendar_id); + + +-- +-- Name: ix_calendar_entries_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendar_entries_name ON public.calendar_entries USING btree (name); + + +-- +-- Name: ix_calendar_entries_start_at; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendar_entries_start_at ON public.calendar_entries USING btree (start_at); + + +-- +-- Name: ix_calendar_slots_calendar_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendar_slots_calendar_id ON public.calendar_slots USING btree (calendar_id); + + +-- +-- Name: ix_calendar_slots_time_start; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendar_slots_time_start ON public.calendar_slots USING btree (time_start); + + +-- +-- Name: ix_calendars_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendars_name ON public.calendars USING btree (name); + + +-- +-- Name: ix_calendars_post_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendars_post_id ON public.calendars USING btree (post_id); + + +-- +-- Name: ix_calendars_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_calendars_slug ON public.calendars USING btree (slug); + + +-- +-- Name: ix_ghost_labels_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_ghost_labels_ghost_id ON public.ghost_labels USING btree (ghost_id); + + +-- +-- Name: ix_ghost_newsletters_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_ghost_newsletters_ghost_id ON public.ghost_newsletters USING btree (ghost_id); + + +-- +-- Name: ix_ghost_subscriptions_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_ghost_subscriptions_ghost_id ON public.ghost_subscriptions USING btree (ghost_id); + + +-- +-- Name: ix_ghost_subscriptions_stripe_customer_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_ghost_subscriptions_stripe_customer_id ON public.ghost_subscriptions USING btree (stripe_customer_id); + + +-- +-- Name: ix_ghost_subscriptions_stripe_subscription_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_ghost_subscriptions_stripe_subscription_id ON public.ghost_subscriptions USING btree (stripe_subscription_id); + + +-- +-- Name: ix_ghost_subscriptions_tier_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_ghost_subscriptions_tier_id ON public.ghost_subscriptions USING btree (tier_id); + + +-- +-- Name: ix_ghost_subscriptions_user_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_ghost_subscriptions_user_id ON public.ghost_subscriptions USING btree (user_id); + + +-- +-- Name: ix_ghost_tiers_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_ghost_tiers_ghost_id ON public.ghost_tiers USING btree (ghost_id); + + +-- +-- Name: ix_link_errors_product_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_link_errors_product_slug ON public.link_errors USING btree (product_slug); + + +-- +-- Name: ix_link_externals_product_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_link_externals_product_slug ON public.link_externals USING btree (product_slug); + + +-- +-- Name: ix_listing_items_listing_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_listing_items_listing_id ON public.listing_items USING btree (listing_id); + + +-- +-- Name: ix_listing_items_listing_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_listing_items_listing_slug ON public.listing_items USING btree (listing_id, slug); + + +-- +-- Name: ix_listing_items_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_listing_items_slug ON public.listing_items USING btree (slug); + + +-- +-- Name: ix_listings_sub_slug; Type: INDEX; Schema: public; Owner: postgres +-- + + + +-- +-- Name: ix_listings_top_slug; Type: INDEX; Schema: public; Owner: postgres +-- + + + +-- +-- Name: ix_magic_links_token; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_magic_links_token ON public.magic_links USING btree (token); + + +-- +-- Name: ix_magic_links_user; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_magic_links_user ON public.magic_links USING btree (user_id); + + +-- +-- Name: ix_nav_subs_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_nav_subs_slug ON public.nav_subs USING btree (slug); + + +-- +-- Name: ix_nav_subs_top_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_nav_subs_top_id ON public.nav_subs USING btree (top_id); + + +-- +-- Name: ix_nav_tops_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_nav_tops_slug ON public.nav_tops USING btree (slug); + + +-- +-- Name: ix_posts_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_ghost_id ON public.posts USING btree (ghost_id); + + +-- +-- Name: ix_posts_is_page; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_is_page ON public.posts USING btree (is_page); + + +-- +-- Name: ix_posts_published_at; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_published_at ON public.posts USING btree (published_at); + + +-- +-- Name: ix_posts_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_slug ON public.posts USING btree (slug); + + +-- +-- Name: ix_posts_status; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_status ON public.posts USING btree (status); + + +-- +-- Name: ix_posts_visibility; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_posts_visibility ON public.posts USING btree (visibility); + + +-- +-- Name: ix_product_allergens_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_allergens_name ON public.product_allergens USING btree (name); + + +-- +-- Name: ix_product_allergens_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_allergens_product_id ON public.product_allergens USING btree (product_id); + + +-- +-- Name: ix_product_attributes_key; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_attributes_key ON public.product_attributes USING btree (key); + + +-- +-- Name: ix_product_attributes_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_attributes_product_id ON public.product_attributes USING btree (product_id); + + +-- +-- Name: ix_product_images_position; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_images_position ON public.product_images USING btree ("position"); + + +-- +-- Name: ix_product_images_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_images_product_id ON public.product_images USING btree (product_id); + + +-- +-- Name: ix_product_labels_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_labels_name ON public.product_labels USING btree (name); + + +-- +-- Name: ix_product_labels_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_labels_product_id ON public.product_labels USING btree (product_id); + + +-- +-- Name: ix_product_likes_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_likes_product_id ON public.product_likes USING btree (product_slug); + + +-- +-- Name: ix_product_likes_user_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_likes_user_id ON public.product_likes USING btree (user_id); + + +-- +-- Name: ix_product_likes_user_product; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_product_likes_user_product ON public.product_likes USING btree (user_id, product_slug, deleted_at); + + +-- +-- Name: ix_product_logs_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_logs_slug ON public.product_logs USING btree (slug); + + +-- +-- Name: ix_product_nutrition_key; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_nutrition_key ON public.product_nutrition USING btree (key); + + +-- +-- Name: ix_product_nutrition_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_nutrition_product_id ON public.product_nutrition USING btree (product_id); + + +-- +-- Name: ix_product_sections_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_sections_product_id ON public.product_sections USING btree (product_id); + + +-- +-- Name: ix_product_stickers_name; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_stickers_name ON public.product_stickers USING btree (name); + + +-- +-- Name: ix_product_stickers_product_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_product_stickers_product_id ON public.product_stickers USING btree (product_id); + + +-- +-- Name: ix_products_brand; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_products_brand ON public.products USING btree (brand); + + +-- +-- Name: ix_products_ean; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_products_ean ON public.products USING btree (ean); + + +-- +-- Name: ix_products_regular_price; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_products_regular_price ON public.products USING btree (regular_price); + + +-- +-- Name: ix_products_sku; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_products_sku ON public.products USING btree (sku); + + +-- +-- Name: ix_products_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_products_slug ON public.products USING btree (slug); + + +-- +-- Name: ix_subcategory_redirects_old_path; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_subcategory_redirects_old_path ON public.subcategory_redirects USING btree (old_path); + + +-- +-- Name: ix_tags_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_tags_ghost_id ON public.tags USING btree (ghost_id); + + +-- +-- Name: ix_tags_slug; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_tags_slug ON public.tags USING btree (slug); + + +-- +-- Name: ix_users_email; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_users_email ON public.users USING btree (email); + + +-- +-- Name: ix_users_ghost_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ix_users_ghost_id ON public.users USING btree (ghost_id); + + +-- +-- Name: ix_users_stripe_customer_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX ix_users_stripe_customer_id ON public.users USING btree (stripe_customer_id); + + +-- +-- Name: ux_calendars_post_slug_active; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE UNIQUE INDEX ux_calendars_post_slug_active ON public.calendars USING btree (post_id, lower((slug)::text)) WHERE (deleted_at IS NULL); + + +-- +-- Name: calendar_entries calendar_entries_calendar_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_entries + ADD CONSTRAINT calendar_entries_calendar_id_fkey FOREIGN KEY (calendar_id) REFERENCES public.calendars(id) ON DELETE CASCADE; + + +-- +-- Name: calendar_slots calendar_slots_calendar_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendar_slots + ADD CONSTRAINT calendar_slots_calendar_id_fkey FOREIGN KEY (calendar_id) REFERENCES public.calendars(id) ON DELETE CASCADE; + + +-- +-- Name: calendars calendars_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.calendars + ADD CONSTRAINT calendars_post_id_fkey FOREIGN KEY (post_id) REFERENCES public.posts(id) ON DELETE CASCADE; + + +-- +-- Name: product_likes fk_product_likes_product_id_products; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_likes + ADD CONSTRAINT fk_product_likes_product_id_products FOREIGN KEY (product_slug) REFERENCES public.products(slug) ON DELETE CASCADE; + + +-- +-- Name: ghost_subscriptions ghost_subscriptions_tier_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_subscriptions + ADD CONSTRAINT ghost_subscriptions_tier_id_fkey FOREIGN KEY (tier_id) REFERENCES public.ghost_tiers(id) ON DELETE SET NULL; + + +-- +-- Name: ghost_subscriptions ghost_subscriptions_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.ghost_subscriptions + ADD CONSTRAINT ghost_subscriptions_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- Name: listing_items listing_items_listing_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listing_items + ADD CONSTRAINT listing_items_listing_id_fkey FOREIGN KEY (listing_id) REFERENCES public.listings(id) ON DELETE CASCADE; + + +-- +-- Name: listings listings_sub_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listings + ADD CONSTRAINT listings_sub_id_fkey FOREIGN KEY (sub_id) REFERENCES public.nav_subs(id); + + +-- +-- Name: listings listings_top_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listings + ADD CONSTRAINT listings_top_id_fkey FOREIGN KEY (top_id) REFERENCES public.nav_tops(id); + + +-- +-- Name: magic_links magic_links_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.magic_links + ADD CONSTRAINT magic_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- Name: nav_subs nav_subs_top_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.nav_subs + ADD CONSTRAINT nav_subs_top_id_fkey FOREIGN KEY (top_id) REFERENCES public.nav_tops(id) ON DELETE CASCADE; + + +-- +-- Name: post_authors post_authors_author_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_authors + ADD CONSTRAINT post_authors_author_id_fkey FOREIGN KEY (author_id) REFERENCES public.authors(id) ON DELETE CASCADE; + + +-- +-- Name: post_authors post_authors_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_authors + ADD CONSTRAINT post_authors_post_id_fkey FOREIGN KEY (post_id) REFERENCES public.posts(id) ON DELETE CASCADE; + + +-- +-- Name: post_tags post_tags_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_tags + ADD CONSTRAINT post_tags_post_id_fkey FOREIGN KEY (post_id) REFERENCES public.posts(id) ON DELETE CASCADE; + + +-- +-- Name: post_tags post_tags_tag_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.post_tags + ADD CONSTRAINT post_tags_tag_id_fkey FOREIGN KEY (tag_id) REFERENCES public.tags(id) ON DELETE CASCADE; + + +-- +-- Name: posts posts_primary_author_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT posts_primary_author_id_fkey FOREIGN KEY (primary_author_id) REFERENCES public.authors(id) ON DELETE SET NULL; + + +-- +-- Name: posts posts_primary_tag_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT posts_primary_tag_id_fkey FOREIGN KEY (primary_tag_id) REFERENCES public.tags(id) ON DELETE SET NULL; + + +-- +-- Name: product_allergens product_allergens_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_allergens + ADD CONSTRAINT product_allergens_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_attributes product_attributes_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_attributes + ADD CONSTRAINT product_attributes_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_images product_images_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_images + ADD CONSTRAINT product_images_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_labels product_labels_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_labels + ADD CONSTRAINT product_labels_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_likes product_likes_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_likes + ADD CONSTRAINT product_likes_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- Name: product_nutrition product_nutrition_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_nutrition + ADD CONSTRAINT product_nutrition_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_sections product_sections_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_sections + ADD CONSTRAINT product_sections_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: product_stickers product_stickers_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.product_stickers + ADD CONSTRAINT product_stickers_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.products(id) ON DELETE CASCADE; + + +-- +-- Name: user_labels user_labels_label_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_labels + ADD CONSTRAINT user_labels_label_id_fkey FOREIGN KEY (label_id) REFERENCES public.ghost_labels(id) ON DELETE CASCADE; + + +-- +-- Name: user_labels user_labels_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_labels + ADD CONSTRAINT user_labels_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- Name: user_newsletters user_newsletters_newsletter_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_newsletters + ADD CONSTRAINT user_newsletters_newsletter_id_fkey FOREIGN KEY (newsletter_id) REFERENCES public.ghost_newsletters(id) ON DELETE CASCADE; + + +-- +-- Name: user_newsletters user_newsletters_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_newsletters + ADD CONSTRAINT user_newsletters_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- PostgreSQL database dump complete +-- + + diff --git a/shared/.gitignore b/shared/.gitignore new file mode 100644 index 0000000..7a60b85 --- /dev/null +++ b/shared/.gitignore @@ -0,0 +1,2 @@ +__pycache__/ +*.pyc diff --git a/shared/README.md b/shared/README.md new file mode 100644 index 0000000..8de7495 --- /dev/null +++ b/shared/README.md @@ -0,0 +1,91 @@ +# Shared + +Shared infrastructure, models, contracts, services, and templates used by all five Rose Ash microservices (blog, market, cart, events, federation). Included as a git submodule in each app. + +## Structure + +``` +shared/ + db/ + base.py # SQLAlchemy declarative Base + session.py # Async session factory (get_session, register_db) + models/ # Canonical domain models + user.py # User + magic_link.py # MagicLink (auth tokens) + (domain_event.py removed — table dropped, see migration n4l2i8j0k1) + kv.py # KeyValue (key-value store) + menu_item.py # MenuItem (deprecated — use MenuNode) + menu_node.py # MenuNode (navigation tree) + container_relation.py # ContainerRelation (parent-child content) + ghost_membership_entities.py # GhostNewsletter, UserNewsletter + federation.py # ActorProfile, APActivity, APFollower, APFollowing, + # RemoteActor, APRemotePost, APLocalPost, + # APInteraction, APNotification, APAnchor, IPFSPin + contracts/ + dtos.py # Frozen dataclasses for cross-domain data transfer + protocols.py # Service protocols (Blog, Calendar, Market, Cart, Federation) + widgets.py # Widget types (NavWidget, CardWidget, AccountPageWidget) + services/ + registry.py # Typed singleton: services.blog, .calendar, .market, .cart, .federation + blog_impl.py # SqlBlogService + calendar_impl.py # SqlCalendarService + market_impl.py # SqlMarketService + cart_impl.py # SqlCartService + federation_impl.py # SqlFederationService + federation_publish.py # try_publish() — inline AP publication helper + stubs.py # No-op stubs for absent domains + navigation.py # get_navigation_tree() + relationships.py # attach_child, get_children, detach_child + widget_registry.py # Widget registry singleton + widgets/ # Per-domain widget registration + infrastructure/ + factory.py # create_base_app() — Quart app factory + cart_identity.py # current_cart_identity() (user_id or session_id) + cart_loader.py # Cart data loader for context processors + context.py # Jinja2 context processors + jinja_setup.py # Jinja2 template environment setup + urls.py # URL helpers (blog_url, market_url, etc.) + user_loader.py # Load current user from session + http_utils.py # HTTP utility functions + events/ + bus.py # emit_activity(), register_activity_handler() + processor.py # EventProcessor (polls ap_activities, runs handlers) + handlers/ # Shared activity handlers + container_handlers.py # Navigation rebuild on attach/detach + login_handlers.py # Cart/entry adoption on login + order_handlers.py # Order lifecycle events + ap_delivery_handler.py # AP activity delivery to follower inboxes (wildcard) + utils/ + __init__.py + calendar_helpers.py # Calendar period/entry utilities + http_signatures.py # RSA keypair generation, HTTP signature signing/verification + ipfs_client.py # Async IPFS client (add_bytes, add_json, pin_cid) + anchoring.py # Merkle trees + OpenTimestamps Bitcoin anchoring + webfinger.py # WebFinger actor resolution + browser/ + app/ # Middleware, CSRF, errors, Redis caching, authz, filters + templates/ # ~300 Jinja2 templates shared across all apps + containers.py # ContainerType, container_filter, content_filter helpers + config.py # YAML config loader + log_config/setup.py # Logging configuration (JSON formatter) + static/ # Shared static assets (CSS, JS, images, FontAwesome) + editor/ # Koenig (Ghost) rich text editor build + alembic/ # Database migrations +``` + +## Key Patterns + +- **App factory:** All apps call `create_base_app()` which sets up DB sessions, CSRF, error handling, event processing, logging, widget registration, and domain service wiring. +- **Service contracts:** Cross-domain communication via typed Protocols + frozen DTO dataclasses. Apps call `services.calendar.method()`, never import models from other domains. +- **Service registry:** Typed singleton (`services.blog`, `.calendar`, `.market`, `.cart`, `.federation`). Apps wire their own domain + stubs for others via `register_domain_services()`. +- **Activity bus:** `emit_activity()` writes to `ap_activities` table in the caller's transaction. `EventProcessor` polls pending activities and dispatches to registered handlers. Internal events use `visibility="internal"`; federation activities use `visibility="public"` and are delivered to follower inboxes by the wildcard delivery handler. +- **Widget registry:** Domain services register widgets (nav, card, account); templates consume via `widgets.container_nav`, `widgets.container_cards`. +- **Cart identity:** `current_cart_identity()` returns `{"user_id": int|None, "session_id": str|None}` from the request session. + +## Alembic Migrations + +All apps share one PostgreSQL database. Migrations are managed here and run from the blog app's entrypoint (other apps skip migrations on startup). + +```bash +alembic -c shared/alembic.ini upgrade head +``` diff --git a/shared/__init__.py b/shared/__init__.py new file mode 100644 index 0000000..f01aafe --- /dev/null +++ b/shared/__init__.py @@ -0,0 +1 @@ +# shared package — infrastructure, models, contracts, and services diff --git a/shared/alembic.ini b/shared/alembic.ini new file mode 100644 index 0000000..a04e071 --- /dev/null +++ b/shared/alembic.ini @@ -0,0 +1,35 @@ +[alembic] +script_location = alembic +sqlalchemy.url = + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s diff --git a/shared/alembic/env.py b/shared/alembic/env.py new file mode 100644 index 0000000..f43a95d --- /dev/null +++ b/shared/alembic/env.py @@ -0,0 +1,69 @@ +from __future__ import annotations +import os, sys +from logging.config import fileConfig +from alembic import context +from sqlalchemy import engine_from_config, pool + +config = context.config + +if config.config_file_name is not None: + try: + fileConfig(config.config_file_name) + except Exception: + pass + +# Add project root so all app model packages are importable +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) + +from shared.db.base import Base + +# Import ALL models so Base.metadata sees every table +import shared.models # noqa: F401 User, KV, MagicLink, MenuItem, Ghost* +for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"): + try: + __import__(_mod) + except ImportError: + pass # OK in Docker — only needed for autogenerate + +target_metadata = Base.metadata + +def _get_url() -> str: + url = os.getenv( + "ALEMBIC_DATABASE_URL", + os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url") or "") + ) + print(url) + return url + +def run_migrations_offline() -> None: + url = _get_url() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, + ) + with context.begin_transaction(): + context.run_migrations() + +def run_migrations_online() -> None: + url = _get_url() + if url: + config.set_main_option("sqlalchemy.url", url) + + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata, compare_type=True) + with context.begin_transaction(): + context.run_migrations() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/shared/alembic/script.py.mako b/shared/alembic/script.py.mako new file mode 100644 index 0000000..31bee0b --- /dev/null +++ b/shared/alembic/script.py.mako @@ -0,0 +1,24 @@ +<%text> +# Alembic migration script template + +"""empty message + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/shared/alembic/versions/0001_initial_schem.py b/shared/alembic/versions/0001_initial_schem.py new file mode 100644 index 0000000..b131310 --- /dev/null +++ b/shared/alembic/versions/0001_initial_schem.py @@ -0,0 +1,33 @@ +"""Initial database schema from schema.sql""" + +from alembic import op +import sqlalchemy as sa +import pathlib + +# revision identifiers, used by Alembic +revision = '0001_initial_schema' +down_revision = None +branch_labels = None +depends_on = None + +def upgrade(): + return + schema_path = pathlib.Path(__file__).parent.parent.parent / "schema.sql" + with open(schema_path, encoding="utf-8") as f: + sql = f.read() + conn = op.get_bind() + conn.execute(sa.text(sql)) + +def downgrade(): + return + # Drop all user-defined tables in the 'public' schema + conn = op.get_bind() + conn.execute(sa.text(""" + DO $$ DECLARE + r RECORD; + BEGIN + FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') LOOP + EXECUTE 'DROP TABLE IF EXISTS public.' || quote_ident(r.tablename) || ' CASCADE'; + END LOOP; + END $$; + """)) \ No newline at end of file diff --git a/shared/alembic/versions/0002_add_cart_items.py b/shared/alembic/versions/0002_add_cart_items.py new file mode 100644 index 0000000..ecae098 --- /dev/null +++ b/shared/alembic/versions/0002_add_cart_items.py @@ -0,0 +1,78 @@ +"""Add cart_items table for shopping cart""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "0002_add_cart_items" +down_revision = "0001_initial_schema" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "cart_items", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + + # Either a logged-in user *or* an anonymous session_id + sa.Column( + "user_id", + sa.Integer(), + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=True, + ), + sa.Column("session_id", sa.String(length=128), nullable=True), + + # IMPORTANT: reference products.id (PK), not slug + sa.Column( + "product_id", + sa.Integer(), + sa.ForeignKey("products.id", ondelete="CASCADE"), + nullable=False, + ), + + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + server_default="1", + ), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.text("now()"), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.text("now()"), + ), + sa.Column( + "deleted_at", + sa.DateTime(timezone=True), + nullable=True, + ), + ) + + # Indexes to speed up cart lookups + op.create_index( + "ix_cart_items_user_product", + "cart_items", + ["user_id", "product_id"], + unique=False, + ) + op.create_index( + "ix_cart_items_session_product", + "cart_items", + ["session_id", "product_id"], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index("ix_cart_items_session_product", table_name="cart_items") + op.drop_index("ix_cart_items_user_product", table_name="cart_items") + op.drop_table("cart_items") diff --git a/shared/alembic/versions/0003_add_orders.py b/shared/alembic/versions/0003_add_orders.py new file mode 100644 index 0000000..4387219 --- /dev/null +++ b/shared/alembic/versions/0003_add_orders.py @@ -0,0 +1,118 @@ +"""Add orders and order_items tables for checkout""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "0003_add_orders" +down_revision = "0002_add_cart_items" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "orders", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True), + sa.Column("session_id", sa.String(length=64), nullable=True), + + sa.Column( + "status", + sa.String(length=32), + nullable=False, + server_default="pending", + ), + sa.Column( + "currency", + sa.String(length=16), + nullable=False, + server_default="GBP", + ), + sa.Column( + "total_amount", + sa.Numeric(12, 2), + nullable=False, + ), + + # SumUp integration fields + sa.Column("sumup_checkout_id", sa.String(length=128), nullable=True), + sa.Column("sumup_status", sa.String(length=32), nullable=True), + sa.Column("sumup_hosted_url", sa.Text(), nullable=True), + + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + ) + + # Indexes to match model hints (session_id + sumup_checkout_id index=True) + op.create_index( + "ix_orders_session_id", + "orders", + ["session_id"], + unique=False, + ) + op.create_index( + "ix_orders_sumup_checkout_id", + "orders", + ["sumup_checkout_id"], + unique=False, + ) + + op.create_table( + "order_items", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + sa.Column( + "order_id", + sa.Integer(), + sa.ForeignKey("orders.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "product_id", + sa.Integer(), + sa.ForeignKey("products.id"), + nullable=False, + ), + sa.Column("product_title", sa.String(length=512), nullable=True), + + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + server_default="1", + ), + sa.Column( + "unit_price", + sa.Numeric(12, 2), + nullable=False, + ), + sa.Column( + "currency", + sa.String(length=16), + nullable=False, + server_default="GBP", + ), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.func.now(), + ), + ) + + +def downgrade() -> None: + op.drop_table("order_items") + op.drop_index("ix_orders_sumup_checkout_id", table_name="orders") + op.drop_index("ix_orders_session_id", table_name="orders") + op.drop_table("orders") diff --git a/shared/alembic/versions/0004_add_sumup_reference.py b/shared/alembic/versions/0004_add_sumup_reference.py new file mode 100644 index 0000000..2738cd2 --- /dev/null +++ b/shared/alembic/versions/0004_add_sumup_reference.py @@ -0,0 +1,27 @@ +"""Add sumup_reference to orders""" + +from alembic import op +import sqlalchemy as sa + +revision = "0004_add_sumup_reference" +down_revision = "0003_add_orders" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "orders", + sa.Column("sumup_reference", sa.String(length=255), nullable=True), + ) + op.create_index( + "ix_orders_sumup_reference", + "orders", + ["sumup_reference"], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index("ix_orders_sumup_reference", table_name="orders") + op.drop_column("orders", "sumup_reference") diff --git a/shared/alembic/versions/0005_add_description.py b/shared/alembic/versions/0005_add_description.py new file mode 100644 index 0000000..37e84ed --- /dev/null +++ b/shared/alembic/versions/0005_add_description.py @@ -0,0 +1,27 @@ +"""Add description field to orders""" + +from alembic import op +import sqlalchemy as sa + +revision = "0005_add_description" +down_revision = "0004_add_sumup_reference" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "orders", + sa.Column("description", sa.Text(), nullable=True), + ) + op.create_index( + "ix_orders_description", + "orders", + ["description"], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index("ix_orders_description", table_name="orders") + op.drop_column("orders", "description") diff --git a/shared/alembic/versions/0006_update_calendar_entries.py b/shared/alembic/versions/0006_update_calendar_entries.py new file mode 100644 index 0000000..cd6f9bd --- /dev/null +++ b/shared/alembic/versions/0006_update_calendar_entries.py @@ -0,0 +1,28 @@ +from alembic import op +import sqlalchemy as sa + +revision = '0006_update_calendar_entries' +down_revision = '0005_add_description' # use the appropriate previous revision ID +branch_labels = None +depends_on = None + +def upgrade(): + # Add user_id and session_id columns + op.add_column('calendar_entries', sa.Column('user_id', sa.Integer(), nullable=True)) + op.create_foreign_key('fk_calendar_entries_user_id', 'calendar_entries', 'users', ['user_id'], ['id']) + op.add_column('calendar_entries', sa.Column('session_id', sa.String(length=128), nullable=True)) + # Add state and cost columns + op.add_column('calendar_entries', sa.Column('state', sa.String(length=20), nullable=False, server_default='pending')) + op.add_column('calendar_entries', sa.Column('cost', sa.Numeric(10,2), nullable=False, server_default='10')) + # (Optional) Create indexes on the new columns + op.create_index('ix_calendar_entries_user_id', 'calendar_entries', ['user_id']) + op.create_index('ix_calendar_entries_session_id', 'calendar_entries', ['session_id']) + +def downgrade(): + op.drop_index('ix_calendar_entries_session_id', table_name='calendar_entries') + op.drop_index('ix_calendar_entries_user_id', table_name='calendar_entries') + op.drop_column('calendar_entries', 'cost') + op.drop_column('calendar_entries', 'state') + op.drop_column('calendar_entries', 'session_id') + op.drop_constraint('fk_calendar_entries_user_id', 'calendar_entries', type_='foreignkey') + op.drop_column('calendar_entries', 'user_id') diff --git a/shared/alembic/versions/0007_add_oid_entries.py b/shared/alembic/versions/0007_add_oid_entries.py new file mode 100644 index 0000000..be05343 --- /dev/null +++ b/shared/alembic/versions/0007_add_oid_entries.py @@ -0,0 +1,50 @@ +from alembic import op +import sqlalchemy as sa + +revision = "0007_add_oid_entries" +down_revision = "0006_update_calendar_entries" +branch_labels = None +depends_on = None + + +def upgrade(): + # Add order_id column + op.add_column( + "calendar_entries", + sa.Column("order_id", sa.Integer(), nullable=True), + ) + op.create_foreign_key( + "fk_calendar_entries_order_id", + "calendar_entries", + "orders", + ["order_id"], + ["id"], + ondelete="SET NULL", + ) + op.create_index( + "ix_calendar_entries_order_id", + "calendar_entries", + ["order_id"], + unique=False, + ) + + # Optional: add an index on state if you want faster queries by state + op.create_index( + "ix_calendar_entries_state", + "calendar_entries", + ["state"], + unique=False, + ) + + +def downgrade(): + # Drop indexes and FK in reverse order + op.drop_index("ix_calendar_entries_state", table_name="calendar_entries") + + op.drop_index("ix_calendar_entries_order_id", table_name="calendar_entries") + op.drop_constraint( + "fk_calendar_entries_order_id", + "calendar_entries", + type_="foreignkey", + ) + op.drop_column("calendar_entries", "order_id") diff --git a/shared/alembic/versions/0008_add_flexible_to_slots.py b/shared/alembic/versions/0008_add_flexible_to_slots.py new file mode 100644 index 0000000..0af0cfe --- /dev/null +++ b/shared/alembic/versions/0008_add_flexible_to_slots.py @@ -0,0 +1,33 @@ +"""add flexible flag to calendar_slots + +Revision ID: 0008_add_flexible_to_calendar_slots +Revises: 0007_add_order_id_to_calendar_entries +Create Date: 2025-12-06 12:34:56.000000 +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "0008_add_flexible_to_slots" +down_revision = "0007_add_oid_entries" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "calendar_slots", + sa.Column( + "flexible", + sa.Boolean(), + nullable=False, + server_default=sa.false(), # set existing rows to False + ), + ) + # Optional: drop server_default so future inserts must supply a value + op.alter_column("calendar_slots", "flexible", server_default=None) + + +def downgrade() -> None: + op.drop_column("calendar_slots", "flexible") diff --git a/shared/alembic/versions/0009_add_slot_id_to_entries.py b/shared/alembic/versions/0009_add_slot_id_to_entries.py new file mode 100644 index 0000000..32c0de4 --- /dev/null +++ b/shared/alembic/versions/0009_add_slot_id_to_entries.py @@ -0,0 +1,54 @@ +"""add slot_id to calendar_entries + +Revision ID: 0009_add_slot_id_to_entries +Revises: 0008_add_flexible_to_slots +Create Date: 2025-12-06 13:00:00.000000 +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "0009_add_slot_id_to_entries" +down_revision = "0008_add_flexible_to_slots" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Add slot_id column as nullable initially + op.add_column( + "calendar_entries", + sa.Column( + "slot_id", + sa.Integer(), + nullable=True, + ), + ) + + # Add foreign key constraint + op.create_foreign_key( + "fk_calendar_entries_slot_id_calendar_slots", + "calendar_entries", + "calendar_slots", + ["slot_id"], + ["id"], + ondelete="SET NULL", + ) + + # Add index for better query performance + op.create_index( + "ix_calendar_entries_slot_id", + "calendar_entries", + ["slot_id"], + ) + + +def downgrade() -> None: + op.drop_index("ix_calendar_entries_slot_id", table_name="calendar_entries") + op.drop_constraint( + "fk_calendar_entries_slot_id_calendar_slots", + "calendar_entries", + type_="foreignkey", + ) + op.drop_column("calendar_entries", "slot_id") \ No newline at end of file diff --git a/shared/alembic/versions/0010_add_post_likes.py b/shared/alembic/versions/0010_add_post_likes.py new file mode 100644 index 0000000..17bc15b --- /dev/null +++ b/shared/alembic/versions/0010_add_post_likes.py @@ -0,0 +1,64 @@ +"""Add post_likes table for liking blog posts + +Revision ID: 0010_add_post_likes +Revises: 0009_add_slot_id_to_entries +Create Date: 2025-12-07 13:00:00.000000 +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "0010_add_post_likes" +down_revision = "0009_add_slot_id_to_entries" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "post_likes", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + sa.Column( + "user_id", + sa.Integer(), + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "post_id", + sa.Integer(), + sa.ForeignKey("posts.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.text("now()"), + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + server_default=sa.text("now()"), + ), + sa.Column( + "deleted_at", + sa.DateTime(timezone=True), + nullable=True, + ), + ) + + # Index for fast user+post lookups + op.create_index( + "ix_post_likes_user_post", + "post_likes", + ["user_id", "post_id"], + unique=False, + ) + + +def downgrade() -> None: + op.drop_index("ix_post_likes_user_post", table_name="post_likes") + op.drop_table("post_likes") diff --git a/shared/alembic/versions/0011_add_entry_tickets.py b/shared/alembic/versions/0011_add_entry_tickets.py new file mode 100644 index 0000000..4b5936f --- /dev/null +++ b/shared/alembic/versions/0011_add_entry_tickets.py @@ -0,0 +1,43 @@ +"""Add ticket_price and ticket_count to calendar_entries + +Revision ID: 0011_add_entry_tickets +Revises: 0010_add_post_likes +Create Date: 2025-12-07 14:00:00.000000 +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import NUMERIC + +# revision identifiers, used by Alembic. +revision = "0011_add_entry_tickets" +down_revision = "0010_add_post_likes" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Add ticket_price column (nullable - NULL means no tickets) + op.add_column( + "calendar_entries", + sa.Column( + "ticket_price", + NUMERIC(10, 2), + nullable=True, + ), + ) + + # Add ticket_count column (nullable - NULL means unlimited) + op.add_column( + "calendar_entries", + sa.Column( + "ticket_count", + sa.Integer(), + nullable=True, + ), + ) + + +def downgrade() -> None: + op.drop_column("calendar_entries", "ticket_count") + op.drop_column("calendar_entries", "ticket_price") diff --git a/shared/alembic/versions/47fc53fc0d2b_add_ticket_types_table.py b/shared/alembic/versions/47fc53fc0d2b_add_ticket_types_table.py new file mode 100644 index 0000000..4c3cd5a --- /dev/null +++ b/shared/alembic/versions/47fc53fc0d2b_add_ticket_types_table.py @@ -0,0 +1,41 @@ + +# Alembic migration script template + +"""add ticket_types table + +Revision ID: 47fc53fc0d2b +Revises: a9f54e4eaf02 +Create Date: 2025-12-08 07:29:11.422435 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '47fc53fc0d2b' +down_revision = 'a9f54e4eaf02' +branch_labels = None +depends_on = None + +def upgrade() -> None: + op.create_table( + 'ticket_types', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('entry_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('cost', sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column('count', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['entry_id'], ['calendar_entries.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_ticket_types_entry_id', 'ticket_types', ['entry_id'], unique=False) + op.create_index('ix_ticket_types_name', 'ticket_types', ['name'], unique=False) + +def downgrade() -> None: + op.drop_index('ix_ticket_types_name', table_name='ticket_types') + op.drop_index('ix_ticket_types_entry_id', table_name='ticket_types') + op.drop_table('ticket_types') diff --git a/shared/alembic/versions/6cb124491c9d_entry_posts.py b/shared/alembic/versions/6cb124491c9d_entry_posts.py new file mode 100644 index 0000000..6062096 --- /dev/null +++ b/shared/alembic/versions/6cb124491c9d_entry_posts.py @@ -0,0 +1,36 @@ + +# Alembic migration script template + +"""Add calendar_entry_posts association table + +Revision ID: 6cb124491c9d +Revises: 0011_add_entry_tickets +Create Date: 2025-12-07 03:40:49.194068 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import TIMESTAMP + +# revision identifiers, used by Alembic. +revision = '6cb124491c9d' +down_revision = '0011_add_entry_tickets' +branch_labels = None +depends_on = None + +def upgrade() -> None: + op.create_table( + 'calendar_entry_posts', + sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True), + sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False), + sa.Column('post_id', sa.Integer(), sa.ForeignKey('posts.id', ondelete='CASCADE'), nullable=False), + sa.Column('created_at', TIMESTAMP(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column('deleted_at', TIMESTAMP(timezone=True), nullable=True), + ) + op.create_index('ix_entry_posts_entry_id', 'calendar_entry_posts', ['entry_id']) + op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id']) + +def downgrade() -> None: + op.drop_index('ix_entry_posts_post_id', 'calendar_entry_posts') + op.drop_index('ix_entry_posts_entry_id', 'calendar_entry_posts') + op.drop_table('calendar_entry_posts') diff --git a/shared/alembic/versions/a1b2c3d4e5f6_add_page_configs_table.py b/shared/alembic/versions/a1b2c3d4e5f6_add_page_configs_table.py new file mode 100644 index 0000000..9cb858c --- /dev/null +++ b/shared/alembic/versions/a1b2c3d4e5f6_add_page_configs_table.py @@ -0,0 +1,74 @@ +"""add page_configs table + +Revision ID: a1b2c3d4e5f6 +Revises: f6d4a1b2c3e7 +Create Date: 2026-02-10 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import text + +revision = 'a1b2c3d4e5f6' +down_revision = 'f6d4a1b2c3e7' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'page_configs', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('post_id', sa.Integer(), nullable=False), + sa.Column('features', sa.JSON(), server_default='{}', nullable=False), + sa.Column('sumup_merchant_code', sa.String(64), nullable=True), + sa.Column('sumup_api_key', sa.Text(), nullable=True), + sa.Column('sumup_checkout_prefix', sa.String(64), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('post_id'), + ) + + # Backfill: create PageConfig for every existing page + conn = op.get_bind() + + # 1. Pages with calendars -> features={"calendar": true} + conn.execute(text(""" + INSERT INTO page_configs (post_id, features, created_at, updated_at) + SELECT p.id, '{"calendar": true}'::jsonb, now(), now() + FROM posts p + WHERE p.is_page = true + AND p.deleted_at IS NULL + AND EXISTS ( + SELECT 1 FROM calendars c + WHERE c.post_id = p.id AND c.deleted_at IS NULL + ) + """)) + + # 2. Market page (slug='market', is_page=true) -> features={"market": true} + # Only if not already inserted above + conn.execute(text(""" + INSERT INTO page_configs (post_id, features, created_at, updated_at) + SELECT p.id, '{"market": true}'::jsonb, now(), now() + FROM posts p + WHERE p.slug = 'market' + AND p.is_page = true + AND p.deleted_at IS NULL + AND p.id NOT IN (SELECT post_id FROM page_configs) + """)) + + # 3. All other pages -> features={} + conn.execute(text(""" + INSERT INTO page_configs (post_id, features, created_at, updated_at) + SELECT p.id, '{}'::jsonb, now(), now() + FROM posts p + WHERE p.is_page = true + AND p.deleted_at IS NULL + AND p.id NOT IN (SELECT post_id FROM page_configs) + """)) + + +def downgrade() -> None: + op.drop_table('page_configs') diff --git a/shared/alembic/versions/a9f54e4eaf02_add_menu_items_table.py b/shared/alembic/versions/a9f54e4eaf02_add_menu_items_table.py new file mode 100644 index 0000000..960c10c --- /dev/null +++ b/shared/alembic/versions/a9f54e4eaf02_add_menu_items_table.py @@ -0,0 +1,37 @@ + +# Alembic migration script template + +"""add menu_items table + +Revision ID: a9f54e4eaf02 +Revises: 6cb124491c9d +Create Date: 2025-12-07 17:38:54.839296 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'a9f54e4eaf02' +down_revision = '6cb124491c9d' +branch_labels = None +depends_on = None + +def upgrade() -> None: + op.create_table('menu_items', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('post_id', sa.Integer(), nullable=False), + sa.Column('sort_order', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_menu_items_post_id'), 'menu_items', ['post_id'], unique=False) + op.create_index(op.f('ix_menu_items_sort_order'), 'menu_items', ['sort_order'], unique=False) + +def downgrade() -> None: + op.drop_index(op.f('ix_menu_items_sort_order'), table_name='menu_items') + op.drop_index(op.f('ix_menu_items_post_id'), table_name='menu_items') + op.drop_table('menu_items') diff --git a/shared/alembic/versions/b2c3d4e5f6a7_add_market_places_table.py b/shared/alembic/versions/b2c3d4e5f6a7_add_market_places_table.py new file mode 100644 index 0000000..4dbb124 --- /dev/null +++ b/shared/alembic/versions/b2c3d4e5f6a7_add_market_places_table.py @@ -0,0 +1,97 @@ +"""add market_places table and nav_tops.market_id + +Revision ID: b2c3d4e5f6a7 +Revises: a1b2c3d4e5f6 +Create Date: 2026-02-10 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import text + +revision = 'b2c3d4e5f6a7' +down_revision = 'a1b2c3d4e5f6' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # 1. Create market_places table + op.create_table( + 'market_places', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('post_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(255), nullable=False), + sa.Column('slug', sa.String(255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index('ix_market_places_post_id', 'market_places', ['post_id']) + op.create_index( + 'ux_market_places_slug_active', + 'market_places', + [sa.text('lower(slug)')], + unique=True, + postgresql_where=sa.text('deleted_at IS NULL'), + ) + + # 2. Add market_id column to nav_tops + op.add_column( + 'nav_tops', + sa.Column('market_id', sa.Integer(), nullable=True), + ) + op.create_foreign_key( + 'fk_nav_tops_market_id', + 'nav_tops', + 'market_places', + ['market_id'], + ['id'], + ondelete='SET NULL', + ) + op.create_index('ix_nav_tops_market_id', 'nav_tops', ['market_id']) + + # 3. Backfill: create default MarketPlace for the 'market' page + conn = op.get_bind() + + # Find the market page + result = conn.execute(text(""" + SELECT id FROM posts + WHERE slug = 'market' AND is_page = true AND deleted_at IS NULL + LIMIT 1 + """)) + row = result.fetchone() + if row: + post_id = row[0] + + # Insert the default market + conn.execute(text(""" + INSERT INTO market_places (post_id, name, slug, created_at, updated_at) + VALUES (:post_id, 'Suma Market', 'suma-market', now(), now()) + """), {"post_id": post_id}) + + # Get the new market_places id + market_row = conn.execute(text(""" + SELECT id FROM market_places + WHERE slug = 'suma-market' AND deleted_at IS NULL + LIMIT 1 + """)).fetchone() + + if market_row: + market_id = market_row[0] + # Assign all active nav_tops to this market + conn.execute(text(""" + UPDATE nav_tops SET market_id = :market_id + WHERE deleted_at IS NULL + """), {"market_id": market_id}) + + +def downgrade() -> None: + op.drop_index('ix_nav_tops_market_id', table_name='nav_tops') + op.drop_constraint('fk_nav_tops_market_id', 'nav_tops', type_='foreignkey') + op.drop_column('nav_tops', 'market_id') + op.drop_index('ux_market_places_slug_active', table_name='market_places') + op.drop_index('ix_market_places_post_id', table_name='market_places') + op.drop_table('market_places') diff --git a/shared/alembic/versions/c3a1f7b9d4e5_add_snippets_table.py b/shared/alembic/versions/c3a1f7b9d4e5_add_snippets_table.py new file mode 100644 index 0000000..c17c08c --- /dev/null +++ b/shared/alembic/versions/c3a1f7b9d4e5_add_snippets_table.py @@ -0,0 +1,35 @@ +"""add snippets table + +Revision ID: c3a1f7b9d4e5 +Revises: 47fc53fc0d2b +Create Date: 2026-02-07 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'c3a1f7b9d4e5' +down_revision = '47fc53fc0d2b' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'snippets', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('value', sa.Text(), nullable=False), + sa.Column('visibility', sa.String(length=20), server_default='private', nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id', 'name', name='uq_snippets_user_name'), + ) + op.create_index('ix_snippets_visibility', 'snippets', ['visibility']) + + +def downgrade() -> None: + op.drop_index('ix_snippets_visibility', table_name='snippets') + op.drop_table('snippets') diff --git a/shared/alembic/versions/c3d4e5f6a7b8_add_page_tracking_to_orders.py b/shared/alembic/versions/c3d4e5f6a7b8_add_page_tracking_to_orders.py new file mode 100644 index 0000000..9547d38 --- /dev/null +++ b/shared/alembic/versions/c3d4e5f6a7b8_add_page_tracking_to_orders.py @@ -0,0 +1,55 @@ +"""add page_config_id to orders, market_place_id to cart_items + +Revision ID: c3d4e5f6a7b8 +Revises: b2c3d4e5f6a7 +Create Date: 2026-02-10 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'c3d4e5f6a7b8' +down_revision = 'b2c3d4e5f6a7' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # 1. Add market_place_id to cart_items + op.add_column( + 'cart_items', + sa.Column('market_place_id', sa.Integer(), nullable=True), + ) + op.create_foreign_key( + 'fk_cart_items_market_place_id', + 'cart_items', + 'market_places', + ['market_place_id'], + ['id'], + ondelete='SET NULL', + ) + op.create_index('ix_cart_items_market_place_id', 'cart_items', ['market_place_id']) + + # 2. Add page_config_id to orders + op.add_column( + 'orders', + sa.Column('page_config_id', sa.Integer(), nullable=True), + ) + op.create_foreign_key( + 'fk_orders_page_config_id', + 'orders', + 'page_configs', + ['page_config_id'], + ['id'], + ondelete='SET NULL', + ) + op.create_index('ix_orders_page_config_id', 'orders', ['page_config_id']) + + +def downgrade() -> None: + op.drop_index('ix_orders_page_config_id', table_name='orders') + op.drop_constraint('fk_orders_page_config_id', 'orders', type_='foreignkey') + op.drop_column('orders', 'page_config_id') + + op.drop_index('ix_cart_items_market_place_id', table_name='cart_items') + op.drop_constraint('fk_cart_items_market_place_id', 'cart_items', type_='foreignkey') + op.drop_column('cart_items', 'market_place_id') diff --git a/shared/alembic/versions/d4b2e8f1a3c7_add_post_user_id_and_author_email.py b/shared/alembic/versions/d4b2e8f1a3c7_add_post_user_id_and_author_email.py new file mode 100644 index 0000000..8d6f122 --- /dev/null +++ b/shared/alembic/versions/d4b2e8f1a3c7_add_post_user_id_and_author_email.py @@ -0,0 +1,45 @@ +"""add post user_id, author email, publish_requested + +Revision ID: d4b2e8f1a3c7 +Revises: c3a1f7b9d4e5 +Create Date: 2026-02-08 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'd4b2e8f1a3c7' +down_revision = 'c3a1f7b9d4e5' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Add author.email + op.add_column('authors', sa.Column('email', sa.String(255), nullable=True)) + + # Add post.user_id FK + op.add_column('posts', sa.Column('user_id', sa.Integer(), nullable=True)) + op.create_foreign_key('fk_posts_user_id', 'posts', 'users', ['user_id'], ['id'], ondelete='SET NULL') + op.create_index('ix_posts_user_id', 'posts', ['user_id']) + + # Add post.publish_requested + op.add_column('posts', sa.Column('publish_requested', sa.Boolean(), server_default='false', nullable=False)) + + # Backfill: match posts to users via primary_author email + op.execute(""" + UPDATE posts + SET user_id = u.id + FROM authors a + JOIN users u ON lower(a.email) = lower(u.email) + WHERE posts.primary_author_id = a.id + AND posts.user_id IS NULL + AND a.email IS NOT NULL + """) + + +def downgrade() -> None: + op.drop_column('posts', 'publish_requested') + op.drop_index('ix_posts_user_id', table_name='posts') + op.drop_constraint('fk_posts_user_id', 'posts', type_='foreignkey') + op.drop_column('posts', 'user_id') + op.drop_column('authors', 'email') diff --git a/shared/alembic/versions/e5c3f9a2b1d6_add_tag_groups_and_tag_group_tags.py b/shared/alembic/versions/e5c3f9a2b1d6_add_tag_groups_and_tag_group_tags.py new file mode 100644 index 0000000..5e21e22 --- /dev/null +++ b/shared/alembic/versions/e5c3f9a2b1d6_add_tag_groups_and_tag_group_tags.py @@ -0,0 +1,45 @@ +"""add tag_groups and tag_group_tags + +Revision ID: e5c3f9a2b1d6 +Revises: d4b2e8f1a3c7 +Create Date: 2026-02-08 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'e5c3f9a2b1d6' +down_revision = 'd4b2e8f1a3c7' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'tag_groups', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('slug', sa.String(length=191), nullable=False), + sa.Column('feature_image', sa.Text(), nullable=True), + sa.Column('colour', sa.String(length=32), nullable=True), + sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('slug'), + ) + + op.create_table( + 'tag_group_tags', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('tag_group_id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['tag_group_id'], ['tag_groups.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('tag_group_id', 'tag_id', name='uq_tag_group_tag'), + ) + + +def downgrade() -> None: + op.drop_table('tag_group_tags') + op.drop_table('tag_groups') diff --git a/shared/alembic/versions/f6d4a0b2c3e7_add_domain_events_table.py b/shared/alembic/versions/f6d4a0b2c3e7_add_domain_events_table.py new file mode 100644 index 0000000..edd0ffb --- /dev/null +++ b/shared/alembic/versions/f6d4a0b2c3e7_add_domain_events_table.py @@ -0,0 +1,40 @@ +"""add domain_events table + +Revision ID: f6d4a0b2c3e7 +Revises: e5c3f9a2b1d6 +Create Date: 2026-02-11 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'f6d4a0b2c3e7' +down_revision = 'e5c3f9a2b1d6' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'domain_events', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('event_type', sa.String(128), nullable=False), + sa.Column('aggregate_type', sa.String(64), nullable=False), + sa.Column('aggregate_id', sa.Integer(), nullable=False), + sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('state', sa.String(20), server_default='pending', nullable=False), + sa.Column('attempts', sa.Integer(), server_default='0', nullable=False), + sa.Column('max_attempts', sa.Integer(), server_default='5', nullable=False), + sa.Column('last_error', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index('ix_domain_events_event_type', 'domain_events', ['event_type']) + op.create_index('ix_domain_events_state', 'domain_events', ['state']) + + +def downgrade() -> None: + op.drop_index('ix_domain_events_state', table_name='domain_events') + op.drop_index('ix_domain_events_event_type', table_name='domain_events') + op.drop_table('domain_events') diff --git a/shared/alembic/versions/f6d4a1b2c3e7_add_tickets_table.py b/shared/alembic/versions/f6d4a1b2c3e7_add_tickets_table.py new file mode 100644 index 0000000..06a0f76 --- /dev/null +++ b/shared/alembic/versions/f6d4a1b2c3e7_add_tickets_table.py @@ -0,0 +1,47 @@ +"""add tickets table + +Revision ID: f6d4a1b2c3e7 +Revises: e5c3f9a2b1d6 +Create Date: 2026-02-09 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'f6d4a1b2c3e7' +down_revision = 'e5c3f9a2b1d6' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'tickets', + sa.Column('id', sa.Integer(), primary_key=True), + sa.Column('entry_id', sa.Integer(), sa.ForeignKey('calendar_entries.id', ondelete='CASCADE'), nullable=False), + sa.Column('ticket_type_id', sa.Integer(), sa.ForeignKey('ticket_types.id', ondelete='SET NULL'), nullable=True), + sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True), + sa.Column('session_id', sa.String(64), nullable=True), + sa.Column('order_id', sa.Integer(), sa.ForeignKey('orders.id', ondelete='SET NULL'), nullable=True), + sa.Column('code', sa.String(64), unique=True, nullable=False), + sa.Column('state', sa.String(20), nullable=False, server_default=sa.text("'reserved'")), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column('checked_in_at', sa.DateTime(timezone=True), nullable=True), + ) + op.create_index('ix_tickets_entry_id', 'tickets', ['entry_id']) + op.create_index('ix_tickets_ticket_type_id', 'tickets', ['ticket_type_id']) + op.create_index('ix_tickets_user_id', 'tickets', ['user_id']) + op.create_index('ix_tickets_session_id', 'tickets', ['session_id']) + op.create_index('ix_tickets_order_id', 'tickets', ['order_id']) + op.create_index('ix_tickets_code', 'tickets', ['code'], unique=True) + op.create_index('ix_tickets_state', 'tickets', ['state']) + + +def downgrade() -> None: + op.drop_index('ix_tickets_state', 'tickets') + op.drop_index('ix_tickets_code', 'tickets') + op.drop_index('ix_tickets_order_id', 'tickets') + op.drop_index('ix_tickets_session_id', 'tickets') + op.drop_index('ix_tickets_user_id', 'tickets') + op.drop_index('ix_tickets_ticket_type_id', 'tickets') + op.drop_index('ix_tickets_entry_id', 'tickets') + op.drop_table('tickets') diff --git a/shared/alembic/versions/g7e5b1c3d4f8_generic_containers.py b/shared/alembic/versions/g7e5b1c3d4f8_generic_containers.py new file mode 100644 index 0000000..7756957 --- /dev/null +++ b/shared/alembic/versions/g7e5b1c3d4f8_generic_containers.py @@ -0,0 +1,115 @@ +"""replace post_id FKs with container_type + container_id + +Revision ID: g7e5b1c3d4f8 +Revises: f6d4a0b2c3e7 +Create Date: 2026-02-11 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'g7e5b1c3d4f8' +down_revision = 'f6d4a0b2c3e7' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # --- calendars: post_id → container_type + container_id --- + op.add_column('calendars', sa.Column('container_type', sa.String(32), nullable=True)) + op.add_column('calendars', sa.Column('container_id', sa.Integer(), nullable=True)) + op.execute("UPDATE calendars SET container_type = 'page', container_id = post_id") + op.alter_column('calendars', 'container_type', nullable=False, server_default=sa.text("'page'")) + op.alter_column('calendars', 'container_id', nullable=False) + op.drop_index('ix_calendars_post_id', table_name='calendars') + op.drop_index('ux_calendars_post_slug_active', table_name='calendars') + op.drop_constraint('calendars_post_id_fkey', 'calendars', type_='foreignkey') + op.drop_column('calendars', 'post_id') + op.create_index('ix_calendars_container', 'calendars', ['container_type', 'container_id']) + op.create_index( + 'ux_calendars_container_slug_active', + 'calendars', + ['container_type', 'container_id', sa.text('lower(slug)')], + unique=True, + postgresql_where=sa.text('deleted_at IS NULL'), + ) + + # --- market_places: post_id → container_type + container_id --- + op.add_column('market_places', sa.Column('container_type', sa.String(32), nullable=True)) + op.add_column('market_places', sa.Column('container_id', sa.Integer(), nullable=True)) + op.execute("UPDATE market_places SET container_type = 'page', container_id = post_id") + op.alter_column('market_places', 'container_type', nullable=False, server_default=sa.text("'page'")) + op.alter_column('market_places', 'container_id', nullable=False) + op.drop_index('ix_market_places_post_id', table_name='market_places') + op.drop_constraint('market_places_post_id_fkey', 'market_places', type_='foreignkey') + op.drop_column('market_places', 'post_id') + op.create_index('ix_market_places_container', 'market_places', ['container_type', 'container_id']) + + # --- page_configs: post_id → container_type + container_id --- + op.add_column('page_configs', sa.Column('container_type', sa.String(32), nullable=True)) + op.add_column('page_configs', sa.Column('container_id', sa.Integer(), nullable=True)) + op.execute("UPDATE page_configs SET container_type = 'page', container_id = post_id") + op.alter_column('page_configs', 'container_type', nullable=False, server_default=sa.text("'page'")) + op.alter_column('page_configs', 'container_id', nullable=False) + op.drop_constraint('page_configs_post_id_fkey', 'page_configs', type_='foreignkey') + op.drop_column('page_configs', 'post_id') + op.create_index('ix_page_configs_container', 'page_configs', ['container_type', 'container_id']) + + # --- calendar_entry_posts: post_id → content_type + content_id --- + op.add_column('calendar_entry_posts', sa.Column('content_type', sa.String(32), nullable=True)) + op.add_column('calendar_entry_posts', sa.Column('content_id', sa.Integer(), nullable=True)) + op.execute("UPDATE calendar_entry_posts SET content_type = 'post', content_id = post_id") + op.alter_column('calendar_entry_posts', 'content_type', nullable=False, server_default=sa.text("'post'")) + op.alter_column('calendar_entry_posts', 'content_id', nullable=False) + op.drop_index('ix_entry_posts_post_id', table_name='calendar_entry_posts') + op.drop_constraint('calendar_entry_posts_post_id_fkey', 'calendar_entry_posts', type_='foreignkey') + op.drop_column('calendar_entry_posts', 'post_id') + op.create_index('ix_entry_posts_content', 'calendar_entry_posts', ['content_type', 'content_id']) + + +def downgrade() -> None: + # --- calendar_entry_posts: restore post_id --- + op.add_column('calendar_entry_posts', sa.Column('post_id', sa.Integer(), nullable=True)) + op.execute("UPDATE calendar_entry_posts SET post_id = content_id WHERE content_type = 'post'") + op.alter_column('calendar_entry_posts', 'post_id', nullable=False) + op.create_foreign_key('calendar_entry_posts_post_id_fkey', 'calendar_entry_posts', 'posts', ['post_id'], ['id'], ondelete='CASCADE') + op.create_index('ix_entry_posts_post_id', 'calendar_entry_posts', ['post_id']) + op.drop_index('ix_entry_posts_content', table_name='calendar_entry_posts') + op.drop_column('calendar_entry_posts', 'content_id') + op.drop_column('calendar_entry_posts', 'content_type') + + # --- page_configs: restore post_id --- + op.add_column('page_configs', sa.Column('post_id', sa.Integer(), nullable=True)) + op.execute("UPDATE page_configs SET post_id = container_id WHERE container_type = 'page'") + op.alter_column('page_configs', 'post_id', nullable=False) + op.create_foreign_key('page_configs_post_id_fkey', 'page_configs', 'posts', ['post_id'], ['id'], ondelete='CASCADE') + op.drop_index('ix_page_configs_container', table_name='page_configs') + op.drop_column('page_configs', 'container_id') + op.drop_column('page_configs', 'container_type') + + # --- market_places: restore post_id --- + op.add_column('market_places', sa.Column('post_id', sa.Integer(), nullable=True)) + op.execute("UPDATE market_places SET post_id = container_id WHERE container_type = 'page'") + op.alter_column('market_places', 'post_id', nullable=False) + op.create_foreign_key('market_places_post_id_fkey', 'market_places', 'posts', ['post_id'], ['id'], ondelete='CASCADE') + op.create_index('ix_market_places_post_id', 'market_places', ['post_id']) + op.drop_index('ix_market_places_container', table_name='market_places') + op.drop_column('market_places', 'container_id') + op.drop_column('market_places', 'container_type') + + # --- calendars: restore post_id --- + op.add_column('calendars', sa.Column('post_id', sa.Integer(), nullable=True)) + op.execute("UPDATE calendars SET post_id = container_id WHERE container_type = 'page'") + op.alter_column('calendars', 'post_id', nullable=False) + op.create_foreign_key('calendars_post_id_fkey', 'calendars', 'posts', ['post_id'], ['id'], ondelete='CASCADE') + op.create_index('ix_calendars_post_id', 'calendars', ['post_id']) + op.create_index( + 'ux_calendars_post_slug_active', + 'calendars', + ['post_id', sa.text('lower(slug)')], + unique=True, + postgresql_where=sa.text('deleted_at IS NULL'), + ) + op.drop_index('ux_calendars_container_slug_active', table_name='calendars') + op.drop_index('ix_calendars_container', table_name='calendars') + op.drop_column('calendars', 'container_id') + op.drop_column('calendars', 'container_type') diff --git a/shared/alembic/versions/h8f6c2d4e5a9_merge_heads.py b/shared/alembic/versions/h8f6c2d4e5a9_merge_heads.py new file mode 100644 index 0000000..769134d --- /dev/null +++ b/shared/alembic/versions/h8f6c2d4e5a9_merge_heads.py @@ -0,0 +1,23 @@ +"""merge heads + +Revision ID: h8f6c2d4e5a9 +Revises: c3d4e5f6a7b8, g7e5b1c3d4f8 +Create Date: 2026-02-11 00:00:00.000000 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'h8f6c2d4e5a9' +down_revision = ('c3d4e5f6a7b8', 'g7e5b1c3d4f8') +branch_labels = None +depends_on = None + + +def upgrade() -> None: + pass + + +def downgrade() -> None: + pass diff --git a/shared/alembic/versions/i9g7d3e5f6_add_glue_layer_tables.py b/shared/alembic/versions/i9g7d3e5f6_add_glue_layer_tables.py new file mode 100644 index 0000000..781f613 --- /dev/null +++ b/shared/alembic/versions/i9g7d3e5f6_add_glue_layer_tables.py @@ -0,0 +1,98 @@ +"""add glue layer tables (container_relations + menu_nodes) + +Revision ID: i9g7d3e5f6 +Revises: h8f6c2d4e5a9 +Create Date: 2026-02-11 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'i9g7d3e5f6' +down_revision = 'h8f6c2d4e5a9' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # --- container_relations --- + op.create_table( + 'container_relations', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('parent_type', sa.String(32), nullable=False), + sa.Column('parent_id', sa.Integer(), nullable=False), + sa.Column('child_type', sa.String(32), nullable=False), + sa.Column('child_id', sa.Integer(), nullable=False), + sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'), + sa.Column('label', sa.String(255), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint( + 'parent_type', 'parent_id', 'child_type', 'child_id', + name='uq_container_relations_parent_child', + ), + ) + op.create_index('ix_container_relations_parent', 'container_relations', ['parent_type', 'parent_id']) + op.create_index('ix_container_relations_child', 'container_relations', ['child_type', 'child_id']) + + # --- menu_nodes --- + op.create_table( + 'menu_nodes', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('container_type', sa.String(32), nullable=False), + sa.Column('container_id', sa.Integer(), nullable=False), + sa.Column('parent_id', sa.Integer(), nullable=True), + sa.Column('sort_order', sa.Integer(), nullable=False, server_default='0'), + sa.Column('depth', sa.Integer(), nullable=False, server_default='0'), + sa.Column('label', sa.String(255), nullable=False), + sa.Column('slug', sa.String(255), nullable=True), + sa.Column('href', sa.String(1024), nullable=True), + sa.Column('icon', sa.String(64), nullable=True), + sa.Column('feature_image', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.ForeignKeyConstraint(['parent_id'], ['menu_nodes.id'], ondelete='SET NULL'), + ) + op.create_index('ix_menu_nodes_container', 'menu_nodes', ['container_type', 'container_id']) + op.create_index('ix_menu_nodes_parent_id', 'menu_nodes', ['parent_id']) + + # --- Backfill container_relations from existing container-pattern tables --- + op.execute(""" + INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order) + SELECT 'page', container_id, 'calendar', id, 0 + FROM calendars + WHERE deleted_at IS NULL AND container_type = 'page' + """) + op.execute(""" + INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order) + SELECT 'page', container_id, 'market', id, 0 + FROM market_places + WHERE deleted_at IS NULL AND container_type = 'page' + """) + op.execute(""" + INSERT INTO container_relations (parent_type, parent_id, child_type, child_id, sort_order) + SELECT 'page', container_id, 'page_config', id, 0 + FROM page_configs + WHERE deleted_at IS NULL AND container_type = 'page' + """) + + # --- Backfill menu_nodes from existing menu_items + posts --- + op.execute(""" + INSERT INTO menu_nodes (container_type, container_id, label, slug, feature_image, sort_order) + SELECT 'page', mi.post_id, p.title, p.slug, p.feature_image, mi.sort_order + FROM menu_items mi + JOIN posts p ON mi.post_id = p.id + WHERE mi.deleted_at IS NULL + """) + + +def downgrade() -> None: + op.drop_index('ix_menu_nodes_parent_id', table_name='menu_nodes') + op.drop_index('ix_menu_nodes_container', table_name='menu_nodes') + op.drop_table('menu_nodes') + op.drop_index('ix_container_relations_child', table_name='container_relations') + op.drop_index('ix_container_relations_parent', table_name='container_relations') + op.drop_table('container_relations') diff --git a/shared/alembic/versions/j0h8e4f6g7_drop_cross_domain_fks.py b/shared/alembic/versions/j0h8e4f6g7_drop_cross_domain_fks.py new file mode 100644 index 0000000..fcbd499 --- /dev/null +++ b/shared/alembic/versions/j0h8e4f6g7_drop_cross_domain_fks.py @@ -0,0 +1,51 @@ +"""drop cross-domain FK constraints (events → cart) + +Merge three existing heads and remove: +- calendar_entries.order_id FK → orders.id +- tickets.order_id FK → orders.id + +Columns are kept as plain integers. + +Revision ID: j0h8e4f6g7 +Revises: c3d4e5f6a7b8, i9g7d3e5f6, g7e5b1c3d4f8 +Create Date: 2026-02-14 +""" +from alembic import op +import sqlalchemy as sa + +revision = 'j0h8e4f6g7' +down_revision = ('c3d4e5f6a7b8', 'i9g7d3e5f6', 'g7e5b1c3d4f8') +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_constraint( + 'fk_calendar_entries_order_id', + 'calendar_entries', + type_='foreignkey', + ) + op.drop_constraint( + 'tickets_order_id_fkey', + 'tickets', + type_='foreignkey', + ) + + +def downgrade() -> None: + op.create_foreign_key( + 'fk_calendar_entries_order_id', + 'calendar_entries', + 'orders', + ['order_id'], + ['id'], + ondelete='SET NULL', + ) + op.create_foreign_key( + 'tickets_order_id_fkey', + 'tickets', + 'orders', + ['order_id'], + ['id'], + ondelete='SET NULL', + ) diff --git a/shared/alembic/versions/k1i9f5g7h8_add_federation_tables.py b/shared/alembic/versions/k1i9f5g7h8_add_federation_tables.py new file mode 100644 index 0000000..78af7f6 --- /dev/null +++ b/shared/alembic/versions/k1i9f5g7h8_add_federation_tables.py @@ -0,0 +1,142 @@ +"""add federation tables + +Revision ID: k1i9f5g7h8 +Revises: j0h8e4f6g7 +Create Date: 2026-02-21 + +Creates: +- ap_actor_profiles — AP identity per user +- ap_activities — local + remote AP activities +- ap_followers — remote followers +- ap_inbox_items — raw incoming AP activities +- ap_anchors — OpenTimestamps merkle batches +- ipfs_pins — IPFS content tracking (platform-wide) +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = "k1i9f5g7h8" +down_revision = "j0h8e4f6g7" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # -- ap_anchors (referenced by ap_activities) ---------------------------- + op.create_table( + "ap_anchors", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("merkle_root", sa.String(128), nullable=False), + sa.Column("tree_ipfs_cid", sa.String(128), nullable=True), + sa.Column("ots_proof_cid", sa.String(128), nullable=True), + sa.Column("activity_count", sa.Integer(), nullable=False, server_default="0"), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("bitcoin_txid", sa.String(128), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + # -- ap_actor_profiles --------------------------------------------------- + op.create_table( + "ap_actor_profiles", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("preferred_username", sa.String(64), nullable=False), + sa.Column("display_name", sa.String(255), nullable=True), + sa.Column("summary", sa.Text(), nullable=True), + sa.Column("public_key_pem", sa.Text(), nullable=False), + sa.Column("private_key_pem", sa.Text(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("preferred_username"), + sa.UniqueConstraint("user_id"), + ) + op.create_index("ix_ap_actor_user_id", "ap_actor_profiles", ["user_id"], unique=True) + op.create_index("ix_ap_actor_username", "ap_actor_profiles", ["preferred_username"], unique=True) + + # -- ap_activities ------------------------------------------------------- + op.create_table( + "ap_activities", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("activity_id", sa.String(512), nullable=False), + sa.Column("activity_type", sa.String(64), nullable=False), + sa.Column("actor_profile_id", sa.Integer(), nullable=False), + sa.Column("object_type", sa.String(64), nullable=True), + sa.Column("object_data", postgresql.JSONB(), nullable=True), + sa.Column("published", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column("signature", postgresql.JSONB(), nullable=True), + sa.Column("is_local", sa.Boolean(), nullable=False, server_default="true"), + sa.Column("source_type", sa.String(64), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("ipfs_cid", sa.String(128), nullable=True), + sa.Column("anchor_id", sa.Integer(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["anchor_id"], ["ap_anchors.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("activity_id"), + ) + op.create_index("ix_ap_activity_actor", "ap_activities", ["actor_profile_id"]) + op.create_index("ix_ap_activity_source", "ap_activities", ["source_type", "source_id"]) + op.create_index("ix_ap_activity_published", "ap_activities", ["published"]) + + # -- ap_followers -------------------------------------------------------- + op.create_table( + "ap_followers", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("actor_profile_id", sa.Integer(), nullable=False), + sa.Column("follower_acct", sa.String(512), nullable=False), + sa.Column("follower_inbox", sa.String(512), nullable=False), + sa.Column("follower_actor_url", sa.String(512), nullable=False), + sa.Column("follower_public_key", sa.Text(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"), + ) + op.create_index("ix_ap_follower_actor", "ap_followers", ["actor_profile_id"]) + + # -- ap_inbox_items ------------------------------------------------------ + op.create_table( + "ap_inbox_items", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("actor_profile_id", sa.Integer(), nullable=False), + sa.Column("raw_json", postgresql.JSONB(), nullable=False), + sa.Column("activity_type", sa.String(64), nullable=True), + sa.Column("from_actor", sa.String(512), nullable=True), + sa.Column("state", sa.String(20), nullable=False, server_default="pending"), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index("ix_ap_inbox_state", "ap_inbox_items", ["state"]) + op.create_index("ix_ap_inbox_actor", "ap_inbox_items", ["actor_profile_id"]) + + # -- ipfs_pins ----------------------------------------------------------- + op.create_table( + "ipfs_pins", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("content_hash", sa.String(128), nullable=False), + sa.Column("ipfs_cid", sa.String(128), nullable=False), + sa.Column("pin_type", sa.String(64), nullable=False), + sa.Column("source_type", sa.String(64), nullable=True), + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("size_bytes", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("ipfs_cid"), + ) + op.create_index("ix_ipfs_pin_source", "ipfs_pins", ["source_type", "source_id"]) + op.create_index("ix_ipfs_pin_cid", "ipfs_pins", ["ipfs_cid"], unique=True) + + +def downgrade() -> None: + op.drop_table("ipfs_pins") + op.drop_table("ap_inbox_items") + op.drop_table("ap_followers") + op.drop_table("ap_activities") + op.drop_table("ap_actor_profiles") + op.drop_table("ap_anchors") diff --git a/shared/alembic/versions/l2j0g6h8i9_add_fediverse_tables.py b/shared/alembic/versions/l2j0g6h8i9_add_fediverse_tables.py new file mode 100644 index 0000000..c186bcc --- /dev/null +++ b/shared/alembic/versions/l2j0g6h8i9_add_fediverse_tables.py @@ -0,0 +1,138 @@ +"""add fediverse social tables + +Revision ID: l2j0g6h8i9 +Revises: k1i9f5g7h8 +Create Date: 2026-02-22 + +Creates: +- ap_remote_actors — cached profiles of remote actors +- ap_following — outbound follows (local → remote) +- ap_remote_posts — ingested posts from remote actors +- ap_local_posts — native posts composed in federation UI +- ap_interactions — likes and boosts +- ap_notifications — follow/like/boost/mention/reply notifications +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB + +revision = "l2j0g6h8i9" +down_revision = "k1i9f5g7h8" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # -- ap_remote_actors -- + op.create_table( + "ap_remote_actors", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("actor_url", sa.String(512), unique=True, nullable=False), + sa.Column("inbox_url", sa.String(512), nullable=False), + sa.Column("shared_inbox_url", sa.String(512), nullable=True), + sa.Column("preferred_username", sa.String(255), nullable=False), + sa.Column("display_name", sa.String(255), nullable=True), + sa.Column("summary", sa.Text, nullable=True), + sa.Column("icon_url", sa.String(512), nullable=True), + sa.Column("public_key_pem", sa.Text, nullable=True), + sa.Column("domain", sa.String(255), nullable=False), + sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + ) + op.create_index("ix_ap_remote_actor_url", "ap_remote_actors", ["actor_url"], unique=True) + op.create_index("ix_ap_remote_actor_domain", "ap_remote_actors", ["domain"]) + + # -- ap_following -- + op.create_table( + "ap_following", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False), + sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False), + sa.Column("state", sa.String(20), nullable=False, server_default="pending"), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column("accepted_at", sa.DateTime(timezone=True), nullable=True), + sa.UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"), + ) + op.create_index("ix_ap_following_actor", "ap_following", ["actor_profile_id"]) + op.create_index("ix_ap_following_remote", "ap_following", ["remote_actor_id"]) + + # -- ap_remote_posts -- + op.create_table( + "ap_remote_posts", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False), + sa.Column("activity_id", sa.String(512), unique=True, nullable=False), + sa.Column("object_id", sa.String(512), unique=True, nullable=False), + sa.Column("object_type", sa.String(64), nullable=False, server_default="Note"), + sa.Column("content", sa.Text, nullable=True), + sa.Column("summary", sa.Text, nullable=True), + sa.Column("url", sa.String(512), nullable=True), + sa.Column("attachment_data", JSONB, nullable=True), + sa.Column("tag_data", JSONB, nullable=True), + sa.Column("in_reply_to", sa.String(512), nullable=True), + sa.Column("conversation", sa.String(512), nullable=True), + sa.Column("published", sa.DateTime(timezone=True), nullable=True), + sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + ) + op.create_index("ix_ap_remote_post_actor", "ap_remote_posts", ["remote_actor_id"]) + op.create_index("ix_ap_remote_post_published", "ap_remote_posts", ["published"]) + op.create_index("ix_ap_remote_post_object", "ap_remote_posts", ["object_id"], unique=True) + + # -- ap_local_posts -- + op.create_table( + "ap_local_posts", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False), + sa.Column("content", sa.Text, nullable=False), + sa.Column("visibility", sa.String(20), nullable=False, server_default="public"), + sa.Column("in_reply_to", sa.String(512), nullable=True), + sa.Column("published", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + ) + op.create_index("ix_ap_local_post_actor", "ap_local_posts", ["actor_profile_id"]) + op.create_index("ix_ap_local_post_published", "ap_local_posts", ["published"]) + + # -- ap_interactions -- + op.create_table( + "ap_interactions", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True), + sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True), + sa.Column("post_type", sa.String(20), nullable=False), + sa.Column("post_id", sa.Integer, nullable=False), + sa.Column("interaction_type", sa.String(20), nullable=False), + sa.Column("activity_id", sa.String(512), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + ) + op.create_index("ix_ap_interaction_post", "ap_interactions", ["post_type", "post_id"]) + op.create_index("ix_ap_interaction_actor", "ap_interactions", ["actor_profile_id"]) + op.create_index("ix_ap_interaction_remote", "ap_interactions", ["remote_actor_id"]) + + # -- ap_notifications -- + op.create_table( + "ap_notifications", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False), + sa.Column("notification_type", sa.String(20), nullable=False), + sa.Column("from_remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True), + sa.Column("from_actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True), + sa.Column("target_activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True), + sa.Column("target_remote_post_id", sa.Integer, sa.ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True), + sa.Column("read", sa.Boolean, nullable=False, server_default="false"), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + ) + op.create_index("ix_ap_notification_actor", "ap_notifications", ["actor_profile_id"]) + op.create_index("ix_ap_notification_read", "ap_notifications", ["actor_profile_id", "read"]) + op.create_index("ix_ap_notification_created", "ap_notifications", ["created_at"]) + + +def downgrade() -> None: + op.drop_table("ap_notifications") + op.drop_table("ap_interactions") + op.drop_table("ap_local_posts") + op.drop_table("ap_remote_posts") + op.drop_table("ap_following") + op.drop_table("ap_remote_actors") diff --git a/shared/alembic/versions/m3k1h7i9j0_add_activity_bus_columns.py b/shared/alembic/versions/m3k1h7i9j0_add_activity_bus_columns.py new file mode 100644 index 0000000..b61aa5e --- /dev/null +++ b/shared/alembic/versions/m3k1h7i9j0_add_activity_bus_columns.py @@ -0,0 +1,113 @@ +"""add unified event bus columns to ap_activities + +Revision ID: m3k1h7i9j0 +Revises: l2j0g6h8i9 +Create Date: 2026-02-22 + +Adds processing and visibility columns so ap_activities can serve as the +unified event bus for both internal domain events and federation delivery. +""" + +revision = "m3k1h7i9j0" +down_revision = "l2j0g6h8i9" +branch_labels = None +depends_on = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade() -> None: + # Add new columns with defaults so existing rows stay valid + op.add_column( + "ap_activities", + sa.Column("actor_uri", sa.String(512), nullable=True), + ) + op.add_column( + "ap_activities", + sa.Column( + "visibility", sa.String(20), + nullable=False, server_default="public", + ), + ) + op.add_column( + "ap_activities", + sa.Column( + "process_state", sa.String(20), + nullable=False, server_default="completed", + ), + ) + op.add_column( + "ap_activities", + sa.Column( + "process_attempts", sa.Integer(), + nullable=False, server_default="0", + ), + ) + op.add_column( + "ap_activities", + sa.Column( + "process_max_attempts", sa.Integer(), + nullable=False, server_default="5", + ), + ) + op.add_column( + "ap_activities", + sa.Column("process_error", sa.Text(), nullable=True), + ) + op.add_column( + "ap_activities", + sa.Column( + "processed_at", sa.DateTime(timezone=True), nullable=True, + ), + ) + + # Backfill actor_uri from the related actor_profile + op.execute( + """ + UPDATE ap_activities a + SET actor_uri = CONCAT( + 'https://', + COALESCE(current_setting('app.ap_domain', true), 'rose-ash.com'), + '/users/', + p.preferred_username + ) + FROM ap_actor_profiles p + WHERE a.actor_profile_id = p.id + AND a.actor_uri IS NULL + """ + ) + + # Make actor_profile_id nullable (internal events have no actor profile) + op.alter_column( + "ap_activities", "actor_profile_id", + existing_type=sa.Integer(), + nullable=True, + ) + + # Index for processor polling + op.create_index( + "ix_ap_activity_process", "ap_activities", ["process_state"], + ) + + +def downgrade() -> None: + op.drop_index("ix_ap_activity_process", table_name="ap_activities") + + # Restore actor_profile_id NOT NULL (remove any rows without it first) + op.execute( + "DELETE FROM ap_activities WHERE actor_profile_id IS NULL" + ) + op.alter_column( + "ap_activities", "actor_profile_id", + existing_type=sa.Integer(), + nullable=False, + ) + + op.drop_column("ap_activities", "processed_at") + op.drop_column("ap_activities", "process_error") + op.drop_column("ap_activities", "process_max_attempts") + op.drop_column("ap_activities", "process_attempts") + op.drop_column("ap_activities", "process_state") + op.drop_column("ap_activities", "visibility") + op.drop_column("ap_activities", "actor_uri") diff --git a/shared/alembic/versions/n4l2i8j0k1_drop_domain_events_table.py b/shared/alembic/versions/n4l2i8j0k1_drop_domain_events_table.py new file mode 100644 index 0000000..3d11dab --- /dev/null +++ b/shared/alembic/versions/n4l2i8j0k1_drop_domain_events_table.py @@ -0,0 +1,46 @@ +"""drop domain_events table + +Revision ID: n4l2i8j0k1 +Revises: m3k1h7i9j0 +Create Date: 2026-02-22 + +The domain_events table is no longer used — all events now flow through +ap_activities with the unified activity bus. +""" + +revision = "n4l2i8j0k1" +down_revision = "m3k1h7i9j0" +branch_labels = None +depends_on = None + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB + + +def upgrade() -> None: + op.drop_index("ix_domain_events_state", table_name="domain_events") + op.drop_index("ix_domain_events_event_type", table_name="domain_events") + op.drop_table("domain_events") + + +def downgrade() -> None: + op.create_table( + "domain_events", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + sa.Column("event_type", sa.String(128), nullable=False), + sa.Column("aggregate_type", sa.String(64), nullable=False), + sa.Column("aggregate_id", sa.Integer(), nullable=False), + sa.Column("payload", JSONB(), nullable=True), + sa.Column("state", sa.String(20), nullable=False, server_default="pending"), + sa.Column("attempts", sa.Integer(), nullable=False, server_default="0"), + sa.Column("max_attempts", sa.Integer(), nullable=False, server_default="5"), + sa.Column("last_error", sa.Text(), nullable=True), + sa.Column( + "created_at", sa.DateTime(timezone=True), + nullable=False, server_default=sa.func.now(), + ), + sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True), + ) + op.create_index("ix_domain_events_event_type", "domain_events", ["event_type"]) + op.create_index("ix_domain_events_state", "domain_events", ["state"]) diff --git a/shared/alembic/versions/o5m3j9k1l2_add_origin_app_column.py b/shared/alembic/versions/o5m3j9k1l2_add_origin_app_column.py new file mode 100644 index 0000000..61e56af --- /dev/null +++ b/shared/alembic/versions/o5m3j9k1l2_add_origin_app_column.py @@ -0,0 +1,35 @@ +"""Add origin_app column to ap_activities + +Revision ID: o5m3j9k1l2 +Revises: n4l2i8j0k1 +Create Date: 2026-02-22 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import inspect as sa_inspect + +revision = "o5m3j9k1l2" +down_revision = "n4l2i8j0k1" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = sa_inspect(conn) + columns = [c["name"] for c in inspector.get_columns("ap_activities")] + if "origin_app" not in columns: + op.add_column( + "ap_activities", + sa.Column("origin_app", sa.String(64), nullable=True), + ) + # Index is idempotent with if_not_exists + op.create_index( + "ix_ap_activity_origin_app", "ap_activities", ["origin_app"], + if_not_exists=True, + ) + + +def downgrade() -> None: + op.drop_index("ix_ap_activity_origin_app", table_name="ap_activities") + op.drop_column("ap_activities", "origin_app") diff --git a/shared/alembic/versions/p6n4k0l2m3_add_oauth_codes_table.py b/shared/alembic/versions/p6n4k0l2m3_add_oauth_codes_table.py new file mode 100644 index 0000000..d74a687 --- /dev/null +++ b/shared/alembic/versions/p6n4k0l2m3_add_oauth_codes_table.py @@ -0,0 +1,37 @@ +"""Add oauth_codes table + +Revision ID: p6n4k0l2m3 +Revises: o5m3j9k1l2 +Create Date: 2026-02-23 +""" +from alembic import op +import sqlalchemy as sa + +revision = "p6n4k0l2m3" +down_revision = "o5m3j9k1l2" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "oauth_codes", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("code", sa.String(128), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("client_id", sa.String(64), nullable=False), + sa.Column("redirect_uri", sa.String(512), nullable=False), + sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("used_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"), + ) + op.create_index("ix_oauth_code_code", "oauth_codes", ["code"], unique=True) + op.create_index("ix_oauth_code_user", "oauth_codes", ["user_id"]) + + +def downgrade() -> None: + op.drop_index("ix_oauth_code_user", table_name="oauth_codes") + op.drop_index("ix_oauth_code_code", table_name="oauth_codes") + op.drop_table("oauth_codes") diff --git a/shared/alembic/versions/q7o5l1m3n4_add_oauth_grants_table.py b/shared/alembic/versions/q7o5l1m3n4_add_oauth_grants_table.py new file mode 100644 index 0000000..b973872 --- /dev/null +++ b/shared/alembic/versions/q7o5l1m3n4_add_oauth_grants_table.py @@ -0,0 +1,41 @@ +"""Add oauth_grants table + +Revision ID: q7o5l1m3n4 +Revises: p6n4k0l2m3 +""" +from alembic import op +import sqlalchemy as sa + +revision = "q7o5l1m3n4" +down_revision = "p6n4k0l2m3" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "oauth_grants", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("token", sa.String(128), unique=True, nullable=False), + sa.Column("user_id", sa.Integer, sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), + sa.Column("client_id", sa.String(64), nullable=False), + sa.Column("issuer_session", sa.String(128), nullable=False), + sa.Column("device_id", sa.String(128), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.Column("revoked_at", sa.DateTime(timezone=True), nullable=True), + ) + op.create_index("ix_oauth_grant_token", "oauth_grants", ["token"], unique=True) + op.create_index("ix_oauth_grant_issuer", "oauth_grants", ["issuer_session"]) + op.create_index("ix_oauth_grant_user", "oauth_grants", ["user_id"]) + op.create_index("ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"]) + + # Add grant_token column to oauth_codes to link code → grant + op.add_column("oauth_codes", sa.Column("grant_token", sa.String(128), nullable=True)) + + +def downgrade(): + op.drop_column("oauth_codes", "grant_token") + op.drop_index("ix_oauth_grant_user", table_name="oauth_grants") + op.drop_index("ix_oauth_grant_issuer", table_name="oauth_grants") + op.drop_index("ix_oauth_grant_token", table_name="oauth_grants") + op.drop_table("oauth_grants") diff --git a/shared/alembic/versions/r8p6m2n4o5_add_device_id_to_oauth_grants.py b/shared/alembic/versions/r8p6m2n4o5_add_device_id_to_oauth_grants.py new file mode 100644 index 0000000..6394f17 --- /dev/null +++ b/shared/alembic/versions/r8p6m2n4o5_add_device_id_to_oauth_grants.py @@ -0,0 +1,29 @@ +"""Add device_id column to oauth_grants + +Revision ID: r8p6m2n4o5 +Revises: q7o5l1m3n4 +""" +from alembic import op +import sqlalchemy as sa + +revision = "r8p6m2n4o5" +down_revision = "q7o5l1m3n4" +branch_labels = None +depends_on = None + + +def upgrade(): + # device_id was added to the create_table migration after it had already + # run, so the column is missing from the live DB. Add it now. + op.add_column( + "oauth_grants", + sa.Column("device_id", sa.String(128), nullable=True), + ) + op.create_index( + "ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"] + ) + + +def downgrade(): + op.drop_index("ix_oauth_grant_device", table_name="oauth_grants") + op.drop_column("oauth_grants", "device_id") diff --git a/shared/alembic/versions/s9q7n3o5p6_add_ap_delivery_log_table.py b/shared/alembic/versions/s9q7n3o5p6_add_ap_delivery_log_table.py new file mode 100644 index 0000000..0635431 --- /dev/null +++ b/shared/alembic/versions/s9q7n3o5p6_add_ap_delivery_log_table.py @@ -0,0 +1,30 @@ +"""Add ap_delivery_log table for idempotent federation delivery + +Revision ID: s9q7n3o5p6 +Revises: r8p6m2n4o5 +""" +from alembic import op +import sqlalchemy as sa + +revision = "s9q7n3o5p6" +down_revision = "r8p6m2n4o5" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "ap_delivery_log", + sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), + sa.Column("activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="CASCADE"), nullable=False), + sa.Column("inbox_url", sa.String(512), nullable=False), + sa.Column("status_code", sa.Integer, nullable=True), + sa.Column("delivered_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()), + sa.UniqueConstraint("activity_id", "inbox_url", name="uq_delivery_activity_inbox"), + ) + op.create_index("ix_ap_delivery_activity", "ap_delivery_log", ["activity_id"]) + + +def downgrade(): + op.drop_index("ix_ap_delivery_activity", table_name="ap_delivery_log") + op.drop_table("ap_delivery_log") diff --git a/shared/alembic/versions/t0r8n4o6p7_add_app_domain_to_ap_followers.py b/shared/alembic/versions/t0r8n4o6p7_add_app_domain_to_ap_followers.py new file mode 100644 index 0000000..3f1c8d0 --- /dev/null +++ b/shared/alembic/versions/t0r8n4o6p7_add_app_domain_to_ap_followers.py @@ -0,0 +1,51 @@ +"""Add app_domain to ap_followers for per-app AP actors + +Revision ID: t0r8n4o6p7 +Revises: s9q7n3o5p6 +""" +from alembic import op +import sqlalchemy as sa + +revision = "t0r8n4o6p7" +down_revision = "s9q7n3o5p6" +branch_labels = None +depends_on = None + + +def upgrade(): + # Add column as nullable first so we can backfill + op.add_column( + "ap_followers", + sa.Column("app_domain", sa.String(64), nullable=True), + ) + # Backfill existing rows: all current followers are aggregate + op.execute("UPDATE ap_followers SET app_domain = 'federation' WHERE app_domain IS NULL") + # Now make it NOT NULL with a default + op.alter_column( + "ap_followers", "app_domain", + nullable=False, server_default="federation", + ) + # Replace old unique constraint with one that includes app_domain + op.drop_constraint("uq_follower_acct", "ap_followers", type_="unique") + op.create_unique_constraint( + "uq_follower_acct_app", + "ap_followers", + ["actor_profile_id", "follower_acct", "app_domain"], + ) + op.create_index( + "ix_ap_follower_app_domain", + "ap_followers", + ["actor_profile_id", "app_domain"], + ) + + +def downgrade(): + op.drop_index("ix_ap_follower_app_domain", table_name="ap_followers") + op.drop_constraint("uq_follower_acct_app", "ap_followers", type_="unique") + op.create_unique_constraint( + "uq_follower_acct", + "ap_followers", + ["actor_profile_id", "follower_acct"], + ) + op.alter_column("ap_followers", "app_domain", nullable=True, server_default=None) + op.drop_column("ap_followers", "app_domain") diff --git a/shared/alembic/versions/u1s9o5p7q8_add_app_domain_to_delivery_log.py b/shared/alembic/versions/u1s9o5p7q8_add_app_domain_to_delivery_log.py new file mode 100644 index 0000000..1306c9f --- /dev/null +++ b/shared/alembic/versions/u1s9o5p7q8_add_app_domain_to_delivery_log.py @@ -0,0 +1,33 @@ +"""Add app_domain to ap_delivery_log for per-domain idempotency + +Revision ID: u1s9o5p7q8 +Revises: t0r8n4o6p7 +""" +from alembic import op +import sqlalchemy as sa + +revision = "u1s9o5p7q8" +down_revision = "t0r8n4o6p7" + + +def upgrade() -> None: + op.add_column( + "ap_delivery_log", + sa.Column("app_domain", sa.String(128), nullable=False, server_default="federation"), + ) + op.drop_constraint("uq_delivery_activity_inbox", "ap_delivery_log", type_="unique") + op.create_unique_constraint( + "uq_delivery_activity_inbox_domain", + "ap_delivery_log", + ["activity_id", "inbox_url", "app_domain"], + ) + + +def downgrade() -> None: + op.drop_constraint("uq_delivery_activity_inbox_domain", "ap_delivery_log", type_="unique") + op.drop_column("ap_delivery_log", "app_domain") + op.create_unique_constraint( + "uq_delivery_activity_inbox", + "ap_delivery_log", + ["activity_id", "inbox_url"], + ) diff --git a/shared/browser/__init__.py b/shared/browser/__init__.py new file mode 100644 index 0000000..6ded1e5 --- /dev/null +++ b/shared/browser/__init__.py @@ -0,0 +1 @@ +# suma_browser package diff --git a/shared/browser/app/__init__.py b/shared/browser/app/__init__.py new file mode 100644 index 0000000..7d0bf9b --- /dev/null +++ b/shared/browser/app/__init__.py @@ -0,0 +1,12 @@ +# The monolith has been split into three apps (apps/blog, apps/market, apps/cart). +# This package remains for shared infrastructure modules (middleware, redis_cacher, +# csrf, errors, authz, filters, utils, bp/*). +# +# To run individual apps: +# hypercorn apps.blog.app:app --bind 0.0.0.0:8000 +# hypercorn apps.market.app:app --bind 0.0.0.0:8001 +# hypercorn apps.cart.app:app --bind 0.0.0.0:8002 +# +# Legacy single-process: +# hypercorn suma_browser.app.app:app --bind 0.0.0.0:8000 +# (runs the old monolith from app.py, which still works) diff --git a/shared/browser/app/authz.py b/shared/browser/app/authz.py new file mode 100644 index 0000000..864e4ff --- /dev/null +++ b/shared/browser/app/authz.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +from functools import wraps +from typing import Any, Dict, Iterable, Optional +import inspect + +from quart import g, abort, redirect, request, current_app +from shared.infrastructure.urls import login_url + + +def require_rights(*rights: str, any_of: bool = True): + """ + Decorator for routes that require certain user rights. + """ + + if not rights: + raise ValueError("require_rights needs at least one right name") + + required_set = frozenset(rights) + + def decorator(view_func): + @wraps(view_func) + async def wrapper(*args: Any, **kwargs: Any): + # Not logged in → go to login, with ?next= + user = g.get("user") + if not user: + return redirect(login_url(request.url)) + + rights_dict = g.get("rights") or {} + + if any_of: + allowed = any(rights_dict.get(name) for name in required_set) + else: + allowed = all(rights_dict.get(name) for name in required_set) + + if not allowed: + abort(403) + + result = view_func(*args, **kwargs) + if inspect.isawaitable(result): + return await result + return result + + # ---- expose access requirements on the wrapper ---- + wrapper.__access_requires__ = { + "rights": required_set, + "any_of": any_of, + } + + return wrapper + + return decorator + + +def require_login(view_func): + """ + Decorator for routes that require any logged-in user. + """ + @wraps(view_func) + async def wrapper(*args: Any, **kwargs: Any): + user = g.get("user") + if not user: + return redirect(login_url(request.url)) + result = view_func(*args, **kwargs) + if inspect.isawaitable(result): + return await result + return result + return wrapper + + +def require_admin(view_func=None): + """ + Shortcut for routes that require the 'admin' right. + """ + if view_func is None: + return require_rights("admin") + + return require_rights("admin")(view_func) + +def require_post_author(view_func): + """Allow admin or post owner.""" + @wraps(view_func) + async def wrapper(*args, **kwargs): + user = g.get("user") + if not user: + return redirect(login_url(request.url)) + is_admin = bool((g.get("rights") or {}).get("admin")) + if is_admin: + result = view_func(*args, **kwargs) + if inspect.isawaitable(result): + return await result + return result + post = getattr(g, "post_data", {}).get("original_post") + if post and post.user_id == user.id: + result = view_func(*args, **kwargs) + if inspect.isawaitable(result): + return await result + return result + abort(403) + return wrapper + + +def _get_access_meta(view_func) -> Optional[Dict[str, Any]]: + """ + Walk the wrapper chain looking for __access_requires__ metadata. + """ + func = view_func + seen: set[int] = set() + + while func is not None and id(func) not in seen: + seen.add(id(func)) + meta = getattr(func, "__access_requires__", None) + if meta is not None: + return meta + func = getattr(func, "__wrapped__", None) + + return None + + +def has_access(endpoint: str) -> bool: + """ + Return True if the current user has access to the given endpoint. + + Example: + has_access("settings.home") + has_access("settings.clear_cache_view") + """ + view = current_app.view_functions.get(endpoint) + if view is None: + # Unknown endpoint: be conservative + return False + + meta = _get_access_meta(view) + + # If the route has no rights metadata, treat it as public: + if meta is None: + return True + + required: Iterable[str] = meta["rights"] + any_of: bool = meta["any_of"] + + # Must be in a request context; if no user, they don't have access + user = g.get("user") + if not user: + return False + + rights_dict = g.get("rights") or {} + + if any_of: + return any(rights_dict.get(name) for name in required) + else: + return all(rights_dict.get(name) for name in required) diff --git a/shared/browser/app/csrf.py b/shared/browser/app/csrf.py new file mode 100644 index 0000000..bfd898d --- /dev/null +++ b/shared/browser/app/csrf.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import secrets +from typing import Callable, Awaitable, Optional + +from quart import ( + abort, + current_app, + request, + session as qsession, +) + +SAFE_METHODS = {"GET", "HEAD", "OPTIONS", "TRACE"} + + +def generate_csrf_token() -> str: + """ + Per-session CSRF token. + + In Jinja: + + """ + token = qsession.get("csrf_token") + if not token: + token = secrets.token_urlsafe(32) + qsession["csrf_token"] = token + return token + + +def _is_exempt_endpoint() -> bool: + endpoint = request.endpoint + if not endpoint: + return False + view = current_app.view_functions.get(endpoint) + + # Walk decorator stack (__wrapped__) to find csrf_exempt + while view is not None: + if getattr(view, "_csrf_exempt", False): + return True + view = getattr(view, "__wrapped__", None) + + return False + + +async def protect() -> None: + """ + Enforce CSRF on unsafe methods. + + Supports: + * Forms: hidden input "csrf_token" + * JSON: "csrf_token" or "csrfToken" field + * HTMX/AJAX: "X-CSRFToken" or "X-CSRF-Token" header + """ + if request.method in SAFE_METHODS: + return + + if _is_exempt_endpoint(): + return + + session_token = qsession.get("csrf_token") + if not session_token: + abort(400, "Missing CSRF session token") + + supplied_token: Optional[str] = None + + # JSON body + if request.mimetype == "application/json": + data = await request.get_json(silent=True) or {} + supplied_token = data.get("csrf_token") or data.get("csrfToken") + + # Form body + if not supplied_token and request.mimetype != "application/json": + form = await request.form + supplied_token = form.get("csrf_token") + + # Headers (HTMX / fetch) + if not supplied_token: + supplied_token = ( + request.headers.get("X-CSRFToken") + or request.headers.get("X-CSRF-Token") + ) + + if not supplied_token or supplied_token != session_token: + abort(400, "Invalid CSRF token") + + +def csrf_exempt(view: Callable[..., Awaitable]) -> Callable[..., Awaitable]: + """ + Mark a view as CSRF-exempt. + + from suma_browser.app.csrf import csrf_exempt + + @csrf_exempt + @blueprint.post("/hook") + async def webhook(): + ... + """ + setattr(view, "_csrf_exempt", True) + return view diff --git a/shared/browser/app/errors.py b/shared/browser/app/errors.py new file mode 100644 index 0000000..bb8cdf2 --- /dev/null +++ b/shared/browser/app/errors.py @@ -0,0 +1,126 @@ +from werkzeug.exceptions import HTTPException +from shared.utils import hx_fragment_request + +from quart import ( + request, + render_template, + make_response, + current_app +) + +from markupsafe import escape + +class AppError(ValueError): + """ + Base class for app-level, client-safe errors. + Behaves like ValueError so existing except ValueError: still works. + """ + status_code: int = 400 + + def __init__(self, message, *, status_code: int | None = None): + # Support a single message or a list/tuple of messages + if isinstance(message, (list, tuple, set)): + self.messages = [str(m) for m in message] + msg = self.messages[0] if self.messages else "" + else: + self.messages = [str(message)] + msg = str(message) + + super().__init__(msg) + + if status_code is not None: + self.status_code = status_code + + +def errors(app): + def _info(e): + return { + "exception": e, + "method": request.method, + "url": str(request.url), + "base_url": str(request.base_url), + "root_path": request.root_path, + "path": request.path, + "full_path": request.full_path, + "endpoint": request.endpoint, + "url_rule": str(request.url_rule) if request.url_rule else None, + "headers": {k: v for k, v in request.headers.items() + if k.lower().startswith("x-forwarded") or k in ("Host",)}, + } + + @app.errorhandler(404) + async def not_found(e): + current_app.logger.warning("404 %s", _info(e)) + if hx_fragment_request(): + html = await render_template( + "_types/root/exceptions/hx/_.html", + errnum='404' + ) + else: + html = await render_template( + "_types/root/exceptions/_.html", + errnum='404', + ) + + return await make_response(html, 404) + + @app.errorhandler(403) + async def not_allowed(e): + current_app.logger.warning("403 %s", _info(e)) + if hx_fragment_request(): + html = await render_template( + "_types/root/exceptions/hx/_.html", + errnum='403' + ) + else: + html = await render_template( + "_types/root/exceptions/_.html", + errnum='403', + ) + + return await make_response(html, 403) + + @app.errorhandler(AppError) + async def app_error(e: AppError): + # App-level, client-safe errors + current_app.logger.info("AppError %s", _info(e)) + status = getattr(e, "status_code", 400) + messages = getattr(e, "messages", [str(e)]) + + if request.headers.get("HX-Request") == "true": + # Build a little styled
    • ...
    snippet + lis = "".join( + f"
  • {escape(m)}
  • " + for m in messages if m + ) + html = ( + "
      " + f"{lis}" + "
    " + ) + return await make_response(html, status) + + # Non-HTMX: show a nicer page with error messages + html = await render_template( + "_types/root/exceptions/app_error.html", + messages=messages, + ) + return await make_response(html, status) + + @app.errorhandler(Exception) + async def error(e): + current_app.logger.exception("Exception %s", _info(e)) + + status = 500 + if isinstance(e, HTTPException): + status = e.code or 500 + + if request.headers.get("HX-Request") == "true": + # Generic message for unexpected/untrusted errors + return await make_response( + "Something went wrong. Please try again.", + status, + ) + + html = await render_template("_types/root/exceptions/error.html") + return await make_response(html, status) diff --git a/shared/browser/app/filters/__init__.py b/shared/browser/app/filters/__init__.py new file mode 100644 index 0000000..4e34162 --- /dev/null +++ b/shared/browser/app/filters/__init__.py @@ -0,0 +1,17 @@ +def register(app): + from .highlight import highlight + app.jinja_env.filters["highlight"] = highlight + + from .qs import register as qs + from .url_join import register as url_join + from .combine import register as combine + from .currency import register as currency + from .truncate import register as truncate + from .getattr import register as getattr + + qs(app) + url_join(app) + combine(app) + currency(app) + getattr(app) + # truncate(app) \ No newline at end of file diff --git a/shared/browser/app/filters/combine.py b/shared/browser/app/filters/combine.py new file mode 100644 index 0000000..9edf07b --- /dev/null +++ b/shared/browser/app/filters/combine.py @@ -0,0 +1,25 @@ +from __future__ import annotations +from typing import Any, Mapping + +def _deep_merge(dst: dict, src: Mapping) -> dict: + out = dict(dst) + for k, v in src.items(): + if isinstance(v, Mapping) and isinstance(out.get(k), Mapping): + out[k] = _deep_merge(out[k], v) # type: ignore[arg-type] + else: + out[k] = v + return out +def register(app): + @app.template_filter("combine") + def combine_filter(a: Any, b: Any, deep: bool = False, drop_none: bool = False) -> Any: + """ + Jinja filter: merge two dict-like objects. + + - Non-dict inputs: returns `a` unchanged. + - If drop_none=True, keys in `b` with value None are ignored. + - If deep=True, nested dicts are merged recursively. + """ + if not isinstance(a, Mapping) or not isinstance(b, Mapping): + return a + b2 = {k: v for k, v in b.items() if not (drop_none and v is None)} + return _deep_merge(a, b2) if deep else {**a, **b2} \ No newline at end of file diff --git a/shared/browser/app/filters/currency.py b/shared/browser/app/filters/currency.py new file mode 100644 index 0000000..0309b9b --- /dev/null +++ b/shared/browser/app/filters/currency.py @@ -0,0 +1,12 @@ +from decimal import Decimal + +def register(app): + @app.template_filter("currency") + def currency_filter(value, code="GBP"): + if value is None: + return "" + # ensure decimal-ish + if isinstance(value, float): + value = Decimal(str(value)) + symbol = "£" if code == "GBP" else code + return f"{symbol}{value:.2f}" diff --git a/shared/browser/app/filters/getattr.py b/shared/browser/app/filters/getattr.py new file mode 100644 index 0000000..7d98684 --- /dev/null +++ b/shared/browser/app/filters/getattr.py @@ -0,0 +1,6 @@ + +def register(app): + @app.template_filter("getattr") + def jinja_getattr(obj, name, default=None): + # Safe getattr: returns default if the attribute is missing + return getattr(obj, name, default) diff --git a/shared/browser/app/filters/highlight.py b/shared/browser/app/filters/highlight.py new file mode 100644 index 0000000..876a10b --- /dev/null +++ b/shared/browser/app/filters/highlight.py @@ -0,0 +1,21 @@ +# ---------- misc helpers / filters ---------- +from markupsafe import Markup, escape + +def highlight(text: str, needle: str, cls: str = "bg-yellow-200 rounded") -> Markup: + """ + Wraps case-insensitive matches of `needle` inside . + Escapes everything safely. + """ + import re + if not text or not needle: + return Markup(escape(text or "")) + + pattern = re.compile(re.escape(needle), re.IGNORECASE) + + def repl(m: re.Match) -> str: + return f'{escape(m.group(0))}' + + esc = escape(text) + result = pattern.sub(lambda m: Markup(repl(m)), esc) + return Markup(result) + diff --git a/shared/browser/app/filters/qs.py b/shared/browser/app/filters/qs.py new file mode 100644 index 0000000..49d3b5d --- /dev/null +++ b/shared/browser/app/filters/qs.py @@ -0,0 +1,13 @@ +from typing import Dict +from quart import g + +def register(app): + @app.template_filter("qs") + def qs_filter(dict: Dict): + if getattr(g, "makeqs_factory", False): + q= g.makeqs_factory()( + **dict, + ) + return q + else: + return "" diff --git a/shared/browser/app/filters/qs_base.py b/shared/browser/app/filters/qs_base.py new file mode 100644 index 0000000..6a8a8b5 --- /dev/null +++ b/shared/browser/app/filters/qs_base.py @@ -0,0 +1,78 @@ +""" +Shared query-string primitives used by blog, market, and order qs modules. +""" +from __future__ import annotations + +from urllib.parse import urlencode + +# Sentinel meaning "leave value as-is" (used as default arg in makeqs) +KEEP = object() + + +def _iterify(x): + """Normalize *x* to a list: None → [], scalar → [scalar], iterable → as-is.""" + if x is None: + return [] + if isinstance(x, (list, tuple, set)): + return x + return [x] + + +def _norm(s: str) -> str: + """Strip + lowercase — used for case-insensitive filter dedup.""" + return s.strip().lower() + + +def make_filter_set( + base: list[str], + add, + remove, + clear_filters: bool, + *, + single_select: bool = False, +) -> list[str]: + """ + Build a deduplicated, sorted filter list. + + Parameters + ---------- + base : list[str] + Current filter values. + add : str | list | None + Value(s) to add. + remove : str | list | None + Value(s) to remove. + clear_filters : bool + If True, start from empty instead of *base*. + single_select : bool + If True, *add* **replaces** the list (blog tags/authors). + If False, *add* is **appended** (market brands/stickers/labels). + """ + add_list = [s for s in _iterify(add) if s is not None] + + if single_select: + # Blog-style: adding replaces the entire set + if add_list: + table = {_norm(s): s for s in add_list} + else: + table = {_norm(s): s for s in base if not clear_filters} + else: + # Market-style: adding appends to the existing set + table = {_norm(s): s for s in base if not clear_filters} + for s in add_list: + k = _norm(s) + if k not in table: + table[k] = s + + for s in _iterify(remove): + if s is None: + continue + table.pop(_norm(s), None) + + return [table[k] for k in sorted(table)] + + +def build_qs(params: list[tuple[str, str]], *, leading_q: bool = True) -> str: + """URL-encode *params* and optionally prepend ``?``.""" + qs = urlencode(params, doseq=True) + return ("?" + qs) if (qs and leading_q) else qs diff --git a/shared/browser/app/filters/query_types.py b/shared/browser/app/filters/query_types.py new file mode 100644 index 0000000..3a7482c --- /dev/null +++ b/shared/browser/app/filters/query_types.py @@ -0,0 +1,33 @@ +""" +NamedTuple types returned by each blueprint's ``decode()`` function. +""" +from __future__ import annotations + +from typing import NamedTuple + + +class BlogQuery(NamedTuple): + page: int + search: str | None + sort: str | None + selected_tags: tuple[str, ...] + selected_authors: tuple[str, ...] + liked: str | None + view: str | None + drafts: str | None + selected_groups: tuple[str, ...] + + +class MarketQuery(NamedTuple): + page: int + search: str | None + sort: str | None + selected_brands: tuple[str, ...] + selected_stickers: tuple[str, ...] + selected_labels: tuple[str, ...] + liked: str | None + + +class OrderQuery(NamedTuple): + page: int + search: str | None diff --git a/shared/browser/app/filters/truncate.py b/shared/browser/app/filters/truncate.py new file mode 100644 index 0000000..754851a --- /dev/null +++ b/shared/browser/app/filters/truncate.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +def register(app): + @app.template_filter("truncate") + def truncate(text, max_length=100): + """ + Truncate text to max_length characters and add an ellipsis character (…) + if it was longer. + """ + if text is None: + return "" + + text = str(text) + + if len(text) <= max_length: + return text + + # Leave space for the ellipsis itself + if max_length <= 1: + return "…" + + return text[:max_length - 1] + "…" \ No newline at end of file diff --git a/shared/browser/app/filters/url_join.py b/shared/browser/app/filters/url_join.py new file mode 100644 index 0000000..120d7fe --- /dev/null +++ b/shared/browser/app/filters/url_join.py @@ -0,0 +1,19 @@ +from typing import Iterable, Union + +from shared.utils import join_url, host_url, _join_url_parts, route_prefix + + +# --- Register as a Jinja filter (Quart / Flask) --- +def register(app): + @app.template_filter("urljoin") + def urljoin_filter(value: Union[str, Iterable[str]]): + return join_url(value) + @app.template_filter("urlhost") + def urlhost_filter(value: Union[str, Iterable[str]]): + return host_url(value) + @app.template_filter("urlhost_no_slash") + def urlhost_no_slash_filter(value: Union[str, Iterable[str]]): + return host_url(value, True) + @app.template_filter("host") + def host_filter(value: str): + return _join_url_parts([route_prefix(), value]) diff --git a/shared/browser/app/middleware.py b/shared/browser/app/middleware.py new file mode 100644 index 0000000..bb156d4 --- /dev/null +++ b/shared/browser/app/middleware.py @@ -0,0 +1,58 @@ + +def register(app): + import json + from typing import Any + + def _decode_headers(scope) -> dict[str, str]: + out = {} + for k, v in scope.get("headers", []): + try: + out[k.decode("latin1")] = v.decode("latin1") + except Exception: + out[repr(k)] = repr(v) + return out + + def _safe(obj: Any): + # make scope json-serialisable; fall back to repr() + try: + json.dumps(obj) + return obj + except Exception: + return repr(obj) + + class ScopeDumpMiddleware: + def __init__(self, app, *, log_bodies: bool = False): + self.app = app + self.log_bodies = log_bodies # keep False; bodies aren't needed for routing + + async def __call__(self, scope, receive, send): + if scope["type"] in ("http", "websocket"): + # Build a compact view of keys relevant to routing + scope_view = { + "type": scope.get("type"), + "asgi": scope.get("asgi"), + "http_version": scope.get("http_version"), + "scheme": scope.get("scheme"), + "method": scope.get("method"), + "server": scope.get("server"), + "client": scope.get("client"), + "root_path": scope.get("root_path"), + "path": scope.get("path"), + "raw_path": scope.get("raw_path").decode("latin1") if scope.get("raw_path") else None, + "query_string": scope.get("query_string", b"").decode("latin1"), + "headers": _decode_headers(scope), + } + + print("\n=== ASGI SCOPE (routing) ===") + print(json.dumps({_safe(k): _safe(v) for k, v in scope_view.items()}, indent=2)) + print("=== END SCOPE ===\n", flush=True) + + return await self.app(scope, receive, send) + + # wrap LAST so you see what hits Quart + #app.asgi_app = ScopeDumpMiddleware(app.asgi_app) + + + from hypercorn.middleware import ProxyFixMiddleware + # trust a single proxy hop; use legacy X-Forwarded-* headers + app.asgi_app = ProxyFixMiddleware(app.asgi_app, mode="legacy", trusted_hops=1) diff --git a/shared/browser/app/payments/__init__.py b/shared/browser/app/payments/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/shared/browser/app/payments/__init__.py @@ -0,0 +1 @@ + diff --git a/shared/browser/app/payments/sumup.py b/shared/browser/app/payments/sumup.py new file mode 100644 index 0000000..7c15852 --- /dev/null +++ b/shared/browser/app/payments/sumup.py @@ -0,0 +1,133 @@ +from __future__ import annotations + +import os +from typing import Any, Dict, TYPE_CHECKING + +import httpx +from quart import current_app + +from shared.config import config + +if TYPE_CHECKING: + from shared.models.order import Order + +SUMUP_BASE_URL = "https://api.sumup.com/v0.1" + + +def _sumup_settings() -> Dict[str, str]: + cfg = config() + sumup_cfg = cfg.get("sumup", {}) or {} + api_key_env = sumup_cfg.get("api_key_env", "SUMUP_API_KEY") + api_key = os.getenv(api_key_env) + if not api_key: + raise RuntimeError(f"Missing SumUp API key in environment variable {api_key_env}") + + merchant_code = sumup_cfg.get("merchant_code") + prefix = sumup_cfg.get("checkout_prefix", "") + if not merchant_code: + raise RuntimeError("Missing 'sumup.merchant_code' in app-config.yaml") + + currency = sumup_cfg.get("currency", "GBP") + + return { + "api_key": api_key, + "merchant_code": merchant_code, + "currency": currency, + "checkout_reference_prefix": prefix, + } + + +async def create_checkout( + order: Order, + redirect_url: str, + webhook_url: str | None = None, + description: str | None = None, + page_config: Any | None = None, +) -> Dict[str, Any]: + settings = _sumup_settings() + + # Per-page SumUp credentials override globals + if page_config and getattr(page_config, "sumup_api_key", None): + settings["api_key"] = page_config.sumup_api_key + if page_config and getattr(page_config, "sumup_merchant_code", None): + settings["merchant_code"] = page_config.sumup_merchant_code + + # Use stored reference if present, otherwise build it + checkout_reference = order.sumup_reference or f"{settings['checkout_reference_prefix']}{order.id}" + + payload: Dict[str, Any] = { + "checkout_reference": checkout_reference, + "amount": float(order.total_amount), + "currency": settings["currency"], + "merchant_code": settings["merchant_code"], + "description": description or f"Order {order.id} at {current_app.config.get('APP_TITLE', 'Rose Ash')}", + "return_url": webhook_url or redirect_url, + "redirect_url": redirect_url, + "hosted_checkout": {"enabled": True}, + } + headers = { + "Authorization": f"Bearer {settings['api_key']}", + "Content-Type": "application/json", + } + + # Optional: log for debugging + current_app.logger.info( + "Creating SumUp checkout %s for Order %s amount %.2f", + checkout_reference, + order.id, + float(order.total_amount), + ) + + async with httpx.AsyncClient(timeout=15.0) as client: + resp = await client.post(f"{SUMUP_BASE_URL}/checkouts", json=payload, headers=headers) + + if resp.status_code == 409: + # Duplicate checkout — retrieve the existing one by reference + current_app.logger.warning( + "SumUp duplicate checkout for ref %s order %s, fetching existing", + checkout_reference, + order.id, + ) + list_resp = await client.get( + f"{SUMUP_BASE_URL}/checkouts", + params={"checkout_reference": checkout_reference}, + headers=headers, + ) + list_resp.raise_for_status() + items = list_resp.json() + if isinstance(items, list) and items: + return items[0] + if isinstance(items, dict) and items.get("items"): + return items["items"][0] + # Fallback: re-raise original error + resp.raise_for_status() + + if resp.status_code >= 400: + current_app.logger.error( + "SumUp checkout error for ref %s order %s: %s", + checkout_reference, + order.id, + resp.text, + ) + resp.raise_for_status() + data = resp.json() + + return data + + +async def get_checkout(checkout_id: str, page_config: Any | None = None) -> Dict[str, Any]: + """Fetch checkout status/details from SumUp.""" + settings = _sumup_settings() + + if page_config and getattr(page_config, "sumup_api_key", None): + settings["api_key"] = page_config.sumup_api_key + + headers = { + "Authorization": f"Bearer {settings['api_key']}", + "Content-Type": "application/json", + } + + async with httpx.AsyncClient(timeout=10.0) as client: + resp = await client.get(f"{SUMUP_BASE_URL}/checkouts/{checkout_id}", headers=headers) + resp.raise_for_status() + return resp.json() diff --git a/shared/browser/app/redis_cacher.py b/shared/browser/app/redis_cacher.py new file mode 100644 index 0000000..154d410 --- /dev/null +++ b/shared/browser/app/redis_cacher.py @@ -0,0 +1,346 @@ +from __future__ import annotations + +from functools import wraps +from typing import Optional, Literal + +import asyncio + +from quart import ( + Quart, + request, + Response, + g, + current_app, +) +from redis import asyncio as aioredis + +Scope = Literal["user", "global", "anon"] +TagScope = Literal["all", "user"] # for clear_cache + + +# --------------------------------------------------------------------------- +# Redis setup +# --------------------------------------------------------------------------- + +def register(app: Quart) -> None: + @app.before_serving + async def setup_redis() -> None: + if app.config["REDIS_URL"] and app.config["REDIS_URL"] != 'no': + app.redis = aioredis.Redis.from_url( + app.config["REDIS_URL"], + encoding="utf-8", + decode_responses=False, # store bytes + ) + else: + app.redis = False + + @app.after_serving + async def close_redis() -> None: + if app.redis: + await app.redis.close() + # optional: await app.redis.connection_pool.disconnect() + + +def get_redis(): + return current_app.redis + + +# --------------------------------------------------------------------------- +# Key helpers +# --------------------------------------------------------------------------- + +def get_user_id() -> str: + """ + Returns a string id or 'anon'. + Adjust based on your auth system. + """ + user = getattr(g, "user", None) + if user: + return str(user.id) + return "anon" + + +def make_cache_key(cache_user_id: str) -> str: + """ + Build a cache key for this (user/global/anon) + path + query + HTMX status. + + HTMX requests and normal requests get different cache keys because they + return different content (partials vs full pages). + + Keys are namespaced by app name (from CACHE_APP_PREFIX) to avoid + collisions between apps that may share the same paths. + """ + app_prefix = current_app.config.get("CACHE_APP_PREFIX", "app") + path = request.path + qs = request.query_string.decode() if request.query_string else "" + + # Check if this is an HTMX request + is_htmx = request.headers.get("HX-Request", "").lower() == "true" + htmx_suffix = ":htmx" if is_htmx else "" + + if qs: + return f"cache:{app_prefix}:page:{cache_user_id}:{path}?{qs}{htmx_suffix}" + else: + return f"cache:{app_prefix}:page:{cache_user_id}:{path}{htmx_suffix}" + + +def user_set_key(user_id: str) -> str: + """ + Redis set that tracks all cache keys for a given user id. + Only used when scope='user'. + """ + return f"cache:user:{user_id}" + + +def tag_set_key(tag: str) -> str: + """ + Redis set that tracks all cache keys associated with a tag + (across all scopes/users). + """ + return f"cache:tag:{tag}" + + +# --------------------------------------------------------------------------- +# Invalidation helpers +# --------------------------------------------------------------------------- + +async def invalidate_user_cache(user_id: str) -> None: + """ + Delete all cached pages for a specific user (scope='user' caches). + """ + r = get_redis() + if r: + s_key = user_set_key(user_id) + keys = await r.smembers(s_key) # set of bytes + if keys: + await r.delete(*keys) + await r.delete(s_key) + + +async def invalidate_tag_cache(tag: str) -> None: + """ + Delete all cached pages associated with this tag, for all users/scopes. + """ + r = get_redis() + if r: + t_key = tag_set_key(tag) + keys = await r.smembers(t_key) # set of bytes + if keys: + await r.delete(*keys) + await r.delete(t_key) + + +async def invalidate_tag_cache_for_user(tag: str, cache_uid: str) -> None: + r = get_redis() + if not r: + return + + t_key = tag_set_key(tag) + keys = await r.smembers(t_key) # set of bytes + if not keys: + return + + prefix = f"cache:page:{cache_uid}:".encode("utf-8") + + # Filter keys belonging to this cache_uid only + to_delete = [k for k in keys if k.startswith(prefix)] + if not to_delete: + return + + # Delete those page entries + await r.delete(*to_delete) + # Remove them from the tag set (leave other users' keys intact) + await r.srem(t_key, *to_delete) + +async def invalidate_tag_cache_for_current_user(tag: str) -> None: + """ + Convenience helper: delete tag cache for the current user_id (scope='user'). + """ + uid = get_user_id() + await invalidate_tag_cache_for_user(tag, uid) + + +# --------------------------------------------------------------------------- +# Cache decorator for GET +# --------------------------------------------------------------------------- + +def cache_page( + ttl: int = 0, + tag: Optional[str] = None, + scope: Scope = "user", +): + """ + Cache GET responses in Redis. + + ttl: + Seconds to keep the cache. 0 = no expiry. + tag: + Optional tag name used for bulk invalidation via invalidate_tag_cache(). + scope: + "user" → cache per-user (includes 'anon'), tracked in cache:user:{id} + "global" → single cache shared by everyone (no per-user tracking) + "anon" → cache only for anonymous users; logged-in users bypass cache + """ + + def decorator(view): + @wraps(view) + async def wrapper(*args, **kwargs): + r = get_redis() + + if not r or request.method != "GET": + return await view(*args, **kwargs) + uid = get_user_id() + + # Decide who the cache key is keyed on + if scope == "global": + cache_uid = "global" + elif scope == "anon": + # Only cache for anonymous users + if uid != "anon": + return await view(*args, **kwargs) + cache_uid = "anon" + else: # scope == "user" + cache_uid = uid + + key = make_cache_key(cache_uid) + + cached = await r.hgetall(key) + if cached: + body = cached[b"body"] + status = int(cached[b"status"].decode()) + content_type = cached.get(b"content_type", b"text/html").decode() + return Response(body, status=status, content_type=content_type) + + # Not cached, call the view + resp = await view(*args, **kwargs) + + # Normalise: if the view returned a string/bytes, wrap it + if not isinstance(resp, Response): + resp = Response(resp, content_type="text/html") + + # Only cache successful responses + if resp.status_code == 200: + body = await resp.get_data() # bytes + + pipe = r.pipeline() + pipe.hset( + key, + mapping={ + "body": body, + "status": str(resp.status_code), + "content_type": resp.content_type or "text/html", + }, + ) + if ttl: + pipe.expire(key, ttl) + + # Track per-user keys only when scope='user' + if scope == "user": + pipe.sadd(user_set_key(cache_uid), key) + + # Track per-tag keys (all scopes) + if tag: + pipe.sadd(tag_set_key(tag), key) + + await pipe.execute() + + resp.set_data(body) + + return resp + + return wrapper + + return decorator + + +# --------------------------------------------------------------------------- +# Clear cache decorator for POST (or any method) +# --------------------------------------------------------------------------- + +def clear_cache( + *, + tag: Optional[str] = None, + tag_scope: TagScope = "all", + clear_user: bool = False, +): + """ + Decorator for routes that should clear cache after they run. + + Use on POST/PUT/PATCH/DELETE handlers. + + Params: + tag: + If set, will clear caches for this tag. + tag_scope: + "all" → invalidate_tag_cache(tag) (all users/scopes) + "user" → invalidate_tag_cache_for_current_user(tag) + clear_user: + If True, also run invalidate_user_cache(current_user_id). + + Typical usage: + + @bp.post("/posts//edit") + @clear_cache(tag="post.post_detail", tag_scope="all") + async def edit_post(slug): + ... + + @bp.post("/prefs") + @clear_cache(tag="dashboard", tag_scope="user", clear_user=True) + async def update_prefs(): + ... + """ + + def decorator(view): + @wraps(view) + async def wrapper(*args, **kwargs): + # Run the view first + resp = await view(*args, **kwargs) + if get_redis(): + + # Only clear cache if the view succeeded (2xx) + status = getattr(resp, "status_code", None) + if status is None: + # Non-Response return (string, dict) -> treat as success + success = True + else: + success = 200 <= status < 300 + + if not success: + return resp + + # Perform invalidations + tasks = [] + + if clear_user: + uid = get_user_id() + tasks.append(invalidate_user_cache(uid)) + + if tag: + if tag_scope == "all": + tasks.append(invalidate_tag_cache(tag)) + else: # tag_scope == "user" + tasks.append(invalidate_tag_cache_for_current_user(tag)) + + if tasks: + # Run them concurrently + await asyncio.gather(*tasks) + + return resp + + return wrapper + + return decorator + +async def clear_all_cache(prefix: str = "cache:") -> None: + r = get_redis() + if not r: + return + + cursor = 0 + pattern = f"{prefix}*" + while True: + cursor, keys = await r.scan(cursor=cursor, match=pattern, count=500) + if keys: + await r.delete(*keys) + if cursor == 0: + break diff --git a/shared/browser/app/utils/__init__.py b/shared/browser/app/utils/__init__.py new file mode 100644 index 0000000..75b8279 --- /dev/null +++ b/shared/browser/app/utils/__init__.py @@ -0,0 +1,12 @@ +from .parse import ( + parse_time, + parse_cost, + parse_dt +) +from .utils import ( + current_route_relative_path, + current_url_without_page, + vary, +) + +from .utc import utcnow \ No newline at end of file diff --git a/shared/browser/app/utils/htmx.py b/shared/browser/app/utils/htmx.py new file mode 100644 index 0000000..17f80e6 --- /dev/null +++ b/shared/browser/app/utils/htmx.py @@ -0,0 +1,46 @@ +"""HTMX utilities for detecting and handling HTMX requests.""" + +from quart import request + + +def is_htmx_request() -> bool: + """ + Check if the current request is an HTMX request. + + Returns: + bool: True if HX-Request header is present and true + """ + return request.headers.get("HX-Request", "").lower() == "true" + + +def get_htmx_target() -> str | None: + """ + Get the target element ID from HTMX request headers. + + Returns: + str | None: Target element ID or None + """ + return request.headers.get("HX-Target") + + +def get_htmx_trigger() -> str | None: + """ + Get the trigger element ID from HTMX request headers. + + Returns: + str | None: Trigger element ID or None + """ + return request.headers.get("HX-Trigger") + + +def should_return_fragment() -> bool: + """ + Determine if we should return a fragment vs full page. + + For HTMX requests, return fragment. + For normal requests, return full page. + + Returns: + bool: True if fragment should be returned + """ + return is_htmx_request() diff --git a/shared/browser/app/utils/parse.py b/shared/browser/app/utils/parse.py new file mode 100644 index 0000000..ee6d8de --- /dev/null +++ b/shared/browser/app/utils/parse.py @@ -0,0 +1,36 @@ +from datetime import datetime, timezone + +def parse_time(val: str | None): + if not val: + return None + try: + h,m = val.split(':', 1) + from datetime import time + return time(int(h), int(m)) + except Exception: + return None + +def parse_cost(val: str | None): + if not val: + return None + try: + return float(val) + except Exception: + return None + + if not val: + return None + dt = datetime.fromisoformat(val) + # make TZ-aware (assume local if naive; convert to UTC) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt + +def parse_dt(val: str | None) -> datetime | None: + if not val: + return None + dt = datetime.fromisoformat(val) + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt + diff --git a/shared/browser/app/utils/utc.py b/shared/browser/app/utils/utc.py new file mode 100644 index 0000000..084886c --- /dev/null +++ b/shared/browser/app/utils/utc.py @@ -0,0 +1,6 @@ +from datetime import datetime, timezone + + +def utcnow() -> datetime: + return datetime.now(timezone.utc) + diff --git a/shared/browser/app/utils/utils.py b/shared/browser/app/utils/utils.py new file mode 100644 index 0000000..71cd993 --- /dev/null +++ b/shared/browser/app/utils/utils.py @@ -0,0 +1,51 @@ +from quart import ( + Response, + request, + g, +) +from shared.utils import host_url +from urllib.parse import urlencode + +def current_route_relative_path() -> str: + """ + Returns the current request path relative to the app's mount point (script_root). + """ + + (request.script_root or "").rstrip("/") + path = request.path # excludes query string + + + + if g.root and path.startswith(f"/{g.root}"): + rel = path[len(g.root+1):] + return rel if rel.startswith("/") else "/" + rel + return path # app at / + + +def current_url_without_page() -> str: + """ + Build current URL (host+path+qs) but with ?page= removed. + Used for Hx-Push-Url. + """ + base = host_url(current_route_relative_path()) + + params = request.args.to_dict(flat=False) # keep multivals + params.pop("page", None) + qs = urlencode(params, doseq=True) + + return f"{base}?{qs}" if qs else base + +def vary(resp: Response) -> Response: + """ + Ensure caches/CDNs vary on HX headers so htmx/non-htmx versions don't get mixed. + """ + v = resp.headers.get("Vary", "") + parts = [p.strip() for p in v.split(",") if p.strip()] + for h in ("HX-Request", "X-Origin"): + if h not in parts: + parts.append(h) + if parts: + resp.headers["Vary"] = ", ".join(parts) + return resp + + diff --git a/shared/browser/templates/_oob_elements.html b/shared/browser/templates/_oob_elements.html new file mode 100644 index 0000000..da748da --- /dev/null +++ b/shared/browser/templates/_oob_elements.html @@ -0,0 +1,33 @@ +{% extends oob.oob_extends %} + +{# OOB elements for HTMX navigation - all elements that need updating #} + +{# Import shared OOB macros #} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + +{% block oobs %} + + {% from '_types/root/_n/macros.html' import oob_header with context %} + {{oob_header( + oob.parent_id, + oob.child_id, + oob.header, + )}} + + {% from oob.parent_header import header_row with context %} + {{ header_row(oob=True) }} +{% endblock %} + + +{# Mobile menu - from market/index.html _main_mobile_menu block #} +{% set mobile_nav %} + {% include oob.nav %} +{% endset %} +{{ mobile_menu(mobile_nav) }} + + +{% block content %} + {% include oob.main %} +{% endblock %} + + diff --git a/shared/browser/templates/_types/root/_full_user.html b/shared/browser/templates/_types/root/_full_user.html new file mode 100644 index 0000000..b5f46cc --- /dev/null +++ b/shared/browser/templates/_types/root/_full_user.html @@ -0,0 +1,11 @@ + +{% set href=account_url('/') %} + + + {{g.user.email}} + + \ No newline at end of file diff --git a/shared/browser/templates/_types/root/_hamburger.html b/shared/browser/templates/_types/root/_hamburger.html new file mode 100644 index 0000000..9a30a19 --- /dev/null +++ b/shared/browser/templates/_types/root/_hamburger.html @@ -0,0 +1,13 @@ + +
    + + + + +
    + + diff --git a/shared/browser/templates/_types/root/_head.html b/shared/browser/templates/_types/root/_head.html new file mode 100644 index 0000000..26a487b --- /dev/null +++ b/shared/browser/templates/_types/root/_head.html @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/shared/browser/templates/_types/root/_index.html b/shared/browser/templates/_types/root/_index.html new file mode 100644 index 0000000..5d3e313 --- /dev/null +++ b/shared/browser/templates/_types/root/_index.html @@ -0,0 +1,13 @@ +{% extends '_types/root/index.html' %} +{% from 'macros/glyphs.html' import opener %} + {% from 'macros/title.html' import title with context %} +{% block main_mobile_menu %} +
    + {% block _main_mobile_menu %} + {% include '_types/root/_nav.html' %} + {% include '_types/root/_nav_panel.html' %} + {% endblock %} +
    +{% endblock %} + + diff --git a/shared/browser/templates/_types/root/_n/macros.html b/shared/browser/templates/_types/root/_n/macros.html new file mode 100644 index 0000000..26e6128 --- /dev/null +++ b/shared/browser/templates/_types/root/_n/macros.html @@ -0,0 +1,35 @@ +{% macro header(id=False, oob=False) %} +
    + {{ caller() }} +
    +{% endmacro %} + + +{% macro oob_header(id, child_id, row_macro) %} + {% call header(id=id, oob=True) %} + {% call header() %} + {% from row_macro import header_row with context %} + {{header_row()}} +
    +
    + {% endcall %} + {% endcall %} +{% endmacro %} + + +{% macro index_row(id, row_macro) %} + {% from '_types/root/_n/macros.html' import header with context %} + {% set _caller = caller %} + {% call header() %} + {% from row_macro import header_row with context %} + {{ header_row() }} +
    + {{_caller()}} +
    + {% endcall %} + +{% endmacro %} \ No newline at end of file diff --git a/shared/browser/templates/_types/root/_nav.html b/shared/browser/templates/_types/root/_nav.html new file mode 100644 index 0000000..c220d05 --- /dev/null +++ b/shared/browser/templates/_types/root/_nav.html @@ -0,0 +1,29 @@ +{% set _app_slugs = { + 'cart': cart_url('/'), + 'market': market_url('/'), + 'events': events_url('/'), + 'federation': federation_url('/'), + 'account': account_url('/'), +} %} +{% set _first_seg = request.path.strip('/').split('/')[0] %} + diff --git a/shared/browser/templates/_types/root/_nav_panel.html b/shared/browser/templates/_types/root/_nav_panel.html new file mode 100644 index 0000000..e804082 --- /dev/null +++ b/shared/browser/templates/_types/root/_nav_panel.html @@ -0,0 +1,7 @@ + {% import 'macros/links.html' as links %} + {% if g.rights.admin %} + + + + {% endif %} + \ No newline at end of file diff --git a/shared/browser/templates/_types/root/_oob_menu.html b/shared/browser/templates/_types/root/_oob_menu.html new file mode 100644 index 0000000..b20c124 --- /dev/null +++ b/shared/browser/templates/_types/root/_oob_menu.html @@ -0,0 +1,46 @@ +{# + Shared mobile menu for both base templates and OOB updates + + This macro can be used in two modes: + - oob=true: Outputs full wrapper with hx-swap-oob attribute (for OOB updates) + - oob=false: Outputs just content, assumes wrapper exists (for base templates) + + The caller can pass section-specific nav items via section_nav parameter. +#} + +{% macro mobile_menu(section_nav='', oob=true) %} +{% if oob %} +
    +{% endif %} + +{% if oob %} +
    +{% endif %} +{% endmacro %} + + + + + +{% macro oob_mobile_menu() %} +
    + +
    +{% endmacro %} + + + diff --git a/shared/browser/templates/_types/root/_sign_in.html b/shared/browser/templates/_types/root/_sign_in.html new file mode 100644 index 0000000..d8777ae --- /dev/null +++ b/shared/browser/templates/_types/root/_sign_in.html @@ -0,0 +1,10 @@ + + + + sign in or register + diff --git a/shared/browser/templates/_types/root/exceptions/403/img.html b/shared/browser/templates/_types/root/exceptions/403/img.html new file mode 100644 index 0000000..c171b80 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/403/img.html @@ -0,0 +1 @@ +{{asset_url('errors/403.gif')}} \ No newline at end of file diff --git a/shared/browser/templates/_types/root/exceptions/403/message.html b/shared/browser/templates/_types/root/exceptions/403/message.html new file mode 100644 index 0000000..d8d39d5 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/403/message.html @@ -0,0 +1 @@ +YOU CAN'T DO THAT \ No newline at end of file diff --git a/shared/browser/templates/_types/root/exceptions/404/img.html b/shared/browser/templates/_types/root/exceptions/404/img.html new file mode 100644 index 0000000..fbfefa5 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/404/img.html @@ -0,0 +1 @@ +{{asset_url('errors/404.gif')}} \ No newline at end of file diff --git a/shared/browser/templates/_types/root/exceptions/404/message.html b/shared/browser/templates/_types/root/exceptions/404/message.html new file mode 100644 index 0000000..6647958 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/404/message.html @@ -0,0 +1 @@ +NOT FOUND \ No newline at end of file diff --git a/shared/browser/templates/_types/root/exceptions/_.html b/shared/browser/templates/_types/root/exceptions/_.html new file mode 100644 index 0000000..3e54b0d --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/_.html @@ -0,0 +1,12 @@ +{% extends '_types/root/exceptions/base.html' %} + +{% block error_summary %} +
    + {% include '_types/root/exceptions/' + errnum + '/message.html' %} +
    +{% endblock %} + +{% block error_content %} + +{% endblock %} + diff --git a/shared/browser/templates/_types/root/exceptions/app_error.html b/shared/browser/templates/_types/root/exceptions/app_error.html new file mode 100644 index 0000000..7d062e8 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/app_error.html @@ -0,0 +1,42 @@ +{% extends '_types/root/_index.html' %} + +{% block content %} +
    +
    +
    + + + +
    + +

    + Something went wrong +

    + + {% if messages %} +
    + {% for message in messages %} +
    + {{ message }} +
    + {% endfor %} +
    + {% endif %} + +
    + + + Home + +
    +
    +
    +{% endblock %} diff --git a/shared/browser/templates/_types/root/exceptions/base.html b/shared/browser/templates/_types/root/exceptions/base.html new file mode 100644 index 0000000..7d20283 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/base.html @@ -0,0 +1,17 @@ +{% extends '_types/root/index.html' %} + +{% block content %} +
    + {% block error_summary %} + {% endblock %} +
    +
    + {% block error_content %} + {% endblock %} +
    +{% endblock %} + diff --git a/shared/browser/templates/_types/root/exceptions/error.html b/shared/browser/templates/_types/root/exceptions/error.html new file mode 100644 index 0000000..70a8164 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/error.html @@ -0,0 +1,12 @@ +{% extends '_types/root/exceptions/base.html' %} + +{% block error_summary %} +
    + WELL THIS IS EMBARASSING... +
    +{% endblock %} + +{% block error_content %} + +{% endblock %} + diff --git a/shared/browser/templates/_types/root/exceptions/hx/_.html b/shared/browser/templates/_types/root/exceptions/hx/_.html new file mode 100644 index 0000000..6a916f1 --- /dev/null +++ b/shared/browser/templates/_types/root/exceptions/hx/_.html @@ -0,0 +1,8 @@ +
    +
    + {% include '_types/root/exceptions/' + errnum + '/message.html' %} +
    + + + +
    \ No newline at end of file diff --git a/shared/browser/templates/_types/root/header/_header.html b/shared/browser/templates/_types/root/header/_header.html new file mode 100644 index 0000000..348bba3 --- /dev/null +++ b/shared/browser/templates/_types/root/header/_header.html @@ -0,0 +1,41 @@ +{% set select_colours = " + [.hover-capable_&]:hover:bg-yellow-300 + aria-selected:bg-stone-500 aria-selected:text-white + [.hover-capable_&[aria-selected=true]:hover]:bg-orange-500 +"%} +{% import 'macros/links.html' as links %} + +{% macro header_row(oob=False) %} + {% call links.menu_row(id='root-row', oob=oob) %} +
    + {# Cart mini — fetched from cart app as fragment #} + {% if cart_mini_html %} + {{ cart_mini_html | safe }} + {% endif %} + + {# Site title #} +
    + {% from 'macros/title.html' import title with context %} + {{ title('flex justify-center md:justify-start')}} +
    + + {# Desktop nav #} + + {% include '_types/root/_hamburger.html' %} +
    + {% endcall %} + {# Mobile user info #} +
    + {% if auth_menu_html %} + {{ auth_menu_html | safe }} + {% endif %} +
    +{% endmacro %} \ No newline at end of file diff --git a/shared/browser/templates/_types/root/header/_oob.html b/shared/browser/templates/_types/root/header/_oob.html new file mode 100644 index 0000000..45b7240 --- /dev/null +++ b/shared/browser/templates/_types/root/header/_oob.html @@ -0,0 +1,67 @@ +{# + Shared root header for both base templates and OOB updates + + This macro can be used in two modes: + - oob=true: Outputs full div with hx-swap-oob attribute (for OOB updates) + - oob=false: Outputs just content, assumes wrapper div exists (for base templates) + + Usage: + 1. Call root_header_start(oob=true/false) + 2. Add any section-specific headers + 3. Call root_header_end(oob=true/false) +#} + +{% macro root_header_start(oob=true) %} +{% set select_colours = " + [.hover-capable_&]:hover:bg-yellow-300 + aria-selected:bg-stone-500 aria-selected:text-white + [.hover-capable_&[aria-selected=true]:hover]:bg-orange-500 +"%} + +{% if oob %} + +{% endif %} +{% endmacro %} diff --git a/shared/browser/templates/_types/root/header/_oob_.html b/shared/browser/templates/_types/root/header/_oob_.html new file mode 100644 index 0000000..772f2ab --- /dev/null +++ b/shared/browser/templates/_types/root/header/_oob_.html @@ -0,0 +1,38 @@ +{# + Shared root header for both base templates and OOB updates + + This macro can be used in two modes: + - oob=true: Outputs full div with hx-swap-oob attribute (for OOB updates) + - oob=false: Outputs just content, assumes wrapper div exists (for base templates) + + Usage: + 1. Call root_header_start(oob=true/false) + 2. Add any section-specific headers + 3. Call root_header_end(oob=true/false) +#} + +{% macro root_header(oob=true) %} +{% set select_colours = " + [.hover-capable_&]:hover:bg-yellow-300 + aria-selected:bg-stone-500 aria-selected:text-white + [.hover-capable_&[aria-selected=true]:hover]:bg-orange-500 +"%} + +{% if oob %} + +{% endif %} + +{% endmacro %} + diff --git a/shared/browser/templates/_types/root/index.html b/shared/browser/templates/_types/root/index.html new file mode 100644 index 0000000..06094f3 --- /dev/null +++ b/shared/browser/templates/_types/root/index.html @@ -0,0 +1,84 @@ +{% import 'macros/layout.html' as layout %} +{% from '_types/root/header/_oob.html' import root_header_start, root_header_end with context %} +{% from '_types/root/_oob_menu.html' import mobile_menu with context %} + + + + + + {% block meta %} + {% include 'social/meta_site.html' %} + {% endblock %} + + {% include '_types/root/_head.html' %} + + +
    + {% block header %} + {% from '_types/root/_n/macros.html' import header with context %} + {% call header() %} + {% call layout.details('/root-header') %} + {% call layout.summary( + 'root-header-summary', + _class='flex items-start gap-2 p-1 + bg-' + menu_colour + '-' + (500-(level()*100))|string, + ) + %} +
    + + {% from '_types/root/header/_header.html' import header_row with context %} + {{ header_row() }} +
    + {% block root_header_child %} + {% endblock %} +
    +
    + + {% endcall %} + {% call layout.menu('root-menu', 'md:hidden bg-yellow-100') %} + {% block main_mobile_menu %} + {% endblock %} + {% endcall %} + {% endcall %} + + + + {% endcall %} + {% endblock %} + + +
    + {% block filter %} + {% endblock %} +
    +
    +
    +
    + + +
    + {% block content %} + {% endblock %} +
    +
    + +
    +
    +
    + +
    + + + diff --git a/shared/browser/templates/_types/root/mobile/_full_user.html b/shared/browser/templates/_types/root/mobile/_full_user.html new file mode 100644 index 0000000..1282e31 --- /dev/null +++ b/shared/browser/templates/_types/root/mobile/_full_user.html @@ -0,0 +1,10 @@ + +{% set href=account_url('/') %} + + + {{g.user.email}} + + \ No newline at end of file diff --git a/shared/browser/templates/_types/root/mobile/_sign_in.html b/shared/browser/templates/_types/root/mobile/_sign_in.html new file mode 100644 index 0000000..3c92646 --- /dev/null +++ b/shared/browser/templates/_types/root/mobile/_sign_in.html @@ -0,0 +1,8 @@ + + + + sign in or register + diff --git a/shared/browser/templates/macros/admin_nav.html b/shared/browser/templates/macros/admin_nav.html new file mode 100644 index 0000000..738a319 --- /dev/null +++ b/shared/browser/templates/macros/admin_nav.html @@ -0,0 +1,21 @@ +{# + Shared admin navigation macro + Use this instead of duplicate _nav.html files +#} + +{% macro admin_nav_item(href, icon='cog', label='', select_colours='', aclass=styles.nav_button) %} + {% import 'macros/links.html' as links %} + {% call links.link(href, hx_select_search, select_colours, True, aclass=aclass) %} + + {{ label }} + {% endcall %} +{% endmacro %} + +{% macro placeholder_nav() %} +{# Placeholder for admin sections without specific nav items #} + +{% endmacro %} diff --git a/shared/browser/templates/macros/cart_icon.html b/shared/browser/templates/macros/cart_icon.html new file mode 100644 index 0000000..7b8a958 --- /dev/null +++ b/shared/browser/templates/macros/cart_icon.html @@ -0,0 +1,31 @@ +{# Cart icon/badge — shows logo when empty, cart icon with count when items present #} + +{% macro cart_icon(count=0, oob=False) %} +
    + {% if count == 0 %} +
    + + + +
    + {% else %} + + + + {{ count }} + + + {% endif %} +
    +{% endmacro %} diff --git a/shared/browser/templates/macros/glyphs.html b/shared/browser/templates/macros/glyphs.html new file mode 100644 index 0000000..0e7e225 --- /dev/null +++ b/shared/browser/templates/macros/glyphs.html @@ -0,0 +1,17 @@ +{% macro opener(group=False) %} + + + + +{% endmacro %} \ No newline at end of file diff --git a/shared/browser/templates/macros/layout.html b/shared/browser/templates/macros/layout.html new file mode 100644 index 0000000..fc648e8 --- /dev/null +++ b/shared/browser/templates/macros/layout.html @@ -0,0 +1,61 @@ +{# templates/macros/layout.html #} + +{% macro details(group = '', _class='') %} +
    + {{ caller() }} +
    +{%- endmacro %} + +{% macro summary(id, _class=None, oob=False) %} + +
    +
    + {{ caller() }} +
    +
    +
    +{%- endmacro %} + +{% macro filter_summary(id, current_local_href, search, search_count, hx_select, oob=True) %} + +
    +
    +
    + + + + + + +
    +
    +
    +
    + {{ caller() }} + +
    +
    + {% from 'macros/search.html' import search_mobile %} + {{ search_mobile(current_local_href, search, search_count, hx_select) }} +
    +
    +{%- endmacro %} + + +{% macro menu(id, _class="") %} +
    + {{ caller() }} +
    +{%- endmacro %} diff --git a/shared/browser/templates/macros/links.html b/shared/browser/templates/macros/links.html new file mode 100644 index 0000000..d80a51d --- /dev/null +++ b/shared/browser/templates/macros/links.html @@ -0,0 +1,59 @@ + + +{% macro link(url, select, select_colours='', highlight=True, _class='', aclass='') %} + {% set href=url|host%} + +{% endmacro %} + + +{% macro menu_row(id=False, oob=False) %} +
    + {{ caller() }} +
    + {{level_up()}} +{% endmacro %} + +{% macro desktop_nav() %} + +{% endmacro %} + +{% macro admin() %} + +
    + settings +
    + +{% endmacro %} \ No newline at end of file diff --git a/shared/browser/templates/macros/scrolling_menu.html b/shared/browser/templates/macros/scrolling_menu.html new file mode 100644 index 0000000..d1a823a --- /dev/null +++ b/shared/browser/templates/macros/scrolling_menu.html @@ -0,0 +1,68 @@ +{# + Scrolling menu macro with arrow navigation + + Creates a horizontally scrollable menu (desktop) or vertically scrollable (mobile) + with arrow buttons that appear/hide based on content overflow. + + Parameters: + - container_id: Unique ID for the scroll container + - items: List of items to iterate over + - item_content: Caller block that renders each item (receives 'item' variable) + - wrapper_class: Optional additional classes for outer wrapper + - container_class: Optional additional classes for scroll container + - item_class: Optional additional classes for each item wrapper +#} + +{% macro scrolling_menu(container_id, items, wrapper_class='', container_class='', item_class='') %} + {% if items %} + {# Left scroll arrow - desktop only #} + + + {# Scrollable container #} +
    +
    + {% for item in items %} +
    + {{ caller(item) }} +
    + {% endfor %} +
    +
    + + + + {# Right scroll arrow - desktop only #} + + {% endif %} +{% endmacro %} diff --git a/shared/browser/templates/macros/search.html b/shared/browser/templates/macros/search.html new file mode 100644 index 0000000..98c0cde --- /dev/null +++ b/shared/browser/templates/macros/search.html @@ -0,0 +1,83 @@ +{# Shared search input macros for filter UIs #} + +{% macro search_mobile(current_local_href, search, search_count, hx_select) -%} +
    + + +
    + {% if search %} + {{search_count}} + {% endif %} +
    +
    +{%- endmacro %} + +{% macro search_desktop(current_local_href, search, search_count, hx_select) -%} +
    + + +
    + {% if search %} + {{search_count}} + {% endif %} + {{zap_filter}} +
    +
    +{%- endmacro %} diff --git a/shared/browser/templates/macros/stickers.html b/shared/browser/templates/macros/stickers.html new file mode 100644 index 0000000..2be5b9f --- /dev/null +++ b/shared/browser/templates/macros/stickers.html @@ -0,0 +1,24 @@ +{% macro sticker(src, title, enabled, size=40, found=false) -%} + + + + {{ title|capitalize }} + + + + + +{%- endmacro -%} + diff --git a/shared/browser/templates/macros/title.html b/shared/browser/templates/macros/title.html new file mode 100644 index 0000000..4477fc2 --- /dev/null +++ b/shared/browser/templates/macros/title.html @@ -0,0 +1,10 @@ +{% macro title(_class='') %} + +

    + {{ site().title }} +

    +
    +{% endmacro %} diff --git a/shared/browser/templates/mobile/menu.html b/shared/browser/templates/mobile/menu.html new file mode 100644 index 0000000..729c141 --- /dev/null +++ b/shared/browser/templates/mobile/menu.html @@ -0,0 +1,5 @@ + +
    +{% block menu %} +{% endblock %} +
    \ No newline at end of file diff --git a/shared/browser/templates/oob_elements.html b/shared/browser/templates/oob_elements.html new file mode 100644 index 0000000..7a6b88a --- /dev/null +++ b/shared/browser/templates/oob_elements.html @@ -0,0 +1,38 @@ + +{% block oobs %} +{% endblock %} + +
    +{% block filter %} +{% endblock %} +
    + + + + + +
    + {% block mobile_menu %} + {% endblock %} +
    + + +
    + {% block content %} + + {% endblock %} + +
    diff --git a/shared/browser/templates/sentinel/desktop_content.html b/shared/browser/templates/sentinel/desktop_content.html new file mode 100644 index 0000000..1bb6127 --- /dev/null +++ b/shared/browser/templates/sentinel/desktop_content.html @@ -0,0 +1,9 @@ +
    + loading… {{ page }} / {{ total_pages }} +
    + + \ No newline at end of file diff --git a/shared/browser/templates/sentinel/mobile_content.html b/shared/browser/templates/sentinel/mobile_content.html new file mode 100644 index 0000000..f4ca68e --- /dev/null +++ b/shared/browser/templates/sentinel/mobile_content.html @@ -0,0 +1,11 @@ + +
    + loading… {{ page }} / {{ total_pages }} +
    + + + \ No newline at end of file diff --git a/shared/browser/templates/sentinel/wireless_error.svg b/shared/browser/templates/sentinel/wireless_error.svg new file mode 100644 index 0000000..7df8fac --- /dev/null +++ b/shared/browser/templates/sentinel/wireless_error.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + diff --git a/shared/browser/templates/social/meta_base.html b/shared/browser/templates/social/meta_base.html new file mode 100644 index 0000000..215768e --- /dev/null +++ b/shared/browser/templates/social/meta_base.html @@ -0,0 +1,54 @@ +{# social/meta_base.html — common, non-conflicting head tags #} +{# Expected context: + site: { title, url, logo, default_image, twitter_site, fb_app_id, description? } + request: Quart request (for canonical derivation) + robots_override: optional string ("index,follow" / "noindex,nofollow") +#} + + + + +{# Canonical #} +{% set _site_url = site().url.rstrip('/') if site and site().url else '' %} +{% set canonical = ( + request.url if request and request.url + else (_site_url ~ request.path if request and _site_url else _site_url or None) +) %} + +{# Robots: allow override; default to index,follow #} + + +{# Theme & RSS #} + +{% if _site_url %} + +{% endif %} + +{# JSON-LD: Organization & WebSite are safe on all pages (don't conflict with BlogPosting) #} +{% set org_jsonld = { + "@context": "https://schema.org", + "@type": "Organization", + "name": site().title if site and site().title else "", + "url": _site_url if _site_url else None, + "logo": site().logo if site and site().logo else None +} %} + + +{% set website_jsonld = { + "@context": "https://schema.org", + "@type": "WebSite", + "name": site().title if site and site().title else "", + "url": _site_url if _site_url else canonical, + "potentialAction": { + "@type": "SearchAction", + "target": (_site_url ~ "/search?q={query}") if _site_url else None, + "query-input": "required name=query" + } +} %} + diff --git a/shared/browser/templates/social/meta_site.html b/shared/browser/templates/social/meta_site.html new file mode 100644 index 0000000..6ccebb7 --- /dev/null +++ b/shared/browser/templates/social/meta_site.html @@ -0,0 +1,25 @@ +{# social/meta_site.html — generic site/page meta #} +{% include 'social/meta_base.html' %} + +{# Title/description (site-level) #} +{% set description = site().description or '' %} + +{{ base_title }} +{% if description %}{% endif %} +{% if canonical %}{% endif %} + +{# Open Graph (website) #} + + + +{% if description %}{% endif %} +{% if canonical %}{% endif %} +{% if site and site().default_image %}{% endif %} +{% if site and site().fb_app_id %}{% endif %} + +{# Twitter (website) #} + +{% if site and site().twitter_site %}{% endif %} + +{% if description %}{% endif %} +{% if site and site().default_image %}{% endif %} diff --git a/shared/config.py b/shared/config.py new file mode 100644 index 0000000..edee631 --- /dev/null +++ b/shared/config.py @@ -0,0 +1,84 @@ +# suma_browser/config.py +from __future__ import annotations + +import asyncio +import os +from types import MappingProxyType +from typing import Any, Optional +import copy +import yaml + +# Default config path (override with APP_CONFIG_FILE) +_DEFAULT_CONFIG_PATH = os.environ.get( + "APP_CONFIG_FILE", + os.path.join(os.getcwd(), "config/app-config.yaml"), +) + +# Module state +_init_lock = asyncio.Lock() +_data_frozen: Any = None # read-only view (mappingproxy / tuples / frozensets) +_data_plain: Any = None # plain builtins for pretty-print / logging + +# ---------------- utils ---------------- +def _freeze(obj: Any) -> Any: + """Deep-freeze containers to read-only equivalents.""" + if isinstance(obj, dict): + # freeze children first, then wrap dict in mappingproxy + return MappingProxyType({k: _freeze(v) for k, v in obj.items()}) + if isinstance(obj, list): + return tuple(_freeze(v) for v in obj) + if isinstance(obj, set): + return frozenset(_freeze(v) for v in obj) + if isinstance(obj, tuple): + return tuple(_freeze(v) for v in obj) + return obj + +# ---------------- API ---------------- +async def init_config(path: Optional[str] = None, *, force: bool = False) -> None: + """ + Load YAML exactly as-is and cache both a frozen (read-only) and a plain copy. + Idempotent; pass force=True to reload. + """ + global _data_frozen, _data_plain + + if _data_frozen is not None and not force: + return + + async with _init_lock: + if _data_frozen is not None and not force: + return + + cfg_path = path or _DEFAULT_CONFIG_PATH + if not os.path.exists(cfg_path): + raise FileNotFoundError(f"Config file not found: {cfg_path}") + + with open(cfg_path, "r", encoding="utf-8") as f: + raw = yaml.safe_load(f) # whatever the YAML root is + + # store plain as loaded; store frozen for normal use + _data_plain = raw + _data_frozen = _freeze(raw) + +def config() -> Any: + """ + Return the read-only (frozen) config. Call init_config() first. + """ + if _data_frozen is None: + raise RuntimeError("init_config() has not been awaited yet.") + return _data_frozen + +def as_plain() -> Any: + """ + Return a deep copy of the plain config for safe external use/pretty printing. + """ + if _data_plain is None: + raise RuntimeError("init_config() has not been awaited yet.") + return copy.deepcopy(_data_plain) + +def pretty() -> str: + """ + YAML pretty string without mappingproxy noise. + """ + if _data_plain is None: + raise RuntimeError("init_config() has not been awaited yet.") + return yaml.safe_dump(_data_plain, sort_keys=False, allow_unicode=True) diff --git a/shared/containers.py b/shared/containers.py new file mode 100644 index 0000000..4e2fff7 --- /dev/null +++ b/shared/containers.py @@ -0,0 +1,20 @@ +""" +Generic container concept — replaces hard-wired post_id FKs +with container_type + container_id soft references. +""" +from __future__ import annotations + + +class ContainerType: + PAGE = "page" + # Future: GROUP = "group", MARKET = "market", etc. + + +def container_filter(model, container_type: str, container_id: int): + """Return SQLAlchemy filter clauses for a container reference.""" + return [model.container_type == container_type, model.container_id == container_id] + + +def content_filter(model, content_type: str, content_id: int): + """Return SQLAlchemy filter clauses for a content reference (e.g. CalendarEntryContent).""" + return [model.content_type == content_type, model.content_id == content_id] diff --git a/shared/contracts/__init__.py b/shared/contracts/__init__.py new file mode 100644 index 0000000..d8cf7bc --- /dev/null +++ b/shared/contracts/__init__.py @@ -0,0 +1,31 @@ +"""Typed contracts (DTOs + Protocols) for cross-domain service interfaces.""" + +from .dtos import ( + PostDTO, + CalendarDTO, + CalendarEntryDTO, + MarketPlaceDTO, + ProductDTO, + CartItemDTO, + CartSummaryDTO, +) +from .protocols import ( + BlogService, + CalendarService, + MarketService, + CartService, +) + +__all__ = [ + "PostDTO", + "CalendarDTO", + "CalendarEntryDTO", + "MarketPlaceDTO", + "ProductDTO", + "CartItemDTO", + "CartSummaryDTO", + "BlogService", + "CalendarService", + "MarketService", + "CartService", +] diff --git a/shared/contracts/dtos.py b/shared/contracts/dtos.py new file mode 100644 index 0000000..cd5c50d --- /dev/null +++ b/shared/contracts/dtos.py @@ -0,0 +1,255 @@ +"""Frozen dataclasses for cross-domain data transfer. + +These are the *only* shapes that cross domain boundaries. Consumers never +see ORM model instances from another domain — only these DTOs. +""" +from __future__ import annotations + +from dataclasses import dataclass, field +from datetime import datetime +from decimal import Decimal + + +# --------------------------------------------------------------------------- +# Blog domain +# --------------------------------------------------------------------------- + +@dataclass(frozen=True, slots=True) +class PostDTO: + id: int + slug: str + title: str + status: str + visibility: str + is_page: bool = False + feature_image: str | None = None + html: str | None = None + excerpt: str | None = None + custom_excerpt: str | None = None + published_at: datetime | None = None + + +# --------------------------------------------------------------------------- +# Calendar / Events domain +# --------------------------------------------------------------------------- + +@dataclass(frozen=True, slots=True) +class CalendarDTO: + id: int + container_type: str + container_id: int + name: str + slug: str + description: str | None = None + + +@dataclass(frozen=True, slots=True) +class TicketDTO: + id: int + code: str + state: str + entry_name: str + entry_start_at: datetime + entry_end_at: datetime | None = None + ticket_type_name: str | None = None + calendar_name: str | None = None + created_at: datetime | None = None + checked_in_at: datetime | None = None + entry_id: int | None = None + ticket_type_id: int | None = None + price: Decimal | None = None + order_id: int | None = None + calendar_container_id: int | None = None + + +@dataclass(frozen=True, slots=True) +class CalendarEntryDTO: + id: int + calendar_id: int + name: str + start_at: datetime + state: str + cost: Decimal + end_at: datetime | None = None + user_id: int | None = None + session_id: str | None = None + order_id: int | None = None + slot_id: int | None = None + ticket_price: Decimal | None = None + ticket_count: int | None = None + calendar_name: str | None = None + calendar_slug: str | None = None + calendar_container_id: int | None = None + calendar_container_type: str | None = None + + +# --------------------------------------------------------------------------- +# Market domain +# --------------------------------------------------------------------------- + +@dataclass(frozen=True, slots=True) +class MarketPlaceDTO: + id: int + container_type: str + container_id: int + name: str + slug: str + description: str | None = None + + +@dataclass(frozen=True, slots=True) +class ProductDTO: + id: int + slug: str + title: str | None = None + image: str | None = None + description_short: str | None = None + rrp: Decimal | None = None + regular_price: Decimal | None = None + special_price: Decimal | None = None + + +# --------------------------------------------------------------------------- +# Cart domain +# --------------------------------------------------------------------------- + +@dataclass(frozen=True, slots=True) +class CartItemDTO: + id: int + product_id: int + quantity: int + product_title: str | None = None + product_slug: str | None = None + product_image: str | None = None + unit_price: Decimal | None = None + market_place_id: int | None = None + + +@dataclass(frozen=True, slots=True) +class CartSummaryDTO: + count: int = 0 + total: Decimal = Decimal("0") + calendar_count: int = 0 + calendar_total: Decimal = Decimal("0") + items: list[CartItemDTO] = field(default_factory=list) + ticket_count: int = 0 + ticket_total: Decimal = Decimal("0") + + +# --------------------------------------------------------------------------- +# Federation / ActivityPub domain +# --------------------------------------------------------------------------- + +@dataclass(frozen=True, slots=True) +class ActorProfileDTO: + id: int + user_id: int + preferred_username: str + public_key_pem: str + display_name: str | None = None + summary: str | None = None + inbox_url: str | None = None + outbox_url: str | None = None + created_at: datetime | None = None + + +@dataclass(frozen=True, slots=True) +class APActivityDTO: + id: int + activity_id: str + activity_type: str + actor_profile_id: int + object_type: str | None = None + object_data: dict | None = None + published: datetime | None = None + is_local: bool = True + source_type: str | None = None + source_id: int | None = None + ipfs_cid: str | None = None + + +@dataclass(frozen=True, slots=True) +class APFollowerDTO: + id: int + actor_profile_id: int + follower_acct: str + follower_inbox: str + follower_actor_url: str + created_at: datetime | None = None + app_domain: str = "federation" + + +@dataclass(frozen=True, slots=True) +class APAnchorDTO: + id: int + merkle_root: str + activity_count: int = 0 + tree_ipfs_cid: str | None = None + ots_proof_cid: str | None = None + confirmed_at: datetime | None = None + bitcoin_txid: str | None = None + + +@dataclass(frozen=True, slots=True) +class RemoteActorDTO: + id: int + actor_url: str + inbox_url: str + preferred_username: str + domain: str + display_name: str | None = None + summary: str | None = None + icon_url: str | None = None + shared_inbox_url: str | None = None + public_key_pem: str | None = None + + +@dataclass(frozen=True, slots=True) +class RemotePostDTO: + id: int + remote_actor_id: int + object_id: str + content: str + summary: str | None = None + url: str | None = None + attachments: list[dict] = field(default_factory=list) + tags: list[dict] = field(default_factory=list) + published: datetime | None = None + actor: RemoteActorDTO | None = None + + +@dataclass(frozen=True, slots=True) +class TimelineItemDTO: + id: str # composite key for cursor pagination + post_type: str # "local" | "remote" | "boost" + content: str # HTML + published: datetime + actor_name: str + actor_username: str + object_id: str | None = None + summary: str | None = None + url: str | None = None + attachments: list[dict] = field(default_factory=list) + tags: list[dict] = field(default_factory=list) + actor_domain: str | None = None # None = local + actor_icon: str | None = None + actor_url: str | None = None + boosted_by: str | None = None + like_count: int = 0 + boost_count: int = 0 + liked_by_me: bool = False + boosted_by_me: bool = False + author_inbox: str | None = None + + +@dataclass(frozen=True, slots=True) +class NotificationDTO: + id: int + notification_type: str # follow/like/boost/mention/reply + from_actor_name: str + from_actor_username: str + created_at: datetime + read: bool + from_actor_domain: str | None = None + from_actor_icon: str | None = None + target_content_preview: str | None = None diff --git a/shared/contracts/protocols.py b/shared/contracts/protocols.py new file mode 100644 index 0000000..e806b8a --- /dev/null +++ b/shared/contracts/protocols.py @@ -0,0 +1,368 @@ +"""Protocol classes defining each domain's service interface. + +All cross-domain callers program against these Protocols. Concrete +implementations (Sql*Service) and no-op stubs both satisfy them. +""" +from __future__ import annotations + +from datetime import datetime +from typing import Protocol, runtime_checkable + +from sqlalchemy.ext.asyncio import AsyncSession + +from .dtos import ( + PostDTO, + CalendarDTO, + CalendarEntryDTO, + TicketDTO, + MarketPlaceDTO, + ProductDTO, + CartItemDTO, + CartSummaryDTO, + ActorProfileDTO, + APActivityDTO, + APFollowerDTO, + RemoteActorDTO, + RemotePostDTO, + TimelineItemDTO, + NotificationDTO, +) + + +@runtime_checkable +class BlogService(Protocol): + async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None: ... + async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: ... + async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: ... + + async def search_posts( + self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10, + ) -> tuple[list[PostDTO], int]: ... + + +@runtime_checkable +class CalendarService(Protocol): + async def calendars_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[CalendarDTO]: ... + + async def pending_entries( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: ... + + async def entries_for_page( + self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: ... + + async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None: ... + + async def associated_entries( + self, session: AsyncSession, content_type: str, content_id: int, page: int, + ) -> tuple[list[CalendarEntryDTO], bool]: ... + + async def toggle_entry_post( + self, session: AsyncSession, entry_id: int, content_type: str, content_id: int, + ) -> bool: ... + + async def adopt_entries_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: ... + + async def claim_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: ... + + async def confirm_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, + ) -> None: ... + + async def get_entries_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[CalendarEntryDTO]: ... + + async def user_tickets( + self, session: AsyncSession, *, user_id: int, + ) -> list[TicketDTO]: ... + + async def user_bookings( + self, session: AsyncSession, *, user_id: int, + ) -> list[CalendarEntryDTO]: ... + + async def confirmed_entries_for_posts( + self, session: AsyncSession, post_ids: list[int], + ) -> dict[int, list[CalendarEntryDTO]]: ... + + async def pending_tickets( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: ... + + async def tickets_for_page( + self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: ... + + async def claim_tickets_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: ... + + async def confirm_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> None: ... + + async def get_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[TicketDTO]: ... + + async def adopt_tickets_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: ... + + async def adjust_ticket_quantity( + self, session: AsyncSession, entry_id: int, count: int, *, + user_id: int | None, session_id: str | None, + ticket_type_id: int | None = None, + ) -> int: ... + + async def entry_ids_for_content( + self, session: AsyncSession, content_type: str, content_id: int, + ) -> set[int]: ... + + async def upcoming_entries_for_container( + self, session: AsyncSession, + container_type: str | None = None, container_id: int | None = None, + *, page: int = 1, per_page: int = 20, + ) -> tuple[list[CalendarEntryDTO], bool]: ... + + async def visible_entries_for_period( + self, session: AsyncSession, calendar_id: int, + period_start: datetime, period_end: datetime, + *, user_id: int | None, is_admin: bool, session_id: str | None, + ) -> list[CalendarEntryDTO]: ... + + +@runtime_checkable +class MarketService(Protocol): + async def marketplaces_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[MarketPlaceDTO]: ... + + async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None: ... + + async def create_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + name: str, slug: str, + ) -> MarketPlaceDTO: ... + + async def list_marketplaces( + self, session: AsyncSession, + container_type: str | None = None, container_id: int | None = None, + *, page: int = 1, per_page: int = 20, + ) -> tuple[list[MarketPlaceDTO], bool]: ... + + async def soft_delete_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + slug: str, + ) -> bool: ... + + +@runtime_checkable +class CartService(Protocol): + async def cart_summary( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + page_slug: str | None = None, + ) -> CartSummaryDTO: ... + + async def cart_items( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[CartItemDTO]: ... + + async def adopt_cart_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: ... + + +@runtime_checkable +class FederationService(Protocol): + # -- Actor management ----------------------------------------------------- + async def get_actor_by_username( + self, session: AsyncSession, username: str, + ) -> ActorProfileDTO | None: ... + + async def get_actor_by_user_id( + self, session: AsyncSession, user_id: int, + ) -> ActorProfileDTO | None: ... + + async def create_actor( + self, session: AsyncSession, user_id: int, preferred_username: str, + display_name: str | None = None, summary: str | None = None, + ) -> ActorProfileDTO: ... + + async def username_available( + self, session: AsyncSession, username: str, + ) -> bool: ... + + # -- Publishing (core cross-domain API) ----------------------------------- + async def publish_activity( + self, session: AsyncSession, *, + actor_user_id: int, + activity_type: str, + object_type: str, + object_data: dict, + source_type: str | None = None, + source_id: int | None = None, + ) -> APActivityDTO: ... + + # -- Queries -------------------------------------------------------------- + async def get_activity( + self, session: AsyncSession, activity_id: str, + ) -> APActivityDTO | None: ... + + async def get_outbox( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + origin_app: str | None = None, + ) -> tuple[list[APActivityDTO], int]: ... + + async def get_activity_for_source( + self, session: AsyncSession, source_type: str, source_id: int, + ) -> APActivityDTO | None: ... + + async def count_activities_for_source( + self, session: AsyncSession, source_type: str, source_id: int, + *, activity_type: str, + ) -> int: ... + + # -- Followers ------------------------------------------------------------ + async def get_followers( + self, session: AsyncSession, username: str, + app_domain: str | None = None, + ) -> list[APFollowerDTO]: ... + + async def get_followers_paginated( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: ... + + async def add_follower( + self, session: AsyncSession, username: str, + follower_acct: str, follower_inbox: str, follower_actor_url: str, + follower_public_key: str | None = None, + app_domain: str = "federation", + ) -> APFollowerDTO: ... + + async def remove_follower( + self, session: AsyncSession, username: str, follower_acct: str, + app_domain: str = "federation", + ) -> bool: ... + + # -- Remote actors -------------------------------------------------------- + async def get_or_fetch_remote_actor( + self, session: AsyncSession, actor_url: str, + ) -> RemoteActorDTO | None: ... + + async def search_remote_actor( + self, session: AsyncSession, acct: str, + ) -> RemoteActorDTO | None: ... + + async def search_actors( + self, session: AsyncSession, query: str, page: int = 1, limit: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: ... + + # -- Following (outbound) ------------------------------------------------- + async def send_follow( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: ... + + async def get_following( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: ... + + async def accept_follow_response( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: ... + + async def unfollow( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: ... + + # -- Remote posts --------------------------------------------------------- + async def ingest_remote_post( + self, session: AsyncSession, remote_actor_id: int, + activity_json: dict, object_json: dict, + ) -> None: ... + + async def delete_remote_post( + self, session: AsyncSession, object_id: str, + ) -> None: ... + + async def get_remote_post( + self, session: AsyncSession, object_id: str, + ) -> RemotePostDTO | None: ... + + # -- Timelines ------------------------------------------------------------ + async def get_home_timeline( + self, session: AsyncSession, actor_profile_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: ... + + async def get_public_timeline( + self, session: AsyncSession, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: ... + + async def get_actor_timeline( + self, session: AsyncSession, remote_actor_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: ... + + # -- Local posts ---------------------------------------------------------- + async def create_local_post( + self, session: AsyncSession, actor_profile_id: int, + content: str, visibility: str = "public", + in_reply_to: str | None = None, + ) -> int: ... + + async def delete_local_post( + self, session: AsyncSession, actor_profile_id: int, post_id: int, + ) -> None: ... + + # -- Interactions --------------------------------------------------------- + async def like_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: ... + + async def unlike_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: ... + + async def boost_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: ... + + async def unboost_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: ... + + # -- Notifications -------------------------------------------------------- + async def get_notifications( + self, session: AsyncSession, actor_profile_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[NotificationDTO]: ... + + async def unread_notification_count( + self, session: AsyncSession, actor_profile_id: int, + ) -> int: ... + + async def mark_notifications_read( + self, session: AsyncSession, actor_profile_id: int, + ) -> None: ... + + # -- Stats ---------------------------------------------------------------- + async def get_stats(self, session: AsyncSession) -> dict: ... diff --git a/shared/contracts/widgets.py b/shared/contracts/widgets.py new file mode 100644 index 0000000..b5aef0f --- /dev/null +++ b/shared/contracts/widgets.py @@ -0,0 +1,49 @@ +"""Widget descriptors for cross-domain UI composition. + +Each widget type describes a UI fragment that one domain contributes to +another domain's page. Host apps iterate widgets generically — they never +name the contributing domain. +""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import Callable + + +@dataclass(frozen=True, slots=True) +class NavWidget: + """Renders nav items on a container page (entries, calendars, markets).""" + domain: str + order: int + context_fn: Callable # async (session, *, container_type, container_id, **kw) -> dict + template: str + + +@dataclass(frozen=True, slots=True) +class CardWidget: + """Decorates content cards in listings with domain data.""" + domain: str + order: int + batch_fn: Callable # async (session, post_ids) -> dict[int, list] + context_key: str # key injected into each post dict + template: str + + +@dataclass(frozen=True, slots=True) +class AccountPageWidget: + """Sub-page under /auth//.""" + domain: str + slug: str + label: str + order: int + context_fn: Callable # async (session, *, user_id, **kw) -> dict + template: str + + +@dataclass(frozen=True, slots=True) +class AccountNavLink: + """Nav link on account page (internal or external).""" + label: str + order: int + href_fn: Callable # () -> str + external: bool = False diff --git a/shared/db/__init__.py b/shared/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/shared/db/base.py b/shared/db/base.py new file mode 100644 index 0000000..e070835 --- /dev/null +++ b/shared/db/base.py @@ -0,0 +1,4 @@ +from __future__ import annotations +from sqlalchemy.orm import declarative_base + +Base = declarative_base() diff --git a/shared/db/session.py b/shared/db/session.py new file mode 100644 index 0000000..bff449c --- /dev/null +++ b/shared/db/session.py @@ -0,0 +1,82 @@ +from __future__ import annotations +import os +from contextlib import asynccontextmanager +from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession +from quart import Quart, g + +DATABASE_URL = ( + os.getenv("DATABASE_URL_ASYNC") + or os.getenv("DATABASE_URL") + or "postgresql+asyncpg://localhost/blog" +) + +_engine = create_async_engine( + DATABASE_URL, + future=True, + echo=False, + pool_pre_ping=True, + pool_size=5, + max_overflow=10, +) + +_Session = async_sessionmaker( + bind=_engine, + class_=AsyncSession, + expire_on_commit=False, +) + +@asynccontextmanager +async def get_session(): + """Always create a fresh AsyncSession for this block.""" + sess = _Session() + try: + yield sess + finally: + await sess.close() + + +def register_db(app: Quart): + + @app.before_request + async def open_session(): + g.s = _Session() + g.tx = await g.s.begin() + g.had_error = False + + @app.after_request + async def maybe_commit(response): + # Runs BEFORE bytes are sent. + if not g.had_error and 200 <= response.status_code < 400: + try: + if hasattr(g, "tx"): + await g.tx.commit() + except Exception as e: + print(f'commit failed {e}') + if hasattr(g, "tx"): + await g.tx.rollback() + from quart import make_response + return await make_response("Commit failed", 500) + return response + + @app.teardown_request + async def finish(exc): + try: + # If an exception occurred OR we didn't commit (still in txn), roll back. + if hasattr(g, "s"): + if exc is not None or g.s.in_transaction(): + if hasattr(g, "tx") and g.tx.is_active: + try: + await g.tx.rollback() + except Exception: + pass + finally: + if hasattr(g, "s"): + try: + await g.s.close() + except Exception: + pass + + @app.errorhandler(Exception) + async def mark_error(e): + g.had_error = True + raise diff --git a/shared/editor/build.mjs b/shared/editor/build.mjs new file mode 100644 index 0000000..13f4cb3 --- /dev/null +++ b/shared/editor/build.mjs @@ -0,0 +1,45 @@ +import * as esbuild from "esbuild"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const isProduction = process.env.NODE_ENV === "production"; +const isWatch = process.argv.includes("--watch"); + +/** @type {import('esbuild').BuildOptions} */ +const opts = { + alias: { + "koenig-styles": path.resolve( + __dirname, + "node_modules/@tryghost/koenig-lexical/dist/index.css" + ), + }, + entryPoints: ["src/index.jsx"], + bundle: true, + outdir: "../static/scripts", + entryNames: "editor", + format: "iife", + target: "es2020", + jsx: "automatic", + minify: isProduction, + define: { + "process.env.NODE_ENV": JSON.stringify( + isProduction ? "production" : "development" + ), + }, + loader: { + ".svg": "dataurl", + ".woff": "file", + ".woff2": "file", + ".ttf": "file", + }, + logLevel: "info", +}; + +if (isWatch) { + const ctx = await esbuild.context(opts); + await ctx.watch(); + console.log("Watching for changes..."); +} else { + await esbuild.build(opts); +} diff --git a/shared/editor/package-lock.json b/shared/editor/package-lock.json new file mode 100644 index 0000000..e102c57 --- /dev/null +++ b/shared/editor/package-lock.json @@ -0,0 +1,512 @@ +{ + "name": "coop-lexical-editor", + "version": "2.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "coop-lexical-editor", + "version": "2.0.0", + "dependencies": { + "@tryghost/koenig-lexical": "^1.7.10", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "esbuild": "^0.24.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz", + "integrity": "sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.2.tgz", + "integrity": "sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz", + "integrity": "sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.2.tgz", + "integrity": "sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz", + "integrity": "sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz", + "integrity": "sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz", + "integrity": "sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz", + "integrity": "sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz", + "integrity": "sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz", + "integrity": "sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz", + "integrity": "sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz", + "integrity": "sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz", + "integrity": "sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz", + "integrity": "sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz", + "integrity": "sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz", + "integrity": "sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz", + "integrity": "sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz", + "integrity": "sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz", + "integrity": "sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz", + "integrity": "sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz", + "integrity": "sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz", + "integrity": "sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz", + "integrity": "sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz", + "integrity": "sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz", + "integrity": "sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@tryghost/koenig-lexical": { + "version": "1.7.10", + "resolved": "https://registry.npmjs.org/@tryghost/koenig-lexical/-/koenig-lexical-1.7.10.tgz", + "integrity": "sha512-6tI2kbSzZ669hQ5GxpENB8n2aDLugZDmpR/nO0GriduOZJLLN8AdDDa/S3Y8dpF5/cOGKsOxFRj3oLGRDOi6tw==" + }, + "node_modules/esbuild": { + "version": "0.24.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.2.tgz", + "integrity": "sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.24.2", + "@esbuild/android-arm": "0.24.2", + "@esbuild/android-arm64": "0.24.2", + "@esbuild/android-x64": "0.24.2", + "@esbuild/darwin-arm64": "0.24.2", + "@esbuild/darwin-x64": "0.24.2", + "@esbuild/freebsd-arm64": "0.24.2", + "@esbuild/freebsd-x64": "0.24.2", + "@esbuild/linux-arm": "0.24.2", + "@esbuild/linux-arm64": "0.24.2", + "@esbuild/linux-ia32": "0.24.2", + "@esbuild/linux-loong64": "0.24.2", + "@esbuild/linux-mips64el": "0.24.2", + "@esbuild/linux-ppc64": "0.24.2", + "@esbuild/linux-riscv64": "0.24.2", + "@esbuild/linux-s390x": "0.24.2", + "@esbuild/linux-x64": "0.24.2", + "@esbuild/netbsd-arm64": "0.24.2", + "@esbuild/netbsd-x64": "0.24.2", + "@esbuild/openbsd-arm64": "0.24.2", + "@esbuild/openbsd-x64": "0.24.2", + "@esbuild/sunos-x64": "0.24.2", + "@esbuild/win32-arm64": "0.24.2", + "@esbuild/win32-ia32": "0.24.2", + "@esbuild/win32-x64": "0.24.2" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "dependencies": { + "loose-envify": "^1.1.0" + } + } + } +} diff --git a/shared/editor/package.json b/shared/editor/package.json new file mode 100644 index 0000000..4d556f1 --- /dev/null +++ b/shared/editor/package.json @@ -0,0 +1,18 @@ +{ + "name": "coop-lexical-editor", + "version": "2.0.0", + "private": true, + "scripts": { + "build": "node build.mjs", + "build:prod": "NODE_ENV=production node build.mjs", + "dev": "node build.mjs --watch" + }, + "dependencies": { + "@tryghost/koenig-lexical": "^1.7.10", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "esbuild": "^0.24.0" + } +} diff --git a/shared/editor/src/Editor.jsx b/shared/editor/src/Editor.jsx new file mode 100644 index 0000000..9c01093 --- /dev/null +++ b/shared/editor/src/Editor.jsx @@ -0,0 +1,81 @@ +import { useMemo, useState, useEffect, useCallback } from "react"; +import { KoenigComposer, KoenigEditor, CardMenuPlugin } from "@tryghost/koenig-lexical"; +import "koenig-styles"; +import makeFileUploader from "./useFileUpload"; + +export default function Editor({ initialState, onChange, csrfToken, uploadUrls, oembedUrl, unsplashApiKey, snippetsUrl }) { + const fileUploader = useMemo(() => makeFileUploader(csrfToken, uploadUrls), [csrfToken, uploadUrls]); + + const [snippets, setSnippets] = useState([]); + + useEffect(() => { + if (!snippetsUrl) return; + fetch(snippetsUrl, { headers: { "X-CSRFToken": csrfToken || "" } }) + .then((r) => r.ok ? r.json() : []) + .then(setSnippets) + .catch(() => {}); + }, [snippetsUrl, csrfToken]); + + const createSnippet = useCallback(async ({ name, value }) => { + if (!snippetsUrl) return; + const resp = await fetch(snippetsUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + "X-CSRFToken": csrfToken || "", + }, + body: JSON.stringify({ name, value: JSON.stringify(value) }), + }); + if (!resp.ok) return; + const created = await resp.json(); + setSnippets((prev) => { + const idx = prev.findIndex((s) => s.name === created.name); + if (idx >= 0) { + const next = [...prev]; + next[idx] = created; + return next; + } + return [...prev, created].sort((a, b) => a.name.localeCompare(b.name)); + }); + }, [snippetsUrl, csrfToken]); + + const cardConfig = useMemo(() => ({ + fetchEmbed: async (url, { type } = {}) => { + const params = new URLSearchParams({ url }); + if (type) params.set("type", type); + const resp = await fetch(`${oembedUrl}?${params}`, { + headers: { "X-CSRFToken": csrfToken || "" }, + }); + if (!resp.ok) return {}; + return resp.json(); + }, + unsplash: unsplashApiKey + ? { defaultHeaders: { Authorization: `Client-ID ${unsplashApiKey}` } } + : false, + membersEnabled: true, + snippets: snippets.map((s) => ({ + id: s.id, + name: s.name, + value: typeof s.value === "string" ? JSON.parse(s.value) : s.value, + })), + createSnippet, + }), [oembedUrl, csrfToken, unsplashApiKey, snippets, createSnippet]); + + return ( + + { + if (onChange) { + onChange(JSON.stringify(serializedState)); + } + }} + > + + + + ); +} diff --git a/shared/editor/src/index.jsx b/shared/editor/src/index.jsx new file mode 100644 index 0000000..ec1e950 --- /dev/null +++ b/shared/editor/src/index.jsx @@ -0,0 +1,49 @@ +import React from "react"; +import { createRoot } from "react-dom/client"; +import Editor from "./Editor"; + +/** + * Mount the Koenig editor into the given DOM element. + * + * @param {string} elementId - ID of the container element + * @param {object} opts + * @param {string} [opts.initialJson] - Serialised Lexical JSON (from Ghost) + * @param {string} [opts.csrfToken] - CSRF token for API calls + * @param {object} [opts.uploadUrls] - { image, media, file } upload endpoint URLs + * @param {string} [opts.oembedUrl] - oEmbed proxy endpoint URL + * @param {string} [opts.unsplashApiKey] - Unsplash API key for image search + */ +window.mountEditor = function mountEditor(elementId, opts = {}) { + const container = document.getElementById(elementId); + if (!container) { + console.error(`[editor] Element #${elementId} not found`); + return; + } + + let currentJson = opts.initialJson || null; + + function handleChange(json) { + currentJson = json; + // Stash the latest JSON in a hidden input for form submission + const hidden = document.getElementById("lexical-json-input"); + if (hidden) hidden.value = json; + } + + const root = createRoot(container); + root.render( + + ); + + // Return handle for programmatic access + return { + getJson: () => currentJson, + }; +}; diff --git a/shared/editor/src/useFileUpload.js b/shared/editor/src/useFileUpload.js new file mode 100644 index 0000000..014b8b5 --- /dev/null +++ b/shared/editor/src/useFileUpload.js @@ -0,0 +1,99 @@ +import { useState, useCallback, useRef } from "react"; + +/** + * Koenig expects `fileUploader.useFileUpload(type)` — a React hook it + * calls internally for each card type ("image", "audio", "file", etc.). + * + * `makeFileUploader(csrfToken, uploadUrls)` returns the object Koenig wants: + * { useFileUpload: (type) => { upload, progress, isLoading, errors, filesNumber } } + * + * `uploadUrls` is an object: { image, media, file } + * For backwards compat, a plain string is treated as the image URL. + */ + +const URL_KEY_MAP = { + image: { urlKey: "image", responseKey: "images" }, + audio: { urlKey: "media", responseKey: "media" }, + video: { urlKey: "media", responseKey: "media" }, + mediaThumbnail: { urlKey: "image", responseKey: "images" }, + file: { urlKey: "file", responseKey: "files" }, +}; + +export default function makeFileUploader(csrfToken, uploadUrls) { + // Normalise: string → object with all keys pointing to same URL + const urls = + typeof uploadUrls === "string" + ? { image: uploadUrls, media: uploadUrls, file: uploadUrls } + : uploadUrls || {}; + + return { + fileTypes: { + image: { mimeTypes: ['image/jpeg', 'image/png', 'image/gif', 'image/webp', 'image/svg+xml'] }, + audio: { mimeTypes: ['audio/mpeg', 'audio/ogg', 'audio/wav', 'audio/mp4', 'audio/aac'] }, + video: { mimeTypes: ['video/mp4', 'video/webm', 'video/ogg'] }, + mediaThumbnail: { mimeTypes: ['image/jpeg', 'image/png', 'image/gif', 'image/webp'] }, + file: { mimeTypes: [] }, + }, + useFileUpload(type) { + const mapping = URL_KEY_MAP[type] || URL_KEY_MAP.image; + const [progress, setProgress] = useState(0); + const [isLoading, setIsLoading] = useState(false); + const [errors, setErrors] = useState([]); + const [filesNumber, setFilesNumber] = useState(0); + const csrfRef = useRef(csrfToken); + const urlRef = useRef(urls[mapping.urlKey] || urls.image || "/editor-api/images/upload/"); + const responseKeyRef = useRef(mapping.responseKey); + + const upload = useCallback(async (files) => { + const fileList = Array.from(files); + setFilesNumber(fileList.length); + setIsLoading(true); + setErrors([]); + setProgress(0); + + const results = []; + for (let i = 0; i < fileList.length; i++) { + const file = fileList[i]; + const formData = new FormData(); + formData.append("file", file); + + try { + const resp = await fetch(urlRef.current, { + method: "POST", + body: formData, + headers: { + "X-CSRFToken": csrfRef.current || "", + }, + }); + if (!resp.ok) { + const err = await resp.json().catch(() => ({})); + const msg = + err.errors?.[0]?.message || `Upload failed (${resp.status})`; + setErrors((prev) => [ + ...prev, + { message: msg, fileName: file.name }, + ]); + continue; + } + const data = await resp.json(); + const fileUrl = data[responseKeyRef.current]?.[0]?.url; + if (fileUrl) { + results.push({ url: fileUrl, fileName: file.name }); + } + } catch (e) { + setErrors((prev) => [ + ...prev, + { message: e.message, fileName: file.name }, + ]); + } + setProgress(Math.round(((i + 1) / fileList.length) * 100)); + } + + setIsLoading(false); + return results; + }, []); + + return { upload, progress, isLoading, errors, filesNumber }; + }, + }; +} diff --git a/shared/events/__init__.py b/shared/events/__init__.py new file mode 100644 index 0000000..522cb5f --- /dev/null +++ b/shared/events/__init__.py @@ -0,0 +1,9 @@ +from .bus import emit_activity, register_activity_handler, get_activity_handlers +from .processor import EventProcessor + +__all__ = [ + "emit_activity", + "register_activity_handler", + "get_activity_handlers", + "EventProcessor", +] diff --git a/shared/events/bus.py b/shared/events/bus.py new file mode 100644 index 0000000..215194e --- /dev/null +++ b/shared/events/bus.py @@ -0,0 +1,126 @@ +""" +Unified activity bus. + +emit_activity() writes an APActivity row with process_state='pending' within +the caller's existing DB transaction — atomic with the domain change. + +register_activity_handler() registers async handler functions that the +EventProcessor dispatches when processing pending activities. +""" +from __future__ import annotations + +import logging +import uuid +from collections import defaultdict +from typing import Awaitable, Callable, Dict, List, Tuple + +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.federation import APActivity + +log = logging.getLogger(__name__) + +# --------------------------------------------------------------------------- +# Activity-handler registry +# --------------------------------------------------------------------------- +# Handler signature: async def handler(activity: APActivity, session: AsyncSession) -> None +ActivityHandlerFn = Callable[[APActivity, AsyncSession], Awaitable[None]] + +# Keyed by (activity_type, object_type). object_type="*" is wildcard. +_activity_handlers: Dict[Tuple[str, str], List[ActivityHandlerFn]] = defaultdict(list) + + +def register_activity_handler( + activity_type: str, + fn: ActivityHandlerFn, + *, + object_type: str | None = None, +) -> None: + """Register an async handler for an activity type + optional object type. + + Use ``activity_type="*"`` as a wildcard that fires for every activity + (e.g. federation delivery handler). + """ + key = (activity_type, object_type or "*") + _activity_handlers[key].append(fn) + log.info("Registered activity handler %s.%s for key %s", fn.__module__, fn.__qualname__, key) + + +def get_activity_handlers( + activity_type: str, + object_type: str | None = None, +) -> List[ActivityHandlerFn]: + """Return all matching handlers for an activity. + + Matches in order: + 1. Exact (activity_type, object_type) + 2. (activity_type, "*") — type-level wildcard + 3. ("*", "*") — global wildcard (e.g. delivery) + """ + handlers: List[ActivityHandlerFn] = [] + ot = object_type or "*" + + # Exact match + if ot != "*": + handlers.extend(_activity_handlers.get((activity_type, ot), [])) + # Type-level wildcard + handlers.extend(_activity_handlers.get((activity_type, "*"), [])) + # Global wildcard + if activity_type != "*": + handlers.extend(_activity_handlers.get(("*", "*"), [])) + + return handlers + + +# --------------------------------------------------------------------------- +# emit_activity — the primary way to emit events +# --------------------------------------------------------------------------- +async def emit_activity( + session: AsyncSession, + *, + activity_type: str, + actor_uri: str, + object_type: str, + object_data: dict | None = None, + source_type: str | None = None, + source_id: int | None = None, + visibility: str = "internal", + actor_profile_id: int | None = None, + origin_app: str | None = None, +) -> APActivity: + """ + Write an AP-shaped activity to ap_activities with process_state='pending'. + + Called inside a service function using the same session that performs the + domain change. The activity and the change commit together. + """ + if not origin_app: + try: + from quart import current_app + origin_app = current_app.name + except (ImportError, RuntimeError): + pass + + activity_uri = f"internal:{uuid.uuid4()}" if visibility == "internal" else f"urn:uuid:{uuid.uuid4()}" + + activity = APActivity( + activity_id=activity_uri, + activity_type=activity_type, + actor_profile_id=actor_profile_id, + actor_uri=actor_uri, + object_type=object_type, + object_data=object_data or {}, + is_local=True, + source_type=source_type, + source_id=source_id, + visibility=visibility, + process_state="pending", + origin_app=origin_app, + ) + session.add(activity) + await session.flush() + # Wake any listening EventProcessor as soon as this transaction commits. + # NOTIFY is transactional — delivered only after commit. + await session.execute(text("NOTIFY ap_activity_pending")) + return activity diff --git a/shared/events/handlers/__init__.py b/shared/events/handlers/__init__.py new file mode 100644 index 0000000..9f6a845 --- /dev/null +++ b/shared/events/handlers/__init__.py @@ -0,0 +1,10 @@ +"""Shared event handlers.""" + + +def register_shared_handlers(): + """Import handler modules to trigger registration. Call at app startup.""" + import shared.events.handlers.container_handlers # noqa: F401 + import shared.events.handlers.login_handlers # noqa: F401 + import shared.events.handlers.order_handlers # noqa: F401 + import shared.events.handlers.ap_delivery_handler # noqa: F401 + import shared.events.handlers.external_delivery_handler # noqa: F401 diff --git a/shared/events/handlers/ap_delivery_handler.py b/shared/events/handlers/ap_delivery_handler.py new file mode 100644 index 0000000..7a175bf --- /dev/null +++ b/shared/events/handlers/ap_delivery_handler.py @@ -0,0 +1,250 @@ +"""Deliver AP activities to remote followers. + +Registered as a wildcard handler — fires for every activity. Skips +non-public activities and those without an actor profile. + +Per-app delivery: activities are delivered using the domain that matches +the follower's subscription. A follower of ``@alice@blog.rose-ash.com`` +receives activities with ``actor: https://blog.rose-ash.com/users/alice`` +and signatures using that domain's key_id. Aggregate followers +(``app_domain='federation'``) receive the federation domain identity. + +Idempotent: successful deliveries are recorded in ap_delivery_log. +On retry (at-least-once reaper), already-delivered inboxes are skipped. +""" +from __future__ import annotations + +import logging +import os +from collections import defaultdict + +import httpx +from sqlalchemy import select, or_ +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events.bus import register_activity_handler +from shared.models.federation import ActorProfile, APActivity, APFollower, APDeliveryLog +from shared.services.registry import services + +log = logging.getLogger(__name__) + +AP_CONTENT_TYPE = "application/activity+json" +DELIVERY_TIMEOUT = 15 # seconds per request + + +def _domain_for_app(app_name: str) -> str: + """Resolve the public AP domain for an app name.""" + from shared.infrastructure.activitypub import _ap_domain + return _ap_domain(app_name) + + +def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str) -> dict: + """Build the full AP activity JSON-LD for delivery.""" + username = actor.preferred_username + actor_url = f"https://{domain}/users/{username}" + + obj = dict(activity.object_data or {}) + + # Rewrite all URLs from the federation domain to the delivery domain + # so Mastodon's origin check passes (all IDs must match actor host). + import re + fed_domain = os.getenv("AP_DOMAIN", "federation.rose-ash.com") + + def _rewrite(url: str) -> str: + if isinstance(url, str) and fed_domain in url: + return url.replace(f"https://{fed_domain}", f"https://{domain}") + return url + + activity_id = _rewrite(activity.activity_id) + object_id = activity_id + "/object" + + # Rewrite any federation-domain URLs in object_data + if "id" in obj: + obj["id"] = _rewrite(obj["id"]) + if "attributedTo" in obj: + obj["attributedTo"] = _rewrite(obj["attributedTo"]) + + if activity.activity_type == "Delete": + obj.setdefault("id", object_id) + obj.setdefault("type", "Tombstone") + else: + obj.setdefault("id", object_id) + obj.setdefault("type", activity.object_type) + obj.setdefault("attributedTo", actor_url) + obj.setdefault("published", activity.published.isoformat() if activity.published else None) + obj.setdefault("to", ["https://www.w3.org/ns/activitystreams#Public"]) + obj.setdefault("cc", [f"{actor_url}/followers"]) + if activity.activity_type == "Update": + from datetime import datetime, timezone + obj["updated"] = datetime.now(timezone.utc).isoformat() + + return { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + ], + "id": activity_id, + "type": activity.activity_type, + "actor": actor_url, + "published": activity.published.isoformat() if activity.published else None, + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": [f"{actor_url}/followers"], + "object": obj, + } + + +async def _deliver_to_inbox( + client: httpx.AsyncClient, + inbox_url: str, + body: dict, + actor: ActorProfile, + domain: str, +) -> int | None: + """POST signed activity to a single inbox. Returns status code or None on error.""" + from shared.utils.http_signatures import sign_request + from urllib.parse import urlparse + import json + + body_bytes = json.dumps(body).encode() + key_id = f"https://{domain}/users/{actor.preferred_username}#main-key" + + parsed = urlparse(inbox_url) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=key_id, + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = AP_CONTENT_TYPE + + try: + resp = await client.post( + inbox_url, + content=body_bytes, + headers=headers, + timeout=DELIVERY_TIMEOUT, + ) + if resp.status_code < 300: + log.info("Delivered to %s → %d", inbox_url, resp.status_code) + else: + log.warning("Delivery to %s → %d: %s", inbox_url, resp.status_code, resp.text[:200]) + return resp.status_code + except Exception: + log.exception("Delivery failed for %s", inbox_url) + return None + + +async def on_any_activity(activity: APActivity, session: AsyncSession) -> None: + """Deliver a public activity to all matching followers of its actor.""" + + # Only deliver public activities that have an actor profile + if activity.visibility != "public": + return + if activity.actor_profile_id is None: + return + if not services.has("federation"): + return + + # Load actor with private key + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == activity.actor_profile_id) + ) + ).scalar_one_or_none() + if not actor or not actor.private_key_pem: + log.warning("Actor not found or missing key for activity %s", activity.activity_id) + return + + # Load matching followers. + # Aggregate followers (app_domain='federation') always get everything. + # Per-app followers only get activities from their app. + origin_app = activity.origin_app + follower_filters = [APFollower.actor_profile_id == actor.id] + + if origin_app and origin_app != "federation": + follower_filters.append( + or_( + APFollower.app_domain == "federation", + APFollower.app_domain == origin_app, + ) + ) + + followers = ( + await session.execute( + select(APFollower).where(*follower_filters) + ) + ).scalars().all() + + if not followers: + log.debug("No followers to deliver to for %s", activity.activity_id) + return + + # Check delivery log — skip (inbox, domain) pairs already delivered (idempotency) + existing = ( + await session.execute( + select(APDeliveryLog.inbox_url, APDeliveryLog.app_domain).where( + APDeliveryLog.activity_id == activity.id, + APDeliveryLog.status_code < 300, + ) + ) + ).all() + already_delivered: set[tuple[str, str]] = {(r[0], r[1]) for r in existing} + + # Collect all (inbox, app_domain) pairs to deliver to. + # Each follower subscription gets its own delivery with the correct + # actor identity, so followers of @user@blog and @user@federation + # both see posts on their respective actor profiles. + delivery_pairs: set[tuple[str, str]] = set() + for f in followers: + if not f.follower_inbox: + continue + app_dom = f.app_domain or "federation" + pair = (f.follower_inbox, app_dom) + if pair not in already_delivered: + delivery_pairs.add(pair) + + if not delivery_pairs: + if already_delivered: + log.info("All deliveries already done for %s", activity.activity_id) + return + + if already_delivered: + log.info( + "Skipping %d already-delivered, delivering to %d remaining", + len(already_delivered), len(delivery_pairs), + ) + + # Group by domain to reuse activity JSON per domain + domain_inboxes: dict[str, list[str]] = defaultdict(list) + for inbox_url, app_dom in delivery_pairs: + domain_inboxes[app_dom].append(inbox_url) + + log.info( + "Delivering %s to %d target(s) for @%s across %d domain(s)", + activity.activity_type, len(delivery_pairs), + actor.preferred_username, len(domain_inboxes), + ) + + async with httpx.AsyncClient() as client: + for app_dom, inboxes in domain_inboxes.items(): + domain = _domain_for_app(app_dom) + activity_json = _build_activity_json(activity, actor, domain) + + for inbox_url in inboxes: + status_code = await _deliver_to_inbox( + client, inbox_url, activity_json, actor, domain + ) + if status_code is not None and status_code < 300: + session.add(APDeliveryLog( + activity_id=activity.id, + inbox_url=inbox_url, + app_domain=app_dom, + status_code=status_code, + )) + await session.flush() + + +# Wildcard: fires for every activity +register_activity_handler("*", on_any_activity) diff --git a/shared/events/handlers/container_handlers.py b/shared/events/handlers/container_handlers.py new file mode 100644 index 0000000..c405002 --- /dev/null +++ b/shared/events/handlers/container_handlers.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events import register_activity_handler +from shared.models.federation import APActivity +from shared.services.navigation import rebuild_navigation + + +async def on_child_attached(activity: APActivity, session: AsyncSession) -> None: + await rebuild_navigation(session) + + +async def on_child_detached(activity: APActivity, session: AsyncSession) -> None: + await rebuild_navigation(session) + + +register_activity_handler("Add", on_child_attached, object_type="rose:ContainerRelation") +register_activity_handler("Remove", on_child_detached, object_type="rose:ContainerRelation") diff --git a/shared/events/handlers/external_delivery_handler.py b/shared/events/handlers/external_delivery_handler.py new file mode 100644 index 0000000..d40852a --- /dev/null +++ b/shared/events/handlers/external_delivery_handler.py @@ -0,0 +1,101 @@ +"""Deliver activities to external service inboxes via signed HTTP POST. + +External services (like artdag) that don't share the coop database receive +activities via HTTP, authenticated with the same HTTP Signatures used for +ActivityPub federation. + +Config via env: EXTERNAL_INBOXES=name|url,name2|url2,... +""" +from __future__ import annotations + +import json +import logging +import os +from urllib.parse import urlparse + +import httpx +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events.bus import register_activity_handler +from shared.models.federation import ActorProfile, APActivity +from shared.utils.http_signatures import sign_request + +log = logging.getLogger(__name__) + +# Activity types to deliver externally +_DELIVERABLE_TYPES = {"rose:DeviceAuth"} + + +def _get_external_inboxes() -> list[tuple[str, str]]: + """Parse EXTERNAL_INBOXES env var into [(name, url), ...].""" + raw = os.environ.get("EXTERNAL_INBOXES", "") + if not raw: + return [] + result = [] + for entry in raw.split(","): + entry = entry.strip() + if "|" in entry: + name, url = entry.split("|", 1) + result.append((name.strip(), url.strip())) + return result + + +def _get_ap_domain() -> str: + return os.environ.get("AP_DOMAIN", "federation.rose-ash.com") + + +async def on_external_activity(activity: APActivity, session: AsyncSession) -> None: + """Deliver matching activities to configured external inboxes.""" + if activity.activity_type not in _DELIVERABLE_TYPES: + return + + inboxes = _get_external_inboxes() + if not inboxes: + return + + # Get the first actor profile for signing + actor = await session.scalar(select(ActorProfile).limit(1)) + if not actor: + log.warning("No ActorProfile available for signing external deliveries") + return + + domain = _get_ap_domain() + key_id = f"https://{domain}/users/{actor.preferred_username}#main-key" + + payload = { + "@context": "https://www.w3.org/ns/activitystreams", + "type": activity.activity_type, + "actor": activity.actor_uri, + "object": activity.object_data, + } + if activity.published: + payload["published"] = activity.published.isoformat() + + body_bytes = json.dumps(payload).encode() + + for name, inbox_url in inboxes: + parsed = urlparse(inbox_url) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=key_id, + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = "application/activity+json" + try: + async with httpx.AsyncClient(timeout=3) as client: + resp = await client.post(inbox_url, content=body_bytes, headers=headers) + log.info( + "External delivery to %s: %d", + name, resp.status_code, + ) + except Exception: + log.warning("External delivery to %s failed", name, exc_info=True) + + +# Register for all deliverable types +for _t in _DELIVERABLE_TYPES: + register_activity_handler(_t, on_external_activity) diff --git a/shared/events/handlers/login_handlers.py b/shared/events/handlers/login_handlers.py new file mode 100644 index 0000000..d09ce23 --- /dev/null +++ b/shared/events/handlers/login_handlers.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events import register_activity_handler +from shared.models.federation import APActivity +from shared.services.registry import services + + +async def on_user_logged_in(activity: APActivity, session: AsyncSession) -> None: + data = activity.object_data + user_id = data["user_id"] + session_id = data["session_id"] + + if services.has("cart"): + await services.cart.adopt_cart_for_user(session, user_id, session_id) + + if services.has("calendar"): + await services.calendar.adopt_entries_for_user(session, user_id, session_id) + await services.calendar.adopt_tickets_for_user(session, user_id, session_id) + + +register_activity_handler("rose:Login", on_user_logged_in) diff --git a/shared/events/handlers/order_handlers.py b/shared/events/handlers/order_handlers.py new file mode 100644 index 0000000..c608ae7 --- /dev/null +++ b/shared/events/handlers/order_handlers.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events import register_activity_handler +from shared.models.federation import APActivity + +log = logging.getLogger(__name__) + + +async def on_order_created(activity: APActivity, session: AsyncSession) -> None: + log.info("order.created: order_id=%s", activity.object_data.get("order_id")) + + +async def on_order_paid(activity: APActivity, session: AsyncSession) -> None: + log.info("order.paid: order_id=%s", activity.object_data.get("order_id")) + + +register_activity_handler("Create", on_order_created, object_type="rose:Order") +register_activity_handler("rose:OrderPaid", on_order_paid) diff --git a/shared/events/processor.py b/shared/events/processor.py new file mode 100644 index 0000000..935309b --- /dev/null +++ b/shared/events/processor.py @@ -0,0 +1,243 @@ +""" +Event processor — polls the ap_activities table and dispatches to registered +activity handlers. + +Runs as an asyncio background task within each app process. +Uses SELECT ... FOR UPDATE SKIP LOCKED for safe concurrent processing. + +A dedicated asyncpg LISTEN connection wakes the poll loop immediately when +emit_activity() fires NOTIFY ap_activity_pending, so latency drops from +~2 seconds (poll interval) to sub-100 ms. The fixed-interval poll remains +as a safety-net fallback. +""" +from __future__ import annotations + +import asyncio +import logging +import traceback +from datetime import datetime, timedelta, timezone + +import asyncpg +from sqlalchemy import select, update +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.db.session import get_session, DATABASE_URL +from shared.models.federation import APActivity +from .bus import get_activity_handlers + +log = logging.getLogger(__name__) + + +class EventProcessor: + """Background event processor that polls the ap_activities table.""" + + def __init__( + self, + *, + app_name: str | None = None, + poll_interval: float = 2.0, + batch_size: int = 10, + stuck_timeout: float = 300.0, + ): + self._app_name = app_name + self._poll_interval = poll_interval + self._batch_size = batch_size + self._stuck_timeout = stuck_timeout # seconds before "processing" → "pending" + self._task: asyncio.Task | None = None + self._listen_task: asyncio.Task | None = None + self._listen_conn: asyncpg.Connection | None = None + self._wake = asyncio.Event() + self._running = False + self._reap_counter = 0 + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Start the background polling loop.""" + if self._task is not None: + return + self._running = True + self._listen_task = asyncio.create_task(self._listen_for_notify()) + self._task = asyncio.create_task(self._poll_loop()) + + async def stop(self) -> None: + """Stop the background polling loop gracefully.""" + self._running = False + if self._listen_task is not None: + self._listen_task.cancel() + try: + await self._listen_task + except asyncio.CancelledError: + pass + self._listen_task = None + if self._listen_conn is not None and not self._listen_conn.is_closed(): + await self._listen_conn.close() + self._listen_conn = None + if self._task is not None: + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + self._task = None + + # ------------------------------------------------------------------ + # LISTEN — wake poll loop on NOTIFY + # ------------------------------------------------------------------ + + async def _listen_for_notify(self) -> None: + """Maintain a LISTEN connection and wake the poll loop on NOTIFY.""" + dsn = DATABASE_URL.replace("+asyncpg", "") + while self._running: + try: + self._listen_conn = await asyncpg.connect(dsn) + await self._listen_conn.add_listener( + "ap_activity_pending", self._on_notify + ) + log.info("LISTEN ap_activity_pending active") + # Keep alive with periodic health check + while self._running: + await asyncio.sleep(30) + await self._listen_conn.execute("SELECT 1") + except asyncio.CancelledError: + break + except Exception: + log.warning("LISTEN connection lost, reconnecting…", exc_info=True) + await asyncio.sleep(2) + finally: + if self._listen_conn is not None and not self._listen_conn.is_closed(): + await self._listen_conn.close() + self._listen_conn = None + + def _on_notify(self, conn, pid, channel, payload) -> None: + """Called by asyncpg when a NOTIFY arrives.""" + self._wake.set() + + # ------------------------------------------------------------------ + # Poll loop + # ------------------------------------------------------------------ + + async def _poll_loop(self) -> None: + while self._running: + try: + # Periodically recover stuck activities (~every 30 cycles) + self._reap_counter += 1 + if self._reap_counter >= 30: + self._reap_counter = 0 + await self._recover_stuck() + + # Clear before processing so any NOTIFY that arrives during + # _process_batch sets the event and we loop immediately. + self._wake.clear() + processed = await self._process_batch() + if processed == 0: + try: + await asyncio.wait_for( + self._wake.wait(), timeout=self._poll_interval + ) + except asyncio.TimeoutError: + pass + # processed > 0 → loop immediately to drain the queue + except asyncio.CancelledError: + break + except Exception: + traceback.print_exc() + await asyncio.sleep(self._poll_interval) + + async def _recover_stuck(self) -> None: + """Reset activities stuck in 'processing' back to 'pending'. + + This handles the case where a process crashed mid-handler. + Combined with idempotent handlers, this gives at-least-once delivery. + """ + cutoff = datetime.now(timezone.utc) - timedelta(seconds=self._stuck_timeout) + try: + async with get_session() as session: + filters = [ + APActivity.process_state == "processing", + APActivity.created_at < cutoff, + ] + if self._app_name: + filters.append(APActivity.origin_app == self._app_name) + result = await session.execute( + update(APActivity) + .where(*filters) + .values(process_state="pending") + .returning(APActivity.id) + ) + recovered = result.scalars().all() + await session.commit() + if recovered: + log.warning( + "Recovered %d stuck activities: %s", + len(recovered), recovered, + ) + except Exception: + log.exception("Failed to recover stuck activities") + + async def _process_batch(self) -> int: + """Fetch and process a batch of pending activities. Returns count processed.""" + processed = 0 + async with get_session() as session: + filters = [ + APActivity.process_state == "pending", + APActivity.process_attempts < APActivity.process_max_attempts, + ] + if self._app_name: + filters.append(APActivity.origin_app == self._app_name) + stmt = ( + select(APActivity) + .where(*filters) + .order_by(APActivity.created_at) + .limit(self._batch_size) + .with_for_update(skip_locked=True) + ) + result = await session.execute(stmt) + activities = result.scalars().all() + + for activity in activities: + await self._process_one(session, activity) + processed += 1 + + await session.commit() + return processed + + async def _process_one(self, session: AsyncSession, activity: APActivity) -> None: + """Run all handlers for a single activity.""" + handlers = get_activity_handlers(activity.activity_type, activity.object_type) + now = datetime.now(timezone.utc) + + log.info( + "Processing activity %s: type=%s object_type=%s visibility=%s actor_profile_id=%s — %d handler(s) found", + activity.id, activity.activity_type, activity.object_type, + activity.visibility, activity.actor_profile_id, len(handlers), + ) + for h in handlers: + log.info(" handler: %s.%s", h.__module__, h.__qualname__) + + activity.process_state = "processing" + activity.process_attempts += 1 + await session.flush() + + if not handlers: + activity.process_state = "completed" + activity.processed_at = now + return + + try: + for handler in handlers: + log.info(" calling %s.%s …", handler.__module__, handler.__qualname__) + await handler(activity, session) + log.info(" done %s.%s", handler.__module__, handler.__qualname__) + activity.process_state = "completed" + activity.processed_at = now + except Exception as exc: + log.exception("Handler failed for activity %s", activity.id) + activity.process_error = f"{exc.__class__.__name__}: {exc}" + if activity.process_attempts >= activity.process_max_attempts: + activity.process_state = "failed" + activity.processed_at = now + else: + activity.process_state = "pending" # retry diff --git a/shared/infrastructure/__init__.py b/shared/infrastructure/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/shared/infrastructure/__init__.py @@ -0,0 +1 @@ + diff --git a/shared/infrastructure/activitypub.py b/shared/infrastructure/activitypub.py new file mode 100644 index 0000000..b7d6d7e --- /dev/null +++ b/shared/infrastructure/activitypub.py @@ -0,0 +1,454 @@ +"""Per-app ActivityPub blueprint. + +Factory function ``create_activitypub_blueprint(app_name)`` returns a +Blueprint with WebFinger, host-meta, nodeinfo, actor profile, inbox, +outbox, and followers endpoints. + +Per-app actors are *virtual projections* of the same ``ActorProfile``. +Same keypair, same ``preferred_username`` — the only differences are: +- the domain in URLs (e.g. blog.rose-ash.com vs federation.rose-ash.com) +- which activities are served in the outbox (filtered by ``origin_app``) +- which followers are returned (filtered by ``app_domain``) +- Follow requests create ``APFollower(app_domain=app_name)`` + +Federation app acts as the aggregate: no origin_app filter, app_domain=NULL. +""" +from __future__ import annotations + +import json +import logging +import os +from datetime import datetime, timezone + +from quart import Blueprint, request, abort, Response, g +from sqlalchemy import select + +from shared.services.registry import services +from shared.models.federation import ActorProfile, APInboxItem +from shared.browser.app.csrf import csrf_exempt + +log = logging.getLogger(__name__) + +AP_CONTENT_TYPE = "application/activity+json" + +# Apps that serve per-app AP actors +AP_APPS = {"blog", "market", "events", "federation"} + + +def _ap_domain(app_name: str) -> str: + """Return the public domain for this app's AP identity.""" + env_key = f"AP_DOMAIN_{app_name.upper()}" + env_val = os.getenv(env_key) + if env_val: + return env_val + # Default: {app}.rose-ash.com, except federation uses AP_DOMAIN + if app_name == "federation": + return os.getenv("AP_DOMAIN", "federation.rose-ash.com") + return f"{app_name}.rose-ash.com" + + +def _federation_domain() -> str: + """The aggregate federation domain (for alsoKnownAs links).""" + return os.getenv("AP_DOMAIN", "federation.rose-ash.com") + + +def _is_aggregate(app_name: str) -> bool: + """Federation serves the aggregate actor (no per-app filter).""" + return app_name == "federation" + + +def create_activitypub_blueprint(app_name: str) -> Blueprint: + """Return a Blueprint with AP endpoints for *app_name*.""" + bp = Blueprint("activitypub", __name__) + + domain = _ap_domain(app_name) + fed_domain = _federation_domain() + aggregate = _is_aggregate(app_name) + # For per-app follows, store app_domain; for federation, "federation" + follower_app_domain: str = app_name + # For per-app outboxes, filter by origin_app; for federation, show all + outbox_origin_app: str | None = None if aggregate else app_name + + # ------------------------------------------------------------------ + # Well-known endpoints + # ------------------------------------------------------------------ + + @bp.get("/.well-known/webfinger") + async def webfinger(): + resource = request.args.get("resource", "") + if not resource.startswith("acct:"): + abort(400, "Invalid resource format") + + parts = resource[5:].split("@") + if len(parts) != 2: + abort(400, "Invalid resource format") + + username, res_domain = parts + if res_domain != domain: + abort(404, "User not on this server") + + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404, "User not found") + + actor_url = f"https://{domain}/users/{username}" + return Response( + response=json.dumps({ + "subject": resource, + "aliases": [actor_url], + "links": [ + { + "rel": "self", + "type": AP_CONTENT_TYPE, + "href": actor_url, + }, + { + "rel": "http://webfinger.net/rel/profile-page", + "type": "text/html", + "href": actor_url, + }, + ], + }), + content_type="application/jrd+json", + ) + + @bp.get("/.well-known/nodeinfo") + async def nodeinfo_index(): + return Response( + response=json.dumps({ + "links": [ + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.0", + "href": f"https://{domain}/nodeinfo/2.0", + } + ] + }), + content_type="application/json", + ) + + @bp.get("/nodeinfo/2.0") + async def nodeinfo(): + stats = await services.federation.get_stats(g.s) + return Response( + response=json.dumps({ + "version": "2.0", + "software": { + "name": "rose-ash", + "version": "1.0.0", + }, + "protocols": ["activitypub"], + "usage": { + "users": { + "total": stats.get("actors", 0), + "activeMonth": stats.get("actors", 0), + }, + "localPosts": stats.get("activities", 0), + }, + "openRegistrations": False, + "metadata": { + "nodeName": f"Rose Ash ({app_name})", + "nodeDescription": f"Rose Ash {app_name} — ActivityPub federation", + }, + }), + content_type="application/json", + ) + + @bp.get("/.well-known/host-meta") + async def host_meta(): + xml = ( + '\n' + '\n' + f' \n' + '' + ) + return Response(response=xml, content_type="application/xrd+xml") + + # ------------------------------------------------------------------ + # Actor profile + # ------------------------------------------------------------------ + + @bp.get("/users/") + async def actor_profile(username: str): + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404) + + accept_header = request.headers.get("accept", "") + + if "application/activity+json" in accept_header or "application/ld+json" in accept_header: + actor_url = f"https://{domain}/users/{username}" + actor_json = { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + ], + "type": "Person", + "id": actor_url, + "name": actor.display_name or username, + "preferredUsername": username, + "summary": actor.summary or "", + "manuallyApprovesFollowers": False, + "inbox": f"{actor_url}/inbox", + "outbox": f"{actor_url}/outbox", + "followers": f"{actor_url}/followers", + "following": f"{actor_url}/following", + "publicKey": { + "id": f"{actor_url}#main-key", + "owner": actor_url, + "publicKeyPem": actor.public_key_pem, + }, + "url": actor_url, + } + + if aggregate: + # Aggregate actor advertises all per-app actors + also_known = [ + f"https://{_ap_domain(a)}/users/{username}" + for a in AP_APPS if a != "federation" + ] + if also_known: + actor_json["alsoKnownAs"] = also_known + else: + # Per-app actors link back to the aggregate federation actor + actor_json["alsoKnownAs"] = [ + f"https://{fed_domain}/users/{username}", + ] + + return Response( + response=json.dumps(actor_json), + content_type=AP_CONTENT_TYPE, + ) + + # HTML: federation renders its own profile; other apps redirect there + if aggregate: + from quart import render_template + activities, total = await services.federation.get_outbox( + g.s, username, page=1, per_page=20, + ) + return await render_template( + "federation/profile.html", + actor=actor, + activities=activities, + total=total, + ) + from quart import redirect + return redirect(f"https://{fed_domain}/users/{username}") + + # ------------------------------------------------------------------ + # Inbox + # ------------------------------------------------------------------ + + @csrf_exempt + @bp.post("/users//inbox") + async def inbox(username: str): + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404) + + body = await request.get_json() + if not body: + abort(400, "Invalid JSON") + + activity_type = body.get("type", "") + from_actor_url = body.get("actor", "") + + # Verify HTTP signature (best-effort) + sig_valid = False + try: + from shared.utils.http_signatures import verify_request_signature + from shared.infrastructure.ap_inbox_handlers import fetch_remote_actor + + req_headers = dict(request.headers) + sig_header = req_headers.get("Signature", "") + + remote_actor = await fetch_remote_actor(from_actor_url) + if remote_actor and sig_header: + pub_key_pem = (remote_actor.get("publicKey") or {}).get("publicKeyPem") + if pub_key_pem: + sig_valid = verify_request_signature( + public_key_pem=pub_key_pem, + signature_header=sig_header, + method="POST", + path=f"/users/{username}/inbox", + headers=req_headers, + ) + except Exception: + log.debug("Signature verification failed for %s", from_actor_url, exc_info=True) + + if not sig_valid: + log.warning( + "Unverified inbox POST from %s (%s) on %s — accepting anyway for now", + from_actor_url, activity_type, domain, + ) + + # Load actor row for DB operations + actor_row = ( + await g.s.execute( + select(ActorProfile).where( + ActorProfile.preferred_username == username + ) + ) + ).scalar_one() + + # Store raw inbox item + item = APInboxItem( + actor_profile_id=actor_row.id, + raw_json=body, + activity_type=activity_type, + from_actor=from_actor_url, + ) + g.s.add(item) + await g.s.flush() + + # Dispatch to shared handlers + from shared.infrastructure.ap_inbox_handlers import dispatch_inbox_activity + await dispatch_inbox_activity( + g.s, actor_row, body, from_actor_url, + domain=domain, + app_domain=follower_app_domain, + ) + + # Mark as processed + item.state = "processed" + item.processed_at = datetime.now(timezone.utc) + await g.s.flush() + + return Response(status=202) + + # ------------------------------------------------------------------ + # Outbox + # ------------------------------------------------------------------ + + @bp.get("/users//outbox") + async def outbox(username: str): + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404) + + actor_url = f"https://{domain}/users/{username}" + page_param = request.args.get("page") + + if not page_param: + _, total = await services.federation.get_outbox( + g.s, username, page=1, per_page=1, + origin_app=outbox_origin_app, + ) + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollection", + "id": f"{actor_url}/outbox", + "totalItems": total, + "first": f"{actor_url}/outbox?page=1", + }), + content_type=AP_CONTENT_TYPE, + ) + + page_num = int(page_param) + activities, total = await services.federation.get_outbox( + g.s, username, page=page_num, per_page=20, + origin_app=outbox_origin_app, + ) + + items = [] + for a in activities: + items.append({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": a.activity_type, + "id": a.activity_id, + "actor": actor_url, + "published": a.published.isoformat() if a.published else None, + "object": { + "type": a.object_type, + **(a.object_data or {}), + }, + }) + + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollectionPage", + "id": f"{actor_url}/outbox?page={page_num}", + "partOf": f"{actor_url}/outbox", + "totalItems": total, + "orderedItems": items, + }), + content_type=AP_CONTENT_TYPE, + ) + + # ------------------------------------------------------------------ + # Followers / following collections + # ------------------------------------------------------------------ + + @bp.get("/users//followers") + async def followers(username: str): + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404) + + collection_id = f"https://{domain}/users/{username}/followers" + follower_list = await services.federation.get_followers( + g.s, username, app_domain=follower_app_domain, + ) + page_param = request.args.get("page") + + if not page_param: + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollection", + "id": collection_id, + "totalItems": len(follower_list), + "first": f"{collection_id}?page=1", + }), + content_type=AP_CONTENT_TYPE, + ) + + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollectionPage", + "id": f"{collection_id}?page=1", + "partOf": collection_id, + "totalItems": len(follower_list), + "orderedItems": [f.follower_actor_url for f in follower_list], + }), + content_type=AP_CONTENT_TYPE, + ) + + @bp.get("/users//following") + async def following(username: str): + actor = await services.federation.get_actor_by_username(g.s, username) + if not actor: + abort(404) + + collection_id = f"https://{domain}/users/{username}/following" + following_list, total = await services.federation.get_following(g.s, username) + page_param = request.args.get("page") + + if not page_param: + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollection", + "id": collection_id, + "totalItems": total, + "first": f"{collection_id}?page=1", + }), + content_type=AP_CONTENT_TYPE, + ) + + return Response( + response=json.dumps({ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollectionPage", + "id": f"{collection_id}?page=1", + "partOf": collection_id, + "totalItems": total, + "orderedItems": [f.actor_url for f in following_list], + }), + content_type=AP_CONTENT_TYPE, + ) + + return bp diff --git a/shared/infrastructure/ap_inbox_handlers.py b/shared/infrastructure/ap_inbox_handlers.py new file mode 100644 index 0000000..d972631 --- /dev/null +++ b/shared/infrastructure/ap_inbox_handlers.py @@ -0,0 +1,564 @@ +"""Reusable AP inbox handlers for all apps. + +Extracted from federation/bp/actors/routes.py so that every app's +shared AP blueprint can process Follow, Undo, Accept, Create, etc. +""" +from __future__ import annotations + +import json +import logging +import uuid +from datetime import datetime, timezone + +import httpx +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.federation import ( + ActorProfile, APInboxItem, APInteraction, APNotification, + APRemotePost, APActivity, RemoteActor, +) +from shared.services.registry import services + +log = logging.getLogger(__name__) + +AP_CONTENT_TYPE = "application/activity+json" + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +async def fetch_remote_actor(actor_url: str) -> dict | None: + """Fetch a remote actor's JSON-LD profile.""" + try: + async with httpx.AsyncClient(timeout=10) as client: + resp = await client.get( + actor_url, + headers={"Accept": AP_CONTENT_TYPE}, + ) + if resp.status_code == 200: + return resp.json() + except Exception: + log.exception("Failed to fetch remote actor: %s", actor_url) + return None + + +async def send_accept( + actor: ActorProfile, + follow_activity: dict, + follower_inbox: str, + domain: str, +) -> None: + """Send an Accept activity back to the follower.""" + from shared.utils.http_signatures import sign_request + from urllib.parse import urlparse + + username = actor.preferred_username + actor_url = f"https://{domain}/users/{username}" + + accept_id = f"{actor_url}/activities/{uuid.uuid4()}" + accept = { + "@context": "https://www.w3.org/ns/activitystreams", + "id": accept_id, + "type": "Accept", + "actor": actor_url, + "object": follow_activity, + } + + body_bytes = json.dumps(accept).encode() + key_id = f"{actor_url}#main-key" + + parsed = urlparse(follower_inbox) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=key_id, + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = AP_CONTENT_TYPE + + log.info("Accept payload → %s: %s", follower_inbox, json.dumps(accept)[:500]) + + try: + async with httpx.AsyncClient(timeout=15) as client: + resp = await client.post( + follower_inbox, + content=body_bytes, + headers=headers, + ) + log.info("Accept → %s: %d %s", follower_inbox, resp.status_code, resp.text[:200]) + except Exception: + log.exception("Failed to send Accept to %s", follower_inbox) + + +async def backfill_follower( + session: AsyncSession, + actor: ActorProfile, + follower_inbox: str, + domain: str, + origin_app: str | None = None, +) -> None: + """Deliver recent *current* Create activities to a new follower's inbox. + + Skips Creates whose source was later Deleted, and uses the latest + Update data when available (so the follower sees the current version). + """ + from shared.events.handlers.ap_delivery_handler import ( + _build_activity_json, _deliver_to_inbox, + ) + + filters = [ + APActivity.actor_profile_id == actor.id, + APActivity.is_local == True, # noqa: E712 + APActivity.activity_type == "Create", + APActivity.source_type.isnot(None), + APActivity.source_id.isnot(None), + ] + if origin_app is not None: + filters.append(APActivity.origin_app == origin_app) + + creates = ( + await session.execute( + select(APActivity).where(*filters) + .order_by(APActivity.published.desc()) + .limit(40) + ) + ).scalars().all() + + if not creates: + return + + # Collect source keys that have been Deleted + source_keys = {(c.source_type, c.source_id) for c in creates} + deleted_keys: set[tuple[str | None, int | None]] = set() + if source_keys: + deletes = ( + await session.execute( + select(APActivity.source_type, APActivity.source_id).where( + APActivity.actor_profile_id == actor.id, + APActivity.activity_type == "Delete", + APActivity.is_local == True, # noqa: E712 + ) + ) + ).all() + deleted_keys = {(d[0], d[1]) for d in deletes} + + # For sources with Updates, grab the latest Update's object_data + updated_data: dict[tuple[str | None, int | None], dict] = {} + if source_keys: + updates = ( + await session.execute( + select(APActivity).where( + APActivity.actor_profile_id == actor.id, + APActivity.activity_type == "Update", + APActivity.is_local == True, # noqa: E712 + ).order_by(APActivity.published.desc()) + ) + ).scalars().all() + for u in updates: + key = (u.source_type, u.source_id) + if key not in updated_data and key in source_keys: + updated_data[key] = u.object_data or {} + + # Filter to current, non-deleted Creates (limit 20) + activities = [] + for c in creates: + key = (c.source_type, c.source_id) + if key in deleted_keys: + continue + # Apply latest Update data if available + if key in updated_data: + c.object_data = updated_data[key] + activities.append(c) + if len(activities) >= 20: + break + + if not activities: + return + + log.info( + "Backfilling %d posts to %s for @%s", + len(activities), follower_inbox, actor.preferred_username, + ) + + async with httpx.AsyncClient() as client: + for activity in reversed(activities): # oldest first + activity_json = _build_activity_json(activity, actor, domain) + await _deliver_to_inbox(client, follower_inbox, activity_json, actor, domain) + + +# --------------------------------------------------------------------------- +# Inbox activity handlers +# --------------------------------------------------------------------------- + +async def handle_follow( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, + domain: str, + app_domain: str = "federation", +) -> None: + """Process a Follow activity: add follower, send Accept, backfill.""" + remote_actor = await fetch_remote_actor(from_actor_url) + if not remote_actor: + log.warning("Could not fetch remote actor for Follow: %s", from_actor_url) + return + + follower_inbox = remote_actor.get("inbox") + if not follower_inbox: + log.warning("Remote actor has no inbox: %s", from_actor_url) + return + + remote_username = remote_actor.get("preferredUsername", "") + from urllib.parse import urlparse + remote_domain = urlparse(from_actor_url).netloc + follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url + + pub_key = (remote_actor.get("publicKey") or {}).get("publicKeyPem") + + await services.federation.add_follower( + session, + actor_row.preferred_username, + follower_acct=follower_acct, + follower_inbox=follower_inbox, + follower_actor_url=from_actor_url, + follower_public_key=pub_key, + app_domain=app_domain, + ) + + log.info( + "New follower: %s → @%s (app_domain=%s)", + follower_acct, actor_row.preferred_username, app_domain, + ) + + # Notification + ra = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + ) + ).scalar_one_or_none() + if not ra: + ra_dto = await services.federation.get_or_fetch_remote_actor(session, from_actor_url) + if ra_dto: + ra = (await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + )).scalar_one_or_none() + + if ra: + notif = APNotification( + actor_profile_id=actor_row.id, + notification_type="follow", + from_remote_actor_id=ra.id, + ) + session.add(notif) + + # Send Accept + await send_accept(actor_row, body, follower_inbox, domain) + + # Backfill: deliver recent posts (filtered by origin_app for per-app follows) + backfill_origin = app_domain if app_domain != "federation" else None + await backfill_follower(session, actor_row, follower_inbox, domain, origin_app=backfill_origin) + + +async def handle_undo( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, + app_domain: str = "federation", +) -> None: + """Process an Undo activity (typically Undo Follow).""" + inner = body.get("object") + if not inner: + return + + inner_type = inner.get("type") if isinstance(inner, dict) else None + if inner_type == "Follow": + from urllib.parse import urlparse + remote_domain = urlparse(from_actor_url).netloc + remote_actor = await fetch_remote_actor(from_actor_url) + remote_username = "" + if remote_actor: + remote_username = remote_actor.get("preferredUsername", "") + follower_acct = f"{remote_username}@{remote_domain}" if remote_username else from_actor_url + + removed = await services.federation.remove_follower( + session, actor_row.preferred_username, follower_acct, + app_domain=app_domain, + ) + if removed: + log.info("Unfollowed: %s → @%s (app_domain=%s)", follower_acct, actor_row.preferred_username, app_domain) + else: + log.debug("Undo Follow: follower not found: %s", follower_acct) + else: + log.debug("Undo for %s — not handled", inner_type) + + +async def handle_accept( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, +) -> None: + """Process Accept activity — update outbound follow state.""" + inner = body.get("object") + if not inner: + return + + inner_type = inner.get("type") if isinstance(inner, dict) else None + if inner_type == "Follow": + await services.federation.accept_follow_response( + session, actor_row.preferred_username, from_actor_url, + ) + log.info("Follow accepted by %s for @%s", from_actor_url, actor_row.preferred_username) + + +async def handle_create( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, + federation_domain: str, +) -> None: + """Process Create(Note/Article) — ingest remote post.""" + obj = body.get("object") + if not obj or not isinstance(obj, dict): + return + + obj_type = obj.get("type", "") + if obj_type not in ("Note", "Article"): + log.debug("Create with type %s — skipping", obj_type) + return + + remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url) + if not remote: + log.warning("Could not resolve remote actor for Create: %s", from_actor_url) + return + + await services.federation.ingest_remote_post(session, remote.id, body, obj) + log.info("Ingested %s from %s", obj_type, from_actor_url) + + # Mention notification + tags = obj.get("tag", []) + if isinstance(tags, list): + for tag in tags: + if not isinstance(tag, dict): + continue + if tag.get("type") != "Mention": + continue + href = tag.get("href", "") + if f"https://{federation_domain}/users/" in href: + mentioned_username = href.rsplit("/", 1)[-1] + mentioned = await services.federation.get_actor_by_username( + session, mentioned_username, + ) + if mentioned: + rp = (await session.execute( + select(APRemotePost).where( + APRemotePost.object_id == obj.get("id") + ) + )).scalar_one_or_none() + + ra = (await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + )).scalar_one_or_none() + + notif = APNotification( + actor_profile_id=mentioned.id, + notification_type="mention", + from_remote_actor_id=ra.id if ra else None, + target_remote_post_id=rp.id if rp else None, + ) + session.add(notif) + + # Reply notification + in_reply_to = obj.get("inReplyTo") + if in_reply_to and f"https://{federation_domain}/users/" in str(in_reply_to): + local_activity = (await session.execute( + select(APActivity).where( + APActivity.activity_id == in_reply_to, + ) + )).scalar_one_or_none() + if local_activity: + ra = (await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + )).scalar_one_or_none() + rp = (await session.execute( + select(APRemotePost).where( + APRemotePost.object_id == obj.get("id") + ) + )).scalar_one_or_none() + + notif = APNotification( + actor_profile_id=local_activity.actor_profile_id, + notification_type="reply", + from_remote_actor_id=ra.id if ra else None, + target_remote_post_id=rp.id if rp else None, + ) + session.add(notif) + + +async def handle_update( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, +) -> None: + """Process Update — re-ingest remote post.""" + obj = body.get("object") + if not obj or not isinstance(obj, dict): + return + obj_type = obj.get("type", "") + if obj_type in ("Note", "Article"): + remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url) + if remote: + await services.federation.ingest_remote_post(session, remote.id, body, obj) + log.info("Updated %s from %s", obj_type, from_actor_url) + + +async def handle_delete( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, +) -> None: + """Process Delete — remove remote post.""" + obj = body.get("object") + if isinstance(obj, str): + object_id = obj + elif isinstance(obj, dict): + object_id = obj.get("id", "") + else: + return + if object_id: + await services.federation.delete_remote_post(session, object_id) + log.info("Deleted remote post %s from %s", object_id, from_actor_url) + + +async def handle_like( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, +) -> None: + """Process incoming Like — record interaction + notify.""" + object_id = body.get("object", "") + if isinstance(object_id, dict): + object_id = object_id.get("id", "") + if not object_id: + return + + remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url) + if not remote: + return + + ra = (await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + )).scalar_one_or_none() + + target = (await session.execute( + select(APActivity).where(APActivity.activity_id == object_id) + )).scalar_one_or_none() + + if not target: + log.info("Like from %s for %s (target not found locally)", from_actor_url, object_id) + return + + interaction = APInteraction( + remote_actor_id=ra.id if ra else None, + post_type="local", + post_id=target.id, + interaction_type="like", + activity_id=body.get("id"), + ) + session.add(interaction) + + notif = APNotification( + actor_profile_id=target.actor_profile_id, + notification_type="like", + from_remote_actor_id=ra.id if ra else None, + target_activity_id=target.id, + ) + session.add(notif) + log.info("Like from %s on activity %s", from_actor_url, object_id) + + +async def handle_announce( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, +) -> None: + """Process incoming Announce (boost) — record interaction + notify.""" + object_id = body.get("object", "") + if isinstance(object_id, dict): + object_id = object_id.get("id", "") + if not object_id: + return + + remote = await services.federation.get_or_fetch_remote_actor(session, from_actor_url) + if not remote: + return + + ra = (await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == from_actor_url) + )).scalar_one_or_none() + + target = (await session.execute( + select(APActivity).where(APActivity.activity_id == object_id) + )).scalar_one_or_none() + + if not target: + log.info("Announce from %s for %s (target not found locally)", from_actor_url, object_id) + return + + interaction = APInteraction( + remote_actor_id=ra.id if ra else None, + post_type="local", + post_id=target.id, + interaction_type="boost", + activity_id=body.get("id"), + ) + session.add(interaction) + + notif = APNotification( + actor_profile_id=target.actor_profile_id, + notification_type="boost", + from_remote_actor_id=ra.id if ra else None, + target_activity_id=target.id, + ) + session.add(notif) + log.info("Announce from %s on activity %s", from_actor_url, object_id) + + +async def dispatch_inbox_activity( + session: AsyncSession, + actor_row: ActorProfile, + body: dict, + from_actor_url: str, + domain: str, + app_domain: str = "federation", +) -> None: + """Route an inbox activity to the correct handler.""" + activity_type = body.get("type", "") + + if activity_type == "Follow": + await handle_follow(session, actor_row, body, from_actor_url, domain, app_domain=app_domain) + elif activity_type == "Undo": + await handle_undo(session, actor_row, body, from_actor_url, app_domain=app_domain) + elif activity_type == "Accept": + await handle_accept(session, actor_row, body, from_actor_url) + elif activity_type == "Create": + await handle_create(session, actor_row, body, from_actor_url, domain) + elif activity_type == "Update": + await handle_update(session, actor_row, body, from_actor_url) + elif activity_type == "Delete": + await handle_delete(session, actor_row, body, from_actor_url) + elif activity_type == "Like": + await handle_like(session, actor_row, body, from_actor_url) + elif activity_type == "Announce": + await handle_announce(session, actor_row, body, from_actor_url) diff --git a/shared/infrastructure/cart_identity.py b/shared/infrastructure/cart_identity.py new file mode 100644 index 0000000..f16c674 --- /dev/null +++ b/shared/infrastructure/cart_identity.py @@ -0,0 +1,34 @@ +""" +Cart identity resolution — shared across all apps that need to know +who the current cart owner is (user_id or anonymous session_id). +""" +from __future__ import annotations + +import secrets +from typing import TypedDict, Optional + +from quart import g, session as qsession + + +class CartIdentity(TypedDict): + user_id: Optional[int] + session_id: Optional[str] + + +def current_cart_identity() -> CartIdentity: + """ + Decide how to identify the cart: + + - If user is logged in -> use user_id (and ignore session_id) + - Else -> generate / reuse an anonymous session_id stored in Quart's session + """ + user = getattr(g, "user", None) + if user is not None and getattr(user, "id", None) is not None: + return {"user_id": user.id, "session_id": None} + + sid = qsession.get("cart_sid") + if not sid: + sid = secrets.token_hex(16) + qsession["cart_sid"] = sid + + return {"user_id": None, "session_id": sid} diff --git a/shared/infrastructure/context.py b/shared/infrastructure/context.py new file mode 100644 index 0000000..a98227c --- /dev/null +++ b/shared/infrastructure/context.py @@ -0,0 +1,58 @@ +""" +Base template context shared by all apps. + +This module no longer imports cart or menu_items services directly. +Each app provides its own context_fn that calls this base and adds +app-specific variables (cart data, menu_items, etc.). +""" +from __future__ import annotations + +from datetime import datetime + +from quart import request, g, current_app + +from shared.config import config +from shared.utils import host_url +from shared.browser.app.utils import current_route_relative_path + + +async def base_context() -> dict: + """ + Common template variables available in every app. + + Does NOT include cart, calendar_cart_entries, total, calendar_total, + or menu_items — those are added by each app's context_fn. + """ + is_htmx = request.headers.get("HX-Request") == "true" + search = request.headers.get("X-Search", "") + zap_filter = is_htmx and search == "" + + def base_url(): + return host_url() + + hx_select = "#main-panel" + hx_select_search = ( + hx_select + + ", #search-mobile, #search-count-mobile, #search-desktop, #search-count-desktop, #menu-items-nav-wrapper" + ) + + return { + "is_htmx": is_htmx, + "request": request, + "now": datetime.now(), + "current_local_href": current_route_relative_path(), + "config": config(), + "asset_url": current_app.jinja_env.globals.get("asset_url", lambda p: ""), + "sort_options": [ + ("az", "A\u2013Z", "order/a-z.svg"), + ("za", "Z\u2013A", "order/z-a.svg"), + ("price-asc", "\u00a3 low\u2192high", "order/l-h.svg"), + ("price-desc", "\u00a3 high\u2192low", "order/h-l.svg"), + ], + "zap_filter": zap_filter, + "print": print, + "base_url": base_url, + "base_title": config()["title"], + "hx_select": hx_select, + "hx_select_search": hx_select_search, + } diff --git a/shared/infrastructure/factory.py b/shared/infrastructure/factory.py new file mode 100644 index 0000000..4f3869b --- /dev/null +++ b/shared/infrastructure/factory.py @@ -0,0 +1,289 @@ +from __future__ import annotations + +import asyncio +import os +import secrets +from pathlib import Path +from typing import Callable, Awaitable, Sequence + +from quart import Quart, request, g, redirect, send_from_directory + +from shared.config import init_config, config, pretty +from shared.models import KV # ensure shared models imported +# Register all app model classes with SQLAlchemy so cross-domain +# relationship() string references resolve correctly. +for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "account.models"): + try: + __import__(_mod) + except ImportError: + pass +from shared.log_config import configure_logging +from shared.events import EventProcessor + +from shared.db.session import register_db +from shared.browser.app.middleware import register as register_middleware +from shared.browser.app.redis_cacher import register as register_redis +from shared.browser.app.csrf import protect +from shared.browser.app.errors import errors + +from .jinja_setup import setup_jinja +from .user_loader import load_current_user + + +# Async init of config (runs once at import) +asyncio.run(init_config()) + +BASE_DIR = Path(__file__).resolve().parent.parent +STATIC_DIR = str(BASE_DIR / "static") +TEMPLATE_DIR = str(BASE_DIR / "browser" / "templates") + + +def create_base_app( + name: str, + *, + context_fn: Callable[[], Awaitable[dict]] | None = None, + before_request_fns: Sequence[Callable[[], Awaitable[None]]] | None = None, + domain_services_fn: Callable[[], None] | None = None, +) -> Quart: + """ + Create a Quart app with shared infrastructure. + + Parameters + ---------- + name: + Application name (also used as CACHE_APP_PREFIX). + context_fn: + Async function returning a dict for template context. + Each app provides its own — the cart app queries locally, + while blog/market apps fetch via internal API. + If not provided, a minimal default context is used. + before_request_fns: + Extra before-request hooks (e.g. cart_loader for the cart app). + domain_services_fn: + Callable that registers domain services on the shared registry. + Each app provides its own — registering real impls for owned + domains and stubs (or real impls) for others. + """ + if domain_services_fn is not None: + domain_services_fn() + + from shared.services.widgets import register_all_widgets + register_all_widgets() + + app = Quart( + name, + static_folder=STATIC_DIR, + static_url_path="/static", + template_folder=TEMPLATE_DIR, + ) + + configure_logging(name) + + app.secret_key = os.getenv("SECRET_KEY", "dev-secret-key-change-me-777") + + # Per-app first-party session cookie (no shared domain — avoids Safari ITP) + app.config["SESSION_COOKIE_NAME"] = f"{name}_session" + app.config["SESSION_COOKIE_SAMESITE"] = "Lax" + app.config["SESSION_COOKIE_SECURE"] = True + + # Ghost / Redis config + app.config["GHOST_API_URL"] = os.getenv("GHOST_API_URL") + app.config["GHOST_PUBLIC_URL"] = os.getenv("GHOST_PUBLIC_URL") + app.config["GHOST_CONTENT_KEY"] = os.getenv("GHOST_CONTENT_API_KEY") + app.config["REDIS_URL"] = os.getenv("REDIS_URL") + + # Cache app prefix for key namespacing + app.config["CACHE_APP_PREFIX"] = name + + # --- infrastructure --- + register_middleware(app) + register_db(app) + register_redis(app) + setup_jinja(app) + errors(app) + + # Auto-register OAuth client blueprint for non-account apps + # (account is the OAuth authorization server) + if name != "account": + from shared.infrastructure.oauth import create_oauth_blueprint + app.register_blueprint(create_oauth_blueprint(name)) + + # Auto-register ActivityPub blueprint for AP-enabled apps + from shared.infrastructure.activitypub import AP_APPS + if name in AP_APPS: + from shared.infrastructure.activitypub import create_activitypub_blueprint + app.register_blueprint(create_activitypub_blueprint(name)) + + # --- device id (all apps, including account) --- + _did_cookie = f"{name}_did" + + @app.before_request + async def _init_device_id(): + did = request.cookies.get(_did_cookie) + if did: + g.device_id = did + g._new_device_id = False + else: + g.device_id = secrets.token_urlsafe(32) + g._new_device_id = True + + @app.after_request + async def _set_device_cookie(response): + if getattr(g, "_new_device_id", False): + response.set_cookie( + _did_cookie, g.device_id, + max_age=30 * 24 * 3600, + secure=True, samesite="Lax", httponly=True, + ) + return response + + # --- before-request hooks --- + @app.before_request + async def _route_log(): + g.root = request.headers.get("x-forwarded-prefix", "/") + g.scheme = request.scheme + g.host = request.host + + @app.before_request + async def _load_user(): + await load_current_user() + + # Register any app-specific before-request hooks (e.g. cart loader) + if before_request_fns: + for fn in before_request_fns: + app.before_request(fn) + + # Auth state check via grant verification + silent OAuth handshake + if name != "account": + @app.before_request + async def _check_auth_state(): + from quart import session as qs + from urllib.parse import quote as _quote + if request.path.startswith(("/auth/", "/static/", "/.well-known/", "/users/", "/nodeinfo/", "/internal/")): + return + + uid = qs.get("uid") + grant_token = qs.get("grant_token") + + from shared.browser.app.redis_cacher import get_redis + redis = get_redis() + + # Case 1: logged in — verify grant still valid (direct DB, cached) + if uid and grant_token: + cache_key = f"grant:{grant_token}" + if redis: + # Quick check: if did_auth was cleared (logout), skip cache + device_id = g.device_id + did_auth_present = await redis.get(f"did_auth:{device_id}") if device_id else True + cached = await redis.get(cache_key) + if cached == b"ok" and did_auth_present: + return + if cached == b"revoked": + qs.pop("uid", None) + qs.pop("grant_token", None) + qs.pop("cart_sid", None) + return + + from sqlalchemy import select + from shared.db.session import get_session + from shared.models.oauth_grant import OAuthGrant + try: + async with get_session() as s: + grant = await s.scalar( + select(OAuthGrant).where(OAuthGrant.token == grant_token) + ) + valid = grant is not None and grant.revoked_at is None + except Exception: + return # DB error — don't log user out + + if redis: + await redis.set(cache_key, b"ok" if valid else b"revoked", ex=60) + if not valid: + qs.pop("uid", None) + qs.pop("grant_token", None) + qs.pop("cart_sid", None) + return + + # Case 2: not logged in — prompt=none OAuth (GET, non-HTMX only) + if not uid and request.method == "GET": + if request.headers.get("HX-Request"): + return + import time as _time + now = _time.time() + pnone_at = qs.get("_pnone_at") + device_id = g.device_id + + # Check if account signalled a login after we cached "not logged in" + # (blog_did == account_did — same value set during OAuth callback) + if device_id and redis and pnone_at: + auth_ts = await redis.get(f"did_auth:{device_id}") + if auth_ts: + try: + if float(auth_ts) > pnone_at: + qs.pop("_pnone_at", None) + return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}") + except (ValueError, TypeError): + pass + + if pnone_at and (now - pnone_at) < 300: + return + if device_id and redis: + cached = await redis.get(f"prompt:{name}:{device_id}") + if cached == b"none": + return + return redirect(f"/auth/login?prompt=none&next={_quote(request.url, safe='')}") + + @app.before_request + async def _csrf_protect(): + await protect() + + # --- after-request hooks --- + # Clear old shared-domain session cookie (migration from .rose-ash.com) + @app.after_request + async def _clear_old_shared_cookie(response): + if request.cookies.get("blog_session"): + response.delete_cookie("blog_session", domain=".rose-ash.com", path="/") + return response + + @app.after_request + async def _add_hx_preserve_search_header(response): + value = request.headers.get("X-Search") + if value is not None: + response.headers["HX-Preserve-Search"] = value + return response + + # --- context processor --- + if context_fn is not None: + @app.context_processor + async def _inject_base(): + return await context_fn() + else: + # Minimal fallback (no cart, no menu_items) + from .context import base_context + + @app.context_processor + async def _inject_base(): + return await base_context() + + # --- event processor --- + _event_processor = EventProcessor(app_name=name) + + # --- startup --- + @app.before_serving + async def _startup(): + from shared.events.handlers import register_shared_handlers + register_shared_handlers() + await init_config() + print(pretty()) + await _event_processor.start() + + @app.after_serving + async def _stop_event_processor(): + await _event_processor.stop() + + # --- favicon --- + @app.get("/favicon.ico") + async def favicon(): + return await send_from_directory("static", "favicon.ico") + + return app diff --git a/shared/infrastructure/fragments.py b/shared/infrastructure/fragments.py new file mode 100644 index 0000000..699a539 --- /dev/null +++ b/shared/infrastructure/fragments.py @@ -0,0 +1,193 @@ +""" +Server-side fragment composition client. + +Each coop app exposes HTML fragments at ``/internal/fragments/{type}``. +This module provides helpers to fetch and cache those fragments so that +consuming apps can compose cross-app UI without shared templates. + +Failures raise ``FragmentError`` by default so broken fragments are +immediately visible rather than silently missing from the page. +""" + +from __future__ import annotations + +import asyncio +import logging +import os +from typing import Sequence + +import httpx + +log = logging.getLogger(__name__) + +# Re-usable async client (created lazily, one per process) +_client: httpx.AsyncClient | None = None + +# Default request timeout (seconds) +_DEFAULT_TIMEOUT = 2.0 + +# Header sent on every fragment request so providers can distinguish +# fragment fetches from normal browser traffic. +FRAGMENT_HEADER = "X-Fragment-Request" + + +class FragmentError(Exception): + """Raised when a fragment fetch fails.""" + + +def _get_client() -> httpx.AsyncClient: + global _client + if _client is None or _client.is_closed: + _client = httpx.AsyncClient( + timeout=httpx.Timeout(_DEFAULT_TIMEOUT), + follow_redirects=False, + ) + return _client + + +def _internal_url(app_name: str) -> str: + """Resolve the Docker-internal base URL for *app_name*. + + Looks up ``INTERNAL_URL_{APP}`` first, falls back to + ``http://{app}:8000``. + """ + env_key = f"INTERNAL_URL_{app_name.upper()}" + return os.getenv(env_key, f"http://{app_name}:8000").rstrip("/") + + +# ------------------------------------------------------------------ +# Public API +# ------------------------------------------------------------------ + +def _is_fragment_request() -> bool: + """True when the current request is itself a fragment fetch.""" + try: + from quart import request as _req + return bool(_req.headers.get(FRAGMENT_HEADER)) + except Exception: + return False + + +async def fetch_fragment( + app_name: str, + fragment_type: str, + *, + params: dict | None = None, + timeout: float = _DEFAULT_TIMEOUT, + required: bool = True, +) -> str: + """Fetch an HTML fragment from another app. + + Returns the raw HTML string. When *required* is True (default), + raises ``FragmentError`` on network errors or non-200 responses. + When *required* is False, returns ``""`` on failure. + + Automatically returns ``""`` when called inside a fragment request + to prevent circular dependencies between apps. + """ + if _is_fragment_request(): + return "" + + base = _internal_url(app_name) + url = f"{base}/internal/fragments/{fragment_type}" + try: + resp = await _get_client().get( + url, + params=params, + headers={FRAGMENT_HEADER: "1"}, + timeout=timeout, + ) + if resp.status_code == 200: + return resp.text + msg = f"Fragment {app_name}/{fragment_type} returned {resp.status_code}" + if required: + log.error(msg) + raise FragmentError(msg) + log.warning(msg) + return "" + except FragmentError: + raise + except Exception as exc: + msg = f"Fragment {app_name}/{fragment_type} failed: {exc}" + if required: + log.error(msg) + raise FragmentError(msg) from exc + log.warning(msg) + return "" + + +async def fetch_fragments( + requests: Sequence[tuple[str, str, dict | None]], + *, + timeout: float = _DEFAULT_TIMEOUT, + required: bool = True, +) -> list[str]: + """Fetch multiple fragments concurrently. + + *requests* is a sequence of ``(app_name, fragment_type, params)`` tuples. + Returns a list of HTML strings in the same order. When *required* + is True, any single failure raises ``FragmentError``. + """ + return list(await asyncio.gather(*( + fetch_fragment(app, ftype, params=params, timeout=timeout, required=required) + for app, ftype, params in requests + ))) + + +async def fetch_fragment_cached( + app_name: str, + fragment_type: str, + *, + params: dict | None = None, + ttl: int = 30, + timeout: float = _DEFAULT_TIMEOUT, + required: bool = True, +) -> str: + """Fetch a fragment with a Redis cache layer. + + Cache key: ``frag:{app}:{type}:{sorted_params}``. + """ + # Build a stable cache key + suffix = "" + if params: + sorted_items = sorted(params.items()) + suffix = ":" + "&".join(f"{k}={v}" for k, v in sorted_items) + cache_key = f"frag:{app_name}:{fragment_type}{suffix}" + + # Try Redis cache + redis = _get_redis() + if redis: + try: + cached = await redis.get(cache_key) + if cached is not None: + return cached.decode() if isinstance(cached, bytes) else cached + except Exception: + pass + + # Cache miss — fetch from provider + html = await fetch_fragment( + app_name, fragment_type, params=params, timeout=timeout, required=required, + ) + + # Store in cache (even empty string — avoids hammering a down service) + if redis and ttl > 0: + try: + await redis.set(cache_key, html.encode(), ex=ttl) + except Exception: + pass + + return html + + +# ------------------------------------------------------------------ +# Helpers +# ------------------------------------------------------------------ + +def _get_redis(): + """Return the current app's Redis connection, or None.""" + try: + from quart import current_app + r = current_app.redis + return r if r else None + except Exception: + return None diff --git a/shared/infrastructure/http_utils.py b/shared/infrastructure/http_utils.py new file mode 100644 index 0000000..4b39779 --- /dev/null +++ b/shared/infrastructure/http_utils.py @@ -0,0 +1,49 @@ +""" +HTTP utility helpers shared across apps. + +Extracted from browse/services/services.py so order/orders blueprints +(which live in the cart app) don't need to import from the browse blueprint. +""" +from __future__ import annotations + +from urllib.parse import urlencode + +from quart import g, request +from shared.utils import host_url + + +def vary(resp): + """ + Ensure HX-Request and X-Origin are part of the Vary header + so caches distinguish HTMX from full-page requests. + """ + v = resp.headers.get("Vary", "") + parts = [p.strip() for p in v.split(",") if p.strip()] + for h in ("HX-Request", "X-Origin"): + if h not in parts: + parts.append(h) + if parts: + resp.headers["Vary"] = ", ".join(parts) + return resp + + +def current_url_without_page(): + """ + Return the current URL with the ``page`` query-string parameter removed. + Used for Hx-Push-Url headers on paginated routes. + """ + (request.script_root or "").rstrip("/") + root2 = "/" + g.root + path_only = request.path + + if root2 and path_only.startswith(root2): + rel = path_only[len(root2):] + rel = rel if rel.startswith("/") else "/" + rel + else: + rel = path_only + base = host_url(rel) + + params = request.args.to_dict(flat=False) + params.pop("page", None) + qs = urlencode(params, doseq=True) + return f"{base}?{qs}" if qs else base diff --git a/shared/infrastructure/jinja_setup.py b/shared/infrastructure/jinja_setup.py new file mode 100644 index 0000000..01c80e7 --- /dev/null +++ b/shared/infrastructure/jinja_setup.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +import hashlib +import re +from pathlib import Path + +from quart import Quart, g, url_for + +from shared.config import config +from shared.utils import host_url + +from shared.browser.app.csrf import generate_csrf_token +from shared.browser.app.authz import has_access +from shared.browser.app.filters import register as register_filters + +from .urls import blog_url, market_url, cart_url, events_url, federation_url, account_url, login_url, page_cart_url, market_product_url + + +def setup_jinja(app: Quart) -> None: + app.jinja_env.add_extension("jinja2.ext.do") + + # --- template globals --- + app.add_template_global(generate_csrf_token, "csrf_token") + app.add_template_global(has_access, "has_access") + + def level(): + if not hasattr(g, "_level_counter"): + g._level_counter = 0 + return g._level_counter + + def level_up(): + if not hasattr(g, "_level_counter"): + g._level_counter = 0 + g._level_counter += 1 + return "" + + app.jinja_env.globals["level"] = level + app.jinja_env.globals["level_up"] = level_up + app.jinja_env.globals["menu_colour"] = "sky" + app.jinja_env.globals["app_name"] = app.name + + select_colours = """ + [.hover-capable_&]:hover:bg-yellow-300 + aria-selected:bg-stone-500 aria-selected:text-white + [.hover-capable_&[aria-selected=true]:hover]:bg-orange-500""" + app.jinja_env.globals["select_colours"] = select_colours + + nav_button = f"""justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black + {select_colours}""" + + styles = { + "pill": """ + inline-flex items-center px-3 py-1 rounded-full bg-stone-200 text-stone-700 text-sm + hover:bg-stone-300 hover:text-stone-900 + focus:outline-none focus-visible:ring-2 focus-visible:ring-stone-400 + """, + "tr": "odd:bg-slate-50 even:bg-white hover:bg-slate-100", + "action_button": "px-2 py-1 border rounded text-sm bg-sky-300 hover:bg-sky-400 flex gap-1 items-center", + "pre_action_button": "px-2 py-1 border rounded text-sm bg-green-200 hover:bg-green-300", + "cancel_button": "px-3 py-1.5 rounded-full text-sm border border-stone-300 text-stone-700 hover:bg-stone-100", + "list_container": "border border-stone-200 rounded-lg p-3 mb-3 bg-white space-y-3 bg-yellow-200", + "nav_button": f"{nav_button} p-3", + "nav_button_less_pad": f"{nav_button} p-2", + } + app.jinja_env.globals["styles"] = styles + + def _asset_url(path: str) -> str: + def squash_double_slashes(url: str) -> str: + m = re.match(r"(?:[A-Za-z][\w+.-]*:)?//", url) + prefix = m.group(0) if m else "" + rest = re.sub(r"/+", "/", url[len(prefix):]) + return prefix + rest + + file_path = Path("static") / path + try: + digest = hashlib.md5(file_path.read_bytes()).hexdigest()[:8] + except Exception: + digest = "dev" + return squash_double_slashes( + f"{g.scheme}://{g.host}{g.root}/{url_for('static', filename=path, v=digest)}" + ) + + app.jinja_env.globals["asset_url"] = _asset_url + + def site(): + return { + "url": host_url(), + "logo": _asset_url("img/logo.jpg"), + "default_image": _asset_url("img/logo.jpg"), + "title": config()["title"], + } + + app.jinja_env.globals["site"] = site + + # cross-app URL helpers available in all templates + app.jinja_env.globals["blog_url"] = blog_url + app.jinja_env.globals["market_url"] = market_url + app.jinja_env.globals["cart_url"] = cart_url + app.jinja_env.globals["events_url"] = events_url + app.jinja_env.globals["federation_url"] = federation_url + app.jinja_env.globals["account_url"] = account_url + app.jinja_env.globals["login_url"] = login_url + app.jinja_env.globals["page_cart_url"] = page_cart_url + app.jinja_env.globals["market_product_url"] = market_product_url + + # widget registry available in all templates + from shared.services.widget_registry import widgets as _widget_registry + app.jinja_env.globals["widgets"] = _widget_registry + + # fragment composition helper — fetch HTML from another app's fragment API + from shared.infrastructure.fragments import fetch_fragment_cached + + async def _fragment(app_name: str, fragment_type: str, ttl: int = 30, **params) -> str: + p = params if params else None + return await fetch_fragment_cached(app_name, fragment_type, params=p, ttl=ttl) + + app.jinja_env.globals["fragment"] = _fragment + + # register jinja filters + register_filters(app) diff --git a/shared/infrastructure/oauth.py b/shared/infrastructure/oauth.py new file mode 100644 index 0000000..85f51f3 --- /dev/null +++ b/shared/infrastructure/oauth.py @@ -0,0 +1,183 @@ +"""OAuth2 client blueprint for non-account apps. + +Each client app gets /auth/login, /auth/callback, /auth/logout. +Account is the OAuth authorization server. + +Device cookie ({app}_did) ties the browser to its auth state so +client apps can detect login/logout without cross-domain cookies. +""" +from __future__ import annotations + +import secrets +from datetime import datetime, timezone + +from quart import ( + Blueprint, + redirect, + request, + session as qsession, + g, + current_app, + make_response, +) +from sqlalchemy import select + +from shared.db.session import get_session +from shared.models.oauth_code import OAuthCode +from shared.infrastructure.urls import account_url, app_url +from shared.infrastructure.cart_identity import current_cart_identity +from shared.events import emit_activity + +SESSION_USER_KEY = "uid" +GRANT_TOKEN_KEY = "grant_token" + + +def create_oauth_blueprint(app_name: str) -> Blueprint: + """Return an OAuth client blueprint for *app_name*.""" + bp = Blueprint("oauth_auth", __name__, url_prefix="/auth") + + @bp.get("/login") + @bp.get("/login/") + async def login(): + next_url = request.args.get("next", "/") + prompt = request.args.get("prompt", "") + state = secrets.token_urlsafe(32) + qsession["oauth_state"] = state + qsession["oauth_next"] = next_url + + device_id = g.device_id + redirect_uri = app_url(app_name, "/auth/callback") + params = ( + f"?client_id={app_name}" + f"&redirect_uri={redirect_uri}" + f"&device_id={device_id}" + f"&state={state}" + ) + if prompt: + params += f"&prompt={prompt}" + authorize_url = account_url(f"/auth/oauth/authorize{params}") + return redirect(authorize_url) + + @bp.get("/callback") + @bp.get("/callback/") + async def callback(): + # Adopt account's device id as our own — one identity across all apps + account_did = request.args.get("account_did", "") + if account_did: + qsession["_account_did"] = account_did + # Overwrite this app's device cookie with account's device id + g.device_id = account_did + g._new_device_id = True # factory after_request will set the cookie + + # Handle prompt=none error (user not logged in on account) + error = request.args.get("error") + if error == "login_required": + next_url = qsession.pop("oauth_next", "/") + qsession.pop("oauth_state", None) + import time as _time + qsession["_pnone_at"] = _time.time() + device_id = g.device_id + if device_id: + from shared.browser.app.redis_cacher import get_redis + _redis = get_redis() + if _redis: + await _redis.set( + f"prompt:{app_name}:{device_id}", b"none", ex=300 + ) + return redirect(next_url) + + code = request.args.get("code") + state = request.args.get("state") + expected_state = qsession.pop("oauth_state", None) + next_url = qsession.pop("oauth_next", "/") + + if not code or not state or state != expected_state: + current_app.logger.warning("OAuth callback: bad state or missing code") + return redirect("/") + + expected_redirect = app_url(app_name, "/auth/callback") + now = datetime.now(timezone.utc) + + async with get_session() as s: + async with s.begin(): + result = await s.execute( + select(OAuthCode) + .where(OAuthCode.code == code) + .with_for_update() + ) + oauth_code = result.scalar_one_or_none() + + if not oauth_code: + current_app.logger.warning("OAuth callback: code not found") + return redirect("/") + + if oauth_code.used_at is not None: + current_app.logger.warning("OAuth callback: code already used") + return redirect("/") + + if oauth_code.expires_at < now: + current_app.logger.warning("OAuth callback: code expired") + return redirect("/") + + if oauth_code.client_id != app_name: + current_app.logger.warning("OAuth callback: client_id mismatch") + return redirect("/") + + if oauth_code.redirect_uri != expected_redirect: + current_app.logger.warning("OAuth callback: redirect_uri mismatch") + return redirect("/") + + oauth_code.used_at = now + user_id = oauth_code.user_id + grant_token = oauth_code.grant_token + + # Set local session with grant token for revocation checking + qsession[SESSION_USER_KEY] = user_id + if grant_token: + qsession[GRANT_TOKEN_KEY] = grant_token + qsession.pop("_pnone_at", None) + + # Emit login activity for cart adoption + ident = current_cart_identity() + anon_session_id = ident.get("session_id") + if anon_session_id: + try: + async with get_session() as s: + async with s.begin(): + await emit_activity( + s, + activity_type="rose:Login", + actor_uri="internal:system", + object_type="Person", + object_data={ + "user_id": user_id, + "session_id": anon_session_id, + }, + ) + except Exception: + current_app.logger.exception("OAuth: failed to emit login activity") + + return redirect(next_url, 303) + + @bp.get("/clear") + @bp.get("/clear/") + async def clear(): + """One-time migration helper: clear all session cookies.""" + qsession.clear() + resp = await make_response(redirect("/")) + resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/") + resp.delete_cookie(f"{app_name}_did", path="/") + return resp + + @bp.post("/logout") + @bp.post("/logout/") + async def logout(): + qsession.pop(SESSION_USER_KEY, None) + qsession.pop(GRANT_TOKEN_KEY, None) + qsession.pop("cart_sid", None) + qsession.pop("_pnone_at", None) + qsession.pop("_account_did", None) + # Redirect through account to revoke grants + clear account session + return redirect(account_url("/auth/sso-logout/")) + + return bp diff --git a/shared/infrastructure/urls.py b/shared/infrastructure/urls.py new file mode 100644 index 0000000..28bcb45 --- /dev/null +++ b/shared/infrastructure/urls.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import os +from urllib.parse import quote + +from shared.config import config + + +def _get_app_url(app_name: str) -> str: + env_key = f"APP_URL_{app_name.upper()}" + env_val = os.getenv(env_key) + if env_val: + return env_val.rstrip("/") + return config()["app_urls"][app_name].rstrip("/") + + +def app_url(app_name: str, path: str = "/") -> str: + base = _get_app_url(app_name) + if not path.startswith("/"): + path = "/" + path + return base + path + + +def blog_url(path: str = "/") -> str: + return app_url("blog", path) + + +def market_url(path: str = "/") -> str: + return app_url("market", path) + + +def cart_url(path: str = "/") -> str: + return app_url("cart", path) + + +def events_url(path: str = "/") -> str: + return app_url("events", path) + + +def federation_url(path: str = "/") -> str: + return app_url("federation", path) + + +def account_url(path: str = "/") -> str: + return app_url("account", path) + + +def artdag_url(path: str = "/") -> str: + return app_url("artdag", path) + + +def page_cart_url(page_slug: str, path: str = "/") -> str: + if not path.startswith("/"): + path = "/" + path + return cart_url(f"/{page_slug}{path}") + + +def market_product_url(product_slug: str, suffix: str = "", market_place=None) -> str: + """Build a market product URL with the correct page/market prefix. + + Resolves the prefix from: + - market app context: g.post_slug + g.market_slug + - cart app context: g.page_slug + market_place.slug + """ + from quart import g + + page_slug = getattr(g, "post_slug", None) or getattr(g, "page_slug", None) + ms = getattr(g, "market_slug", None) or ( + getattr(market_place, "slug", None) if market_place else None + ) + prefix = f"/{page_slug}/{ms}" if page_slug and ms else "" + tail = f"/{suffix}" if suffix else "/" + return market_url(f"{prefix}/product/{product_slug}{tail}") + + +def login_url(next_url: str = "") -> str: + from quart import current_app + + # Account handles login directly (magic link flow — it's the OAuth server) + if current_app.name == "account": + base = "/auth/login/" + params: list[str] = [] + if next_url: + params.append(f"next={quote(next_url, safe='')}") + from quart import session as qsession + cart_sid = qsession.get("cart_sid") + if cart_sid: + params.append(f"cart_sid={quote(cart_sid, safe='')}") + if params: + return f"{base}?{'&'.join(params)}" + return base + + # Client apps: local /auth/login triggers OAuth redirect to account + base = "/auth/login/" + if next_url: + return f"{base}?next={quote(next_url, safe='')}" + return base diff --git a/shared/infrastructure/user_loader.py b/shared/infrastructure/user_loader.py new file mode 100644 index 0000000..aa488d4 --- /dev/null +++ b/shared/infrastructure/user_loader.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from quart import session as qsession, g +from sqlalchemy import select +from sqlalchemy.orm import selectinload + +from shared.models.user import User +from shared.models.ghost_membership_entities import UserNewsletter + + +async def load_user_by_id(session, user_id: int): + """Load a user by ID with labels and newsletters eagerly loaded.""" + stmt = ( + select(User) + .options( + selectinload(User.labels), + selectinload(User.user_newsletters).selectinload( + UserNewsletter.newsletter + ), + ) + .where(User.id == user_id) + ) + result = await session.execute(stmt) + return result.scalar_one_or_none() + + +async def load_current_user(): + uid = qsession.get("uid") + if not uid: + g.user = None + g.rights = {"admin": False} + return + + g.user = await load_user_by_id(g.s, uid) + g.rights = {l.name: True for l in g.user.labels} if g.user else {} diff --git a/shared/log_config/__init__.py b/shared/log_config/__init__.py new file mode 100644 index 0000000..359f540 --- /dev/null +++ b/shared/log_config/__init__.py @@ -0,0 +1,3 @@ +from .setup import configure_logging, get_logger + +__all__ = ["configure_logging", "get_logger"] diff --git a/shared/log_config/setup.py b/shared/log_config/setup.py new file mode 100644 index 0000000..50621c7 --- /dev/null +++ b/shared/log_config/setup.py @@ -0,0 +1,66 @@ +""" +Structured JSON logging for all Rose Ash apps. + +Call configure_logging(app_name) once at app startup. +Use get_logger(name) anywhere to get a logger that outputs JSON to stdout. +""" +from __future__ import annotations + +import json +import logging +import sys +from datetime import datetime, timezone + + +class JSONFormatter(logging.Formatter): + """Format log records as single-line JSON objects.""" + + def __init__(self, app_name: str = ""): + super().__init__() + self.app_name = app_name + + def format(self, record: logging.LogRecord) -> str: + entry = { + "timestamp": datetime.now(timezone.utc).isoformat(), + "level": record.levelname, + "app": self.app_name, + "logger": record.name, + "message": record.getMessage(), + } + # Include extra fields if set on the record + for key in ("event_type", "user_id", "request_id", "duration_ms"): + val = getattr(record, key, None) + if val is not None: + entry[key] = val + + if record.exc_info and record.exc_info[0] is not None: + entry["exception"] = self.formatException(record.exc_info) + + return json.dumps(entry, default=str) + + +_configured = False + + +def configure_logging(app_name: str, level: int = logging.INFO) -> None: + """Set up structured JSON logging to stdout. Safe to call multiple times.""" + global _configured + if _configured: + return + _configured = True + + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(JSONFormatter(app_name=app_name)) + + root = logging.getLogger() + root.setLevel(level) + root.addHandler(handler) + + # Quiet down noisy libraries + for name in ("httpx", "httpcore", "asyncio", "sqlalchemy.engine"): + logging.getLogger(name).setLevel(logging.WARNING) + + +def get_logger(name: str) -> logging.Logger: + """Get a named logger. Uses the structured JSON format once configure_logging is called.""" + return logging.getLogger(name) diff --git a/shared/models/__init__.py b/shared/models/__init__.py new file mode 100644 index 0000000..c7303ee --- /dev/null +++ b/shared/models/__init__.py @@ -0,0 +1,33 @@ +from .user import User +from .kv import KV +from .magic_link import MagicLink +from .oauth_code import OAuthCode +from .oauth_grant import OAuthGrant +from .menu_item import MenuItem + +from .ghost_membership_entities import ( + GhostLabel, UserLabel, + GhostNewsletter, UserNewsletter, + GhostTier, GhostSubscription, +) +from .ghost_content import Tag, Post, Author, PostAuthor, PostTag, PostLike +from .page_config import PageConfig +from .order import Order, OrderItem +from .market import ( + Product, ProductLike, ProductImage, ProductSection, + NavTop, NavSub, Listing, ListingItem, + LinkError, LinkExternal, SubcategoryRedirect, ProductLog, + ProductLabel, ProductSticker, ProductAttribute, ProductNutrition, ProductAllergen, + CartItem, +) +from .market_place import MarketPlace +from .calendars import ( + Calendar, CalendarEntry, CalendarSlot, + TicketType, Ticket, CalendarEntryPost, +) +from .container_relation import ContainerRelation +from .menu_node import MenuNode +from .federation import ( + ActorProfile, APActivity, APFollower, APInboxItem, APAnchor, IPFSPin, + RemoteActor, APFollowing, APRemotePost, APLocalPost, APInteraction, APNotification, +) diff --git a/shared/models/calendars.py b/shared/models/calendars.py new file mode 100644 index 0000000..d4e8721 --- /dev/null +++ b/shared/models/calendars.py @@ -0,0 +1,297 @@ +from __future__ import annotations + +from sqlalchemy import ( + Column, Integer, String, DateTime, ForeignKey, CheckConstraint, + Index, text, Text, Boolean, Time, Numeric +) +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +# Adjust this import to match where your Base lives +from shared.db.base import Base + +from datetime import datetime, timezone + + +def utcnow() -> datetime: + return datetime.now(timezone.utc) + + + +class Calendar(Base): + __tablename__ = "calendars" + + id = Column(Integer, primary_key=True) + container_type = Column(String(32), nullable=False, server_default=text("'page'")) + container_id = Column(Integer, nullable=False) + name = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + slug = Column(String(255), nullable=False) + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + deleted_at = Column(DateTime(timezone=True), nullable=True) + + # relationships + entries = relationship( + "CalendarEntry", + back_populates="calendar", + cascade="all, delete-orphan", + passive_deletes=True, + order_by="CalendarEntry.start_at", + ) + + slots = relationship( + "CalendarSlot", + back_populates="calendar", + cascade="all, delete-orphan", + passive_deletes=True, + order_by="CalendarSlot.time_start", + ) + + # Indexes / constraints + __table_args__ = ( + Index("ix_calendars_container", "container_type", "container_id"), + Index("ix_calendars_name", "name"), + Index("ix_calendars_slug", "slug"), + # Soft-delete-aware uniqueness: one active calendar per container/slug + Index( + "ux_calendars_container_slug_active", + "container_type", + "container_id", + func.lower(slug), + unique=True, + postgresql_where=text("deleted_at IS NULL"), + ), + ) + + +class CalendarEntry(Base): + __tablename__ = "calendar_entries" + + id = Column(Integer, primary_key=True) + calendar_id = Column( + Integer, + ForeignKey("calendars.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + + # NEW: ownership + order link + user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True) + session_id = Column(String(64), nullable=True, index=True) + order_id = Column(Integer, nullable=True, index=True) + + # NEW: slot link + slot_id = Column(Integer, ForeignKey("calendar_slots.id", ondelete="SET NULL"), nullable=True, index=True) + + # details + name = Column(String(255), nullable=False) + start_at = Column(DateTime(timezone=True), nullable=False, index=True) + end_at = Column(DateTime(timezone=True), nullable=True) + + # NEW: booking state + cost + state = Column( + String(20), + nullable=False, + server_default=text("'pending'"), + ) + cost = Column(Numeric(10, 2), nullable=False, server_default=text("10")) + + # Ticket configuration + ticket_price = Column(Numeric(10, 2), nullable=True) # Price per ticket (NULL = no tickets) + ticket_count = Column(Integer, nullable=True) # Total available tickets (NULL = unlimited) + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + deleted_at = Column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + CheckConstraint( + "(end_at IS NULL) OR (end_at >= start_at)", + name="ck_calendar_entries_end_after_start", + ), + Index("ix_calendar_entries_name", "name"), + Index("ix_calendar_entries_start_at", "start_at"), + Index("ix_calendar_entries_user_id", "user_id"), + Index("ix_calendar_entries_session_id", "session_id"), + Index("ix_calendar_entries_state", "state"), + Index("ix_calendar_entries_order_id", "order_id"), + Index("ix_calendar_entries_slot_id", "slot_id"), + ) + + calendar = relationship("Calendar", back_populates="entries") + slot = relationship("CalendarSlot", back_populates="entries", lazy="selectin") + posts = relationship("CalendarEntryPost", back_populates="entry", cascade="all, delete-orphan") + ticket_types = relationship( + "TicketType", + back_populates="entry", + cascade="all, delete-orphan", + passive_deletes=True, + order_by="TicketType.name", + lazy="selectin", + ) + +DAY_LABELS = [ + ("mon", "Mon"), + ("tue", "Tue"), + ("wed", "Wed"), + ("thu", "Thu"), + ("fri", "Fri"), + ("sat", "Sat"), + ("sun", "Sun"), +] + + +class CalendarSlot(Base): + __tablename__ = "calendar_slots" + + id = Column(Integer, primary_key=True) + calendar_id = Column( + Integer, + ForeignKey("calendars.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + + name = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + + mon = Column(Boolean, nullable=False, default=False) + tue = Column(Boolean, nullable=False, default=False) + wed = Column(Boolean, nullable=False, default=False) + thu = Column(Boolean, nullable=False, default=False) + fri = Column(Boolean, nullable=False, default=False) + sat = Column(Boolean, nullable=False, default=False) + sun = Column(Boolean, nullable=False, default=False) + + # NEW: whether bookings can be made at flexible times within this band + flexible = Column( + Boolean, + nullable=False, + server_default=text("false"), + default=False, + ) + + @property + def days_display(self) -> str: + days = [label for attr, label in DAY_LABELS if getattr(self, attr)] + if len(days) == len(DAY_LABELS): + # all days selected + return "All" # or "All days" if you prefer + return ", ".join(days) if days else "—" + + time_start = Column(Time(timezone=False), nullable=False) + time_end = Column(Time(timezone=False), nullable=False) + + cost = Column(Numeric(10, 2), nullable=True) + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + deleted_at = Column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + CheckConstraint( + "(time_end > time_start)", + name="ck_calendar_slots_time_end_after_start", + ), + Index("ix_calendar_slots_calendar_id", "calendar_id"), + Index("ix_calendar_slots_time_start", "time_start"), + ) + + calendar = relationship("Calendar", back_populates="slots") + entries = relationship("CalendarEntry", back_populates="slot") + + +class TicketType(Base): + __tablename__ = "ticket_types" + + id = Column(Integer, primary_key=True) + entry_id = Column( + Integer, + ForeignKey("calendar_entries.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + + name = Column(String(255), nullable=False) + cost = Column(Numeric(10, 2), nullable=False) + count = Column(Integer, nullable=False) + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + deleted_at = Column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + Index("ix_ticket_types_entry_id", "entry_id"), + Index("ix_ticket_types_name", "name"), + ) + + entry = relationship("CalendarEntry", back_populates="ticket_types") + + +class Ticket(Base): + __tablename__ = "tickets" + + id = Column(Integer, primary_key=True) + entry_id = Column( + Integer, + ForeignKey("calendar_entries.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + ticket_type_id = Column( + Integer, + ForeignKey("ticket_types.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True) + session_id = Column(String(64), nullable=True, index=True) + order_id = Column(Integer, nullable=True, index=True) + + code = Column(String(64), unique=True, nullable=False) # QR/barcode value + state = Column( + String(20), + nullable=False, + server_default=text("'reserved'"), + ) # reserved, confirmed, checked_in, cancelled + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + checked_in_at = Column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + Index("ix_tickets_entry_id", "entry_id"), + Index("ix_tickets_ticket_type_id", "ticket_type_id"), + Index("ix_tickets_user_id", "user_id"), + Index("ix_tickets_session_id", "session_id"), + Index("ix_tickets_order_id", "order_id"), + Index("ix_tickets_code", "code", unique=True), + Index("ix_tickets_state", "state"), + ) + + entry = relationship("CalendarEntry", backref="tickets") + ticket_type = relationship("TicketType", backref="tickets") + + +class CalendarEntryPost(Base): + """Junction between calendar entries and content (posts, etc.).""" + __tablename__ = "calendar_entry_posts" + + id = Column(Integer, primary_key=True, autoincrement=True) + entry_id = Column(Integer, ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False) + content_type = Column(String(32), nullable=False, server_default=text("'post'")) + content_id = Column(Integer, nullable=False) + + created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow) + deleted_at = Column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + Index("ix_entry_posts_entry_id", "entry_id"), + Index("ix_entry_posts_content", "content_type", "content_id"), + ) + + entry = relationship("CalendarEntry", back_populates="posts") + + +__all__ = ["Calendar", "CalendarEntry", "CalendarSlot", "TicketType", "Ticket", "CalendarEntryPost"] diff --git a/shared/models/container_relation.py b/shared/models/container_relation.py new file mode 100644 index 0000000..ecafaba --- /dev/null +++ b/shared/models/container_relation.py @@ -0,0 +1,38 @@ +from datetime import datetime +from typing import Optional +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import Integer, String, DateTime, Index, UniqueConstraint, func +from shared.db.base import Base + + +class ContainerRelation(Base): + __tablename__ = "container_relations" + + __table_args__ = ( + UniqueConstraint( + "parent_type", "parent_id", "child_type", "child_id", + name="uq_container_relations_parent_child", + ), + Index("ix_container_relations_parent", "parent_type", "parent_id"), + Index("ix_container_relations_child", "child_type", "child_id"), + ) + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + parent_type: Mapped[str] = mapped_column(String(32), nullable=False) + parent_id: Mapped[int] = mapped_column(Integer, nullable=False) + child_type: Mapped[str] = mapped_column(String(32), nullable=False) + child_id: Mapped[int] = mapped_column(Integer, nullable=False) + + sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + label: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + nullable=False, + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), + nullable=True, + ) diff --git a/shared/models/federation.py b/shared/models/federation.py new file mode 100644 index 0000000..daef64a --- /dev/null +++ b/shared/models/federation.py @@ -0,0 +1,466 @@ +"""Federation / ActivityPub ORM models. + +These models support AP identity, activities, followers, inbox processing, +IPFS content addressing, and OpenTimestamps anchoring. +""" +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import ( + String, Integer, DateTime, Text, Boolean, BigInteger, + ForeignKey, UniqueConstraint, Index, func, +) +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from shared.db.base import Base + + +class ActorProfile(Base): + """AP identity for a user. Created when user chooses a username.""" + __tablename__ = "ap_actor_profiles" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("users.id", ondelete="CASCADE"), + unique=True, nullable=False, + ) + preferred_username: Mapped[str] = mapped_column(String(64), unique=True, nullable=False) + display_name: Mapped[str | None] = mapped_column(String(255), nullable=True) + summary: Mapped[str | None] = mapped_column(Text, nullable=True) + public_key_pem: Mapped[str] = mapped_column(Text, nullable=False) + private_key_pem: Mapped[str] = mapped_column(Text, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + # Relationships + user = relationship("User", backref="actor_profile", uselist=False, lazy="selectin") + activities = relationship("APActivity", back_populates="actor_profile", lazy="dynamic") + followers = relationship("APFollower", back_populates="actor_profile", lazy="dynamic") + + __table_args__ = ( + Index("ix_ap_actor_user_id", "user_id", unique=True), + Index("ix_ap_actor_username", "preferred_username", unique=True), + ) + + def __repr__(self) -> str: + return f"" + + +class APActivity(Base): + """An ActivityPub activity (local or remote). + + Also serves as the unified event bus: internal domain events and public + federation activities both live here, distinguished by ``visibility``. + The ``EventProcessor`` polls rows with ``process_state='pending'``. + """ + __tablename__ = "ap_activities" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False) + activity_type: Mapped[str] = mapped_column(String(64), nullable=False) + actor_profile_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True, + ) + object_type: Mapped[str | None] = mapped_column(String(64), nullable=True) + object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + published: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + signature: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + is_local: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") + + # Link back to originating domain object (e.g. source_type='post', source_id=42) + source_type: Mapped[str | None] = mapped_column(String(64), nullable=True) + source_id: Mapped[int | None] = mapped_column(Integer, nullable=True) + + # IPFS content-addressed copy of the activity + ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True) + + # Anchoring (filled later when batched into a merkle tree) + anchor_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_anchors.id", ondelete="SET NULL"), nullable=True, + ) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + # --- Unified event-bus columns --- + actor_uri: Mapped[str | None] = mapped_column( + String(512), nullable=True, + ) + visibility: Mapped[str] = mapped_column( + String(20), nullable=False, default="public", server_default="public", + ) + process_state: Mapped[str] = mapped_column( + String(20), nullable=False, default="completed", server_default="completed", + ) + process_attempts: Mapped[int] = mapped_column( + Integer, nullable=False, default=0, server_default="0", + ) + process_max_attempts: Mapped[int] = mapped_column( + Integer, nullable=False, default=5, server_default="5", + ) + process_error: Mapped[str | None] = mapped_column(Text, nullable=True) + processed_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), nullable=True, + ) + origin_app: Mapped[str | None] = mapped_column( + String(64), nullable=True, + ) + + # Relationships + actor_profile = relationship("ActorProfile", back_populates="activities") + + __table_args__ = ( + Index("ix_ap_activity_actor", "actor_profile_id"), + Index("ix_ap_activity_source", "source_type", "source_id"), + Index("ix_ap_activity_published", "published"), + Index("ix_ap_activity_process", "process_state"), + ) + + def __repr__(self) -> str: + return f"" + + +class APFollower(Base): + """A remote follower of a local actor. + + ``app_domain`` scopes the follow to a specific app (e.g. "blog", + "market", "events"). "federation" means the aggregate — the + follower subscribes to all activities. + """ + __tablename__ = "ap_followers" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, + ) + follower_acct: Mapped[str] = mapped_column(String(512), nullable=False) + follower_inbox: Mapped[str] = mapped_column(String(512), nullable=False) + follower_actor_url: Mapped[str] = mapped_column(String(512), nullable=False) + follower_public_key: Mapped[str | None] = mapped_column(Text, nullable=True) + app_domain: Mapped[str] = mapped_column( + String(64), nullable=False, default="federation", server_default="federation", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + # Relationships + actor_profile = relationship("ActorProfile", back_populates="followers") + + __table_args__ = ( + UniqueConstraint( + "actor_profile_id", "follower_acct", "app_domain", + name="uq_follower_acct_app", + ), + Index("ix_ap_follower_actor", "actor_profile_id"), + Index("ix_ap_follower_app_domain", "actor_profile_id", "app_domain"), + ) + + def __repr__(self) -> str: + return f"" + + +class APInboxItem(Base): + """Raw incoming AP activity, stored for async processing.""" + __tablename__ = "ap_inbox_items" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, + ) + raw_json: Mapped[dict] = mapped_column(JSONB, nullable=False) + activity_type: Mapped[str | None] = mapped_column(String(64), nullable=True) + from_actor: Mapped[str | None] = mapped_column(String(512), nullable=True) + state: Mapped[str] = mapped_column( + String(20), nullable=False, default="pending", server_default="pending", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + processed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + + __table_args__ = ( + Index("ix_ap_inbox_state", "state"), + Index("ix_ap_inbox_actor", "actor_profile_id"), + ) + + def __repr__(self) -> str: + return f"" + + +class APAnchor(Base): + """OpenTimestamps anchoring batch — merkle tree of activities.""" + __tablename__ = "ap_anchors" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + merkle_root: Mapped[str] = mapped_column(String(128), nullable=False) + tree_ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True) + ots_proof_cid: Mapped[str | None] = mapped_column(String(128), nullable=True) + activity_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + confirmed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + bitcoin_txid: Mapped[str | None] = mapped_column(String(128), nullable=True) + + def __repr__(self) -> str: + return f"" + + +class IPFSPin(Base): + """Tracks content stored on IPFS — used by all domains.""" + __tablename__ = "ipfs_pins" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + content_hash: Mapped[str] = mapped_column(String(128), nullable=False) + ipfs_cid: Mapped[str] = mapped_column(String(128), nullable=False, unique=True) + pin_type: Mapped[str] = mapped_column(String(64), nullable=False) + source_type: Mapped[str | None] = mapped_column(String(64), nullable=True) + source_id: Mapped[int | None] = mapped_column(Integer, nullable=True) + size_bytes: Mapped[int | None] = mapped_column(BigInteger, nullable=True) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + __table_args__ = ( + Index("ix_ipfs_pin_source", "source_type", "source_id"), + Index("ix_ipfs_pin_cid", "ipfs_cid", unique=True), + ) + + def __repr__(self) -> str: + return f"" + + +class RemoteActor(Base): + """Cached profile of a remote actor we interact with.""" + __tablename__ = "ap_remote_actors" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_url: Mapped[str] = mapped_column(String(512), unique=True, nullable=False) + inbox_url: Mapped[str] = mapped_column(String(512), nullable=False) + shared_inbox_url: Mapped[str | None] = mapped_column(String(512), nullable=True) + preferred_username: Mapped[str] = mapped_column(String(255), nullable=False) + display_name: Mapped[str | None] = mapped_column(String(255), nullable=True) + summary: Mapped[str | None] = mapped_column(Text, nullable=True) + icon_url: Mapped[str | None] = mapped_column(String(512), nullable=True) + public_key_pem: Mapped[str | None] = mapped_column(Text, nullable=True) + domain: Mapped[str] = mapped_column(String(255), nullable=False) + fetched_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + __table_args__ = ( + Index("ix_ap_remote_actor_url", "actor_url", unique=True), + Index("ix_ap_remote_actor_domain", "domain"), + ) + + def __repr__(self) -> str: + return f"" + + +class APFollowing(Base): + """Outbound follow: local actor → remote actor.""" + __tablename__ = "ap_following" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, + ) + remote_actor_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False, + ) + state: Mapped[str] = mapped_column( + String(20), nullable=False, default="pending", server_default="pending", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + accepted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + + # Relationships + actor_profile = relationship("ActorProfile") + remote_actor = relationship("RemoteActor") + + __table_args__ = ( + UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"), + Index("ix_ap_following_actor", "actor_profile_id"), + Index("ix_ap_following_remote", "remote_actor_id"), + ) + + def __repr__(self) -> str: + return f"" + + +class APRemotePost(Base): + """A federated post ingested from a remote actor.""" + __tablename__ = "ap_remote_posts" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + remote_actor_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False, + ) + activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False) + object_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False) + object_type: Mapped[str] = mapped_column(String(64), nullable=False, default="Note") + content: Mapped[str | None] = mapped_column(Text, nullable=True) + summary: Mapped[str | None] = mapped_column(Text, nullable=True) + url: Mapped[str | None] = mapped_column(String(512), nullable=True) + attachment_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + tag_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True) + conversation: Mapped[str | None] = mapped_column(String(512), nullable=True) + published: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + fetched_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + # Relationships + remote_actor = relationship("RemoteActor") + + __table_args__ = ( + Index("ix_ap_remote_post_actor", "remote_actor_id"), + Index("ix_ap_remote_post_published", "published"), + Index("ix_ap_remote_post_object", "object_id", unique=True), + ) + + def __repr__(self) -> str: + return f"" + + +class APLocalPost(Base): + """A native post composed in the federation UI.""" + __tablename__ = "ap_local_posts" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, + ) + content: Mapped[str] = mapped_column(Text, nullable=False) + visibility: Mapped[str] = mapped_column( + String(20), nullable=False, default="public", server_default="public", + ) + in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True) + published: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now(), + ) + + # Relationships + actor_profile = relationship("ActorProfile") + + __table_args__ = ( + Index("ix_ap_local_post_actor", "actor_profile_id"), + Index("ix_ap_local_post_published", "published"), + ) + + def __repr__(self) -> str: + return f"" + + +class APInteraction(Base): + """Like or boost (local or remote).""" + __tablename__ = "ap_interactions" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True, + ) + remote_actor_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True, + ) + post_type: Mapped[str] = mapped_column(String(20), nullable=False) # local/remote + post_id: Mapped[int] = mapped_column(Integer, nullable=False) + interaction_type: Mapped[str] = mapped_column(String(20), nullable=False) # like/boost + activity_id: Mapped[str | None] = mapped_column(String(512), nullable=True) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + __table_args__ = ( + Index("ix_ap_interaction_post", "post_type", "post_id"), + Index("ix_ap_interaction_actor", "actor_profile_id"), + Index("ix_ap_interaction_remote", "remote_actor_id"), + ) + + def __repr__(self) -> str: + return f"" + + +class APNotification(Base): + """Notification for a local actor.""" + __tablename__ = "ap_notifications" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + actor_profile_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False, + ) + notification_type: Mapped[str] = mapped_column(String(20), nullable=False) + from_remote_actor_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True, + ) + from_actor_profile_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True, + ) + target_activity_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True, + ) + target_remote_post_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True, + ) + read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + # Relationships + actor_profile = relationship("ActorProfile", foreign_keys=[actor_profile_id]) + from_remote_actor = relationship("RemoteActor") + from_actor_profile = relationship("ActorProfile", foreign_keys=[from_actor_profile_id]) + + __table_args__ = ( + Index("ix_ap_notification_actor", "actor_profile_id"), + Index("ix_ap_notification_read", "actor_profile_id", "read"), + Index("ix_ap_notification_created", "created_at"), + ) + + +class APDeliveryLog(Base): + """Tracks successful deliveries of activities to remote inboxes. + + Used for idempotency: the delivery handler skips inboxes that already + have a success row, so retries after a crash never send duplicates. + """ + __tablename__ = "ap_delivery_log" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + activity_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ap_activities.id", ondelete="CASCADE"), nullable=False, + ) + inbox_url: Mapped[str] = mapped_column(String(512), nullable=False) + app_domain: Mapped[str] = mapped_column(String(128), nullable=False, server_default="federation") + status_code: Mapped[int | None] = mapped_column(Integer, nullable=True) + delivered_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + + __table_args__ = ( + UniqueConstraint("activity_id", "inbox_url", "app_domain", name="uq_delivery_activity_inbox_domain"), + Index("ix_ap_delivery_activity", "activity_id"), + ) diff --git a/shared/models/ghost_content.py b/shared/models/ghost_content.py new file mode 100644 index 0000000..197f651 --- /dev/null +++ b/shared/models/ghost_content.py @@ -0,0 +1,216 @@ +from datetime import datetime +from typing import List, Optional +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ( + Integer, + String, + Text, + Boolean, + DateTime, + ForeignKey, + Column, + func, +) +from shared.db.base import Base # whatever your Base is +# from .author import Author # make sure imports resolve +# from ..app.blog.calendars.model import Calendar + +class Tag(Base): + __tablename__ = "tags" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) + + slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + + description: Mapped[Optional[str]] = mapped_column(Text()) + visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False) + feature_image: Mapped[Optional[str]] = mapped_column(Text()) + + meta_title: Mapped[Optional[str]] = mapped_column(String(300)) + meta_description: Mapped[Optional[str]] = mapped_column(Text()) + + created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + # NEW: posts relationship is now direct Post objects via PostTag + posts: Mapped[List["Post"]] = relationship( + "Post", + secondary="post_tags", + primaryjoin="Tag.id==post_tags.c.tag_id", + secondaryjoin="Post.id==post_tags.c.post_id", + back_populates="tags", + order_by="PostTag.sort_order", + ) + + +class Post(Base): + __tablename__ = "posts" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) + uuid: Mapped[str] = mapped_column(String(64), unique=True, nullable=False) + slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) + + title: Mapped[str] = mapped_column(String(500), nullable=False) + + html: Mapped[Optional[str]] = mapped_column(Text()) + plaintext: Mapped[Optional[str]] = mapped_column(Text()) + mobiledoc: Mapped[Optional[str]] = mapped_column(Text()) + lexical: Mapped[Optional[str]] = mapped_column(Text()) + + feature_image: Mapped[Optional[str]] = mapped_column(Text()) + feature_image_alt: Mapped[Optional[str]] = mapped_column(Text()) + feature_image_caption: Mapped[Optional[str]] = mapped_column(Text()) + + excerpt: Mapped[Optional[str]] = mapped_column(Text()) + custom_excerpt: Mapped[Optional[str]] = mapped_column(Text()) + + visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False) + status: Mapped[str] = mapped_column(String(32), default="draft", nullable=False) + featured: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False) + is_page: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False) + email_only: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False) + + canonical_url: Mapped[Optional[str]] = mapped_column(Text()) + meta_title: Mapped[Optional[str]] = mapped_column(String(500)) + meta_description: Mapped[Optional[str]] = mapped_column(Text()) + og_image: Mapped[Optional[str]] = mapped_column(Text()) + og_title: Mapped[Optional[str]] = mapped_column(String(500)) + og_description: Mapped[Optional[str]] = mapped_column(Text()) + twitter_image: Mapped[Optional[str]] = mapped_column(Text()) + twitter_title: Mapped[Optional[str]] = mapped_column(String(500)) + twitter_description: Mapped[Optional[str]] = mapped_column(Text()) + custom_template: Mapped[Optional[str]] = mapped_column(String(191)) + + reading_time: Mapped[Optional[int]] = mapped_column(Integer()) + comment_id: Mapped[Optional[str]] = mapped_column(String(191)) + + published_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + user_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("users.id", ondelete="SET NULL"), index=True + ) + publish_requested: Mapped[bool] = mapped_column(Boolean(), default=False, server_default="false", nullable=False) + + primary_author_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("authors.id", ondelete="SET NULL") + ) + primary_tag_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("tags.id", ondelete="SET NULL") + ) + + primary_author: Mapped[Optional["Author"]] = relationship( + "Author", foreign_keys=[primary_author_id] + ) + primary_tag: Mapped[Optional[Tag]] = relationship( + "Tag", foreign_keys=[primary_tag_id] + ) + user: Mapped[Optional["User"]] = relationship( + "User", foreign_keys=[user_id] + ) + + # AUTHORS RELATIONSHIP (many-to-many via post_authors) + authors: Mapped[List["Author"]] = relationship( + "Author", + secondary="post_authors", + primaryjoin="Post.id==post_authors.c.post_id", + secondaryjoin="Author.id==post_authors.c.author_id", + back_populates="posts", + order_by="PostAuthor.sort_order", + ) + + # TAGS RELATIONSHIP (many-to-many via post_tags) + tags: Mapped[List[Tag]] = relationship( + "Tag", + secondary="post_tags", + primaryjoin="Post.id==post_tags.c.post_id", + secondaryjoin="Tag.id==post_tags.c.tag_id", + back_populates="posts", + order_by="PostTag.sort_order", + ) + likes: Mapped[List["PostLike"]] = relationship( + "PostLike", + back_populates="post", + cascade="all, delete-orphan", + passive_deletes=True, + ) + +class Author(Base): + __tablename__ = "authors" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False) + + slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + email: Mapped[Optional[str]] = mapped_column(String(255)) + + profile_image: Mapped[Optional[str]] = mapped_column(Text()) + cover_image: Mapped[Optional[str]] = mapped_column(Text()) + bio: Mapped[Optional[str]] = mapped_column(Text()) + website: Mapped[Optional[str]] = mapped_column(Text()) + location: Mapped[Optional[str]] = mapped_column(Text()) + facebook: Mapped[Optional[str]] = mapped_column(Text()) + twitter: Mapped[Optional[str]] = mapped_column(Text()) + + created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + # backref to posts via post_authors + posts: Mapped[List[Post]] = relationship( + "Post", + secondary="post_authors", + primaryjoin="Author.id==post_authors.c.author_id", + secondaryjoin="Post.id==post_authors.c.post_id", + back_populates="authors", + order_by="PostAuthor.sort_order", + ) + +class PostAuthor(Base): + __tablename__ = "post_authors" + + post_id: Mapped[int] = mapped_column( + ForeignKey("posts.id", ondelete="CASCADE"), + primary_key=True, + ) + author_id: Mapped[int] = mapped_column( + ForeignKey("authors.id", ondelete="CASCADE"), + primary_key=True, + ) + sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + + +class PostTag(Base): + __tablename__ = "post_tags" + + post_id: Mapped[int] = mapped_column( + ForeignKey("posts.id", ondelete="CASCADE"), + primary_key=True, + ) + tag_id: Mapped[int] = mapped_column( + ForeignKey("tags.id", ondelete="CASCADE"), + primary_key=True, + ) + sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + + +class PostLike(Base): + __tablename__ = "post_likes" + + id = Column(Integer, primary_key=True, autoincrement=True) + user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False) + post_id: Mapped[int] = mapped_column(ForeignKey("posts.id", ondelete="CASCADE"), nullable=False) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + post: Mapped["Post"] = relationship("Post", back_populates="likes", foreign_keys=[post_id]) + user = relationship("User", back_populates="liked_posts") diff --git a/shared/models/ghost_membership_entities.py b/shared/models/ghost_membership_entities.py new file mode 100644 index 0000000..5e3542a --- /dev/null +++ b/shared/models/ghost_membership_entities.py @@ -0,0 +1,122 @@ +# suma_browser/models/ghost_membership_entities.py + +from datetime import datetime +from typing import Optional + +from sqlalchemy import ( + Integer, String, Text, Boolean, DateTime, ForeignKey, UniqueConstraint +) +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.ext.associationproxy import association_proxy + +from shared.db.base import Base + + +# ----------------------- +# Labels (simple M2M) +# ----------------------- + +class GhostLabel(Base): + __tablename__ = "ghost_labels" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[Optional[str]] = mapped_column(String(255)) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow) + + # Back-populated by User.labels + users = relationship("User", secondary="user_labels", back_populates="labels", lazy="selectin") + + +class UserLabel(Base): + __tablename__ = "user_labels" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True) + label_id: Mapped[int] = mapped_column(ForeignKey("ghost_labels.id", ondelete="CASCADE"), index=True) + + __table_args__ = ( + UniqueConstraint("user_id", "label_id", name="uq_user_label"), + ) + + +# ----------------------- +# Newsletters (association object + proxy) +# ----------------------- + +class GhostNewsletter(Base): + __tablename__ = "ghost_newsletters" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[Optional[str]] = mapped_column(String(255)) + description: Mapped[Optional[str]] = mapped_column(Text) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow) + + # Association-object side (one-to-many) + user_newsletters = relationship( + "UserNewsletter", + back_populates="newsletter", + cascade="all, delete-orphan", + lazy="selectin", + ) + + # Convenience: list-like proxy of Users via association rows (read-only container) + users = association_proxy("user_newsletters", "user") + + +class UserNewsletter(Base): + __tablename__ = "user_newsletters" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True) + newsletter_id: Mapped[int] = mapped_column(ForeignKey("ghost_newsletters.id", ondelete="CASCADE"), index=True) + subscribed: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) + + __table_args__ = ( + UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"), + ) + + # Bidirectional links for the association object + user = relationship("User", back_populates="user_newsletters", lazy="selectin") + newsletter = relationship("GhostNewsletter", back_populates="user_newsletters", lazy="selectin") + + +# ----------------------- +# Tiers & Subscriptions +# ----------------------- + +class GhostTier(Base): + __tablename__ = "ghost_tiers" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[Optional[str]] = mapped_column(String(255)) + type: Mapped[Optional[str]] = mapped_column(String(50)) # e.g. free, paid + visibility: Mapped[Optional[str]] = mapped_column(String(50)) + + +class GhostSubscription(Base): + __tablename__ = "ghost_subscriptions" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + ghost_id: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True) + status: Mapped[Optional[str]] = mapped_column(String(50)) + tier_id: Mapped[Optional[int]] = mapped_column(ForeignKey("ghost_tiers.id", ondelete="SET NULL"), index=True) + cadence: Mapped[Optional[str]] = mapped_column(String(50)) # month, year + price_amount: Mapped[Optional[int]] = mapped_column(Integer) + price_currency: Mapped[Optional[str]] = mapped_column(String(10)) + stripe_customer_id: Mapped[Optional[str]] = mapped_column(String(255), index=True) + stripe_subscription_id: Mapped[Optional[str]] = mapped_column(String(255), index=True) + raw: Mapped[Optional[dict]] = mapped_column(JSONB, nullable=True) + + # Relationships + user = relationship("User", back_populates="subscriptions", lazy="selectin") + tier = relationship("GhostTier", lazy="selectin") diff --git a/shared/models/kv.py b/shared/models/kv.py new file mode 100644 index 0000000..1a0563c --- /dev/null +++ b/shared/models/kv.py @@ -0,0 +1,12 @@ +from __future__ import annotations +from datetime import datetime +from sqlalchemy import String, Text, DateTime +from sqlalchemy.orm import Mapped, mapped_column +from shared.db.base import Base + +class KV(Base): + __tablename__ = "kv" + """Simple key-value table for settings/cache/demo.""" + key: Mapped[str] = mapped_column(String(120), primary_key=True) + value: Mapped[str | None] = mapped_column(Text(), nullable=True) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow) diff --git a/shared/models/magic_link.py b/shared/models/magic_link.py new file mode 100644 index 0000000..1c8ed7a --- /dev/null +++ b/shared/models/magic_link.py @@ -0,0 +1,25 @@ +from __future__ import annotations +from datetime import datetime +from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index +from sqlalchemy.orm import Mapped, mapped_column, relationship +from shared.db.base import Base + +class MagicLink(Base): + __tablename__ = "magic_links" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + token: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + purpose: Mapped[str] = mapped_column(String(32), nullable=False, default="signin") + expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + ip: Mapped[str | None] = mapped_column(String(64), nullable=True) + user_agent: Mapped[str | None] = mapped_column(String(256), nullable=True) + + user = relationship("User", backref="magic_links") + + __table_args__ = ( + Index("ix_magic_link_token", "token", unique=True), + Index("ix_magic_link_user", "user_id"), + ) diff --git a/shared/models/market.py b/shared/models/market.py new file mode 100644 index 0000000..87a6b72 --- /dev/null +++ b/shared/models/market.py @@ -0,0 +1,441 @@ +# at top of persist_snapshot.py: +from datetime import datetime +from typing import Optional, List +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from typing import List, Optional + +from sqlalchemy import ( + String, Text, Integer, ForeignKey, DateTime, Boolean, Numeric, + UniqueConstraint, Index, func +) + +from shared.db.base import Base # you already import Base in app.py + + + +class Product(Base): + __tablename__ = "products" + + id: Mapped[int] = mapped_column(primary_key=True) + slug: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False) + + title: Mapped[Optional[str]] = mapped_column(String(512)) + image: Mapped[Optional[str]] = mapped_column(Text) + + description_short: Mapped[Optional[str]] = mapped_column(Text) + description_html: Mapped[Optional[str]] = mapped_column(Text) + + suma_href: Mapped[Optional[str]] = mapped_column(Text) + brand: Mapped[Optional[str]] = mapped_column(String(255)) + + rrp: Mapped[Optional[float]] = mapped_column(Numeric(12, 2)) + rrp_currency: Mapped[Optional[str]] = mapped_column(String(16)) + rrp_raw: Mapped[Optional[str]] = mapped_column(String(128)) + + price_per_unit: Mapped[Optional[float]] = mapped_column(Numeric(12, 4)) + price_per_unit_currency: Mapped[Optional[str]] = mapped_column(String(16)) + price_per_unit_raw: Mapped[Optional[str]] = mapped_column(String(128)) + + special_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2)) + special_price_currency: Mapped[Optional[str]] = mapped_column(String(16)) + special_price_raw: Mapped[Optional[str]] = mapped_column(String(128)) + + regular_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2)) + regular_price_currency: Mapped[Optional[str]] = mapped_column(String(16)) + regular_price_raw: Mapped[Optional[str]] = mapped_column(String(128)) + + oe_list_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2)) + + case_size_count: Mapped[Optional[int]] = mapped_column(Integer) + case_size_item_qty: Mapped[Optional[float]] = mapped_column(Numeric(12, 3)) + case_size_item_unit: Mapped[Optional[str]] = mapped_column(String(32)) + case_size_raw: Mapped[Optional[str]] = mapped_column(String(128)) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + images: Mapped[List["ProductImage"]] = relationship( + back_populates="product", + cascade="all, delete-orphan", + passive_deletes=True, + ) + sections: Mapped[List["ProductSection"]] = relationship( + back_populates="product", + cascade="all, delete-orphan", + passive_deletes=True, + ) + labels: Mapped[List["ProductLabel"]] = relationship( + cascade="all, delete-orphan", + passive_deletes=True, + ) + stickers: Mapped[List["ProductSticker"]] = relationship( + cascade="all, delete-orphan", + passive_deletes=True, + ) + + ean: Mapped[Optional[str]] = mapped_column(String(64)) + sku: Mapped[Optional[str]] = mapped_column(String(128)) + unit_size: Mapped[Optional[str]] = mapped_column(String(128)) + pack_size: Mapped[Optional[str]] = mapped_column(String(128)) + + attributes = relationship( + "ProductAttribute", + back_populates="product", + lazy="selectin", + cascade="all, delete-orphan", + ) + nutrition = relationship( + "ProductNutrition", + back_populates="product", + lazy="selectin", + cascade="all, delete-orphan", + ) + allergens = relationship( + "ProductAllergen", + back_populates="product", + lazy="selectin", + cascade="all, delete-orphan", + ) + + likes = relationship( + "ProductLike", + back_populates="product", + cascade="all, delete-orphan", + ) + cart_items: Mapped[List["CartItem"]] = relationship( + "CartItem", + back_populates="product", + cascade="all, delete-orphan", + ) + + # NEW: all order items that reference this product + order_items: Mapped[List["OrderItem"]] = relationship( + "OrderItem", + back_populates="product", + cascade="all, delete-orphan", + ) + +from sqlalchemy import Column + +class ProductLike(Base): + __tablename__ = "product_likes" + + id = Column(Integer, primary_key=True, autoincrement=True) + user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False) + product_slug: Mapped[str] = mapped_column(ForeignKey("products.slug", ondelete="CASCADE")) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + product: Mapped["Product"] = relationship("Product", back_populates="likes", foreign_keys=[product_slug]) + + user = relationship("User", back_populates="liked_products") # optional, if you want reverse access + + +class ProductImage(Base): + __tablename__ = "product_images" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + url: Mapped[str] = mapped_column(Text, nullable=False) + position: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + kind: Mapped[str] = mapped_column(String(16), nullable=False, default="gallery") + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + product: Mapped["Product"] = relationship(back_populates="images") + + __table_args__ = ( + UniqueConstraint("product_id", "url", "kind", name="uq_product_images_product_url_kind"), + Index("ix_product_images_position", "position"), + ) + +class ProductSection(Base): + __tablename__ = "product_sections" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column( + ForeignKey("products.id", ondelete="CASCADE"), + index=True, + nullable=False, + ) + title: Mapped[str] = mapped_column(String(255), nullable=False) + html: Mapped[str] = mapped_column(Text, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + product: Mapped["Product"] = relationship(back_populates="sections") + __table_args__ = ( + UniqueConstraint("product_id", "title", name="uq_product_sections_product_title"), + ) +# --- Nav & listings --- + +class NavTop(Base): + __tablename__ = "nav_tops" + id: Mapped[int] = mapped_column(primary_key=True) + label: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True) + market_id: Mapped[Optional[int]] = mapped_column( + Integer, + ForeignKey("market_places.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + listings: Mapped[List["Listing"]] = relationship(back_populates="top", cascade="all, delete-orphan") + market = relationship("MarketPlace", back_populates="nav_tops") + + __table_args__ = (UniqueConstraint("label", "slug", name="uq_nav_tops_label_slug"),) + +class NavSub(Base): + __tablename__ = "nav_subs" + id: Mapped[int] = mapped_column(primary_key=True) + top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False) + label: Mapped[Optional[str]] = mapped_column(String(255)) + slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True) + href: Mapped[Optional[str]] = mapped_column(Text) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + listings: Mapped[List["Listing"]] = relationship(back_populates="sub", cascade="all, delete-orphan") + + __table_args__ = (UniqueConstraint("top_id", "slug", name="uq_nav_subs_top_slug"),) + +class Listing(Base): + __tablename__ = "listings" + + id: Mapped[int] = mapped_column(primary_key=True) + + # Old slug-based fields (optional: remove) + # top_slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True) + # sub_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True) + + top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False) + sub_id: Mapped[Optional[int]] = mapped_column(ForeignKey("nav_subs.id", ondelete="CASCADE"), index=True) + + total_pages: Mapped[Optional[int]] = mapped_column(Integer) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + + top: Mapped["NavTop"] = relationship(back_populates="listings") + sub: Mapped[Optional["NavSub"]] = relationship(back_populates="listings") + + __table_args__ = ( + UniqueConstraint("top_id", "sub_id", name="uq_listings_top_sub"), + ) + +class ListingItem(Base): + __tablename__ = "listing_items" + id: Mapped[int] = mapped_column(primary_key=True) + listing_id: Mapped[int] = mapped_column(ForeignKey("listings.id", ondelete="CASCADE"), index=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True) + __table_args__ = (UniqueConstraint("listing_id", "slug", name="uq_listing_items_listing_slug"),) + +# --- Reports / redirects / logs --- + +class LinkError(Base): + __tablename__ = "link_errors" + id: Mapped[int] = mapped_column(primary_key=True) + product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True) + href: Mapped[Optional[str]] = mapped_column(Text) + text: Mapped[Optional[str]] = mapped_column(Text) + top: Mapped[Optional[str]] = mapped_column(String(255)) + sub: Mapped[Optional[str]] = mapped_column(String(255)) + target_slug: Mapped[Optional[str]] = mapped_column(String(255)) + type: Mapped[Optional[str]] = mapped_column(String(255)) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + +class LinkExternal(Base): + __tablename__ = "link_externals" + id: Mapped[int] = mapped_column(primary_key=True) + product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True) + href: Mapped[Optional[str]] = mapped_column(Text) + text: Mapped[Optional[str]] = mapped_column(Text) + host: Mapped[Optional[str]] = mapped_column(String(255)) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + +class SubcategoryRedirect(Base): + __tablename__ = "subcategory_redirects" + id: Mapped[int] = mapped_column(primary_key=True) + old_path: Mapped[str] = mapped_column(String(512), nullable=False, index=True) + new_path: Mapped[str] = mapped_column(String(512), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + +class ProductLog(Base): + __tablename__ = "product_logs" + id: Mapped[int] = mapped_column(primary_key=True) + slug: Mapped[Optional[str]] = mapped_column(String(255), index=True) + href_tried: Mapped[Optional[str]] = mapped_column(Text) + ok: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false") + error_type: Mapped[Optional[str]] = mapped_column(String(255)) + error_message: Mapped[Optional[str]] = mapped_column(Text) + http_status: Mapped[Optional[int]] = mapped_column(Integer) + final_url: Mapped[Optional[str]] = mapped_column(Text) + transport_error: Mapped[Optional[bool]] = mapped_column(Boolean) + title: Mapped[Optional[str]] = mapped_column(String(512)) + has_description_html: Mapped[Optional[bool]] = mapped_column(Boolean) + has_description_short: Mapped[Optional[bool]] = mapped_column(Boolean) + sections_count: Mapped[Optional[int]] = mapped_column(Integer) + images_count: Mapped[Optional[int]] = mapped_column(Integer) + embedded_images_count: Mapped[Optional[int]] = mapped_column(Integer) + all_images_count: Mapped[Optional[int]] = mapped_column(Integer) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + + + +# ...existing models... + +class ProductLabel(Base): + __tablename__ = "product_labels" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + product: Mapped["Product"] = relationship(back_populates="labels") + + __table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_labels_product_name"),) + +class ProductSticker(Base): + __tablename__ = "product_stickers" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + product: Mapped["Product"] = relationship(back_populates="stickers") + + __table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_stickers_product_name"),) + +class ProductAttribute(Base): + __tablename__ = "product_attributes" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + key: Mapped[str] = mapped_column(String(255), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + + value: Mapped[Optional[str]] = mapped_column(Text) + product = relationship("Product", back_populates="attributes") + __table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_attributes_product_key"),) + +class ProductNutrition(Base): + __tablename__ = "product_nutrition" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + key: Mapped[str] = mapped_column(String(255), nullable=False) + value: Mapped[Optional[str]] = mapped_column(String(255)) + unit: Mapped[Optional[str]] = mapped_column(String(64)) + product = relationship("Product", back_populates="nutrition") + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + __table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_nutrition_product_key"),) + +class ProductAllergen(Base): + __tablename__ = "product_allergens" + id: Mapped[int] = mapped_column(primary_key=True) + product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + contains: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false") + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True)) + product: Mapped["Product"] = relationship(back_populates="allergens") + __table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_allergens_product_name"),) + + +class CartItem(Base): + __tablename__ = "cart_items" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + # Either a logged-in user OR an anonymous session + user_id: Mapped[int | None] = mapped_column( + ForeignKey("users.id", ondelete="CASCADE"), + nullable=True, + ) + session_id: Mapped[str | None] = mapped_column( + String(128), + nullable=True, + ) + + # IMPORTANT: link to product *id*, not slug + product_id: Mapped[int] = mapped_column( + ForeignKey("products.id", ondelete="CASCADE"), + nullable=False, + ) + + quantity: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=1, + server_default="1", + ) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + market_place_id: Mapped[int | None] = mapped_column( + ForeignKey("market_places.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + + deleted_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + nullable=True, + ) + + # Relationships + + market_place: Mapped["MarketPlace | None"] = relationship( + "MarketPlace", + foreign_keys=[market_place_id], + ) + product: Mapped["Product"] = relationship( + "Product", + back_populates="cart_items", + ) + user: Mapped["User | None"] = relationship("User", back_populates="cart_items") + + __table_args__ = ( + Index("ix_cart_items_user_product", "user_id", "product_id"), + Index("ix_cart_items_session_product", "session_id", "product_id"), + ) diff --git a/shared/models/market_place.py b/shared/models/market_place.py new file mode 100644 index 0000000..8792e36 --- /dev/null +++ b/shared/models/market_place.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import Optional, List + +from sqlalchemy import ( + Integer, String, Text, DateTime, ForeignKey, Index, func, text, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from shared.db.base import Base + + +def utcnow() -> datetime: + return datetime.now(timezone.utc) + + +class MarketPlace(Base): + __tablename__ = "market_places" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + container_type: Mapped[str] = mapped_column( + String(32), nullable=False, server_default=text("'page'"), + ) + container_id: Mapped[int] = mapped_column(Integer, nullable=False) + name: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[str] = mapped_column(String(255), nullable=False) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now(), + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), nullable=True, + ) + + nav_tops: Mapped[List["NavTop"]] = relationship( + "NavTop", back_populates="market", + ) + + __table_args__ = ( + Index("ix_market_places_container", "container_type", "container_id"), + Index( + "ux_market_places_slug_active", + func.lower(slug), + unique=True, + postgresql_where=text("deleted_at IS NULL"), + ), + ) diff --git a/shared/models/menu_item.py b/shared/models/menu_item.py new file mode 100644 index 0000000..d041869 --- /dev/null +++ b/shared/models/menu_item.py @@ -0,0 +1,37 @@ +from datetime import datetime +from typing import Optional +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import Integer, String, DateTime, ForeignKey, func +from shared.db.base import Base + + +class MenuItem(Base): + """Deprecated — kept so the table isn't dropped. Use shared.models.menu_node.MenuNode.""" + __tablename__ = "menu_items" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + post_id: Mapped[int] = mapped_column( + Integer, + ForeignKey("posts.id", ondelete="CASCADE"), + nullable=False, + index=True + ) + + sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0, index=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + nullable=False + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), + nullable=True + ) diff --git a/shared/models/menu_node.py b/shared/models/menu_node.py new file mode 100644 index 0000000..d4b49cc --- /dev/null +++ b/shared/models/menu_node.py @@ -0,0 +1,50 @@ +from datetime import datetime +from typing import Optional +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import Integer, String, Text, DateTime, ForeignKey, Index, func +from shared.db.base import Base + + +class MenuNode(Base): + __tablename__ = "menu_nodes" + + __table_args__ = ( + Index("ix_menu_nodes_container", "container_type", "container_id"), + Index("ix_menu_nodes_parent_id", "parent_id"), + ) + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + container_type: Mapped[str] = mapped_column(String(32), nullable=False) + container_id: Mapped[int] = mapped_column(Integer, nullable=False) + + parent_id: Mapped[Optional[int]] = mapped_column( + Integer, + ForeignKey("menu_nodes.id", ondelete="SET NULL"), + nullable=True, + ) + + sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + depth: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + + label: Mapped[str] = mapped_column(String(255), nullable=False) + slug: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + href: Mapped[Optional[str]] = mapped_column(String(1024), nullable=True) + icon: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + feature_image: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + nullable=False, + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + nullable=False, + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), + nullable=True, + ) diff --git a/shared/models/oauth_code.py b/shared/models/oauth_code.py new file mode 100644 index 0000000..3973dcc --- /dev/null +++ b/shared/models/oauth_code.py @@ -0,0 +1,26 @@ +from __future__ import annotations +from datetime import datetime +from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index +from sqlalchemy.orm import Mapped, mapped_column, relationship +from shared.db.base import Base + + +class OAuthCode(Base): + __tablename__ = "oauth_codes" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + code: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + client_id: Mapped[str] = mapped_column(String(64), nullable=False) + redirect_uri: Mapped[str] = mapped_column(String(512), nullable=False) + expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + grant_token: Mapped[str | None] = mapped_column(String(128), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + + user = relationship("User", backref="oauth_codes") + + __table_args__ = ( + Index("ix_oauth_code_code", "code", unique=True), + Index("ix_oauth_code_user", "user_id"), + ) diff --git a/shared/models/oauth_grant.py b/shared/models/oauth_grant.py new file mode 100644 index 0000000..01a0718 --- /dev/null +++ b/shared/models/oauth_grant.py @@ -0,0 +1,32 @@ +from __future__ import annotations +from datetime import datetime +from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index +from sqlalchemy.orm import Mapped, mapped_column, relationship +from shared.db.base import Base + + +class OAuthGrant(Base): + """Long-lived grant tracking each client-app session authorization. + + Created when the OAuth authorize endpoint issues a code. Tied to the + account session that issued it (``issuer_session``) so that logging out + on one device revokes only that device's grants. + """ + __tablename__ = "oauth_grants" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + token: Mapped[str] = mapped_column(String(128), unique=True, nullable=False) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + client_id: Mapped[str] = mapped_column(String(64), nullable=False) + issuer_session: Mapped[str] = mapped_column(String(128), nullable=False, index=True) + device_id: Mapped[str | None] = mapped_column(String(128), nullable=True, index=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + revoked_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + + user = relationship("User", backref="oauth_grants") + + __table_args__ = ( + Index("ix_oauth_grant_token", "token", unique=True), + Index("ix_oauth_grant_issuer", "issuer_session"), + Index("ix_oauth_grant_device", "device_id", "client_id"), + ) diff --git a/shared/models/order.py b/shared/models/order.py new file mode 100644 index 0000000..4f2f547 --- /dev/null +++ b/shared/models/order.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional, List + +from sqlalchemy import Integer, String, DateTime, ForeignKey, Numeric, func, Text +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from shared.db.base import Base + + +class Order(Base): + __tablename__ = "orders" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True) + session_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True) + + page_config_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("page_configs.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + + status: Mapped[str] = mapped_column( + String(32), + nullable=False, + default="pending", + server_default="pending", + ) + currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP") + total_amount: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False) + + # free-form description for the order + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True, index=True) + + # SumUp reference string (what we send as checkout_reference) + sumup_reference: Mapped[Optional[str]] = mapped_column( + String(255), + nullable=True, + index=True, + ) + + # SumUp integration fields + sumup_checkout_id: Mapped[Optional[str]] = mapped_column( + String(128), + nullable=True, + index=True, + ) + sumup_status: Mapped[Optional[str]] = mapped_column(String(32), nullable=True) + sumup_hosted_url: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + onupdate=func.now(), + ) + + items: Mapped[List["OrderItem"]] = relationship( + "OrderItem", + back_populates="order", + cascade="all, delete-orphan", + lazy="selectin", + ) + page_config: Mapped[Optional["PageConfig"]] = relationship( + "PageConfig", + foreign_keys=[page_config_id], + lazy="selectin", + ) + + +class OrderItem(Base): + __tablename__ = "order_items" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + order_id: Mapped[int] = mapped_column( + ForeignKey("orders.id", ondelete="CASCADE"), + nullable=False, + ) + + product_id: Mapped[int] = mapped_column( + ForeignKey("products.id"), + nullable=False, + ) + product_title: Mapped[Optional[str]] = mapped_column(String(512), nullable=True) + + quantity: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + unit_price: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False) + currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP") + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ) + + order: Mapped["Order"] = relationship( + "Order", + back_populates="items", + ) + + # NEW: link each order item to its product + product: Mapped["Product"] = relationship( + "Product", + back_populates="order_items", + lazy="selectin", + ) diff --git a/shared/models/page_config.py b/shared/models/page_config.py new file mode 100644 index 0000000..adb6561 --- /dev/null +++ b/shared/models/page_config.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from sqlalchemy import Integer, String, Text, DateTime, func, JSON, text +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from shared.db.base import Base + + +class PageConfig(Base): + __tablename__ = "page_configs" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + + container_type: Mapped[str] = mapped_column( + String(32), nullable=False, server_default=text("'page'"), + ) + container_id: Mapped[int] = mapped_column(Integer, nullable=False) + + features: Mapped[dict] = mapped_column( + JSON, nullable=False, server_default="{}" + ) + + # Per-page SumUp credentials (NULL until configured) + sumup_merchant_code: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + sumup_api_key: Mapped[Optional[str]] = mapped_column(Text(), nullable=True) + sumup_checkout_prefix: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now() + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False, server_default=func.now() + ) + deleted_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), nullable=True + ) diff --git a/shared/models/user.py b/shared/models/user.py new file mode 100644 index 0000000..473675d --- /dev/null +++ b/shared/models/user.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from datetime import datetime +from sqlalchemy import String, Integer, DateTime, func, Index, Text, Boolean +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.ext.associationproxy import association_proxy +from shared.db.base import Base + +class User(Base): + __tablename__ = "users" + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + email: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now()) + last_login_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True) + + # Ghost membership linkage + ghost_id: Mapped[str | None] = mapped_column(String(64), unique=True, index=True, nullable=True) + name: Mapped[str | None] = mapped_column(String(255), nullable=True) + ghost_status: Mapped[str | None] = mapped_column(String(50), nullable=True) # free, paid, comped + ghost_subscribed: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default=func.true()) + ghost_note: Mapped[str | None] = mapped_column(Text, nullable=True) + avatar_image: Mapped[str | None] = mapped_column(Text, nullable=True) + stripe_customer_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True) + ghost_raw: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + + # Relationships to Ghost-related entities + + user_newsletters = relationship("UserNewsletter", back_populates="user", cascade="all, delete-orphan", lazy="selectin") + newsletters = association_proxy("user_newsletters", "newsletter") + labels = relationship("GhostLabel", secondary="user_labels", back_populates="users", lazy="selectin") + subscriptions = relationship("GhostSubscription", back_populates="user", cascade="all, delete-orphan", lazy="selectin") + + liked_products = relationship("ProductLike", back_populates="user", cascade="all, delete-orphan") + liked_posts = relationship("PostLike", back_populates="user", cascade="all, delete-orphan") + cart_items = relationship( + "CartItem", + back_populates="user", + cascade="all, delete-orphan", + ) + + __table_args__ = ( + Index("ix_user_email", "email", unique=True), + ) + + def __repr__(self) -> str: + return f"" diff --git a/shared/requirements.txt b/shared/requirements.txt new file mode 100644 index 0000000..900c63e --- /dev/null +++ b/shared/requirements.txt @@ -0,0 +1,49 @@ +starlette>=0.37,<0.39 +aiofiles==25.1.0 +aiohttp>=3.9 +aiosmtplib==5.0.0 +alembic==1.17.0 +anyio==4.11.0 +async-timeout==5.0.1 +asyncpg==0.30.0 +beautifulsoup4==4.14.2 +blinker==1.9.0 +Brotli==1.1.0 +certifi==2025.10.5 +click==8.3.0 +cryptography>=41.0 +exceptiongroup==1.3.0 +Flask==3.1.2 +greenlet==3.2.4 +h11==0.16.0 +h2==4.3.0 +hpack==4.1.0 +httpcore==1.0.9 +httpx==0.28.1 +Hypercorn==0.17.3 +hyperframe==6.1.0 +idna==3.10 +itsdangerous==2.2.0 +Jinja2==3.1.6 +lxml==6.0.2 +Mako==1.3.10 +MarkupSafe==3.0.3 +priority==2.0.0 +psycopg==3.2.11 +psycopg-binary==3.2.11 +PyJWT==2.10.1 +PyYAML==6.0.3 +Quart==0.20.0 +sniffio==1.3.1 +soupsieve==2.8 +SQLAlchemy==2.0.44 +taskgroup==0.2.2 +tomli==2.3.0 +typing_extensions==4.15.0 +Werkzeug==3.1.3 +wsproto==1.2.0 +zstandard==0.25.0 +redis>=5.0 +mistune>=3.0 +pytest>=8.0 +pytest-asyncio>=0.23 diff --git a/shared/services/__init__.py b/shared/services/__init__.py new file mode 100644 index 0000000..bb11cb4 --- /dev/null +++ b/shared/services/__init__.py @@ -0,0 +1,5 @@ +"""Domain service implementations and registry.""" + +from .registry import services + +__all__ = ["services"] diff --git a/shared/services/blog_impl.py b/shared/services/blog_impl.py new file mode 100644 index 0000000..cbdcca8 --- /dev/null +++ b/shared/services/blog_impl.py @@ -0,0 +1,65 @@ +"""SQL-backed BlogService implementation. + +Queries ``shared.models.ghost_content.Post`` — only this module may read +blog-domain tables on behalf of other domains. +""" +from __future__ import annotations + +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.ghost_content import Post +from shared.contracts.dtos import PostDTO + + +def _post_to_dto(post: Post) -> PostDTO: + return PostDTO( + id=post.id, + slug=post.slug, + title=post.title, + status=post.status, + visibility=post.visibility, + is_page=post.is_page, + feature_image=post.feature_image, + html=post.html, + excerpt=post.excerpt, + custom_excerpt=post.custom_excerpt, + published_at=post.published_at, + ) + + +class SqlBlogService: + async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None: + post = ( + await session.execute(select(Post).where(Post.slug == slug)) + ).scalar_one_or_none() + return _post_to_dto(post) if post else None + + async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: + post = ( + await session.execute(select(Post).where(Post.id == id)) + ).scalar_one_or_none() + return _post_to_dto(post) if post else None + + async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: + if not ids: + return [] + result = await session.execute(select(Post).where(Post.id.in_(ids))) + return [_post_to_dto(p) for p in result.scalars().all()] + + async def search_posts( + self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10, + ) -> tuple[list[PostDTO], int]: + """Search posts by title with pagination. Not part of the Protocol + (admin-only use in events), but provided for convenience.""" + if query: + count_stmt = select(func.count(Post.id)).where(Post.title.ilike(f"%{query}%")) + posts_stmt = select(Post).where(Post.title.ilike(f"%{query}%")).order_by(Post.title) + else: + count_stmt = select(func.count(Post.id)) + posts_stmt = select(Post).order_by(Post.published_at.desc().nullslast()) + + total = (await session.execute(count_stmt)).scalar() or 0 + offset = (page - 1) * per_page + result = await session.execute(posts_stmt.limit(per_page).offset(offset)) + return [_post_to_dto(p) for p in result.scalars().all()], total diff --git a/shared/services/calendar_impl.py b/shared/services/calendar_impl.py new file mode 100644 index 0000000..26a3f0e --- /dev/null +++ b/shared/services/calendar_impl.py @@ -0,0 +1,669 @@ +"""SQL-backed CalendarService implementation. + +Queries ``shared.models.calendars.*`` — only this module may write to +calendar-domain tables on behalf of other domains. +""" +from __future__ import annotations + +from datetime import datetime +from decimal import Decimal + +from sqlalchemy import select, update, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models.calendars import Calendar, CalendarEntry, CalendarEntryPost, Ticket +from shared.contracts.dtos import CalendarDTO, CalendarEntryDTO, TicketDTO + + +def _cal_to_dto(cal: Calendar) -> CalendarDTO: + return CalendarDTO( + id=cal.id, + container_type=cal.container_type, + container_id=cal.container_id, + name=cal.name, + slug=cal.slug, + description=cal.description, + ) + + +def _entry_to_dto(entry: CalendarEntry) -> CalendarEntryDTO: + cal = getattr(entry, "calendar", None) + return CalendarEntryDTO( + id=entry.id, + calendar_id=entry.calendar_id, + name=entry.name, + start_at=entry.start_at, + state=entry.state, + cost=entry.cost, + end_at=entry.end_at, + user_id=entry.user_id, + session_id=entry.session_id, + order_id=entry.order_id, + slot_id=entry.slot_id, + ticket_price=entry.ticket_price, + ticket_count=entry.ticket_count, + calendar_name=cal.name if cal else None, + calendar_slug=cal.slug if cal else None, + calendar_container_id=cal.container_id if cal else None, + calendar_container_type=cal.container_type if cal else None, + ) + + +def _ticket_to_dto(ticket: Ticket) -> TicketDTO: + entry = getattr(ticket, "entry", None) + tt = getattr(ticket, "ticket_type", None) + cal = getattr(entry, "calendar", None) if entry else None + # Price: ticket type cost if available, else entry ticket_price + price = None + if tt and tt.cost is not None: + price = tt.cost + elif entry and entry.ticket_price is not None: + price = entry.ticket_price + return TicketDTO( + id=ticket.id, + code=ticket.code, + state=ticket.state, + entry_name=entry.name if entry else "", + entry_start_at=entry.start_at if entry else ticket.created_at, + entry_end_at=entry.end_at if entry else None, + ticket_type_name=tt.name if tt else None, + calendar_name=cal.name if cal else None, + created_at=ticket.created_at, + checked_in_at=ticket.checked_in_at, + entry_id=entry.id if entry else None, + ticket_type_id=ticket.ticket_type_id, + price=price, + order_id=ticket.order_id, + calendar_container_id=cal.container_id if cal else None, + ) + + +class SqlCalendarService: + + # -- reads ---------------------------------------------------------------- + + async def calendars_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[CalendarDTO]: + result = await session.execute( + select(Calendar).where( + Calendar.container_type == container_type, + Calendar.container_id == container_id, + Calendar.deleted_at.is_(None), + ).order_by(Calendar.name.asc()) + ) + return [_cal_to_dto(c) for c in result.scalars().all()] + + async def pending_entries( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: + filters = [ + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "pending", + ] + if user_id is not None: + filters.append(CalendarEntry.user_id == user_id) + elif session_id is not None: + filters.append(CalendarEntry.session_id == session_id) + else: + return [] + + result = await session.execute( + select(CalendarEntry) + .where(*filters) + .order_by(CalendarEntry.start_at.asc()) + .options(selectinload(CalendarEntry.calendar)) + ) + return [_entry_to_dto(e) for e in result.scalars().all()] + + async def entries_for_page( + self, session: AsyncSession, page_id: int, *, + user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: + cal_ids = select(Calendar.id).where( + Calendar.container_type == "page", + Calendar.container_id == page_id, + Calendar.deleted_at.is_(None), + ).scalar_subquery() + + filters = [ + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "pending", + CalendarEntry.calendar_id.in_(cal_ids), + ] + if user_id is not None: + filters.append(CalendarEntry.user_id == user_id) + elif session_id is not None: + filters.append(CalendarEntry.session_id == session_id) + else: + return [] + + result = await session.execute( + select(CalendarEntry) + .where(*filters) + .order_by(CalendarEntry.start_at.asc()) + .options(selectinload(CalendarEntry.calendar)) + ) + return [_entry_to_dto(e) for e in result.scalars().all()] + + async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None: + entry = ( + await session.execute( + select(CalendarEntry) + .where(CalendarEntry.id == entry_id, CalendarEntry.deleted_at.is_(None)) + .options(selectinload(CalendarEntry.calendar)) + ) + ).scalar_one_or_none() + return _entry_to_dto(entry) if entry else None + + async def entry_ids_for_content( + self, session: AsyncSession, content_type: str, content_id: int, + ) -> set[int]: + """Get entry IDs associated with a content item (e.g. post).""" + result = await session.execute( + select(CalendarEntryPost.entry_id).where( + CalendarEntryPost.content_type == content_type, + CalendarEntryPost.content_id == content_id, + CalendarEntryPost.deleted_at.is_(None), + ) + ) + return set(result.scalars().all()) + + async def visible_entries_for_period( + self, session: AsyncSession, calendar_id: int, + period_start: datetime, period_end: datetime, + *, user_id: int | None, is_admin: bool, session_id: str | None, + ) -> list[CalendarEntryDTO]: + """Return visible entries for a calendar in a date range. + + Visibility rules: + - Everyone sees confirmed entries. + - Current user/session sees their own entries (any state). + - Admins also see ordered + provisional entries for all users. + """ + # User/session entries (any state) + user_entries: list[CalendarEntry] = [] + if user_id or session_id: + conditions = [ + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ] + if user_id: + conditions.append(CalendarEntry.user_id == user_id) + elif session_id: + conditions.append(CalendarEntry.session_id == session_id) + result = await session.execute( + select(CalendarEntry).where(*conditions) + .options(selectinload(CalendarEntry.calendar)) + ) + user_entries = list(result.scalars().all()) + + # Confirmed entries for everyone + result = await session.execute( + select(CalendarEntry).where( + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.state == "confirmed", + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ).options(selectinload(CalendarEntry.calendar)) + ) + confirmed_entries = list(result.scalars().all()) + + # Admin: ordered + provisional for everyone + admin_entries: list[CalendarEntry] = [] + if is_admin: + result = await session.execute( + select(CalendarEntry).where( + CalendarEntry.calendar_id == calendar_id, + CalendarEntry.state.in_(("ordered", "provisional")), + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= period_start, + CalendarEntry.start_at < period_end, + ).options(selectinload(CalendarEntry.calendar)) + ) + admin_entries = list(result.scalars().all()) + + # Merge, deduplicate, sort + entries_by_id: dict[int, CalendarEntry] = {} + for e in confirmed_entries: + entries_by_id[e.id] = e + for e in admin_entries: + entries_by_id[e.id] = e + for e in user_entries: + entries_by_id[e.id] = e + + merged = sorted(entries_by_id.values(), key=lambda e: e.start_at or period_start) + return [_entry_to_dto(e) for e in merged] + + async def upcoming_entries_for_container( + self, session: AsyncSession, + container_type: str | None = None, container_id: int | None = None, + *, page: int = 1, per_page: int = 20, + ) -> tuple[list[CalendarEntryDTO], bool]: + """Upcoming confirmed entries. Optionally scoped to a container.""" + filters = [ + CalendarEntry.state == "confirmed", + CalendarEntry.deleted_at.is_(None), + CalendarEntry.start_at >= func.now(), + ] + + if container_type is not None and container_id is not None: + cal_ids = select(Calendar.id).where( + Calendar.container_type == container_type, + Calendar.container_id == container_id, + Calendar.deleted_at.is_(None), + ).scalar_subquery() + filters.append(CalendarEntry.calendar_id.in_(cal_ids)) + else: + # Still exclude entries from deleted calendars + cal_ids = select(Calendar.id).where( + Calendar.deleted_at.is_(None), + ).scalar_subquery() + filters.append(CalendarEntry.calendar_id.in_(cal_ids)) + + offset = (page - 1) * per_page + result = await session.execute( + select(CalendarEntry) + .where(*filters) + .order_by(CalendarEntry.start_at.asc()) + .limit(per_page) + .offset(offset) + .options(selectinload(CalendarEntry.calendar)) + ) + entries = result.scalars().all() + has_more = len(entries) == per_page + return [_entry_to_dto(e) for e in entries], has_more + + async def associated_entries( + self, session: AsyncSession, content_type: str, content_id: int, page: int, + ) -> tuple[list[CalendarEntryDTO], bool]: + """Get paginated confirmed entries associated with a content item.""" + per_page = 10 + entry_ids_result = await session.execute( + select(CalendarEntryPost.entry_id).where( + CalendarEntryPost.content_type == content_type, + CalendarEntryPost.content_id == content_id, + CalendarEntryPost.deleted_at.is_(None), + ) + ) + entry_ids = set(entry_ids_result.scalars().all()) + if not entry_ids: + return [], False + + offset = (page - 1) * per_page + result = await session.execute( + select(CalendarEntry) + .where( + CalendarEntry.id.in_(entry_ids), + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "confirmed", + ) + .order_by(CalendarEntry.start_at.desc()) + .limit(per_page) + .offset(offset) + .options(selectinload(CalendarEntry.calendar)) + ) + entries = result.scalars().all() + has_more = len(entries) == per_page + return [_entry_to_dto(e) for e in entries], has_more + + async def toggle_entry_post( + self, session: AsyncSession, entry_id: int, content_type: str, content_id: int, + ) -> bool: + """Toggle association; returns True if now associated, False if removed.""" + existing = await session.scalar( + select(CalendarEntryPost).where( + CalendarEntryPost.entry_id == entry_id, + CalendarEntryPost.content_type == content_type, + CalendarEntryPost.content_id == content_id, + CalendarEntryPost.deleted_at.is_(None), + ) + ) + if existing: + existing.deleted_at = func.now() + await session.flush() + return False + else: + assoc = CalendarEntryPost( + entry_id=entry_id, + content_type=content_type, + content_id=content_id, + ) + session.add(assoc) + await session.flush() + return True + + async def get_entries_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[CalendarEntryDTO]: + result = await session.execute( + select(CalendarEntry) + .where( + CalendarEntry.order_id == order_id, + CalendarEntry.deleted_at.is_(None), + ) + .options(selectinload(CalendarEntry.calendar)) + ) + return [_entry_to_dto(e) for e in result.scalars().all()] + + async def user_tickets( + self, session: AsyncSession, *, user_id: int, + ) -> list[TicketDTO]: + result = await session.execute( + select(Ticket) + .where( + Ticket.user_id == user_id, + Ticket.state != "cancelled", + ) + .order_by(Ticket.created_at.desc()) + .options( + selectinload(Ticket.entry).selectinload(CalendarEntry.calendar), + selectinload(Ticket.ticket_type), + ) + ) + return [_ticket_to_dto(t) for t in result.scalars().all()] + + async def user_bookings( + self, session: AsyncSession, *, user_id: int, + ) -> list[CalendarEntryDTO]: + result = await session.execute( + select(CalendarEntry) + .where( + CalendarEntry.user_id == user_id, + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state.in_(("ordered", "provisional", "confirmed")), + ) + .order_by(CalendarEntry.start_at.desc()) + .options(selectinload(CalendarEntry.calendar)) + ) + return [_entry_to_dto(e) for e in result.scalars().all()] + + # -- batch reads (not in protocol — convenience for blog service) --------- + + async def confirmed_entries_for_posts( + self, session: AsyncSession, post_ids: list[int], + ) -> dict[int, list[CalendarEntryDTO]]: + """Return confirmed entries grouped by post_id for a batch of posts.""" + if not post_ids: + return {} + + result = await session.execute( + select(CalendarEntry, CalendarEntryPost.content_id) + .join(CalendarEntryPost, CalendarEntry.id == CalendarEntryPost.entry_id) + .options(selectinload(CalendarEntry.calendar)) + .where( + CalendarEntryPost.content_type == "post", + CalendarEntryPost.content_id.in_(post_ids), + CalendarEntryPost.deleted_at.is_(None), + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "confirmed", + ) + .order_by(CalendarEntry.start_at.asc()) + ) + + entries_by_post: dict[int, list[CalendarEntryDTO]] = {} + for entry, post_id in result: + entries_by_post.setdefault(post_id, []).append(_entry_to_dto(entry)) + return entries_by_post + + # -- writes --------------------------------------------------------------- + + async def adopt_entries_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + """Adopt anonymous calendar entries for a logged-in user. + + Only deletes stale *pending* entries for the user — confirmed/ordered + entries must be preserved. + """ + await session.execute( + update(CalendarEntry) + .where( + CalendarEntry.deleted_at.is_(None), + CalendarEntry.user_id == user_id, + CalendarEntry.state == "pending", + ) + .values(deleted_at=func.now()) + ) + cal_result = await session.execute( + select(CalendarEntry).where( + CalendarEntry.deleted_at.is_(None), + CalendarEntry.session_id == session_id, + ) + ) + for entry in cal_result.scalars().all(): + entry.user_id = user_id + + async def claim_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: + """Mark pending CalendarEntries as 'ordered' and set order_id.""" + filters = [ + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "pending", + ] + if user_id is not None: + filters.append(CalendarEntry.user_id == user_id) + elif session_id is not None: + filters.append(CalendarEntry.session_id == session_id) + + if page_post_id is not None: + cal_ids = select(Calendar.id).where( + Calendar.container_type == "page", + Calendar.container_id == page_post_id, + Calendar.deleted_at.is_(None), + ).scalar_subquery() + filters.append(CalendarEntry.calendar_id.in_(cal_ids)) + + await session.execute( + update(CalendarEntry) + .where(*filters) + .values(state="ordered", order_id=order_id) + ) + + async def confirm_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, + ) -> None: + """Mark ordered CalendarEntries as 'provisional'.""" + filters = [ + CalendarEntry.deleted_at.is_(None), + CalendarEntry.state == "ordered", + CalendarEntry.order_id == order_id, + ] + if user_id is not None: + filters.append(CalendarEntry.user_id == user_id) + elif session_id is not None: + filters.append(CalendarEntry.session_id == session_id) + + await session.execute( + update(CalendarEntry) + .where(*filters) + .values(state="provisional") + ) + + # -- ticket methods ------------------------------------------------------- + + def _ticket_query_options(self): + return [ + selectinload(Ticket.entry).selectinload(CalendarEntry.calendar), + selectinload(Ticket.ticket_type), + ] + + async def pending_tickets( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: + """Reserved tickets for the given identity (cart line items).""" + filters = [Ticket.state == "reserved"] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return [] + + result = await session.execute( + select(Ticket) + .where(*filters) + .order_by(Ticket.created_at.asc()) + .options(*self._ticket_query_options()) + ) + return [_ticket_to_dto(t) for t in result.scalars().all()] + + async def tickets_for_page( + self, session: AsyncSession, page_id: int, *, + user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: + """Reserved tickets scoped to a page (via entry → calendar → container_id).""" + cal_ids = select(Calendar.id).where( + Calendar.container_type == "page", + Calendar.container_id == page_id, + Calendar.deleted_at.is_(None), + ).scalar_subquery() + + entry_ids = select(CalendarEntry.id).where( + CalendarEntry.calendar_id.in_(cal_ids), + CalendarEntry.deleted_at.is_(None), + ).scalar_subquery() + + filters = [ + Ticket.state == "reserved", + Ticket.entry_id.in_(entry_ids), + ] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return [] + + result = await session.execute( + select(Ticket) + .where(*filters) + .order_by(Ticket.created_at.asc()) + .options(*self._ticket_query_options()) + ) + return [_ticket_to_dto(t) for t in result.scalars().all()] + + async def claim_tickets_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: + """Set order_id on reserved tickets at checkout.""" + filters = [Ticket.state == "reserved"] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + + if page_post_id is not None: + cal_ids = select(Calendar.id).where( + Calendar.container_type == "page", + Calendar.container_id == page_post_id, + Calendar.deleted_at.is_(None), + ).scalar_subquery() + entry_ids = select(CalendarEntry.id).where( + CalendarEntry.calendar_id.in_(cal_ids), + CalendarEntry.deleted_at.is_(None), + ).scalar_subquery() + filters.append(Ticket.entry_id.in_(entry_ids)) + + await session.execute( + update(Ticket).where(*filters).values(order_id=order_id) + ) + + async def confirm_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> None: + """Reserved → confirmed on payment.""" + await session.execute( + update(Ticket) + .where(Ticket.order_id == order_id, Ticket.state == "reserved") + .values(state="confirmed") + ) + + async def get_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[TicketDTO]: + """Tickets for a given order (checkout return display).""" + result = await session.execute( + select(Ticket) + .where(Ticket.order_id == order_id) + .order_by(Ticket.created_at.asc()) + .options(*self._ticket_query_options()) + ) + return [_ticket_to_dto(t) for t in result.scalars().all()] + + async def adopt_tickets_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + """Migrate anonymous reserved tickets to user on login.""" + result = await session.execute( + select(Ticket).where( + Ticket.session_id == session_id, + Ticket.state == "reserved", + ) + ) + for ticket in result.scalars().all(): + ticket.user_id = user_id + + async def adjust_ticket_quantity( + self, session: AsyncSession, entry_id: int, count: int, *, + user_id: int | None, session_id: str | None, + ticket_type_id: int | None = None, + ) -> int: + """Adjust reserved ticket count to target. Returns new count.""" + import uuid + + count = max(count, 0) + + # Current reserved count + filters = [ + Ticket.entry_id == entry_id, + Ticket.state == "reserved", + ] + if user_id is not None: + filters.append(Ticket.user_id == user_id) + elif session_id is not None: + filters.append(Ticket.session_id == session_id) + else: + return 0 + if ticket_type_id is not None: + filters.append(Ticket.ticket_type_id == ticket_type_id) + + current = await session.scalar( + select(func.count(Ticket.id)).where(*filters) + ) or 0 + + if count > current: + # Create tickets + for _ in range(count - current): + ticket = Ticket( + entry_id=entry_id, + ticket_type_id=ticket_type_id, + user_id=user_id, + session_id=session_id, + code=uuid.uuid4().hex, + state="reserved", + ) + session.add(ticket) + await session.flush() + elif count < current: + # Cancel newest tickets + to_cancel = current - count + result = await session.execute( + select(Ticket) + .where(*filters) + .order_by(Ticket.created_at.desc()) + .limit(to_cancel) + ) + for ticket in result.scalars().all(): + ticket.state = "cancelled" + await session.flush() + + return count diff --git a/shared/services/cart_impl.py b/shared/services/cart_impl.py new file mode 100644 index 0000000..1438bfa --- /dev/null +++ b/shared/services/cart_impl.py @@ -0,0 +1,162 @@ +"""SQL-backed CartService implementation. + +Queries ``shared.models.market.CartItem`` — only this module may write +to cart-domain tables on behalf of other domains. +""" +from __future__ import annotations + +from decimal import Decimal + +from sqlalchemy import select, update, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models.market import CartItem +from shared.models.market_place import MarketPlace +from shared.models.calendars import CalendarEntry, Calendar +from shared.contracts.dtos import CartItemDTO, CartSummaryDTO + + +def _item_to_dto(ci: CartItem) -> CartItemDTO: + product = ci.product + return CartItemDTO( + id=ci.id, + product_id=ci.product_id, + quantity=ci.quantity, + product_title=product.title if product else None, + product_slug=product.slug if product else None, + product_image=product.image if product else None, + unit_price=Decimal(str(product.special_price or product.regular_price or 0)) if product else None, + market_place_id=ci.market_place_id, + ) + + +class SqlCartService: + + async def cart_summary( + self, session: AsyncSession, *, + user_id: int | None, session_id: str | None, + page_slug: str | None = None, + ) -> CartSummaryDTO: + """Build a lightweight cart summary for the current identity.""" + # Resolve page filter + page_post_id: int | None = None + if page_slug: + from shared.services.registry import services + post = await services.blog.get_post_by_slug(session, page_slug) + if post and post.is_page: + page_post_id = post.id + + # --- product cart --- + cart_q = select(CartItem).where(CartItem.deleted_at.is_(None)) + if user_id is not None: + cart_q = cart_q.where(CartItem.user_id == user_id) + elif session_id is not None: + cart_q = cart_q.where(CartItem.session_id == session_id) + else: + return CartSummaryDTO() + + if page_post_id is not None: + mp_ids = select(MarketPlace.id).where( + MarketPlace.container_type == "page", + MarketPlace.container_id == page_post_id, + MarketPlace.deleted_at.is_(None), + ).scalar_subquery() + cart_q = cart_q.where(CartItem.market_place_id.in_(mp_ids)) + + cart_q = cart_q.options(selectinload(CartItem.product)) + result = await session.execute(cart_q) + cart_items = result.scalars().all() + + count = sum(ci.quantity for ci in cart_items) + total = sum( + Decimal(str(ci.product.special_price or ci.product.regular_price or 0)) * ci.quantity + for ci in cart_items + if ci.product and (ci.product.special_price or ci.product.regular_price) + ) + + # --- calendar entries --- + from shared.services.registry import services + if page_post_id is not None: + cal_entries = await services.calendar.entries_for_page( + session, page_post_id, + user_id=user_id, + session_id=session_id, + ) + else: + cal_entries = await services.calendar.pending_entries( + session, + user_id=user_id, + session_id=session_id, + ) + + calendar_count = len(cal_entries) + calendar_total = sum(Decimal(str(e.cost or 0)) for e in cal_entries if e.cost is not None) + + # --- tickets --- + if page_post_id is not None: + tickets = await services.calendar.tickets_for_page( + session, page_post_id, + user_id=user_id, + session_id=session_id, + ) + else: + tickets = await services.calendar.pending_tickets( + session, + user_id=user_id, + session_id=session_id, + ) + + ticket_count = len(tickets) + ticket_total = sum(Decimal(str(t.price or 0)) for t in tickets) + + items = [_item_to_dto(ci) for ci in cart_items] + + return CartSummaryDTO( + count=count, + total=total, + calendar_count=calendar_count, + calendar_total=calendar_total, + items=items, + ticket_count=ticket_count, + ticket_total=ticket_total, + ) + + async def cart_items( + self, session: AsyncSession, *, + user_id: int | None, session_id: str | None, + ) -> list[CartItemDTO]: + cart_q = select(CartItem).where(CartItem.deleted_at.is_(None)) + if user_id is not None: + cart_q = cart_q.where(CartItem.user_id == user_id) + elif session_id is not None: + cart_q = cart_q.where(CartItem.session_id == session_id) + else: + return [] + + cart_q = cart_q.options(selectinload(CartItem.product)).order_by(CartItem.created_at.desc()) + result = await session.execute(cart_q) + return [_item_to_dto(ci) for ci in result.scalars().all()] + + async def adopt_cart_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + """Adopt anonymous cart items for a logged-in user.""" + anon_result = await session.execute( + select(CartItem).where( + CartItem.deleted_at.is_(None), + CartItem.user_id.is_(None), + CartItem.session_id == session_id, + ) + ) + anon_items = anon_result.scalars().all() + + if anon_items: + # Soft-delete existing user cart + await session.execute( + update(CartItem) + .where(CartItem.deleted_at.is_(None), CartItem.user_id == user_id) + .values(deleted_at=func.now()) + ) + for ci in anon_items: + ci.user_id = user_id diff --git a/shared/services/federation_impl.py b/shared/services/federation_impl.py new file mode 100644 index 0000000..fa33d7d --- /dev/null +++ b/shared/services/federation_impl.py @@ -0,0 +1,1654 @@ +"""SQL-backed FederationService implementation. + +Queries ``shared.models.federation`` — only this module may read/write +federation-domain tables on behalf of other domains. +""" +from __future__ import annotations + +import os +import uuid +from datetime import datetime, timezone + +from sqlalchemy import select, func, delete +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.federation import ( + ActorProfile, APActivity, APFollower, + RemoteActor, APFollowing, APRemotePost, APLocalPost, + APInteraction, APNotification, +) +from shared.contracts.dtos import ( + ActorProfileDTO, APActivityDTO, APFollowerDTO, + RemoteActorDTO, RemotePostDTO, TimelineItemDTO, NotificationDTO, +) + + +def _domain() -> str: + return os.getenv("AP_DOMAIN", "federation.rose-ash.com") + + +def _get_origin_app() -> str | None: + try: + from quart import current_app + return current_app.name + except (ImportError, RuntimeError): + return None + + +def _actor_to_dto(actor: ActorProfile) -> ActorProfileDTO: + domain = _domain() + username = actor.preferred_username + return ActorProfileDTO( + id=actor.id, + user_id=actor.user_id, + preferred_username=username, + public_key_pem=actor.public_key_pem, + display_name=actor.display_name, + summary=actor.summary, + inbox_url=f"https://{domain}/users/{username}/inbox", + outbox_url=f"https://{domain}/users/{username}/outbox", + created_at=actor.created_at, + ) + + +def _activity_to_dto(a: APActivity) -> APActivityDTO: + return APActivityDTO( + id=a.id, + activity_id=a.activity_id, + activity_type=a.activity_type, + actor_profile_id=a.actor_profile_id, + object_type=a.object_type, + object_data=a.object_data, + published=a.published, + is_local=a.is_local, + source_type=a.source_type, + source_id=a.source_id, + ipfs_cid=a.ipfs_cid, + ) + + +def _follower_to_dto(f: APFollower) -> APFollowerDTO: + return APFollowerDTO( + id=f.id, + actor_profile_id=f.actor_profile_id, + follower_acct=f.follower_acct, + follower_inbox=f.follower_inbox, + follower_actor_url=f.follower_actor_url, + created_at=f.created_at, + app_domain=f.app_domain, + ) + + +def _remote_actor_to_dto(r: RemoteActor) -> RemoteActorDTO: + return RemoteActorDTO( + id=r.id, + actor_url=r.actor_url, + inbox_url=r.inbox_url, + preferred_username=r.preferred_username, + domain=r.domain, + display_name=r.display_name, + summary=r.summary, + icon_url=r.icon_url, + shared_inbox_url=r.shared_inbox_url, + public_key_pem=r.public_key_pem, + ) + + +def _remote_post_to_dto( + p: APRemotePost, actor: RemoteActor | None = None, +) -> RemotePostDTO: + return RemotePostDTO( + id=p.id, + remote_actor_id=p.remote_actor_id, + object_id=p.object_id, + content=p.content or "", + summary=p.summary, + url=p.url, + attachments=p.attachment_data or [], + tags=p.tag_data or [], + published=p.published, + actor=_remote_actor_to_dto(actor) if actor else None, + ) + + +class SqlFederationService: + # -- Actor management ----------------------------------------------------- + + async def get_actor_by_username( + self, session: AsyncSession, username: str, + ) -> ActorProfileDTO | None: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + return _actor_to_dto(actor) if actor else None + + async def get_actor_by_user_id( + self, session: AsyncSession, user_id: int, + ) -> ActorProfileDTO | None: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.user_id == user_id) + ) + ).scalar_one_or_none() + return _actor_to_dto(actor) if actor else None + + async def create_actor( + self, session: AsyncSession, user_id: int, preferred_username: str, + display_name: str | None = None, summary: str | None = None, + ) -> ActorProfileDTO: + from shared.utils.http_signatures import generate_rsa_keypair + + private_pem, public_pem = generate_rsa_keypair() + + actor = ActorProfile( + user_id=user_id, + preferred_username=preferred_username, + display_name=display_name, + summary=summary, + public_key_pem=public_pem, + private_key_pem=private_pem, + ) + session.add(actor) + await session.flush() + return _actor_to_dto(actor) + + async def username_available( + self, session: AsyncSession, username: str, + ) -> bool: + count = ( + await session.execute( + select(func.count(ActorProfile.id)).where( + ActorProfile.preferred_username == username + ) + ) + ).scalar() or 0 + return count == 0 + + # -- Publishing ----------------------------------------------------------- + + async def publish_activity( + self, session: AsyncSession, *, + actor_user_id: int, + activity_type: str, + object_type: str, + object_data: dict, + source_type: str | None = None, + source_id: int | None = None, + ) -> APActivityDTO: + # Look up actor + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.user_id == actor_user_id) + ) + ).scalar_one_or_none() + if actor is None: + raise ValueError(f"No ActorProfile for user_id={actor_user_id}") + + domain = _domain() + username = actor.preferred_username + activity_uri = f"https://{domain}/users/{username}/activities/{uuid.uuid4()}" + + now = datetime.now(timezone.utc) + + actor_url = f"https://{domain}/users/{username}" + + activity = APActivity( + activity_id=activity_uri, + activity_type=activity_type, + actor_profile_id=actor.id, + actor_uri=actor_url, + object_type=object_type, + object_data=object_data, + published=now, + is_local=True, + source_type=source_type, + source_id=source_id, + visibility="public", + process_state="pending", + origin_app=_get_origin_app(), + ) + session.add(activity) + await session.flush() + + # Store activity JSON on IPFS (best-effort — don't fail publish if IPFS down) + try: + from shared.utils.ipfs_client import add_json, is_available + if await is_available(): + activity_json = { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + ], + "id": activity_uri, + "type": activity_type, + "actor": actor_url, + "published": now.isoformat(), + "object": { + "type": object_type, + **object_data, + }, + } + cid = await add_json(activity_json) + activity.ipfs_cid = cid + await session.flush() + except Exception: + pass # IPFS failure is non-fatal + + return _activity_to_dto(activity) + + # -- Queries -------------------------------------------------------------- + + async def get_activity( + self, session: AsyncSession, activity_id: str, + ) -> APActivityDTO | None: + a = ( + await session.execute( + select(APActivity).where(APActivity.activity_id == activity_id) + ) + ).scalar_one_or_none() + return _activity_to_dto(a) if a else None + + async def get_outbox( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + origin_app: str | None = None, + ) -> tuple[list[APActivityDTO], int]: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if actor is None: + return [], 0 + + filters = [ + APActivity.actor_profile_id == actor.id, + APActivity.is_local == True, # noqa: E712 + ] + if origin_app is not None: + filters.append(APActivity.origin_app == origin_app) + + total = ( + await session.execute( + select(func.count(APActivity.id)).where(*filters) + ) + ).scalar() or 0 + + offset = (page - 1) * per_page + result = await session.execute( + select(APActivity) + .where(*filters) + .order_by(APActivity.published.desc()) + .limit(per_page) + .offset(offset) + ) + return [_activity_to_dto(a) for a in result.scalars().all()], total + + async def get_activity_for_source( + self, session: AsyncSession, source_type: str, source_id: int, + ) -> APActivityDTO | None: + a = ( + await session.execute( + select(APActivity).where( + APActivity.source_type == source_type, + APActivity.source_id == source_id, + ).order_by(APActivity.created_at.desc()) + .limit(1) + ) + ).scalars().first() + return _activity_to_dto(a) if a else None + + async def count_activities_for_source( + self, session: AsyncSession, source_type: str, source_id: int, + *, activity_type: str, + ) -> int: + from sqlalchemy import func + result = await session.execute( + select(func.count()).select_from(APActivity).where( + APActivity.source_type == source_type, + APActivity.source_id == source_id, + APActivity.activity_type == activity_type, + ) + ) + return result.scalar_one() + + # -- Followers ------------------------------------------------------------ + + async def get_followers( + self, session: AsyncSession, username: str, + app_domain: str | None = None, + ) -> list[APFollowerDTO]: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if actor is None: + return [] + + q = select(APFollower).where(APFollower.actor_profile_id == actor.id) + if app_domain is not None: + q = q.where(APFollower.app_domain == app_domain) + + result = await session.execute(q) + return [_follower_to_dto(f) for f in result.scalars().all()] + + async def add_follower( + self, session: AsyncSession, username: str, + follower_acct: str, follower_inbox: str, follower_actor_url: str, + follower_public_key: str | None = None, + app_domain: str = "federation", + ) -> APFollowerDTO: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if actor is None: + raise ValueError(f"Actor not found: {username}") + + # Upsert: update if already following this (actor, acct, app_domain) + existing = ( + await session.execute( + select(APFollower).where( + APFollower.actor_profile_id == actor.id, + APFollower.follower_acct == follower_acct, + APFollower.app_domain == app_domain, + ) + ) + ).scalar_one_or_none() + + if existing: + existing.follower_inbox = follower_inbox + existing.follower_actor_url = follower_actor_url + existing.follower_public_key = follower_public_key + await session.flush() + return _follower_to_dto(existing) + + follower = APFollower( + actor_profile_id=actor.id, + follower_acct=follower_acct, + follower_inbox=follower_inbox, + follower_actor_url=follower_actor_url, + follower_public_key=follower_public_key, + app_domain=app_domain, + ) + session.add(follower) + await session.flush() + return _follower_to_dto(follower) + + async def remove_follower( + self, session: AsyncSession, username: str, follower_acct: str, + app_domain: str = "federation", + ) -> bool: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if actor is None: + return False + + result = await session.execute( + delete(APFollower).where( + APFollower.actor_profile_id == actor.id, + APFollower.follower_acct == follower_acct, + APFollower.app_domain == app_domain, + ) + ) + return result.rowcount > 0 + + async def get_followers_paginated( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if actor is None: + return [], 0 + + total = ( + await session.execute( + select(func.count(APFollower.id)).where( + APFollower.actor_profile_id == actor.id, + ) + ) + ).scalar() or 0 + + offset = (page - 1) * per_page + followers = ( + await session.execute( + select(APFollower) + .where(APFollower.actor_profile_id == actor.id) + .order_by(APFollower.created_at.desc()) + .limit(per_page) + .offset(offset) + ) + ).scalars().all() + + results: list[RemoteActorDTO] = [] + for f in followers: + # Try to resolve from cached remote actors first + remote = ( + await session.execute( + select(RemoteActor).where( + RemoteActor.actor_url == f.follower_actor_url, + ) + ) + ).scalar_one_or_none() + if remote: + results.append(_remote_actor_to_dto(remote)) + else: + # Synthesise a minimal DTO from follower data + from urllib.parse import urlparse + domain = urlparse(f.follower_actor_url).netloc + results.append(RemoteActorDTO( + id=0, + actor_url=f.follower_actor_url, + inbox_url=f.follower_inbox, + preferred_username=f.follower_acct.split("@")[0] if "@" in f.follower_acct else f.follower_acct, + domain=domain, + display_name=None, + summary=None, + icon_url=None, + )) + return results, total + + # -- Remote actors -------------------------------------------------------- + + async def get_or_fetch_remote_actor( + self, session: AsyncSession, actor_url: str, + ) -> RemoteActorDTO | None: + # Check cache first + row = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == actor_url) + ) + ).scalar_one_or_none() + if row: + return _remote_actor_to_dto(row) + + # Fetch from remote + import httpx + try: + async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client: + resp = await client.get( + actor_url, + headers={"Accept": "application/activity+json"}, + ) + if resp.status_code != 200: + return None + data = resp.json() + except Exception: + return None + + return await self._upsert_remote_actor(session, actor_url, data) + + async def _upsert_remote_actor( + self, session: AsyncSession, actor_url: str, data: dict, + ) -> RemoteActorDTO | None: + from urllib.parse import urlparse + domain = urlparse(actor_url).netloc + + icon_url = None + icon = data.get("icon") + if isinstance(icon, dict): + icon_url = icon.get("url") + + pub_key = (data.get("publicKey") or {}).get("publicKeyPem") + + # Upsert + existing = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == actor_url) + ) + ).scalar_one_or_none() + + now = datetime.now(timezone.utc) + if existing: + existing.inbox_url = data.get("inbox", existing.inbox_url) + existing.shared_inbox_url = (data.get("endpoints") or {}).get("sharedInbox") + existing.preferred_username = data.get("preferredUsername", existing.preferred_username) + existing.display_name = data.get("name") + existing.summary = data.get("summary") + existing.icon_url = icon_url + existing.public_key_pem = pub_key + existing.fetched_at = now + await session.flush() + return _remote_actor_to_dto(existing) + + row = RemoteActor( + actor_url=actor_url, + inbox_url=data.get("inbox", ""), + shared_inbox_url=(data.get("endpoints") or {}).get("sharedInbox"), + preferred_username=data.get("preferredUsername", ""), + display_name=data.get("name"), + summary=data.get("summary"), + icon_url=icon_url, + public_key_pem=pub_key, + domain=domain, + fetched_at=now, + ) + session.add(row) + await session.flush() + return _remote_actor_to_dto(row) + + async def search_remote_actor( + self, session: AsyncSession, acct: str, + ) -> RemoteActorDTO | None: + from shared.utils.webfinger import resolve_actor + data = await resolve_actor(acct) + if not data: + return None + + actor_url = data.get("id") + if not actor_url: + return None + + return await self._upsert_remote_actor(session, actor_url, data) + + async def search_actors( + self, session: AsyncSession, query: str, page: int = 1, limit: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: + from sqlalchemy import or_ + + pattern = f"%{query}%" + offset = (page - 1) * limit + + # WebFinger resolve for @user@domain queries (first page only) + webfinger_result: RemoteActorDTO | None = None + if page == 1 and "@" in query: + webfinger_result = await self.search_remote_actor(session, query) + + # Search cached remote actors + remote_filter = or_( + RemoteActor.preferred_username.ilike(pattern), + RemoteActor.display_name.ilike(pattern), + RemoteActor.domain.ilike(pattern), + ) + remote_total = ( + await session.execute( + select(func.count(RemoteActor.id)).where(remote_filter) + ) + ).scalar() or 0 + + # Search local actor profiles + local_filter = or_( + ActorProfile.preferred_username.ilike(pattern), + ActorProfile.display_name.ilike(pattern), + ) + local_total = ( + await session.execute( + select(func.count(ActorProfile.id)).where(local_filter) + ) + ).scalar() or 0 + + total = remote_total + local_total + + # Fetch remote actors page + remote_rows = ( + await session.execute( + select(RemoteActor) + .where(remote_filter) + .order_by(RemoteActor.preferred_username) + .limit(limit) + .offset(offset) + ) + ).scalars().all() + + results: list[RemoteActorDTO] = [_remote_actor_to_dto(r) for r in remote_rows] + + # Fill remaining slots with local actors + remaining = limit - len(results) + local_offset = max(0, offset - remote_total) + if remaining > 0 and offset + len(results) >= remote_total: + domain = _domain() + local_rows = ( + await session.execute( + select(ActorProfile) + .where(local_filter) + .order_by(ActorProfile.preferred_username) + .limit(remaining) + .offset(local_offset) + ) + ).scalars().all() + for lp in local_rows: + results.append(RemoteActorDTO( + id=0, + actor_url=f"https://{domain}/users/{lp.preferred_username}", + inbox_url=f"https://{domain}/users/{lp.preferred_username}/inbox", + preferred_username=lp.preferred_username, + domain=domain, + display_name=lp.display_name, + summary=lp.summary, + icon_url=None, + )) + + # Prepend WebFinger result (deduped) + if webfinger_result: + existing_urls = {r.actor_url for r in results} + if webfinger_result.actor_url not in existing_urls: + results.insert(0, webfinger_result) + total += 1 + + return results, total + + # -- Following (outbound) ------------------------------------------------- + + async def send_follow( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == local_username) + ) + ).scalar_one_or_none() + if not actor: + raise ValueError(f"Actor not found: {local_username}") + + # Get or fetch remote actor + remote_dto = await self.get_or_fetch_remote_actor(session, remote_actor_url) + if not remote_dto: + raise ValueError(f"Could not resolve remote actor: {remote_actor_url}") + + remote = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == remote_actor_url) + ) + ).scalar_one() + + # Check for existing follow + existing = ( + await session.execute( + select(APFollowing).where( + APFollowing.actor_profile_id == actor.id, + APFollowing.remote_actor_id == remote.id, + ) + ) + ).scalar_one_or_none() + + if existing: + return # already following or pending + + follow = APFollowing( + actor_profile_id=actor.id, + remote_actor_id=remote.id, + state="pending", + ) + session.add(follow) + await session.flush() + + # Send Follow activity + domain = _domain() + actor_url = f"https://{domain}/users/{local_username}" + follow_id = f"{actor_url}/activities/{uuid.uuid4()}" + + follow_activity = { + "@context": "https://www.w3.org/ns/activitystreams", + "id": follow_id, + "type": "Follow", + "actor": actor_url, + "object": remote_actor_url, + } + + import json + import httpx + from shared.utils.http_signatures import sign_request + from urllib.parse import urlparse + + body_bytes = json.dumps(follow_activity).encode() + parsed = urlparse(remote.inbox_url) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=f"{actor_url}#main-key", + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = "application/activity+json" + + try: + async with httpx.AsyncClient(timeout=15) as client: + await client.post(remote.inbox_url, content=body_bytes, headers=headers) + except Exception: + import logging + logging.getLogger(__name__).exception("Failed to send Follow to %s", remote.inbox_url) + + async def get_following( + self, session: AsyncSession, username: str, + page: int = 1, per_page: int = 20, + ) -> tuple[list[RemoteActorDTO], int]: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == username) + ) + ).scalar_one_or_none() + if not actor: + return [], 0 + + total = ( + await session.execute( + select(func.count(APFollowing.id)).where( + APFollowing.actor_profile_id == actor.id, + APFollowing.state == "accepted", + ) + ) + ).scalar() or 0 + + offset = (page - 1) * per_page + result = await session.execute( + select(RemoteActor) + .join(APFollowing, APFollowing.remote_actor_id == RemoteActor.id) + .where( + APFollowing.actor_profile_id == actor.id, + APFollowing.state == "accepted", + ) + .order_by(APFollowing.accepted_at.desc()) + .limit(per_page) + .offset(offset) + ) + return [_remote_actor_to_dto(r) for r in result.scalars().all()], total + + async def accept_follow_response( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == local_username) + ) + ).scalar_one_or_none() + if not actor: + return + + remote = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == remote_actor_url) + ) + ).scalar_one_or_none() + if not remote: + return + + follow = ( + await session.execute( + select(APFollowing).where( + APFollowing.actor_profile_id == actor.id, + APFollowing.remote_actor_id == remote.id, + APFollowing.state == "pending", + ) + ) + ).scalar_one_or_none() + if follow: + follow.state = "accepted" + follow.accepted_at = datetime.now(timezone.utc) + await session.flush() + + async def unfollow( + self, session: AsyncSession, local_username: str, remote_actor_url: str, + ) -> None: + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.preferred_username == local_username) + ) + ).scalar_one_or_none() + if not actor: + return + + remote = ( + await session.execute( + select(RemoteActor).where(RemoteActor.actor_url == remote_actor_url) + ) + ).scalar_one_or_none() + if not remote: + return + + follow = ( + await session.execute( + select(APFollowing).where( + APFollowing.actor_profile_id == actor.id, + APFollowing.remote_actor_id == remote.id, + ) + ) + ).scalar_one_or_none() + if not follow: + return + + await session.delete(follow) + await session.flush() + + # Send Undo(Follow) to remote + domain = _domain() + actor_url = f"https://{domain}/users/{local_username}" + undo_id = f"{actor_url}/activities/{uuid.uuid4()}" + + undo_activity = { + "@context": "https://www.w3.org/ns/activitystreams", + "id": undo_id, + "type": "Undo", + "actor": actor_url, + "object": { + "type": "Follow", + "actor": actor_url, + "object": remote_actor_url, + }, + } + + import json + import httpx + from shared.utils.http_signatures import sign_request + from urllib.parse import urlparse + + body_bytes = json.dumps(undo_activity).encode() + parsed = urlparse(remote.inbox_url) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=f"{actor_url}#main-key", + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = "application/activity+json" + + try: + async with httpx.AsyncClient(timeout=15) as client: + await client.post(remote.inbox_url, content=body_bytes, headers=headers) + except Exception: + import logging + logging.getLogger(__name__).exception("Failed to send Undo Follow to %s", remote.inbox_url) + + # -- Remote posts --------------------------------------------------------- + + async def ingest_remote_post( + self, session: AsyncSession, remote_actor_id: int, + activity_json: dict, object_json: dict, + ) -> None: + activity_id_str = activity_json.get("id", "") + object_id_str = object_json.get("id", "") + if not object_id_str: + return + + # Upsert + existing = ( + await session.execute( + select(APRemotePost).where(APRemotePost.object_id == object_id_str) + ) + ).scalar_one_or_none() + + published = None + pub_str = object_json.get("published") + if pub_str: + try: + published = datetime.fromisoformat(pub_str.replace("Z", "+00:00")) + except (ValueError, AttributeError): + pass + + # Sanitise HTML content + content = object_json.get("content", "") + + if existing: + existing.content = content + existing.summary = object_json.get("summary") + existing.url = object_json.get("url") + existing.attachment_data = object_json.get("attachment") + existing.tag_data = object_json.get("tag") + existing.in_reply_to = object_json.get("inReplyTo") + existing.conversation = object_json.get("conversation") + existing.published = published or existing.published + existing.fetched_at = datetime.now(timezone.utc) + await session.flush() + return + + post = APRemotePost( + remote_actor_id=remote_actor_id, + activity_id=activity_id_str, + object_id=object_id_str, + object_type=object_json.get("type", "Note"), + content=content, + summary=object_json.get("summary"), + url=object_json.get("url"), + attachment_data=object_json.get("attachment"), + tag_data=object_json.get("tag"), + in_reply_to=object_json.get("inReplyTo"), + conversation=object_json.get("conversation"), + published=published, + ) + session.add(post) + await session.flush() + + async def delete_remote_post( + self, session: AsyncSession, object_id: str, + ) -> None: + await session.execute( + delete(APRemotePost).where(APRemotePost.object_id == object_id) + ) + + async def get_remote_post( + self, session: AsyncSession, object_id: str, + ) -> RemotePostDTO | None: + post = ( + await session.execute( + select(APRemotePost).where(APRemotePost.object_id == object_id) + ) + ).scalar_one_or_none() + if not post: + return None + + actor = ( + await session.execute( + select(RemoteActor).where(RemoteActor.id == post.remote_actor_id) + ) + ).scalar_one_or_none() + + return _remote_post_to_dto(post, actor) + + # -- Timelines ------------------------------------------------------------ + + async def get_home_timeline( + self, session: AsyncSession, actor_profile_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: + from sqlalchemy import union_all, literal_column, cast, String as SaString + from sqlalchemy.orm import aliased + + # Query 1: Remote posts from followed actors + following_subq = ( + select(APFollowing.remote_actor_id) + .where( + APFollowing.actor_profile_id == actor_profile_id, + APFollowing.state == "accepted", + ) + .subquery() + ) + + remote_q = ( + select( + APRemotePost.id.label("post_id"), + literal_column("'remote'").label("post_type"), + APRemotePost.content.label("content"), + APRemotePost.summary.label("summary"), + APRemotePost.url.label("url"), + APRemotePost.published.label("published"), + APRemotePost.object_id.label("object_id"), + RemoteActor.display_name.label("actor_name"), + RemoteActor.preferred_username.label("actor_username"), + RemoteActor.domain.label("actor_domain"), + RemoteActor.icon_url.label("actor_icon"), + RemoteActor.actor_url.label("actor_url"), + RemoteActor.inbox_url.label("author_inbox"), + ) + .join(RemoteActor, RemoteActor.id == APRemotePost.remote_actor_id) + .where(APRemotePost.remote_actor_id.in_(following_subq)) + ) + if before: + remote_q = remote_q.where(APRemotePost.published < before) + + # Query 2: Local activities (Create) by this actor + local_q = ( + select( + APActivity.id.label("post_id"), + literal_column("'local'").label("post_type"), + func.coalesce( + APActivity.object_data.op("->>")("content"), + literal_column("''"), + ).label("content"), + APActivity.object_data.op("->>")("summary").label("summary"), + APActivity.object_data.op("->>")("url").label("url"), + APActivity.published.label("published"), + APActivity.activity_id.label("object_id"), + func.coalesce( + ActorProfile.display_name, + ActorProfile.preferred_username, + ).label("actor_name"), + ActorProfile.preferred_username.label("actor_username"), + literal_column("NULL").label("actor_domain"), + literal_column("NULL").label("actor_icon"), + literal_column("NULL").label("actor_url"), + literal_column("NULL").label("author_inbox"), + ) + .join(ActorProfile, ActorProfile.id == APActivity.actor_profile_id) + .where( + APActivity.actor_profile_id == actor_profile_id, + APActivity.is_local == True, # noqa: E712 + APActivity.activity_type == "Create", + ) + ) + if before: + local_q = local_q.where(APActivity.published < before) + + # Union and sort + combined = union_all(remote_q, local_q).subquery() + result = await session.execute( + select(combined) + .order_by(combined.c.published.desc()) + .limit(limit) + ) + + items = [] + for row in result.mappings().all(): + # Look up interaction counts + user state + object_id = row["object_id"] + like_count = 0 + boost_count = 0 + liked_by_me = False + boosted_by_me = False + + if object_id: + post_type_val = row["post_type"] + post_id_val = row["post_id"] + + like_count = (await session.execute( + select(func.count(APInteraction.id)).where( + APInteraction.post_type == post_type_val, + APInteraction.post_id == post_id_val, + APInteraction.interaction_type == "like", + ) + )).scalar() or 0 + boost_count = (await session.execute( + select(func.count(APInteraction.id)).where( + APInteraction.post_type == post_type_val, + APInteraction.post_id == post_id_val, + APInteraction.interaction_type == "boost", + ) + )).scalar() or 0 + liked_by_me = bool((await session.execute( + select(APInteraction.id).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type_val, + APInteraction.post_id == post_id_val, + APInteraction.interaction_type == "like", + ).limit(1) + )).scalar()) + boosted_by_me = bool((await session.execute( + select(APInteraction.id).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type_val, + APInteraction.post_id == post_id_val, + APInteraction.interaction_type == "boost", + ).limit(1) + )).scalar()) + + items.append(TimelineItemDTO( + id=f"{row['post_type']}:{row['post_id']}", + post_type=row["post_type"], + content=row["content"] or "", + published=row["published"], + actor_name=row["actor_name"] or row["actor_username"] or "", + actor_username=row["actor_username"] or "", + object_id=object_id, + summary=row["summary"], + url=row["url"], + actor_domain=row["actor_domain"], + actor_icon=row["actor_icon"], + actor_url=row["actor_url"], + like_count=like_count, + boost_count=boost_count, + liked_by_me=liked_by_me, + boosted_by_me=boosted_by_me, + author_inbox=row["author_inbox"], + )) + return items + + async def get_public_timeline( + self, session: AsyncSession, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: + # Public timeline: all local Create activities + q = ( + select(APActivity, ActorProfile) + .join(ActorProfile, ActorProfile.id == APActivity.actor_profile_id) + .where( + APActivity.is_local == True, # noqa: E712 + APActivity.activity_type == "Create", + ) + ) + if before: + q = q.where(APActivity.published < before) + q = q.order_by(APActivity.published.desc()).limit(limit) + + result = await session.execute(q) + items = [] + for activity, actor in result.all(): + content = "" + summary = None + url = None + if activity.object_data: + content = activity.object_data.get("content", "") + summary = activity.object_data.get("summary") + url = activity.object_data.get("url") + + items.append(TimelineItemDTO( + id=f"local:{activity.id}", + post_type="local", + content=content, + published=activity.published, + actor_name=actor.display_name or actor.preferred_username, + actor_username=actor.preferred_username, + object_id=activity.activity_id, + summary=summary, + url=url, + )) + return items + + async def get_actor_timeline( + self, session: AsyncSession, remote_actor_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[TimelineItemDTO]: + remote_actor = ( + await session.execute( + select(RemoteActor).where(RemoteActor.id == remote_actor_id) + ) + ).scalar_one_or_none() + if not remote_actor: + return [] + + q = ( + select(APRemotePost) + .where(APRemotePost.remote_actor_id == remote_actor_id) + ) + if before: + q = q.where(APRemotePost.published < before) + q = q.order_by(APRemotePost.published.desc()).limit(limit) + + posts = (await session.execute(q)).scalars().all() + return [ + TimelineItemDTO( + id=f"remote:{p.id}", + post_type="remote", + content=p.content or "", + published=p.published, + actor_name=remote_actor.display_name or remote_actor.preferred_username, + actor_username=remote_actor.preferred_username, + object_id=p.object_id, + summary=p.summary, + url=p.url, + actor_domain=remote_actor.domain, + actor_icon=remote_actor.icon_url, + actor_url=remote_actor.actor_url, + author_inbox=remote_actor.inbox_url, + ) + for p in posts + ] + + # -- Local posts ---------------------------------------------------------- + + async def create_local_post( + self, session: AsyncSession, actor_profile_id: int, + content: str, visibility: str = "public", + in_reply_to: str | None = None, + ) -> int: + now = datetime.now(timezone.utc) + post = APLocalPost( + actor_profile_id=actor_profile_id, + content=content, + visibility=visibility, + in_reply_to=in_reply_to, + published=now, + ) + session.add(post) + await session.flush() + + # Get actor for publishing + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + username = actor.preferred_username + + # Convert content to simple HTML + import html as html_mod + html_content = "".join( + f"

    {html_mod.escape(line)}

    " if line.strip() else "" + for line in content.split("\n") + ) + + object_id = f"https://{domain}/users/{username}/posts/{post.id}" + object_data = { + "id": object_id, + "type": "Note", + "content": html_content, + "url": object_id, + "attributedTo": f"https://{domain}/users/{username}", + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": [f"https://{domain}/users/{username}/followers"], + "published": now.isoformat(), + } + if in_reply_to: + object_data["inReplyTo"] = in_reply_to + + # Publish via existing activity system + await self.publish_activity( + session, + actor_user_id=actor.user_id, + activity_type="Create", + object_type="Note", + object_data=object_data, + source_type="local_post", + source_id=post.id, + ) + + return post.id + + async def delete_local_post( + self, session: AsyncSession, actor_profile_id: int, post_id: int, + ) -> None: + post = ( + await session.execute( + select(APLocalPost).where( + APLocalPost.id == post_id, + APLocalPost.actor_profile_id == actor_profile_id, + ) + ) + ).scalar_one_or_none() + if not post: + return + + # Get actor + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + object_id = f"https://{domain}/users/{actor.preferred_username}/posts/{post.id}" + + # Publish Delete activity + await self.publish_activity( + session, + actor_user_id=actor.user_id, + activity_type="Delete", + object_type="Note", + object_data={"id": object_id}, + source_type="local_post", + source_id=post.id, + ) + + await session.delete(post) + await session.flush() + + # -- Interactions --------------------------------------------------------- + + async def like_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: + # Determine post type and id + post_type, post_id = await self._resolve_post(session, object_id) + if not post_type: + return + + # Check for existing + existing = ( + await session.execute( + select(APInteraction).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "like", + ) + ) + ).scalar_one_or_none() + if existing: + return + + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + actor_url = f"https://{domain}/users/{actor.preferred_username}" + like_id = f"{actor_url}/activities/{uuid.uuid4()}" + + interaction = APInteraction( + actor_profile_id=actor_profile_id, + post_type=post_type, + post_id=post_id, + interaction_type="like", + activity_id=like_id, + ) + session.add(interaction) + await session.flush() + + # Send Like to author + if author_inbox: + await self._send_activity_to_inbox( + actor, { + "@context": "https://www.w3.org/ns/activitystreams", + "id": like_id, + "type": "Like", + "actor": actor_url, + "object": object_id, + }, author_inbox, + ) + + async def unlike_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: + post_type, post_id = await self._resolve_post(session, object_id) + if not post_type: + return + + interaction = ( + await session.execute( + select(APInteraction).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "like", + ) + ) + ).scalar_one_or_none() + if not interaction: + return + + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + actor_url = f"https://{domain}/users/{actor.preferred_username}" + + # Send Undo(Like) + if author_inbox and interaction.activity_id: + await self._send_activity_to_inbox( + actor, { + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"{actor_url}/activities/{uuid.uuid4()}", + "type": "Undo", + "actor": actor_url, + "object": { + "id": interaction.activity_id, + "type": "Like", + "actor": actor_url, + "object": object_id, + }, + }, author_inbox, + ) + + await session.delete(interaction) + await session.flush() + + async def boost_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: + post_type, post_id = await self._resolve_post(session, object_id) + if not post_type: + return + + existing = ( + await session.execute( + select(APInteraction).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "boost", + ) + ) + ).scalar_one_or_none() + if existing: + return + + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + actor_url = f"https://{domain}/users/{actor.preferred_username}" + announce_id = f"{actor_url}/activities/{uuid.uuid4()}" + + interaction = APInteraction( + actor_profile_id=actor_profile_id, + post_type=post_type, + post_id=post_id, + interaction_type="boost", + activity_id=announce_id, + ) + session.add(interaction) + await session.flush() + + # Send Announce to author and deliver to followers via publish_activity + if author_inbox: + announce_activity = { + "@context": "https://www.w3.org/ns/activitystreams", + "id": announce_id, + "type": "Announce", + "actor": actor_url, + "object": object_id, + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": [f"{actor_url}/followers"], + } + await self._send_activity_to_inbox(actor, announce_activity, author_inbox) + + # Also publish as our own activity for delivery to our followers + await self.publish_activity( + session, + actor_user_id=actor.user_id, + activity_type="Announce", + object_type="Note", + object_data={"id": object_id}, + ) + + async def unboost_post( + self, session: AsyncSession, actor_profile_id: int, + object_id: str, author_inbox: str, + ) -> None: + post_type, post_id = await self._resolve_post(session, object_id) + if not post_type: + return + + interaction = ( + await session.execute( + select(APInteraction).where( + APInteraction.actor_profile_id == actor_profile_id, + APInteraction.post_type == post_type, + APInteraction.post_id == post_id, + APInteraction.interaction_type == "boost", + ) + ) + ).scalar_one_or_none() + if not interaction: + return + + actor = ( + await session.execute( + select(ActorProfile).where(ActorProfile.id == actor_profile_id) + ) + ).scalar_one() + + domain = _domain() + actor_url = f"https://{domain}/users/{actor.preferred_username}" + + if author_inbox and interaction.activity_id: + await self._send_activity_to_inbox( + actor, { + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"{actor_url}/activities/{uuid.uuid4()}", + "type": "Undo", + "actor": actor_url, + "object": { + "id": interaction.activity_id, + "type": "Announce", + "actor": actor_url, + "object": object_id, + }, + }, author_inbox, + ) + + await session.delete(interaction) + await session.flush() + + async def _resolve_post( + self, session: AsyncSession, object_id: str, + ) -> tuple[str | None, int | None]: + """Resolve an AP object_id to (post_type, post_id).""" + # Check remote posts + remote = ( + await session.execute( + select(APRemotePost.id).where(APRemotePost.object_id == object_id).limit(1) + ) + ).scalar() + if remote: + return "remote", remote + + # Check local activities + local = ( + await session.execute( + select(APActivity.id).where(APActivity.activity_id == object_id).limit(1) + ) + ).scalar() + if local: + return "local", local + + return None, None + + async def _send_activity_to_inbox( + self, actor: ActorProfile, activity: dict, inbox_url: str, + ) -> None: + import json + import httpx + from shared.utils.http_signatures import sign_request + from urllib.parse import urlparse + + domain = _domain() + actor_url = f"https://{domain}/users/{actor.preferred_username}" + + body_bytes = json.dumps(activity).encode() + parsed = urlparse(inbox_url) + headers = sign_request( + private_key_pem=actor.private_key_pem, + key_id=f"{actor_url}#main-key", + method="POST", + path=parsed.path, + host=parsed.netloc, + body=body_bytes, + ) + headers["Content-Type"] = "application/activity+json" + + try: + async with httpx.AsyncClient(timeout=15) as client: + await client.post(inbox_url, content=body_bytes, headers=headers) + except Exception: + import logging + logging.getLogger(__name__).exception( + "Failed to deliver activity to %s", inbox_url, + ) + + # -- Notifications -------------------------------------------------------- + + async def get_notifications( + self, session: AsyncSession, actor_profile_id: int, + before: datetime | None = None, limit: int = 20, + ) -> list[NotificationDTO]: + q = ( + select(APNotification, RemoteActor, ActorProfile) + .outerjoin(RemoteActor, RemoteActor.id == APNotification.from_remote_actor_id) + .outerjoin( + ActorProfile, + ActorProfile.id == APNotification.from_actor_profile_id, + ) + .where(APNotification.actor_profile_id == actor_profile_id) + ) + if before: + q = q.where(APNotification.created_at < before) + q = q.order_by(APNotification.created_at.desc()).limit(limit) + + result = await session.execute(q) + items = [] + for notif, remote_actor, from_actor_profile in result.all(): + if remote_actor: + name = remote_actor.display_name or remote_actor.preferred_username + username = remote_actor.preferred_username + domain = remote_actor.domain + icon = remote_actor.icon_url + elif from_actor_profile: + name = from_actor_profile.display_name or from_actor_profile.preferred_username + username = from_actor_profile.preferred_username + domain = None + icon = None + else: + name = "Unknown" + username = "unknown" + domain = None + icon = None + + # Get preview if target exists + preview = None + if notif.target_activity_id: + act = (await session.execute( + select(APActivity).where(APActivity.id == notif.target_activity_id) + )).scalar_one_or_none() + if act and act.object_data: + content = act.object_data.get("content", "") + # Strip HTML tags for preview + import re + preview = re.sub(r"<[^>]+>", "", content)[:100] + elif notif.target_remote_post_id: + rp = (await session.execute( + select(APRemotePost).where(APRemotePost.id == notif.target_remote_post_id) + )).scalar_one_or_none() + if rp and rp.content: + import re + preview = re.sub(r"<[^>]+>", "", rp.content)[:100] + + items.append(NotificationDTO( + id=notif.id, + notification_type=notif.notification_type, + from_actor_name=name, + from_actor_username=username, + from_actor_domain=domain, + from_actor_icon=icon, + target_content_preview=preview, + created_at=notif.created_at, + read=notif.read, + )) + return items + + async def unread_notification_count( + self, session: AsyncSession, actor_profile_id: int, + ) -> int: + return ( + await session.execute( + select(func.count(APNotification.id)).where( + APNotification.actor_profile_id == actor_profile_id, + APNotification.read == False, # noqa: E712 + ) + ) + ).scalar() or 0 + + async def mark_notifications_read( + self, session: AsyncSession, actor_profile_id: int, + ) -> None: + from sqlalchemy import update + await session.execute( + update(APNotification) + .where( + APNotification.actor_profile_id == actor_profile_id, + APNotification.read == False, # noqa: E712 + ) + .values(read=True) + ) + + # -- Stats ---------------------------------------------------------------- + + async def get_stats(self, session: AsyncSession) -> dict: + actors = (await session.execute(select(func.count(ActorProfile.id)))).scalar() or 0 + activities = (await session.execute(select(func.count(APActivity.id)))).scalar() or 0 + followers = (await session.execute(select(func.count(APFollower.id)))).scalar() or 0 + return {"actors": actors, "activities": activities, "followers": followers} diff --git a/shared/services/federation_publish.py b/shared/services/federation_publish.py new file mode 100644 index 0000000..fb26ea0 --- /dev/null +++ b/shared/services/federation_publish.py @@ -0,0 +1,92 @@ +"""Inline federation publication — called at write time, not via async handler. + +The originating service calls try_publish() directly, which creates the +APActivity (with process_state='pending') in the same DB transaction. +The EventProcessor picks it up and the delivery wildcard handler POSTs +to follower inboxes. +""" +from __future__ import annotations + +import logging +import os + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.services.registry import services + +log = logging.getLogger(__name__) + + +async def try_publish( + session: AsyncSession, + *, + user_id: int | None, + activity_type: str, + object_type: str, + object_data: dict, + source_type: str, + source_id: int, +) -> None: + """Publish an AP activity if federation is available and user has a profile. + + Safe to call from any app — returns silently if federation isn't wired + or the user has no actor profile. + """ + if not services.has("federation"): + return + + if not user_id: + return + + actor = await services.federation.get_actor_by_user_id(session, user_id) + if not actor: + return + + # Dedup: don't re-Create if already published, don't re-Delete if already deleted + existing = await services.federation.get_activity_for_source( + session, source_type, source_id, + ) + if existing: + if activity_type == "Create" and existing.activity_type != "Delete": + return # already published (allow re-Create after Delete/unpublish) + if activity_type == "Delete" and existing.activity_type == "Delete": + return # already deleted + elif activity_type in ("Delete", "Update"): + return # never published, nothing to delete/update + + # Stable object ID within a publish cycle. After Delete + re-Create + # we append a version suffix so remote servers (Mastodon) treat it as + # a brand-new post rather than ignoring the tombstoned ID. + domain = os.getenv("AP_DOMAIN", "federation.rose-ash.com") + base_object_id = ( + f"https://{domain}/users/{actor.preferred_username}" + f"/objects/{source_type.lower()}/{source_id}" + ) + if activity_type == "Create" and existing and existing.activity_type == "Delete": + # Count prior Creates to derive a version number + create_count = await services.federation.count_activities_for_source( + session, source_type, source_id, activity_type="Create", + ) + object_data["id"] = f"{base_object_id}/v{create_count + 1}" + elif activity_type in ("Update", "Delete") and existing and existing.object_data: + # Use the same object ID as the most recent activity + object_data["id"] = existing.object_data.get("id", base_object_id) + else: + object_data["id"] = base_object_id + + try: + await services.federation.publish_activity( + session, + actor_user_id=user_id, + activity_type=activity_type, + object_type=object_type, + object_data=object_data, + source_type=source_type, + source_id=source_id, + ) + log.info( + "Published %s/%s for %s#%d by user %d", + activity_type, object_type, source_type, source_id, user_id, + ) + except Exception: + log.exception("Failed to publish activity for %s#%d", source_type, source_id) diff --git a/shared/services/market_impl.py b/shared/services/market_impl.py new file mode 100644 index 0000000..71f8771 --- /dev/null +++ b/shared/services/market_impl.py @@ -0,0 +1,128 @@ +"""SQL-backed MarketService implementation. + +Queries ``shared.models.market.*`` and ``shared.models.market_place.*`` — +only this module may read market-domain tables on behalf of other domains. +""" +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.market import Product +from shared.models.market_place import MarketPlace +from shared.browser.app.utils import utcnow +from shared.contracts.dtos import MarketPlaceDTO, ProductDTO +from shared.services.relationships import attach_child, detach_child + + +def _mp_to_dto(mp: MarketPlace) -> MarketPlaceDTO: + return MarketPlaceDTO( + id=mp.id, + container_type=mp.container_type, + container_id=mp.container_id, + name=mp.name, + slug=mp.slug, + description=mp.description, + ) + + +def _product_to_dto(p: Product) -> ProductDTO: + return ProductDTO( + id=p.id, + slug=p.slug, + title=p.title, + image=p.image, + description_short=p.description_short, + rrp=p.rrp, + regular_price=p.regular_price, + special_price=p.special_price, + ) + + +class SqlMarketService: + async def marketplaces_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[MarketPlaceDTO]: + result = await session.execute( + select(MarketPlace).where( + MarketPlace.container_type == container_type, + MarketPlace.container_id == container_id, + MarketPlace.deleted_at.is_(None), + ).order_by(MarketPlace.name.asc()) + ) + return [_mp_to_dto(mp) for mp in result.scalars().all()] + + async def list_marketplaces( + self, session: AsyncSession, + container_type: str | None = None, container_id: int | None = None, + *, page: int = 1, per_page: int = 20, + ) -> tuple[list[MarketPlaceDTO], bool]: + stmt = select(MarketPlace).where(MarketPlace.deleted_at.is_(None)) + if container_type is not None and container_id is not None: + stmt = stmt.where( + MarketPlace.container_type == container_type, + MarketPlace.container_id == container_id, + ) + stmt = stmt.order_by(MarketPlace.name.asc()) + stmt = stmt.offset((page - 1) * per_page).limit(per_page + 1) + rows = (await session.execute(stmt)).scalars().all() + has_more = len(rows) > per_page + return [_mp_to_dto(mp) for mp in rows[:per_page]], has_more + + async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None: + product = ( + await session.execute(select(Product).where(Product.id == product_id)) + ).scalar_one_or_none() + return _product_to_dto(product) if product else None + + async def create_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + name: str, slug: str, + ) -> MarketPlaceDTO: + # Look for existing (including soft-deleted) + existing = (await session.execute( + select(MarketPlace).where( + MarketPlace.container_type == container_type, + MarketPlace.container_id == container_id, + MarketPlace.slug == slug, + ) + )).scalar_one_or_none() + + if existing: + if existing.deleted_at is not None: + existing.deleted_at = None # revive + existing.name = name + await session.flush() + await attach_child(session, container_type, container_id, "market", existing.id) + return _mp_to_dto(existing) + raise ValueError(f'Market with slug "{slug}" already exists for this container.') + + market = MarketPlace( + container_type=container_type, container_id=container_id, + name=name, slug=slug, + ) + session.add(market) + await session.flush() + await attach_child(session, container_type, container_id, "market", market.id) + return _mp_to_dto(market) + + async def soft_delete_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + slug: str, + ) -> bool: + market = (await session.execute( + select(MarketPlace).where( + MarketPlace.container_type == container_type, + MarketPlace.container_id == container_id, + MarketPlace.slug == slug, + MarketPlace.deleted_at.is_(None), + ) + )).scalar_one_or_none() + + if not market: + return False + + market.deleted_at = utcnow() + await session.flush() + await detach_child(session, container_type, container_id, "market", market.id) + return True diff --git a/shared/services/navigation.py b/shared/services/navigation.py new file mode 100644 index 0000000..d9e15c5 --- /dev/null +++ b/shared/services/navigation.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models.menu_node import MenuNode + + +async def get_navigation_tree(session: AsyncSession) -> list[MenuNode]: + """ + Return top-level menu nodes ordered by sort_order. + + All apps call this directly (shared DB) — no more HTTP API. + """ + result = await session.execute( + select(MenuNode) + .where(MenuNode.deleted_at.is_(None), MenuNode.depth == 0) + .order_by(MenuNode.sort_order.asc(), MenuNode.id.asc()) + ) + return list(result.scalars().all()) + + +async def rebuild_navigation(session: AsyncSession) -> None: + """ + Rebuild menu_nodes from container_relations. + + Called by event handlers when relationships change. + Currently a no-op placeholder — menu nodes are managed directly + by the admin UI. When the full relationship-driven nav is needed, + this will sync ContainerRelation -> MenuNode. + """ + pass diff --git a/shared/services/registry.py b/shared/services/registry.py new file mode 100644 index 0000000..23d559b --- /dev/null +++ b/shared/services/registry.py @@ -0,0 +1,105 @@ +"""Typed singleton registry for domain services. + +Usage:: + + from shared.services.registry import services + + # Register at app startup + services.blog = SqlBlogService() + + # Query anywhere + if services.has("calendar"): + entries = await services.calendar.pending_entries(session, ...) + + # Or use stubs for absent domains + summary = await services.cart.cart_summary(session, ...) +""" +from __future__ import annotations + +from shared.contracts.protocols import ( + BlogService, + CalendarService, + MarketService, + CartService, + FederationService, +) + + +class _ServiceRegistry: + """Central registry holding one implementation per domain. + + Properties return the registered implementation or raise + ``RuntimeError`` if nothing is registered. Use ``has(name)`` + to check before access when the domain might be absent. + """ + + def __init__(self) -> None: + self._blog: BlogService | None = None + self._calendar: CalendarService | None = None + self._market: MarketService | None = None + self._cart: CartService | None = None + self._federation: FederationService | None = None + + # -- blog ----------------------------------------------------------------- + @property + def blog(self) -> BlogService: + if self._blog is None: + raise RuntimeError("BlogService not registered") + return self._blog + + @blog.setter + def blog(self, impl: BlogService) -> None: + self._blog = impl + + # -- calendar ------------------------------------------------------------- + @property + def calendar(self) -> CalendarService: + if self._calendar is None: + raise RuntimeError("CalendarService not registered") + return self._calendar + + @calendar.setter + def calendar(self, impl: CalendarService) -> None: + self._calendar = impl + + # -- market --------------------------------------------------------------- + @property + def market(self) -> MarketService: + if self._market is None: + raise RuntimeError("MarketService not registered") + return self._market + + @market.setter + def market(self, impl: MarketService) -> None: + self._market = impl + + # -- cart ----------------------------------------------------------------- + @property + def cart(self) -> CartService: + if self._cart is None: + raise RuntimeError("CartService not registered") + return self._cart + + @cart.setter + def cart(self, impl: CartService) -> None: + self._cart = impl + + # -- federation ----------------------------------------------------------- + @property + def federation(self) -> FederationService: + if self._federation is None: + raise RuntimeError("FederationService not registered") + return self._federation + + @federation.setter + def federation(self, impl: FederationService) -> None: + self._federation = impl + + # -- introspection -------------------------------------------------------- + def has(self, name: str) -> bool: + """Check whether a domain service is registered.""" + return getattr(self, f"_{name}", None) is not None + + +# Module-level singleton — import this everywhere. +services = _ServiceRegistry() diff --git a/shared/services/relationships.py b/shared/services/relationships.py new file mode 100644 index 0000000..c7ff084 --- /dev/null +++ b/shared/services/relationships.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.events import emit_activity +from shared.models.container_relation import ContainerRelation + + +async def attach_child( + session: AsyncSession, + parent_type: str, + parent_id: int, + child_type: str, + child_id: int, + label: str | None = None, + sort_order: int | None = None, +) -> ContainerRelation: + """ + Create a ContainerRelation and emit container.child_attached event. + + Upsert behaviour: if a relation already exists (including soft-deleted), + revive it instead of inserting a duplicate. + """ + # Check for existing (including soft-deleted) + existing = await session.scalar( + select(ContainerRelation).where( + ContainerRelation.parent_type == parent_type, + ContainerRelation.parent_id == parent_id, + ContainerRelation.child_type == child_type, + ContainerRelation.child_id == child_id, + ) + ) + if existing: + if existing.deleted_at is not None: + # Revive soft-deleted relation + existing.deleted_at = None + if sort_order is not None: + existing.sort_order = sort_order + if label is not None: + existing.label = label + await session.flush() + await emit_activity( + session, + activity_type="Add", + actor_uri="internal:system", + object_type="rose:ContainerRelation", + object_data={ + "parent_type": parent_type, + "parent_id": parent_id, + "child_type": child_type, + "child_id": child_id, + }, + source_type="container_relation", + source_id=existing.id, + ) + return existing + # Already attached and active — no-op + return existing + + if sort_order is None: + max_order = await session.scalar( + select(func.max(ContainerRelation.sort_order)).where( + ContainerRelation.parent_type == parent_type, + ContainerRelation.parent_id == parent_id, + ContainerRelation.deleted_at.is_(None), + ) + ) + sort_order = (max_order or 0) + 1 + + rel = ContainerRelation( + parent_type=parent_type, + parent_id=parent_id, + child_type=child_type, + child_id=child_id, + label=label, + sort_order=sort_order, + ) + session.add(rel) + await session.flush() + + await emit_activity( + session, + activity_type="Add", + actor_uri="internal:system", + object_type="rose:ContainerRelation", + object_data={ + "parent_type": parent_type, + "parent_id": parent_id, + "child_type": child_type, + "child_id": child_id, + }, + source_type="container_relation", + source_id=rel.id, + ) + + return rel + + +async def get_children( + session: AsyncSession, + parent_type: str, + parent_id: int, + child_type: str | None = None, +) -> list[ContainerRelation]: + """Query children of a container, optionally filtered by child_type.""" + stmt = select(ContainerRelation).where( + ContainerRelation.parent_type == parent_type, + ContainerRelation.parent_id == parent_id, + ContainerRelation.deleted_at.is_(None), + ) + if child_type is not None: + stmt = stmt.where(ContainerRelation.child_type == child_type) + + stmt = stmt.order_by( + ContainerRelation.sort_order.asc(), ContainerRelation.id.asc() + ) + result = await session.execute(stmt) + return list(result.scalars().all()) + + +async def detach_child( + session: AsyncSession, + parent_type: str, + parent_id: int, + child_type: str, + child_id: int, +) -> bool: + """Soft-delete a ContainerRelation and emit container.child_detached event.""" + result = await session.execute( + select(ContainerRelation).where( + ContainerRelation.parent_type == parent_type, + ContainerRelation.parent_id == parent_id, + ContainerRelation.child_type == child_type, + ContainerRelation.child_id == child_id, + ContainerRelation.deleted_at.is_(None), + ) + ) + rel = result.scalar_one_or_none() + if not rel: + return False + + rel.deleted_at = func.now() + await session.flush() + + await emit_activity( + session, + activity_type="Remove", + actor_uri="internal:system", + object_type="rose:ContainerRelation", + object_data={ + "parent_type": parent_type, + "parent_id": parent_id, + "child_type": child_type, + "child_id": child_id, + }, + source_type="container_relation", + source_id=rel.id, + ) + + return True diff --git a/shared/services/stubs.py b/shared/services/stubs.py new file mode 100644 index 0000000..eb46cca --- /dev/null +++ b/shared/services/stubs.py @@ -0,0 +1,314 @@ +"""No-op stub services for absent domains. + +When an app starts without a particular domain, it registers the stub +so that ``services.X.method()`` returns empty/None rather than crashing. +""" +from __future__ import annotations + +from decimal import Decimal + +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.contracts.dtos import ( + PostDTO, + CalendarDTO, + CalendarEntryDTO, + TicketDTO, + MarketPlaceDTO, + ProductDTO, + CartItemDTO, + CartSummaryDTO, + ActorProfileDTO, + APActivityDTO, + APFollowerDTO, +) + + +class StubBlogService: + async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None: + return None + + async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: + return None + + async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: + return [] + + async def search_posts(self, session, query, page=1, per_page=10): + return [], 0 + + +class StubCalendarService: + async def calendars_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[CalendarDTO]: + return [] + + async def pending_entries( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: + return [] + + async def entries_for_page( + self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None, + ) -> list[CalendarEntryDTO]: + return [] + + async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None: + return None + + async def associated_entries( + self, session: AsyncSession, content_type: str, content_id: int, page: int, + ) -> tuple[list[CalendarEntryDTO], bool]: + return [], False + + async def toggle_entry_post( + self, session: AsyncSession, entry_id: int, content_type: str, content_id: int, + ) -> bool: + return False + + async def adopt_entries_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + pass + + async def claim_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: + pass + + async def confirm_entries_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, + ) -> None: + pass + + async def get_entries_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[CalendarEntryDTO]: + return [] + + async def user_tickets( + self, session: AsyncSession, *, user_id: int, + ) -> list[TicketDTO]: + return [] + + async def user_bookings( + self, session: AsyncSession, *, user_id: int, + ) -> list[CalendarEntryDTO]: + return [] + + async def confirmed_entries_for_posts( + self, session: AsyncSession, post_ids: list[int], + ) -> dict[int, list[CalendarEntryDTO]]: + return {} + + async def pending_tickets( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: + return [] + + async def tickets_for_page( + self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None, + ) -> list[TicketDTO]: + return [] + + async def claim_tickets_for_order( + self, session: AsyncSession, order_id: int, user_id: int | None, + session_id: str | None, page_post_id: int | None, + ) -> None: + pass + + async def confirm_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> None: + pass + + async def get_tickets_for_order( + self, session: AsyncSession, order_id: int, + ) -> list[TicketDTO]: + return [] + + async def adopt_tickets_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + pass + + async def adjust_ticket_quantity( + self, session, entry_id, count, *, user_id, session_id, ticket_type_id=None, + ) -> int: + return 0 + + async def upcoming_entries_for_container(self, session, container_type, container_id, *, page=1, per_page=20): + return [], False + + async def entry_ids_for_content(self, session, content_type, content_id): + return set() + + async def visible_entries_for_period(self, session, calendar_id, period_start, period_end, *, user_id, is_admin, session_id): + return [] + + +class StubMarketService: + async def marketplaces_for_container( + self, session: AsyncSession, container_type: str, container_id: int, + ) -> list[MarketPlaceDTO]: + return [] + + async def list_marketplaces( + self, session: AsyncSession, + container_type: str | None = None, container_id: int | None = None, + *, page: int = 1, per_page: int = 20, + ) -> tuple[list[MarketPlaceDTO], bool]: + return [], False + + async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None: + return None + + async def create_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + name: str, slug: str, + ) -> MarketPlaceDTO: + raise RuntimeError("MarketService not available") + + async def soft_delete_marketplace( + self, session: AsyncSession, container_type: str, container_id: int, + slug: str, + ) -> bool: + return False + + +class StubCartService: + async def cart_summary( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + page_slug: str | None = None, + ) -> CartSummaryDTO: + return CartSummaryDTO() + + async def cart_items( + self, session: AsyncSession, *, user_id: int | None, session_id: str | None, + ) -> list[CartItemDTO]: + return [] + + async def adopt_cart_for_user( + self, session: AsyncSession, user_id: int, session_id: str, + ) -> None: + pass + + +class StubFederationService: + """No-op federation stub for apps that don't own federation.""" + + async def get_actor_by_username(self, session, username): + return None + + async def get_actor_by_user_id(self, session, user_id): + return None + + async def create_actor(self, session, user_id, preferred_username, + display_name=None, summary=None): + raise RuntimeError("FederationService not available") + + async def username_available(self, session, username): + return False + + async def publish_activity(self, session, *, actor_user_id, activity_type, + object_type, object_data, source_type=None, + source_id=None): + return None + + async def get_activity(self, session, activity_id): + return None + + async def get_outbox(self, session, username, page=1, per_page=20, origin_app=None): + return [], 0 + + async def get_activity_for_source(self, session, source_type, source_id): + return None + + async def count_activities_for_source(self, session, source_type, source_id, *, activity_type): + return 0 + + async def get_followers(self, session, username, app_domain=None): + return [] + + async def add_follower(self, session, username, follower_acct, follower_inbox, + follower_actor_url, follower_public_key=None, + app_domain="federation"): + raise RuntimeError("FederationService not available") + + async def remove_follower(self, session, username, follower_acct, app_domain="federation"): + return False + + async def get_or_fetch_remote_actor(self, session, actor_url): + return None + + async def search_remote_actor(self, session, acct): + return None + + async def search_actors(self, session, query, page=1, limit=20): + return [], 0 + + async def send_follow(self, session, local_username, remote_actor_url): + raise RuntimeError("FederationService not available") + + async def get_following(self, session, username, page=1, per_page=20): + return [], 0 + + async def get_followers_paginated(self, session, username, page=1, per_page=20): + return [], 0 + + async def accept_follow_response(self, session, local_username, remote_actor_url): + pass + + async def unfollow(self, session, local_username, remote_actor_url): + pass + + async def ingest_remote_post(self, session, remote_actor_id, activity_json, object_json): + pass + + async def delete_remote_post(self, session, object_id): + pass + + async def get_remote_post(self, session, object_id): + return None + + async def get_home_timeline(self, session, actor_profile_id, before=None, limit=20): + return [] + + async def get_public_timeline(self, session, before=None, limit=20): + return [] + + async def get_actor_timeline(self, session, remote_actor_id, before=None, limit=20): + return [] + + async def create_local_post(self, session, actor_profile_id, content, visibility="public", in_reply_to=None): + raise RuntimeError("FederationService not available") + + async def delete_local_post(self, session, actor_profile_id, post_id): + raise RuntimeError("FederationService not available") + + async def like_post(self, session, actor_profile_id, object_id, author_inbox): + pass + + async def unlike_post(self, session, actor_profile_id, object_id, author_inbox): + pass + + async def boost_post(self, session, actor_profile_id, object_id, author_inbox): + pass + + async def unboost_post(self, session, actor_profile_id, object_id, author_inbox): + pass + + async def get_notifications(self, session, actor_profile_id, before=None, limit=20): + return [] + + async def unread_notification_count(self, session, actor_profile_id): + return 0 + + async def mark_notifications_read(self, session, actor_profile_id): + pass + + async def get_stats(self, session): + return {"actors": 0, "activities": 0, "followers": 0} diff --git a/shared/services/widget_registry.py b/shared/services/widget_registry.py new file mode 100644 index 0000000..f43d8e5 --- /dev/null +++ b/shared/services/widget_registry.py @@ -0,0 +1,90 @@ +"""Singleton widget registry for cross-domain UI composition. + +Usage:: + + from shared.services.widget_registry import widgets + + # Register at app startup (after domain services) + widgets.add_container_nav(NavWidget(...)) + + # Query in templates / context processors + for w in widgets.container_nav: + ctx = await w.context_fn(session, container_type="page", ...) +""" +from __future__ import annotations + +from shared.contracts.widgets import ( + NavWidget, + CardWidget, + AccountPageWidget, + AccountNavLink, +) + + +class _WidgetRegistry: + """Central registry holding all widget descriptors. + + Widgets are added at startup and read at request time. + Properties return sorted-by-order copies. + """ + + def __init__(self) -> None: + self._container_nav: list[NavWidget] = [] + self._container_card: list[CardWidget] = [] + self._account_pages: list[AccountPageWidget] = [] + self._account_nav: list[AccountNavLink] = [] + + # -- registration --------------------------------------------------------- + + def add_container_nav(self, w: NavWidget) -> None: + self._container_nav.append(w) + + def add_container_card(self, w: CardWidget) -> None: + self._container_card.append(w) + + def add_account_page(self, w: AccountPageWidget) -> None: + self._account_pages.append(w) + # Auto-create a matching internal nav link + slug = w.slug + + def _href(s=slug): + from shared.infrastructure.urls import account_url + return account_url(f"/{s}/") + + self._account_nav.append(AccountNavLink( + label=w.label, + order=w.order, + href_fn=_href, + external=False, + )) + + def add_account_link(self, link: AccountNavLink) -> None: + self._account_nav.append(link) + + # -- read access (sorted copies) ------------------------------------------ + + @property + def container_nav(self) -> list[NavWidget]: + return sorted(self._container_nav, key=lambda w: w.order) + + @property + def container_cards(self) -> list[CardWidget]: + return sorted(self._container_card, key=lambda w: w.order) + + @property + def account_pages(self) -> list[AccountPageWidget]: + return sorted(self._account_pages, key=lambda w: w.order) + + @property + def account_nav(self) -> list[AccountNavLink]: + return sorted(self._account_nav, key=lambda w: w.order) + + def account_page_by_slug(self, slug: str) -> AccountPageWidget | None: + for w in self._account_pages: + if w.slug == slug: + return w + return None + + +# Module-level singleton — import this everywhere. +widgets = _WidgetRegistry() diff --git a/shared/services/widgets/__init__.py b/shared/services/widgets/__init__.py new file mode 100644 index 0000000..d063a76 --- /dev/null +++ b/shared/services/widgets/__init__.py @@ -0,0 +1,22 @@ +"""Per-domain widget registration. + +Called once at startup after domain services are registered. +Only registers widgets for domains that are actually available. +""" +from __future__ import annotations + + +def register_all_widgets() -> None: + from shared.services.registry import services + + if services.has("calendar"): + from .calendar_widgets import register_calendar_widgets + register_calendar_widgets() + + if services.has("market"): + from .market_widgets import register_market_widgets + register_market_widgets() + + if services.has("cart"): + from .cart_widgets import register_cart_widgets + register_cart_widgets() diff --git a/shared/services/widgets/calendar_widgets.py b/shared/services/widgets/calendar_widgets.py new file mode 100644 index 0000000..d9fc2ff --- /dev/null +++ b/shared/services/widgets/calendar_widgets.py @@ -0,0 +1,10 @@ +"""Calendar-domain widgets. + +All calendar widgets have been replaced by fragments +(events app serves them at /internal/fragments/). +""" +from __future__ import annotations + + +def register_calendar_widgets() -> None: + pass diff --git a/shared/services/widgets/cart_widgets.py b/shared/services/widgets/cart_widgets.py new file mode 100644 index 0000000..a45ab90 --- /dev/null +++ b/shared/services/widgets/cart_widgets.py @@ -0,0 +1,10 @@ +"""Cart-domain widgets. + +Account nav link has been replaced by fragments +(cart app serves account-nav-item at /internal/fragments/). +""" +from __future__ import annotations + + +def register_cart_widgets() -> None: + pass diff --git a/shared/services/widgets/market_widgets.py b/shared/services/widgets/market_widgets.py new file mode 100644 index 0000000..480d42c --- /dev/null +++ b/shared/services/widgets/market_widgets.py @@ -0,0 +1,10 @@ +"""Market-domain widgets. + +Container nav widgets have been replaced by fragments +(market app serves them at /internal/fragments/). +""" +from __future__ import annotations + + +def register_market_widgets() -> None: + pass diff --git a/shared/static/errors/403.gif b/shared/static/errors/403.gif new file mode 100644 index 0000000..940f6db Binary files /dev/null and b/shared/static/errors/403.gif differ diff --git a/shared/static/errors/404.gif b/shared/static/errors/404.gif new file mode 100644 index 0000000..18d68e3 Binary files /dev/null and b/shared/static/errors/404.gif differ diff --git a/shared/static/errors/error.gif b/shared/static/errors/error.gif new file mode 100644 index 0000000..b8bf54c Binary files /dev/null and b/shared/static/errors/error.gif differ diff --git a/shared/static/favicon.ico b/shared/static/favicon.ico new file mode 100644 index 0000000..e1b7520 Binary files /dev/null and b/shared/static/favicon.ico differ diff --git a/shared/static/fontawesome/css/all.min.css b/shared/static/fontawesome/css/all.min.css new file mode 100644 index 0000000..cd555f1 --- /dev/null +++ b/shared/static/fontawesome/css/all.min.css @@ -0,0 +1,9 @@ +/*! + * Font Awesome Free 6.5.1 by @fontawesome - https://fontawesome.com + * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) + * Copyright 2023 Fonticons, Inc. + */ +.fa{font-family:var(--fa-style-family,"Font Awesome 6 Free");font-weight:var(--fa-style,900)}.fa,.fa-brands,.fa-classic,.fa-regular,.fa-sharp,.fa-solid,.fab,.far,.fas{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:var(--fa-display,inline-block);font-style:normal;font-variant:normal;line-height:1;text-rendering:auto}.fa-classic,.fa-regular,.fa-solid,.far,.fas{font-family:"Font Awesome 6 Free"}.fa-brands,.fab{font-family:"Font Awesome 6 Brands"}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-2xs{font-size:.625em;line-height:.1em;vertical-align:.225em}.fa-xs{font-size:.75em;line-height:.08333em;vertical-align:.125em}.fa-sm{font-size:.875em;line-height:.07143em;vertical-align:.05357em}.fa-lg{font-size:1.25em;line-height:.05em;vertical-align:-.075em}.fa-xl{font-size:1.5em;line-height:.04167em;vertical-align:-.125em}.fa-2xl{font-size:2em;line-height:.03125em;vertical-align:-.1875em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:var(--fa-li-margin,2.5em);padding-left:0}.fa-ul>li{position:relative}.fa-li{left:calc(var(--fa-li-width, 2em)*-1);position:absolute;text-align:center;width:var(--fa-li-width,2em);line-height:inherit}.fa-border{border-radius:var(--fa-border-radius,.1em);border:var(--fa-border-width,.08em) var(--fa-border-style,solid) var(--fa-border-color,#eee);padding:var(--fa-border-padding,.2em .25em .15em)}.fa-pull-left{float:left;margin-right:var(--fa-pull-margin,.3em)}.fa-pull-right{float:right;margin-left:var(--fa-pull-margin,.3em)}.fa-beat{-webkit-animation-name:fa-beat;animation-name:fa-beat;-webkit-animation-delay:var(--fa-animation-delay,0s);animation-delay:var(--fa-animation-delay,0s);-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal);-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,ease-in-out);animation-timing-function:var(--fa-animation-timing,ease-in-out)}.fa-bounce{-webkit-animation-name:fa-bounce;animation-name:fa-bounce;-webkit-animation-delay:var(--fa-animation-delay,0s);animation-delay:var(--fa-animation-delay,0s);-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal);-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,cubic-bezier(.28,.84,.42,1));animation-timing-function:var(--fa-animation-timing,cubic-bezier(.28,.84,.42,1))}.fa-fade{-webkit-animation-name:fa-fade;animation-name:fa-fade;-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,cubic-bezier(.4,0,.6,1));animation-timing-function:var(--fa-animation-timing,cubic-bezier(.4,0,.6,1))}.fa-beat-fade,.fa-fade{-webkit-animation-delay:var(--fa-animation-delay,0s);animation-delay:var(--fa-animation-delay,0s);-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal);-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s)}.fa-beat-fade{-webkit-animation-name:fa-beat-fade;animation-name:fa-beat-fade;-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,cubic-bezier(.4,0,.6,1));animation-timing-function:var(--fa-animation-timing,cubic-bezier(.4,0,.6,1))}.fa-flip{-webkit-animation-name:fa-flip;animation-name:fa-flip;-webkit-animation-delay:var(--fa-animation-delay,0s);animation-delay:var(--fa-animation-delay,0s);-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal);-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,ease-in-out);animation-timing-function:var(--fa-animation-timing,ease-in-out)}.fa-shake{-webkit-animation-name:fa-shake;animation-name:fa-shake;-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,linear);animation-timing-function:var(--fa-animation-timing,linear)}.fa-shake,.fa-spin{-webkit-animation-delay:var(--fa-animation-delay,0s);animation-delay:var(--fa-animation-delay,0s);-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal)}.fa-spin{-webkit-animation-name:fa-spin;animation-name:fa-spin;-webkit-animation-duration:var(--fa-animation-duration,2s);animation-duration:var(--fa-animation-duration,2s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,linear);animation-timing-function:var(--fa-animation-timing,linear)}.fa-spin-reverse{--fa-animation-direction:reverse}.fa-pulse,.fa-spin-pulse{-webkit-animation-name:fa-spin;animation-name:fa-spin;-webkit-animation-direction:var(--fa-animation-direction,normal);animation-direction:var(--fa-animation-direction,normal);-webkit-animation-duration:var(--fa-animation-duration,1s);animation-duration:var(--fa-animation-duration,1s);-webkit-animation-iteration-count:var(--fa-animation-iteration-count,infinite);animation-iteration-count:var(--fa-animation-iteration-count,infinite);-webkit-animation-timing-function:var(--fa-animation-timing,steps(8));animation-timing-function:var(--fa-animation-timing,steps(8))}@media (prefers-reduced-motion:reduce){.fa-beat,.fa-beat-fade,.fa-bounce,.fa-fade,.fa-flip,.fa-pulse,.fa-shake,.fa-spin,.fa-spin-pulse{-webkit-animation-delay:-1ms;animation-delay:-1ms;-webkit-animation-duration:1ms;animation-duration:1ms;-webkit-animation-iteration-count:1;animation-iteration-count:1;-webkit-transition-delay:0s;transition-delay:0s;-webkit-transition-duration:0s;transition-duration:0s}}@-webkit-keyframes fa-beat{0%,90%{-webkit-transform:scale(1);transform:scale(1)}45%{-webkit-transform:scale(var(--fa-beat-scale,1.25));transform:scale(var(--fa-beat-scale,1.25))}}@keyframes fa-beat{0%,90%{-webkit-transform:scale(1);transform:scale(1)}45%{-webkit-transform:scale(var(--fa-beat-scale,1.25));transform:scale(var(--fa-beat-scale,1.25))}}@-webkit-keyframes fa-bounce{0%{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}10%{-webkit-transform:scale(var(--fa-bounce-start-scale-x,1.1),var(--fa-bounce-start-scale-y,.9)) translateY(0);transform:scale(var(--fa-bounce-start-scale-x,1.1),var(--fa-bounce-start-scale-y,.9)) translateY(0)}30%{-webkit-transform:scale(var(--fa-bounce-jump-scale-x,.9),var(--fa-bounce-jump-scale-y,1.1)) translateY(var(--fa-bounce-height,-.5em));transform:scale(var(--fa-bounce-jump-scale-x,.9),var(--fa-bounce-jump-scale-y,1.1)) translateY(var(--fa-bounce-height,-.5em))}50%{-webkit-transform:scale(var(--fa-bounce-land-scale-x,1.05),var(--fa-bounce-land-scale-y,.95)) translateY(0);transform:scale(var(--fa-bounce-land-scale-x,1.05),var(--fa-bounce-land-scale-y,.95)) translateY(0)}57%{-webkit-transform:scale(1) translateY(var(--fa-bounce-rebound,-.125em));transform:scale(1) translateY(var(--fa-bounce-rebound,-.125em))}64%{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}to{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}}@keyframes fa-bounce{0%{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}10%{-webkit-transform:scale(var(--fa-bounce-start-scale-x,1.1),var(--fa-bounce-start-scale-y,.9)) translateY(0);transform:scale(var(--fa-bounce-start-scale-x,1.1),var(--fa-bounce-start-scale-y,.9)) translateY(0)}30%{-webkit-transform:scale(var(--fa-bounce-jump-scale-x,.9),var(--fa-bounce-jump-scale-y,1.1)) translateY(var(--fa-bounce-height,-.5em));transform:scale(var(--fa-bounce-jump-scale-x,.9),var(--fa-bounce-jump-scale-y,1.1)) translateY(var(--fa-bounce-height,-.5em))}50%{-webkit-transform:scale(var(--fa-bounce-land-scale-x,1.05),var(--fa-bounce-land-scale-y,.95)) translateY(0);transform:scale(var(--fa-bounce-land-scale-x,1.05),var(--fa-bounce-land-scale-y,.95)) translateY(0)}57%{-webkit-transform:scale(1) translateY(var(--fa-bounce-rebound,-.125em));transform:scale(1) translateY(var(--fa-bounce-rebound,-.125em))}64%{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}to{-webkit-transform:scale(1) translateY(0);transform:scale(1) translateY(0)}}@-webkit-keyframes fa-fade{50%{opacity:var(--fa-fade-opacity,.4)}}@keyframes fa-fade{50%{opacity:var(--fa-fade-opacity,.4)}}@-webkit-keyframes fa-beat-fade{0%,to{opacity:var(--fa-beat-fade-opacity,.4);-webkit-transform:scale(1);transform:scale(1)}50%{opacity:1;-webkit-transform:scale(var(--fa-beat-fade-scale,1.125));transform:scale(var(--fa-beat-fade-scale,1.125))}}@keyframes fa-beat-fade{0%,to{opacity:var(--fa-beat-fade-opacity,.4);-webkit-transform:scale(1);transform:scale(1)}50%{opacity:1;-webkit-transform:scale(var(--fa-beat-fade-scale,1.125));transform:scale(var(--fa-beat-fade-scale,1.125))}}@-webkit-keyframes fa-flip{50%{-webkit-transform:rotate3d(var(--fa-flip-x,0),var(--fa-flip-y,1),var(--fa-flip-z,0),var(--fa-flip-angle,-180deg));transform:rotate3d(var(--fa-flip-x,0),var(--fa-flip-y,1),var(--fa-flip-z,0),var(--fa-flip-angle,-180deg))}}@keyframes fa-flip{50%{-webkit-transform:rotate3d(var(--fa-flip-x,0),var(--fa-flip-y,1),var(--fa-flip-z,0),var(--fa-flip-angle,-180deg));transform:rotate3d(var(--fa-flip-x,0),var(--fa-flip-y,1),var(--fa-flip-z,0),var(--fa-flip-angle,-180deg))}}@-webkit-keyframes fa-shake{0%{-webkit-transform:rotate(-15deg);transform:rotate(-15deg)}4%{-webkit-transform:rotate(15deg);transform:rotate(15deg)}8%,24%{-webkit-transform:rotate(-18deg);transform:rotate(-18deg)}12%,28%{-webkit-transform:rotate(18deg);transform:rotate(18deg)}16%{-webkit-transform:rotate(-22deg);transform:rotate(-22deg)}20%{-webkit-transform:rotate(22deg);transform:rotate(22deg)}32%{-webkit-transform:rotate(-12deg);transform:rotate(-12deg)}36%{-webkit-transform:rotate(12deg);transform:rotate(12deg)}40%,to{-webkit-transform:rotate(0deg);transform:rotate(0deg)}}@keyframes fa-shake{0%{-webkit-transform:rotate(-15deg);transform:rotate(-15deg)}4%{-webkit-transform:rotate(15deg);transform:rotate(15deg)}8%,24%{-webkit-transform:rotate(-18deg);transform:rotate(-18deg)}12%,28%{-webkit-transform:rotate(18deg);transform:rotate(18deg)}16%{-webkit-transform:rotate(-22deg);transform:rotate(-22deg)}20%{-webkit-transform:rotate(22deg);transform:rotate(22deg)}32%{-webkit-transform:rotate(-12deg);transform:rotate(-12deg)}36%{-webkit-transform:rotate(12deg);transform:rotate(12deg)}40%,to{-webkit-transform:rotate(0deg);transform:rotate(0deg)}}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}.fa-rotate-90{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-webkit-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-webkit-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-webkit-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-webkit-transform:scaleY(-1);transform:scaleY(-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{-webkit-transform:scale(-1);transform:scale(-1)}.fa-rotate-by{-webkit-transform:rotate(var(--fa-rotate-angle,none));transform:rotate(var(--fa-rotate-angle,none))}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2.5em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%;z-index:var(--fa-stack-z-index,auto)}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:var(--fa-inverse,#fff)} + +.fa-0:before{content:"\30"}.fa-1:before{content:"\31"}.fa-2:before{content:"\32"}.fa-3:before{content:"\33"}.fa-4:before{content:"\34"}.fa-5:before{content:"\35"}.fa-6:before{content:"\36"}.fa-7:before{content:"\37"}.fa-8:before{content:"\38"}.fa-9:before{content:"\39"}.fa-fill-drip:before{content:"\f576"}.fa-arrows-to-circle:before{content:"\e4bd"}.fa-chevron-circle-right:before,.fa-circle-chevron-right:before{content:"\f138"}.fa-at:before{content:"\40"}.fa-trash-alt:before,.fa-trash-can:before{content:"\f2ed"}.fa-text-height:before{content:"\f034"}.fa-user-times:before,.fa-user-xmark:before{content:"\f235"}.fa-stethoscope:before{content:"\f0f1"}.fa-comment-alt:before,.fa-message:before{content:"\f27a"}.fa-info:before{content:"\f129"}.fa-compress-alt:before,.fa-down-left-and-up-right-to-center:before{content:"\f422"}.fa-explosion:before{content:"\e4e9"}.fa-file-alt:before,.fa-file-lines:before,.fa-file-text:before{content:"\f15c"}.fa-wave-square:before{content:"\f83e"}.fa-ring:before{content:"\f70b"}.fa-building-un:before{content:"\e4d9"}.fa-dice-three:before{content:"\f527"}.fa-calendar-alt:before,.fa-calendar-days:before{content:"\f073"}.fa-anchor-circle-check:before{content:"\e4aa"}.fa-building-circle-arrow-right:before{content:"\e4d1"}.fa-volleyball-ball:before,.fa-volleyball:before{content:"\f45f"}.fa-arrows-up-to-line:before{content:"\e4c2"}.fa-sort-desc:before,.fa-sort-down:before{content:"\f0dd"}.fa-circle-minus:before,.fa-minus-circle:before{content:"\f056"}.fa-door-open:before{content:"\f52b"}.fa-right-from-bracket:before,.fa-sign-out-alt:before{content:"\f2f5"}.fa-atom:before{content:"\f5d2"}.fa-soap:before{content:"\e06e"}.fa-heart-music-camera-bolt:before,.fa-icons:before{content:"\f86d"}.fa-microphone-alt-slash:before,.fa-microphone-lines-slash:before{content:"\f539"}.fa-bridge-circle-check:before{content:"\e4c9"}.fa-pump-medical:before{content:"\e06a"}.fa-fingerprint:before{content:"\f577"}.fa-hand-point-right:before{content:"\f0a4"}.fa-magnifying-glass-location:before,.fa-search-location:before{content:"\f689"}.fa-forward-step:before,.fa-step-forward:before{content:"\f051"}.fa-face-smile-beam:before,.fa-smile-beam:before{content:"\f5b8"}.fa-flag-checkered:before{content:"\f11e"}.fa-football-ball:before,.fa-football:before{content:"\f44e"}.fa-school-circle-exclamation:before{content:"\e56c"}.fa-crop:before{content:"\f125"}.fa-angle-double-down:before,.fa-angles-down:before{content:"\f103"}.fa-users-rectangle:before{content:"\e594"}.fa-people-roof:before{content:"\e537"}.fa-people-line:before{content:"\e534"}.fa-beer-mug-empty:before,.fa-beer:before{content:"\f0fc"}.fa-diagram-predecessor:before{content:"\e477"}.fa-arrow-up-long:before,.fa-long-arrow-up:before{content:"\f176"}.fa-burn:before,.fa-fire-flame-simple:before{content:"\f46a"}.fa-male:before,.fa-person:before{content:"\f183"}.fa-laptop:before{content:"\f109"}.fa-file-csv:before{content:"\f6dd"}.fa-menorah:before{content:"\f676"}.fa-truck-plane:before{content:"\e58f"}.fa-record-vinyl:before{content:"\f8d9"}.fa-face-grin-stars:before,.fa-grin-stars:before{content:"\f587"}.fa-bong:before{content:"\f55c"}.fa-pastafarianism:before,.fa-spaghetti-monster-flying:before{content:"\f67b"}.fa-arrow-down-up-across-line:before{content:"\e4af"}.fa-spoon:before,.fa-utensil-spoon:before{content:"\f2e5"}.fa-jar-wheat:before{content:"\e517"}.fa-envelopes-bulk:before,.fa-mail-bulk:before{content:"\f674"}.fa-file-circle-exclamation:before{content:"\e4eb"}.fa-circle-h:before,.fa-hospital-symbol:before{content:"\f47e"}.fa-pager:before{content:"\f815"}.fa-address-book:before,.fa-contact-book:before{content:"\f2b9"}.fa-strikethrough:before{content:"\f0cc"}.fa-k:before{content:"\4b"}.fa-landmark-flag:before{content:"\e51c"}.fa-pencil-alt:before,.fa-pencil:before{content:"\f303"}.fa-backward:before{content:"\f04a"}.fa-caret-right:before{content:"\f0da"}.fa-comments:before{content:"\f086"}.fa-file-clipboard:before,.fa-paste:before{content:"\f0ea"}.fa-code-pull-request:before{content:"\e13c"}.fa-clipboard-list:before{content:"\f46d"}.fa-truck-loading:before,.fa-truck-ramp-box:before{content:"\f4de"}.fa-user-check:before{content:"\f4fc"}.fa-vial-virus:before{content:"\e597"}.fa-sheet-plastic:before{content:"\e571"}.fa-blog:before{content:"\f781"}.fa-user-ninja:before{content:"\f504"}.fa-person-arrow-up-from-line:before{content:"\e539"}.fa-scroll-torah:before,.fa-torah:before{content:"\f6a0"}.fa-broom-ball:before,.fa-quidditch-broom-ball:before,.fa-quidditch:before{content:"\f458"}.fa-toggle-off:before{content:"\f204"}.fa-archive:before,.fa-box-archive:before{content:"\f187"}.fa-person-drowning:before{content:"\e545"}.fa-arrow-down-9-1:before,.fa-sort-numeric-desc:before,.fa-sort-numeric-down-alt:before{content:"\f886"}.fa-face-grin-tongue-squint:before,.fa-grin-tongue-squint:before{content:"\f58a"}.fa-spray-can:before{content:"\f5bd"}.fa-truck-monster:before{content:"\f63b"}.fa-w:before{content:"\57"}.fa-earth-africa:before,.fa-globe-africa:before{content:"\f57c"}.fa-rainbow:before{content:"\f75b"}.fa-circle-notch:before{content:"\f1ce"}.fa-tablet-alt:before,.fa-tablet-screen-button:before{content:"\f3fa"}.fa-paw:before{content:"\f1b0"}.fa-cloud:before{content:"\f0c2"}.fa-trowel-bricks:before{content:"\e58a"}.fa-face-flushed:before,.fa-flushed:before{content:"\f579"}.fa-hospital-user:before{content:"\f80d"}.fa-tent-arrow-left-right:before{content:"\e57f"}.fa-gavel:before,.fa-legal:before{content:"\f0e3"}.fa-binoculars:before{content:"\f1e5"}.fa-microphone-slash:before{content:"\f131"}.fa-box-tissue:before{content:"\e05b"}.fa-motorcycle:before{content:"\f21c"}.fa-bell-concierge:before,.fa-concierge-bell:before{content:"\f562"}.fa-pen-ruler:before,.fa-pencil-ruler:before{content:"\f5ae"}.fa-people-arrows-left-right:before,.fa-people-arrows:before{content:"\e068"}.fa-mars-and-venus-burst:before{content:"\e523"}.fa-caret-square-right:before,.fa-square-caret-right:before{content:"\f152"}.fa-cut:before,.fa-scissors:before{content:"\f0c4"}.fa-sun-plant-wilt:before{content:"\e57a"}.fa-toilets-portable:before{content:"\e584"}.fa-hockey-puck:before{content:"\f453"}.fa-table:before{content:"\f0ce"}.fa-magnifying-glass-arrow-right:before{content:"\e521"}.fa-digital-tachograph:before,.fa-tachograph-digital:before{content:"\f566"}.fa-users-slash:before{content:"\e073"}.fa-clover:before{content:"\e139"}.fa-mail-reply:before,.fa-reply:before{content:"\f3e5"}.fa-star-and-crescent:before{content:"\f699"}.fa-house-fire:before{content:"\e50c"}.fa-minus-square:before,.fa-square-minus:before{content:"\f146"}.fa-helicopter:before{content:"\f533"}.fa-compass:before{content:"\f14e"}.fa-caret-square-down:before,.fa-square-caret-down:before{content:"\f150"}.fa-file-circle-question:before{content:"\e4ef"}.fa-laptop-code:before{content:"\f5fc"}.fa-swatchbook:before{content:"\f5c3"}.fa-prescription-bottle:before{content:"\f485"}.fa-bars:before,.fa-navicon:before{content:"\f0c9"}.fa-people-group:before{content:"\e533"}.fa-hourglass-3:before,.fa-hourglass-end:before{content:"\f253"}.fa-heart-broken:before,.fa-heart-crack:before{content:"\f7a9"}.fa-external-link-square-alt:before,.fa-square-up-right:before{content:"\f360"}.fa-face-kiss-beam:before,.fa-kiss-beam:before{content:"\f597"}.fa-film:before{content:"\f008"}.fa-ruler-horizontal:before{content:"\f547"}.fa-people-robbery:before{content:"\e536"}.fa-lightbulb:before{content:"\f0eb"}.fa-caret-left:before{content:"\f0d9"}.fa-circle-exclamation:before,.fa-exclamation-circle:before{content:"\f06a"}.fa-school-circle-xmark:before{content:"\e56d"}.fa-arrow-right-from-bracket:before,.fa-sign-out:before{content:"\f08b"}.fa-chevron-circle-down:before,.fa-circle-chevron-down:before{content:"\f13a"}.fa-unlock-alt:before,.fa-unlock-keyhole:before{content:"\f13e"}.fa-cloud-showers-heavy:before{content:"\f740"}.fa-headphones-alt:before,.fa-headphones-simple:before{content:"\f58f"}.fa-sitemap:before{content:"\f0e8"}.fa-circle-dollar-to-slot:before,.fa-donate:before{content:"\f4b9"}.fa-memory:before{content:"\f538"}.fa-road-spikes:before{content:"\e568"}.fa-fire-burner:before{content:"\e4f1"}.fa-flag:before{content:"\f024"}.fa-hanukiah:before{content:"\f6e6"}.fa-feather:before{content:"\f52d"}.fa-volume-down:before,.fa-volume-low:before{content:"\f027"}.fa-comment-slash:before{content:"\f4b3"}.fa-cloud-sun-rain:before{content:"\f743"}.fa-compress:before{content:"\f066"}.fa-wheat-alt:before,.fa-wheat-awn:before{content:"\e2cd"}.fa-ankh:before{content:"\f644"}.fa-hands-holding-child:before{content:"\e4fa"}.fa-asterisk:before{content:"\2a"}.fa-check-square:before,.fa-square-check:before{content:"\f14a"}.fa-peseta-sign:before{content:"\e221"}.fa-header:before,.fa-heading:before{content:"\f1dc"}.fa-ghost:before{content:"\f6e2"}.fa-list-squares:before,.fa-list:before{content:"\f03a"}.fa-phone-square-alt:before,.fa-square-phone-flip:before{content:"\f87b"}.fa-cart-plus:before{content:"\f217"}.fa-gamepad:before{content:"\f11b"}.fa-circle-dot:before,.fa-dot-circle:before{content:"\f192"}.fa-dizzy:before,.fa-face-dizzy:before{content:"\f567"}.fa-egg:before{content:"\f7fb"}.fa-house-medical-circle-xmark:before{content:"\e513"}.fa-campground:before{content:"\f6bb"}.fa-folder-plus:before{content:"\f65e"}.fa-futbol-ball:before,.fa-futbol:before,.fa-soccer-ball:before{content:"\f1e3"}.fa-paint-brush:before,.fa-paintbrush:before{content:"\f1fc"}.fa-lock:before{content:"\f023"}.fa-gas-pump:before{content:"\f52f"}.fa-hot-tub-person:before,.fa-hot-tub:before{content:"\f593"}.fa-map-location:before,.fa-map-marked:before{content:"\f59f"}.fa-house-flood-water:before{content:"\e50e"}.fa-tree:before{content:"\f1bb"}.fa-bridge-lock:before{content:"\e4cc"}.fa-sack-dollar:before{content:"\f81d"}.fa-edit:before,.fa-pen-to-square:before{content:"\f044"}.fa-car-side:before{content:"\f5e4"}.fa-share-alt:before,.fa-share-nodes:before{content:"\f1e0"}.fa-heart-circle-minus:before{content:"\e4ff"}.fa-hourglass-2:before,.fa-hourglass-half:before{content:"\f252"}.fa-microscope:before{content:"\f610"}.fa-sink:before{content:"\e06d"}.fa-bag-shopping:before,.fa-shopping-bag:before{content:"\f290"}.fa-arrow-down-z-a:before,.fa-sort-alpha-desc:before,.fa-sort-alpha-down-alt:before{content:"\f881"}.fa-mitten:before{content:"\f7b5"}.fa-person-rays:before{content:"\e54d"}.fa-users:before{content:"\f0c0"}.fa-eye-slash:before{content:"\f070"}.fa-flask-vial:before{content:"\e4f3"}.fa-hand-paper:before,.fa-hand:before{content:"\f256"}.fa-om:before{content:"\f679"}.fa-worm:before{content:"\e599"}.fa-house-circle-xmark:before{content:"\e50b"}.fa-plug:before{content:"\f1e6"}.fa-chevron-up:before{content:"\f077"}.fa-hand-spock:before{content:"\f259"}.fa-stopwatch:before{content:"\f2f2"}.fa-face-kiss:before,.fa-kiss:before{content:"\f596"}.fa-bridge-circle-xmark:before{content:"\e4cb"}.fa-face-grin-tongue:before,.fa-grin-tongue:before{content:"\f589"}.fa-chess-bishop:before{content:"\f43a"}.fa-face-grin-wink:before,.fa-grin-wink:before{content:"\f58c"}.fa-deaf:before,.fa-deafness:before,.fa-ear-deaf:before,.fa-hard-of-hearing:before{content:"\f2a4"}.fa-road-circle-check:before{content:"\e564"}.fa-dice-five:before{content:"\f523"}.fa-rss-square:before,.fa-square-rss:before{content:"\f143"}.fa-land-mine-on:before{content:"\e51b"}.fa-i-cursor:before{content:"\f246"}.fa-stamp:before{content:"\f5bf"}.fa-stairs:before{content:"\e289"}.fa-i:before{content:"\49"}.fa-hryvnia-sign:before,.fa-hryvnia:before{content:"\f6f2"}.fa-pills:before{content:"\f484"}.fa-face-grin-wide:before,.fa-grin-alt:before{content:"\f581"}.fa-tooth:before{content:"\f5c9"}.fa-v:before{content:"\56"}.fa-bangladeshi-taka-sign:before{content:"\e2e6"}.fa-bicycle:before{content:"\f206"}.fa-rod-asclepius:before,.fa-rod-snake:before,.fa-staff-aesculapius:before,.fa-staff-snake:before{content:"\e579"}.fa-head-side-cough-slash:before{content:"\e062"}.fa-ambulance:before,.fa-truck-medical:before{content:"\f0f9"}.fa-wheat-awn-circle-exclamation:before{content:"\e598"}.fa-snowman:before{content:"\f7d0"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-road-barrier:before{content:"\e562"}.fa-school:before{content:"\f549"}.fa-igloo:before{content:"\f7ae"}.fa-joint:before{content:"\f595"}.fa-angle-right:before{content:"\f105"}.fa-horse:before{content:"\f6f0"}.fa-q:before{content:"\51"}.fa-g:before{content:"\47"}.fa-notes-medical:before{content:"\f481"}.fa-temperature-2:before,.fa-temperature-half:before,.fa-thermometer-2:before,.fa-thermometer-half:before{content:"\f2c9"}.fa-dong-sign:before{content:"\e169"}.fa-capsules:before{content:"\f46b"}.fa-poo-bolt:before,.fa-poo-storm:before{content:"\f75a"}.fa-face-frown-open:before,.fa-frown-open:before{content:"\f57a"}.fa-hand-point-up:before{content:"\f0a6"}.fa-money-bill:before{content:"\f0d6"}.fa-bookmark:before{content:"\f02e"}.fa-align-justify:before{content:"\f039"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-helmet-un:before{content:"\e503"}.fa-bullseye:before{content:"\f140"}.fa-bacon:before{content:"\f7e5"}.fa-hand-point-down:before{content:"\f0a7"}.fa-arrow-up-from-bracket:before{content:"\e09a"}.fa-folder-blank:before,.fa-folder:before{content:"\f07b"}.fa-file-medical-alt:before,.fa-file-waveform:before{content:"\f478"}.fa-radiation:before{content:"\f7b9"}.fa-chart-simple:before{content:"\e473"}.fa-mars-stroke:before{content:"\f229"}.fa-vial:before{content:"\f492"}.fa-dashboard:before,.fa-gauge-med:before,.fa-gauge:before,.fa-tachometer-alt-average:before{content:"\f624"}.fa-magic-wand-sparkles:before,.fa-wand-magic-sparkles:before{content:"\e2ca"}.fa-e:before{content:"\45"}.fa-pen-alt:before,.fa-pen-clip:before{content:"\f305"}.fa-bridge-circle-exclamation:before{content:"\e4ca"}.fa-user:before{content:"\f007"}.fa-school-circle-check:before{content:"\e56b"}.fa-dumpster:before{content:"\f793"}.fa-shuttle-van:before,.fa-van-shuttle:before{content:"\f5b6"}.fa-building-user:before{content:"\e4da"}.fa-caret-square-left:before,.fa-square-caret-left:before{content:"\f191"}.fa-highlighter:before{content:"\f591"}.fa-key:before{content:"\f084"}.fa-bullhorn:before{content:"\f0a1"}.fa-globe:before{content:"\f0ac"}.fa-synagogue:before{content:"\f69b"}.fa-person-half-dress:before{content:"\e548"}.fa-road-bridge:before{content:"\e563"}.fa-location-arrow:before{content:"\f124"}.fa-c:before{content:"\43"}.fa-tablet-button:before{content:"\f10a"}.fa-building-lock:before{content:"\e4d6"}.fa-pizza-slice:before{content:"\f818"}.fa-money-bill-wave:before{content:"\f53a"}.fa-area-chart:before,.fa-chart-area:before{content:"\f1fe"}.fa-house-flag:before{content:"\e50d"}.fa-person-circle-minus:before{content:"\e540"}.fa-ban:before,.fa-cancel:before{content:"\f05e"}.fa-camera-rotate:before{content:"\e0d8"}.fa-air-freshener:before,.fa-spray-can-sparkles:before{content:"\f5d0"}.fa-star:before{content:"\f005"}.fa-repeat:before{content:"\f363"}.fa-cross:before{content:"\f654"}.fa-box:before{content:"\f466"}.fa-venus-mars:before{content:"\f228"}.fa-arrow-pointer:before,.fa-mouse-pointer:before{content:"\f245"}.fa-expand-arrows-alt:before,.fa-maximize:before{content:"\f31e"}.fa-charging-station:before{content:"\f5e7"}.fa-shapes:before,.fa-triangle-circle-square:before{content:"\f61f"}.fa-random:before,.fa-shuffle:before{content:"\f074"}.fa-person-running:before,.fa-running:before{content:"\f70c"}.fa-mobile-retro:before{content:"\e527"}.fa-grip-lines-vertical:before{content:"\f7a5"}.fa-spider:before{content:"\f717"}.fa-hands-bound:before{content:"\e4f9"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-plane-circle-exclamation:before{content:"\e556"}.fa-x-ray:before{content:"\f497"}.fa-spell-check:before{content:"\f891"}.fa-slash:before{content:"\f715"}.fa-computer-mouse:before,.fa-mouse:before{content:"\f8cc"}.fa-arrow-right-to-bracket:before,.fa-sign-in:before{content:"\f090"}.fa-shop-slash:before,.fa-store-alt-slash:before{content:"\e070"}.fa-server:before{content:"\f233"}.fa-virus-covid-slash:before{content:"\e4a9"}.fa-shop-lock:before{content:"\e4a5"}.fa-hourglass-1:before,.fa-hourglass-start:before{content:"\f251"}.fa-blender-phone:before{content:"\f6b6"}.fa-building-wheat:before{content:"\e4db"}.fa-person-breastfeeding:before{content:"\e53a"}.fa-right-to-bracket:before,.fa-sign-in-alt:before{content:"\f2f6"}.fa-venus:before{content:"\f221"}.fa-passport:before{content:"\f5ab"}.fa-heart-pulse:before,.fa-heartbeat:before{content:"\f21e"}.fa-people-carry-box:before,.fa-people-carry:before{content:"\f4ce"}.fa-temperature-high:before{content:"\f769"}.fa-microchip:before{content:"\f2db"}.fa-crown:before{content:"\f521"}.fa-weight-hanging:before{content:"\f5cd"}.fa-xmarks-lines:before{content:"\e59a"}.fa-file-prescription:before{content:"\f572"}.fa-weight-scale:before,.fa-weight:before{content:"\f496"}.fa-user-friends:before,.fa-user-group:before{content:"\f500"}.fa-arrow-up-a-z:before,.fa-sort-alpha-up:before{content:"\f15e"}.fa-chess-knight:before{content:"\f441"}.fa-face-laugh-squint:before,.fa-laugh-squint:before{content:"\f59b"}.fa-wheelchair:before{content:"\f193"}.fa-arrow-circle-up:before,.fa-circle-arrow-up:before{content:"\f0aa"}.fa-toggle-on:before{content:"\f205"}.fa-person-walking:before,.fa-walking:before{content:"\f554"}.fa-l:before{content:"\4c"}.fa-fire:before{content:"\f06d"}.fa-bed-pulse:before,.fa-procedures:before{content:"\f487"}.fa-shuttle-space:before,.fa-space-shuttle:before{content:"\f197"}.fa-face-laugh:before,.fa-laugh:before{content:"\f599"}.fa-folder-open:before{content:"\f07c"}.fa-heart-circle-plus:before{content:"\e500"}.fa-code-fork:before{content:"\e13b"}.fa-city:before{content:"\f64f"}.fa-microphone-alt:before,.fa-microphone-lines:before{content:"\f3c9"}.fa-pepper-hot:before{content:"\f816"}.fa-unlock:before{content:"\f09c"}.fa-colon-sign:before{content:"\e140"}.fa-headset:before{content:"\f590"}.fa-store-slash:before{content:"\e071"}.fa-road-circle-xmark:before{content:"\e566"}.fa-user-minus:before{content:"\f503"}.fa-mars-stroke-up:before,.fa-mars-stroke-v:before{content:"\f22a"}.fa-champagne-glasses:before,.fa-glass-cheers:before{content:"\f79f"}.fa-clipboard:before{content:"\f328"}.fa-house-circle-exclamation:before{content:"\e50a"}.fa-file-arrow-up:before,.fa-file-upload:before{content:"\f574"}.fa-wifi-3:before,.fa-wifi-strong:before,.fa-wifi:before{content:"\f1eb"}.fa-bath:before,.fa-bathtub:before{content:"\f2cd"}.fa-underline:before{content:"\f0cd"}.fa-user-edit:before,.fa-user-pen:before{content:"\f4ff"}.fa-signature:before{content:"\f5b7"}.fa-stroopwafel:before{content:"\f551"}.fa-bold:before{content:"\f032"}.fa-anchor-lock:before{content:"\e4ad"}.fa-building-ngo:before{content:"\e4d7"}.fa-manat-sign:before{content:"\e1d5"}.fa-not-equal:before{content:"\f53e"}.fa-border-style:before,.fa-border-top-left:before{content:"\f853"}.fa-map-location-dot:before,.fa-map-marked-alt:before{content:"\f5a0"}.fa-jedi:before{content:"\f669"}.fa-poll:before,.fa-square-poll-vertical:before{content:"\f681"}.fa-mug-hot:before{content:"\f7b6"}.fa-battery-car:before,.fa-car-battery:before{content:"\f5df"}.fa-gift:before{content:"\f06b"}.fa-dice-two:before{content:"\f528"}.fa-chess-queen:before{content:"\f445"}.fa-glasses:before{content:"\f530"}.fa-chess-board:before{content:"\f43c"}.fa-building-circle-check:before{content:"\e4d2"}.fa-person-chalkboard:before{content:"\e53d"}.fa-mars-stroke-h:before,.fa-mars-stroke-right:before{content:"\f22b"}.fa-hand-back-fist:before,.fa-hand-rock:before{content:"\f255"}.fa-caret-square-up:before,.fa-square-caret-up:before{content:"\f151"}.fa-cloud-showers-water:before{content:"\e4e4"}.fa-bar-chart:before,.fa-chart-bar:before{content:"\f080"}.fa-hands-bubbles:before,.fa-hands-wash:before{content:"\e05e"}.fa-less-than-equal:before{content:"\f537"}.fa-train:before{content:"\f238"}.fa-eye-low-vision:before,.fa-low-vision:before{content:"\f2a8"}.fa-crow:before{content:"\f520"}.fa-sailboat:before{content:"\e445"}.fa-window-restore:before{content:"\f2d2"}.fa-plus-square:before,.fa-square-plus:before{content:"\f0fe"}.fa-torii-gate:before{content:"\f6a1"}.fa-frog:before{content:"\f52e"}.fa-bucket:before{content:"\e4cf"}.fa-image:before{content:"\f03e"}.fa-microphone:before{content:"\f130"}.fa-cow:before{content:"\f6c8"}.fa-caret-up:before{content:"\f0d8"}.fa-screwdriver:before{content:"\f54a"}.fa-folder-closed:before{content:"\e185"}.fa-house-tsunami:before{content:"\e515"}.fa-square-nfi:before{content:"\e576"}.fa-arrow-up-from-ground-water:before{content:"\e4b5"}.fa-glass-martini-alt:before,.fa-martini-glass:before{content:"\f57b"}.fa-rotate-back:before,.fa-rotate-backward:before,.fa-rotate-left:before,.fa-undo-alt:before{content:"\f2ea"}.fa-columns:before,.fa-table-columns:before{content:"\f0db"}.fa-lemon:before{content:"\f094"}.fa-head-side-mask:before{content:"\e063"}.fa-handshake:before{content:"\f2b5"}.fa-gem:before{content:"\f3a5"}.fa-dolly-box:before,.fa-dolly:before{content:"\f472"}.fa-smoking:before{content:"\f48d"}.fa-compress-arrows-alt:before,.fa-minimize:before{content:"\f78c"}.fa-monument:before{content:"\f5a6"}.fa-snowplow:before{content:"\f7d2"}.fa-angle-double-right:before,.fa-angles-right:before{content:"\f101"}.fa-cannabis:before{content:"\f55f"}.fa-circle-play:before,.fa-play-circle:before{content:"\f144"}.fa-tablets:before{content:"\f490"}.fa-ethernet:before{content:"\f796"}.fa-eur:before,.fa-euro-sign:before,.fa-euro:before{content:"\f153"}.fa-chair:before{content:"\f6c0"}.fa-check-circle:before,.fa-circle-check:before{content:"\f058"}.fa-circle-stop:before,.fa-stop-circle:before{content:"\f28d"}.fa-compass-drafting:before,.fa-drafting-compass:before{content:"\f568"}.fa-plate-wheat:before{content:"\e55a"}.fa-icicles:before{content:"\f7ad"}.fa-person-shelter:before{content:"\e54f"}.fa-neuter:before{content:"\f22c"}.fa-id-badge:before{content:"\f2c1"}.fa-marker:before{content:"\f5a1"}.fa-face-laugh-beam:before,.fa-laugh-beam:before{content:"\f59a"}.fa-helicopter-symbol:before{content:"\e502"}.fa-universal-access:before{content:"\f29a"}.fa-chevron-circle-up:before,.fa-circle-chevron-up:before{content:"\f139"}.fa-lari-sign:before{content:"\e1c8"}.fa-volcano:before{content:"\f770"}.fa-person-walking-dashed-line-arrow-right:before{content:"\e553"}.fa-gbp:before,.fa-pound-sign:before,.fa-sterling-sign:before{content:"\f154"}.fa-viruses:before{content:"\e076"}.fa-square-person-confined:before{content:"\e577"}.fa-user-tie:before{content:"\f508"}.fa-arrow-down-long:before,.fa-long-arrow-down:before{content:"\f175"}.fa-tent-arrow-down-to-line:before{content:"\e57e"}.fa-certificate:before{content:"\f0a3"}.fa-mail-reply-all:before,.fa-reply-all:before{content:"\f122"}.fa-suitcase:before{content:"\f0f2"}.fa-person-skating:before,.fa-skating:before{content:"\f7c5"}.fa-filter-circle-dollar:before,.fa-funnel-dollar:before{content:"\f662"}.fa-camera-retro:before{content:"\f083"}.fa-arrow-circle-down:before,.fa-circle-arrow-down:before{content:"\f0ab"}.fa-arrow-right-to-file:before,.fa-file-import:before{content:"\f56f"}.fa-external-link-square:before,.fa-square-arrow-up-right:before{content:"\f14c"}.fa-box-open:before{content:"\f49e"}.fa-scroll:before{content:"\f70e"}.fa-spa:before{content:"\f5bb"}.fa-location-pin-lock:before{content:"\e51f"}.fa-pause:before{content:"\f04c"}.fa-hill-avalanche:before{content:"\e507"}.fa-temperature-0:before,.fa-temperature-empty:before,.fa-thermometer-0:before,.fa-thermometer-empty:before{content:"\f2cb"}.fa-bomb:before{content:"\f1e2"}.fa-registered:before{content:"\f25d"}.fa-address-card:before,.fa-contact-card:before,.fa-vcard:before{content:"\f2bb"}.fa-balance-scale-right:before,.fa-scale-unbalanced-flip:before{content:"\f516"}.fa-subscript:before{content:"\f12c"}.fa-diamond-turn-right:before,.fa-directions:before{content:"\f5eb"}.fa-burst:before{content:"\e4dc"}.fa-house-laptop:before,.fa-laptop-house:before{content:"\e066"}.fa-face-tired:before,.fa-tired:before{content:"\f5c8"}.fa-money-bills:before{content:"\e1f3"}.fa-smog:before{content:"\f75f"}.fa-crutch:before{content:"\f7f7"}.fa-cloud-arrow-up:before,.fa-cloud-upload-alt:before,.fa-cloud-upload:before{content:"\f0ee"}.fa-palette:before{content:"\f53f"}.fa-arrows-turn-right:before{content:"\e4c0"}.fa-vest:before{content:"\e085"}.fa-ferry:before{content:"\e4ea"}.fa-arrows-down-to-people:before{content:"\e4b9"}.fa-seedling:before,.fa-sprout:before{content:"\f4d8"}.fa-arrows-alt-h:before,.fa-left-right:before{content:"\f337"}.fa-boxes-packing:before{content:"\e4c7"}.fa-arrow-circle-left:before,.fa-circle-arrow-left:before{content:"\f0a8"}.fa-group-arrows-rotate:before{content:"\e4f6"}.fa-bowl-food:before{content:"\e4c6"}.fa-candy-cane:before{content:"\f786"}.fa-arrow-down-wide-short:before,.fa-sort-amount-asc:before,.fa-sort-amount-down:before{content:"\f160"}.fa-cloud-bolt:before,.fa-thunderstorm:before{content:"\f76c"}.fa-remove-format:before,.fa-text-slash:before{content:"\f87d"}.fa-face-smile-wink:before,.fa-smile-wink:before{content:"\f4da"}.fa-file-word:before{content:"\f1c2"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-arrows-h:before,.fa-arrows-left-right:before{content:"\f07e"}.fa-house-lock:before{content:"\e510"}.fa-cloud-arrow-down:before,.fa-cloud-download-alt:before,.fa-cloud-download:before{content:"\f0ed"}.fa-children:before{content:"\e4e1"}.fa-blackboard:before,.fa-chalkboard:before{content:"\f51b"}.fa-user-alt-slash:before,.fa-user-large-slash:before{content:"\f4fa"}.fa-envelope-open:before{content:"\f2b6"}.fa-handshake-alt-slash:before,.fa-handshake-simple-slash:before{content:"\e05f"}.fa-mattress-pillow:before{content:"\e525"}.fa-guarani-sign:before{content:"\e19a"}.fa-arrows-rotate:before,.fa-refresh:before,.fa-sync:before{content:"\f021"}.fa-fire-extinguisher:before{content:"\f134"}.fa-cruzeiro-sign:before{content:"\e152"}.fa-greater-than-equal:before{content:"\f532"}.fa-shield-alt:before,.fa-shield-halved:before{content:"\f3ed"}.fa-atlas:before,.fa-book-atlas:before{content:"\f558"}.fa-virus:before{content:"\e074"}.fa-envelope-circle-check:before{content:"\e4e8"}.fa-layer-group:before{content:"\f5fd"}.fa-arrows-to-dot:before{content:"\e4be"}.fa-archway:before{content:"\f557"}.fa-heart-circle-check:before{content:"\e4fd"}.fa-house-chimney-crack:before,.fa-house-damage:before{content:"\f6f1"}.fa-file-archive:before,.fa-file-zipper:before{content:"\f1c6"}.fa-square:before{content:"\f0c8"}.fa-glass-martini:before,.fa-martini-glass-empty:before{content:"\f000"}.fa-couch:before{content:"\f4b8"}.fa-cedi-sign:before{content:"\e0df"}.fa-italic:before{content:"\f033"}.fa-church:before{content:"\f51d"}.fa-comments-dollar:before{content:"\f653"}.fa-democrat:before{content:"\f747"}.fa-z:before{content:"\5a"}.fa-person-skiing:before,.fa-skiing:before{content:"\f7c9"}.fa-road-lock:before{content:"\e567"}.fa-a:before{content:"\41"}.fa-temperature-arrow-down:before,.fa-temperature-down:before{content:"\e03f"}.fa-feather-alt:before,.fa-feather-pointed:before{content:"\f56b"}.fa-p:before{content:"\50"}.fa-snowflake:before{content:"\f2dc"}.fa-newspaper:before{content:"\f1ea"}.fa-ad:before,.fa-rectangle-ad:before{content:"\f641"}.fa-arrow-circle-right:before,.fa-circle-arrow-right:before{content:"\f0a9"}.fa-filter-circle-xmark:before{content:"\e17b"}.fa-locust:before{content:"\e520"}.fa-sort:before,.fa-unsorted:before{content:"\f0dc"}.fa-list-1-2:before,.fa-list-numeric:before,.fa-list-ol:before{content:"\f0cb"}.fa-person-dress-burst:before{content:"\e544"}.fa-money-check-alt:before,.fa-money-check-dollar:before{content:"\f53d"}.fa-vector-square:before{content:"\f5cb"}.fa-bread-slice:before{content:"\f7ec"}.fa-language:before{content:"\f1ab"}.fa-face-kiss-wink-heart:before,.fa-kiss-wink-heart:before{content:"\f598"}.fa-filter:before{content:"\f0b0"}.fa-question:before{content:"\3f"}.fa-file-signature:before{content:"\f573"}.fa-arrows-alt:before,.fa-up-down-left-right:before{content:"\f0b2"}.fa-house-chimney-user:before{content:"\e065"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-puzzle-piece:before{content:"\f12e"}.fa-money-check:before{content:"\f53c"}.fa-star-half-alt:before,.fa-star-half-stroke:before{content:"\f5c0"}.fa-code:before{content:"\f121"}.fa-glass-whiskey:before,.fa-whiskey-glass:before{content:"\f7a0"}.fa-building-circle-exclamation:before{content:"\e4d3"}.fa-magnifying-glass-chart:before{content:"\e522"}.fa-arrow-up-right-from-square:before,.fa-external-link:before{content:"\f08e"}.fa-cubes-stacked:before{content:"\e4e6"}.fa-krw:before,.fa-won-sign:before,.fa-won:before{content:"\f159"}.fa-virus-covid:before{content:"\e4a8"}.fa-austral-sign:before{content:"\e0a9"}.fa-f:before{content:"\46"}.fa-leaf:before{content:"\f06c"}.fa-road:before{content:"\f018"}.fa-cab:before,.fa-taxi:before{content:"\f1ba"}.fa-person-circle-plus:before{content:"\e541"}.fa-chart-pie:before,.fa-pie-chart:before{content:"\f200"}.fa-bolt-lightning:before{content:"\e0b7"}.fa-sack-xmark:before{content:"\e56a"}.fa-file-excel:before{content:"\f1c3"}.fa-file-contract:before{content:"\f56c"}.fa-fish-fins:before{content:"\e4f2"}.fa-building-flag:before{content:"\e4d5"}.fa-face-grin-beam:before,.fa-grin-beam:before{content:"\f582"}.fa-object-ungroup:before{content:"\f248"}.fa-poop:before{content:"\f619"}.fa-location-pin:before,.fa-map-marker:before{content:"\f041"}.fa-kaaba:before{content:"\f66b"}.fa-toilet-paper:before{content:"\f71e"}.fa-hard-hat:before,.fa-hat-hard:before,.fa-helmet-safety:before{content:"\f807"}.fa-eject:before{content:"\f052"}.fa-arrow-alt-circle-right:before,.fa-circle-right:before{content:"\f35a"}.fa-plane-circle-check:before{content:"\e555"}.fa-face-rolling-eyes:before,.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-object-group:before{content:"\f247"}.fa-chart-line:before,.fa-line-chart:before{content:"\f201"}.fa-mask-ventilator:before{content:"\e524"}.fa-arrow-right:before{content:"\f061"}.fa-map-signs:before,.fa-signs-post:before{content:"\f277"}.fa-cash-register:before{content:"\f788"}.fa-person-circle-question:before{content:"\e542"}.fa-h:before{content:"\48"}.fa-tarp:before{content:"\e57b"}.fa-screwdriver-wrench:before,.fa-tools:before{content:"\f7d9"}.fa-arrows-to-eye:before{content:"\e4bf"}.fa-plug-circle-bolt:before{content:"\e55b"}.fa-heart:before{content:"\f004"}.fa-mars-and-venus:before{content:"\f224"}.fa-home-user:before,.fa-house-user:before{content:"\e1b0"}.fa-dumpster-fire:before{content:"\f794"}.fa-house-crack:before{content:"\e3b1"}.fa-cocktail:before,.fa-martini-glass-citrus:before{content:"\f561"}.fa-face-surprise:before,.fa-surprise:before{content:"\f5c2"}.fa-bottle-water:before{content:"\e4c5"}.fa-circle-pause:before,.fa-pause-circle:before{content:"\f28b"}.fa-toilet-paper-slash:before{content:"\e072"}.fa-apple-alt:before,.fa-apple-whole:before{content:"\f5d1"}.fa-kitchen-set:before{content:"\e51a"}.fa-r:before{content:"\52"}.fa-temperature-1:before,.fa-temperature-quarter:before,.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:"\f2ca"}.fa-cube:before{content:"\f1b2"}.fa-bitcoin-sign:before{content:"\e0b4"}.fa-shield-dog:before{content:"\e573"}.fa-solar-panel:before{content:"\f5ba"}.fa-lock-open:before{content:"\f3c1"}.fa-elevator:before{content:"\e16d"}.fa-money-bill-transfer:before{content:"\e528"}.fa-money-bill-trend-up:before{content:"\e529"}.fa-house-flood-water-circle-arrow-right:before{content:"\e50f"}.fa-poll-h:before,.fa-square-poll-horizontal:before{content:"\f682"}.fa-circle:before{content:"\f111"}.fa-backward-fast:before,.fa-fast-backward:before{content:"\f049"}.fa-recycle:before{content:"\f1b8"}.fa-user-astronaut:before{content:"\f4fb"}.fa-plane-slash:before{content:"\e069"}.fa-trademark:before{content:"\f25c"}.fa-basketball-ball:before,.fa-basketball:before{content:"\f434"}.fa-satellite-dish:before{content:"\f7c0"}.fa-arrow-alt-circle-up:before,.fa-circle-up:before{content:"\f35b"}.fa-mobile-alt:before,.fa-mobile-screen-button:before{content:"\f3cd"}.fa-volume-high:before,.fa-volume-up:before{content:"\f028"}.fa-users-rays:before{content:"\e593"}.fa-wallet:before{content:"\f555"}.fa-clipboard-check:before{content:"\f46c"}.fa-file-audio:before{content:"\f1c7"}.fa-burger:before,.fa-hamburger:before{content:"\f805"}.fa-wrench:before{content:"\f0ad"}.fa-bugs:before{content:"\e4d0"}.fa-rupee-sign:before,.fa-rupee:before{content:"\f156"}.fa-file-image:before{content:"\f1c5"}.fa-circle-question:before,.fa-question-circle:before{content:"\f059"}.fa-plane-departure:before{content:"\f5b0"}.fa-handshake-slash:before{content:"\e060"}.fa-book-bookmark:before{content:"\e0bb"}.fa-code-branch:before{content:"\f126"}.fa-hat-cowboy:before{content:"\f8c0"}.fa-bridge:before{content:"\e4c8"}.fa-phone-alt:before,.fa-phone-flip:before{content:"\f879"}.fa-truck-front:before{content:"\e2b7"}.fa-cat:before{content:"\f6be"}.fa-anchor-circle-exclamation:before{content:"\e4ab"}.fa-truck-field:before{content:"\e58d"}.fa-route:before{content:"\f4d7"}.fa-clipboard-question:before{content:"\e4e3"}.fa-panorama:before{content:"\e209"}.fa-comment-medical:before{content:"\f7f5"}.fa-teeth-open:before{content:"\f62f"}.fa-file-circle-minus:before{content:"\e4ed"}.fa-tags:before{content:"\f02c"}.fa-wine-glass:before{content:"\f4e3"}.fa-fast-forward:before,.fa-forward-fast:before{content:"\f050"}.fa-face-meh-blank:before,.fa-meh-blank:before{content:"\f5a4"}.fa-parking:before,.fa-square-parking:before{content:"\f540"}.fa-house-signal:before{content:"\e012"}.fa-bars-progress:before,.fa-tasks-alt:before{content:"\f828"}.fa-faucet-drip:before{content:"\e006"}.fa-cart-flatbed:before,.fa-dolly-flatbed:before{content:"\f474"}.fa-ban-smoking:before,.fa-smoking-ban:before{content:"\f54d"}.fa-terminal:before{content:"\f120"}.fa-mobile-button:before{content:"\f10b"}.fa-house-medical-flag:before{content:"\e514"}.fa-basket-shopping:before,.fa-shopping-basket:before{content:"\f291"}.fa-tape:before{content:"\f4db"}.fa-bus-alt:before,.fa-bus-simple:before{content:"\f55e"}.fa-eye:before{content:"\f06e"}.fa-face-sad-cry:before,.fa-sad-cry:before{content:"\f5b3"}.fa-audio-description:before{content:"\f29e"}.fa-person-military-to-person:before{content:"\e54c"}.fa-file-shield:before{content:"\e4f0"}.fa-user-slash:before{content:"\f506"}.fa-pen:before{content:"\f304"}.fa-tower-observation:before{content:"\e586"}.fa-file-code:before{content:"\f1c9"}.fa-signal-5:before,.fa-signal-perfect:before,.fa-signal:before{content:"\f012"}.fa-bus:before{content:"\f207"}.fa-heart-circle-xmark:before{content:"\e501"}.fa-home-lg:before,.fa-house-chimney:before{content:"\e3af"}.fa-window-maximize:before{content:"\f2d0"}.fa-face-frown:before,.fa-frown:before{content:"\f119"}.fa-prescription:before{content:"\f5b1"}.fa-shop:before,.fa-store-alt:before{content:"\f54f"}.fa-floppy-disk:before,.fa-save:before{content:"\f0c7"}.fa-vihara:before{content:"\f6a7"}.fa-balance-scale-left:before,.fa-scale-unbalanced:before{content:"\f515"}.fa-sort-asc:before,.fa-sort-up:before{content:"\f0de"}.fa-comment-dots:before,.fa-commenting:before{content:"\f4ad"}.fa-plant-wilt:before{content:"\e5aa"}.fa-diamond:before{content:"\f219"}.fa-face-grin-squint:before,.fa-grin-squint:before{content:"\f585"}.fa-hand-holding-dollar:before,.fa-hand-holding-usd:before{content:"\f4c0"}.fa-bacterium:before{content:"\e05a"}.fa-hand-pointer:before{content:"\f25a"}.fa-drum-steelpan:before{content:"\f56a"}.fa-hand-scissors:before{content:"\f257"}.fa-hands-praying:before,.fa-praying-hands:before{content:"\f684"}.fa-arrow-right-rotate:before,.fa-arrow-rotate-forward:before,.fa-arrow-rotate-right:before,.fa-redo:before{content:"\f01e"}.fa-biohazard:before{content:"\f780"}.fa-location-crosshairs:before,.fa-location:before{content:"\f601"}.fa-mars-double:before{content:"\f227"}.fa-child-dress:before{content:"\e59c"}.fa-users-between-lines:before{content:"\e591"}.fa-lungs-virus:before{content:"\e067"}.fa-face-grin-tears:before,.fa-grin-tears:before{content:"\f588"}.fa-phone:before{content:"\f095"}.fa-calendar-times:before,.fa-calendar-xmark:before{content:"\f273"}.fa-child-reaching:before{content:"\e59d"}.fa-head-side-virus:before{content:"\e064"}.fa-user-cog:before,.fa-user-gear:before{content:"\f4fe"}.fa-arrow-up-1-9:before,.fa-sort-numeric-up:before{content:"\f163"}.fa-door-closed:before{content:"\f52a"}.fa-shield-virus:before{content:"\e06c"}.fa-dice-six:before{content:"\f526"}.fa-mosquito-net:before{content:"\e52c"}.fa-bridge-water:before{content:"\e4ce"}.fa-person-booth:before{content:"\f756"}.fa-text-width:before{content:"\f035"}.fa-hat-wizard:before{content:"\f6e8"}.fa-pen-fancy:before{content:"\f5ac"}.fa-digging:before,.fa-person-digging:before{content:"\f85e"}.fa-trash:before{content:"\f1f8"}.fa-gauge-simple-med:before,.fa-gauge-simple:before,.fa-tachometer-average:before{content:"\f629"}.fa-book-medical:before{content:"\f7e6"}.fa-poo:before{content:"\f2fe"}.fa-quote-right-alt:before,.fa-quote-right:before{content:"\f10e"}.fa-shirt:before,.fa-t-shirt:before,.fa-tshirt:before{content:"\f553"}.fa-cubes:before{content:"\f1b3"}.fa-divide:before{content:"\f529"}.fa-tenge-sign:before,.fa-tenge:before{content:"\f7d7"}.fa-headphones:before{content:"\f025"}.fa-hands-holding:before{content:"\f4c2"}.fa-hands-clapping:before{content:"\e1a8"}.fa-republican:before{content:"\f75e"}.fa-arrow-left:before{content:"\f060"}.fa-person-circle-xmark:before{content:"\e543"}.fa-ruler:before{content:"\f545"}.fa-align-left:before{content:"\f036"}.fa-dice-d6:before{content:"\f6d1"}.fa-restroom:before{content:"\f7bd"}.fa-j:before{content:"\4a"}.fa-users-viewfinder:before{content:"\e595"}.fa-file-video:before{content:"\f1c8"}.fa-external-link-alt:before,.fa-up-right-from-square:before{content:"\f35d"}.fa-table-cells:before,.fa-th:before{content:"\f00a"}.fa-file-pdf:before{content:"\f1c1"}.fa-bible:before,.fa-book-bible:before{content:"\f647"}.fa-o:before{content:"\4f"}.fa-medkit:before,.fa-suitcase-medical:before{content:"\f0fa"}.fa-user-secret:before{content:"\f21b"}.fa-otter:before{content:"\f700"}.fa-female:before,.fa-person-dress:before{content:"\f182"}.fa-comment-dollar:before{content:"\f651"}.fa-briefcase-clock:before,.fa-business-time:before{content:"\f64a"}.fa-table-cells-large:before,.fa-th-large:before{content:"\f009"}.fa-book-tanakh:before,.fa-tanakh:before{content:"\f827"}.fa-phone-volume:before,.fa-volume-control-phone:before{content:"\f2a0"}.fa-hat-cowboy-side:before{content:"\f8c1"}.fa-clipboard-user:before{content:"\f7f3"}.fa-child:before{content:"\f1ae"}.fa-lira-sign:before{content:"\f195"}.fa-satellite:before{content:"\f7bf"}.fa-plane-lock:before{content:"\e558"}.fa-tag:before{content:"\f02b"}.fa-comment:before{content:"\f075"}.fa-birthday-cake:before,.fa-cake-candles:before,.fa-cake:before{content:"\f1fd"}.fa-envelope:before{content:"\f0e0"}.fa-angle-double-up:before,.fa-angles-up:before{content:"\f102"}.fa-paperclip:before{content:"\f0c6"}.fa-arrow-right-to-city:before{content:"\e4b3"}.fa-ribbon:before{content:"\f4d6"}.fa-lungs:before{content:"\f604"}.fa-arrow-up-9-1:before,.fa-sort-numeric-up-alt:before{content:"\f887"}.fa-litecoin-sign:before{content:"\e1d3"}.fa-border-none:before{content:"\f850"}.fa-circle-nodes:before{content:"\e4e2"}.fa-parachute-box:before{content:"\f4cd"}.fa-indent:before{content:"\f03c"}.fa-truck-field-un:before{content:"\e58e"}.fa-hourglass-empty:before,.fa-hourglass:before{content:"\f254"}.fa-mountain:before{content:"\f6fc"}.fa-user-doctor:before,.fa-user-md:before{content:"\f0f0"}.fa-circle-info:before,.fa-info-circle:before{content:"\f05a"}.fa-cloud-meatball:before{content:"\f73b"}.fa-camera-alt:before,.fa-camera:before{content:"\f030"}.fa-square-virus:before{content:"\e578"}.fa-meteor:before{content:"\f753"}.fa-car-on:before{content:"\e4dd"}.fa-sleigh:before{content:"\f7cc"}.fa-arrow-down-1-9:before,.fa-sort-numeric-asc:before,.fa-sort-numeric-down:before{content:"\f162"}.fa-hand-holding-droplet:before,.fa-hand-holding-water:before{content:"\f4c1"}.fa-water:before{content:"\f773"}.fa-calendar-check:before{content:"\f274"}.fa-braille:before{content:"\f2a1"}.fa-prescription-bottle-alt:before,.fa-prescription-bottle-medical:before{content:"\f486"}.fa-landmark:before{content:"\f66f"}.fa-truck:before{content:"\f0d1"}.fa-crosshairs:before{content:"\f05b"}.fa-person-cane:before{content:"\e53c"}.fa-tent:before{content:"\e57d"}.fa-vest-patches:before{content:"\e086"}.fa-check-double:before{content:"\f560"}.fa-arrow-down-a-z:before,.fa-sort-alpha-asc:before,.fa-sort-alpha-down:before{content:"\f15d"}.fa-money-bill-wheat:before{content:"\e52a"}.fa-cookie:before{content:"\f563"}.fa-arrow-left-rotate:before,.fa-arrow-rotate-back:before,.fa-arrow-rotate-backward:before,.fa-arrow-rotate-left:before,.fa-undo:before{content:"\f0e2"}.fa-hard-drive:before,.fa-hdd:before{content:"\f0a0"}.fa-face-grin-squint-tears:before,.fa-grin-squint-tears:before{content:"\f586"}.fa-dumbbell:before{content:"\f44b"}.fa-list-alt:before,.fa-rectangle-list:before{content:"\f022"}.fa-tarp-droplet:before{content:"\e57c"}.fa-house-medical-circle-check:before{content:"\e511"}.fa-person-skiing-nordic:before,.fa-skiing-nordic:before{content:"\f7ca"}.fa-calendar-plus:before{content:"\f271"}.fa-plane-arrival:before{content:"\f5af"}.fa-arrow-alt-circle-left:before,.fa-circle-left:before{content:"\f359"}.fa-subway:before,.fa-train-subway:before{content:"\f239"}.fa-chart-gantt:before{content:"\e0e4"}.fa-indian-rupee-sign:before,.fa-indian-rupee:before,.fa-inr:before{content:"\e1bc"}.fa-crop-alt:before,.fa-crop-simple:before{content:"\f565"}.fa-money-bill-1:before,.fa-money-bill-alt:before{content:"\f3d1"}.fa-left-long:before,.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-dna:before{content:"\f471"}.fa-virus-slash:before{content:"\e075"}.fa-minus:before,.fa-subtract:before{content:"\f068"}.fa-chess:before{content:"\f439"}.fa-arrow-left-long:before,.fa-long-arrow-left:before{content:"\f177"}.fa-plug-circle-check:before{content:"\e55c"}.fa-street-view:before{content:"\f21d"}.fa-franc-sign:before{content:"\e18f"}.fa-volume-off:before{content:"\f026"}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before,.fa-hands-american-sign-language-interpreting:before,.fa-hands-asl-interpreting:before{content:"\f2a3"}.fa-cog:before,.fa-gear:before{content:"\f013"}.fa-droplet-slash:before,.fa-tint-slash:before{content:"\f5c7"}.fa-mosque:before{content:"\f678"}.fa-mosquito:before{content:"\e52b"}.fa-star-of-david:before{content:"\f69a"}.fa-person-military-rifle:before{content:"\e54b"}.fa-cart-shopping:before,.fa-shopping-cart:before{content:"\f07a"}.fa-vials:before{content:"\f493"}.fa-plug-circle-plus:before{content:"\e55f"}.fa-place-of-worship:before{content:"\f67f"}.fa-grip-vertical:before{content:"\f58e"}.fa-arrow-turn-up:before,.fa-level-up:before{content:"\f148"}.fa-u:before{content:"\55"}.fa-square-root-alt:before,.fa-square-root-variable:before{content:"\f698"}.fa-clock-four:before,.fa-clock:before{content:"\f017"}.fa-backward-step:before,.fa-step-backward:before{content:"\f048"}.fa-pallet:before{content:"\f482"}.fa-faucet:before{content:"\e005"}.fa-baseball-bat-ball:before{content:"\f432"}.fa-s:before{content:"\53"}.fa-timeline:before{content:"\e29c"}.fa-keyboard:before{content:"\f11c"}.fa-caret-down:before{content:"\f0d7"}.fa-clinic-medical:before,.fa-house-chimney-medical:before{content:"\f7f2"}.fa-temperature-3:before,.fa-temperature-three-quarters:before,.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-mobile-android-alt:before,.fa-mobile-screen:before{content:"\f3cf"}.fa-plane-up:before{content:"\e22d"}.fa-piggy-bank:before{content:"\f4d3"}.fa-battery-3:before,.fa-battery-half:before{content:"\f242"}.fa-mountain-city:before{content:"\e52e"}.fa-coins:before{content:"\f51e"}.fa-khanda:before{content:"\f66d"}.fa-sliders-h:before,.fa-sliders:before{content:"\f1de"}.fa-folder-tree:before{content:"\f802"}.fa-network-wired:before{content:"\f6ff"}.fa-map-pin:before{content:"\f276"}.fa-hamsa:before{content:"\f665"}.fa-cent-sign:before{content:"\e3f5"}.fa-flask:before{content:"\f0c3"}.fa-person-pregnant:before{content:"\e31e"}.fa-wand-sparkles:before{content:"\f72b"}.fa-ellipsis-v:before,.fa-ellipsis-vertical:before{content:"\f142"}.fa-ticket:before{content:"\f145"}.fa-power-off:before{content:"\f011"}.fa-long-arrow-alt-right:before,.fa-right-long:before{content:"\f30b"}.fa-flag-usa:before{content:"\f74d"}.fa-laptop-file:before{content:"\e51d"}.fa-teletype:before,.fa-tty:before{content:"\f1e4"}.fa-diagram-next:before{content:"\e476"}.fa-person-rifle:before{content:"\e54e"}.fa-house-medical-circle-exclamation:before{content:"\e512"}.fa-closed-captioning:before{content:"\f20a"}.fa-hiking:before,.fa-person-hiking:before{content:"\f6ec"}.fa-venus-double:before{content:"\f226"}.fa-images:before{content:"\f302"}.fa-calculator:before{content:"\f1ec"}.fa-people-pulling:before{content:"\e535"}.fa-n:before{content:"\4e"}.fa-cable-car:before,.fa-tram:before{content:"\f7da"}.fa-cloud-rain:before{content:"\f73d"}.fa-building-circle-xmark:before{content:"\e4d4"}.fa-ship:before{content:"\f21a"}.fa-arrows-down-to-line:before{content:"\e4b8"}.fa-download:before{content:"\f019"}.fa-face-grin:before,.fa-grin:before{content:"\f580"}.fa-backspace:before,.fa-delete-left:before{content:"\f55a"}.fa-eye-dropper-empty:before,.fa-eye-dropper:before,.fa-eyedropper:before{content:"\f1fb"}.fa-file-circle-check:before{content:"\e5a0"}.fa-forward:before{content:"\f04e"}.fa-mobile-android:before,.fa-mobile-phone:before,.fa-mobile:before{content:"\f3ce"}.fa-face-meh:before,.fa-meh:before{content:"\f11a"}.fa-align-center:before{content:"\f037"}.fa-book-dead:before,.fa-book-skull:before{content:"\f6b7"}.fa-drivers-license:before,.fa-id-card:before{content:"\f2c2"}.fa-dedent:before,.fa-outdent:before{content:"\f03b"}.fa-heart-circle-exclamation:before{content:"\e4fe"}.fa-home-alt:before,.fa-home-lg-alt:before,.fa-home:before,.fa-house:before{content:"\f015"}.fa-calendar-week:before{content:"\f784"}.fa-laptop-medical:before{content:"\f812"}.fa-b:before{content:"\42"}.fa-file-medical:before{content:"\f477"}.fa-dice-one:before{content:"\f525"}.fa-kiwi-bird:before{content:"\f535"}.fa-arrow-right-arrow-left:before,.fa-exchange:before{content:"\f0ec"}.fa-redo-alt:before,.fa-rotate-forward:before,.fa-rotate-right:before{content:"\f2f9"}.fa-cutlery:before,.fa-utensils:before{content:"\f2e7"}.fa-arrow-up-wide-short:before,.fa-sort-amount-up:before{content:"\f161"}.fa-mill-sign:before{content:"\e1ed"}.fa-bowl-rice:before{content:"\e2eb"}.fa-skull:before{content:"\f54c"}.fa-broadcast-tower:before,.fa-tower-broadcast:before{content:"\f519"}.fa-truck-pickup:before{content:"\f63c"}.fa-long-arrow-alt-up:before,.fa-up-long:before{content:"\f30c"}.fa-stop:before{content:"\f04d"}.fa-code-merge:before{content:"\f387"}.fa-upload:before{content:"\f093"}.fa-hurricane:before{content:"\f751"}.fa-mound:before{content:"\e52d"}.fa-toilet-portable:before{content:"\e583"}.fa-compact-disc:before{content:"\f51f"}.fa-file-arrow-down:before,.fa-file-download:before{content:"\f56d"}.fa-caravan:before{content:"\f8ff"}.fa-shield-cat:before{content:"\e572"}.fa-bolt:before,.fa-zap:before{content:"\f0e7"}.fa-glass-water:before{content:"\e4f4"}.fa-oil-well:before{content:"\e532"}.fa-vault:before{content:"\e2c5"}.fa-mars:before{content:"\f222"}.fa-toilet:before{content:"\f7d8"}.fa-plane-circle-xmark:before{content:"\e557"}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen-sign:before,.fa-yen:before{content:"\f157"}.fa-rouble:before,.fa-rub:before,.fa-ruble-sign:before,.fa-ruble:before{content:"\f158"}.fa-sun:before{content:"\f185"}.fa-guitar:before{content:"\f7a6"}.fa-face-laugh-wink:before,.fa-laugh-wink:before{content:"\f59c"}.fa-horse-head:before{content:"\f7ab"}.fa-bore-hole:before{content:"\e4c3"}.fa-industry:before{content:"\f275"}.fa-arrow-alt-circle-down:before,.fa-circle-down:before{content:"\f358"}.fa-arrows-turn-to-dots:before{content:"\e4c1"}.fa-florin-sign:before{content:"\e184"}.fa-arrow-down-short-wide:before,.fa-sort-amount-desc:before,.fa-sort-amount-down-alt:before{content:"\f884"}.fa-less-than:before{content:"\3c"}.fa-angle-down:before{content:"\f107"}.fa-car-tunnel:before{content:"\e4de"}.fa-head-side-cough:before{content:"\e061"}.fa-grip-lines:before{content:"\f7a4"}.fa-thumbs-down:before{content:"\f165"}.fa-user-lock:before{content:"\f502"}.fa-arrow-right-long:before,.fa-long-arrow-right:before{content:"\f178"}.fa-anchor-circle-xmark:before{content:"\e4ac"}.fa-ellipsis-h:before,.fa-ellipsis:before{content:"\f141"}.fa-chess-pawn:before{content:"\f443"}.fa-first-aid:before,.fa-kit-medical:before{content:"\f479"}.fa-person-through-window:before{content:"\e5a9"}.fa-toolbox:before{content:"\f552"}.fa-hands-holding-circle:before{content:"\e4fb"}.fa-bug:before{content:"\f188"}.fa-credit-card-alt:before,.fa-credit-card:before{content:"\f09d"}.fa-automobile:before,.fa-car:before{content:"\f1b9"}.fa-hand-holding-hand:before{content:"\e4f7"}.fa-book-open-reader:before,.fa-book-reader:before{content:"\f5da"}.fa-mountain-sun:before{content:"\e52f"}.fa-arrows-left-right-to-line:before{content:"\e4ba"}.fa-dice-d20:before{content:"\f6cf"}.fa-truck-droplet:before{content:"\e58c"}.fa-file-circle-xmark:before{content:"\e5a1"}.fa-temperature-arrow-up:before,.fa-temperature-up:before{content:"\e040"}.fa-medal:before{content:"\f5a2"}.fa-bed:before{content:"\f236"}.fa-h-square:before,.fa-square-h:before{content:"\f0fd"}.fa-podcast:before{content:"\f2ce"}.fa-temperature-4:before,.fa-temperature-full:before,.fa-thermometer-4:before,.fa-thermometer-full:before{content:"\f2c7"}.fa-bell:before{content:"\f0f3"}.fa-superscript:before{content:"\f12b"}.fa-plug-circle-xmark:before{content:"\e560"}.fa-star-of-life:before{content:"\f621"}.fa-phone-slash:before{content:"\f3dd"}.fa-paint-roller:before{content:"\f5aa"}.fa-hands-helping:before,.fa-handshake-angle:before{content:"\f4c4"}.fa-location-dot:before,.fa-map-marker-alt:before{content:"\f3c5"}.fa-file:before{content:"\f15b"}.fa-greater-than:before{content:"\3e"}.fa-person-swimming:before,.fa-swimmer:before{content:"\f5c4"}.fa-arrow-down:before{content:"\f063"}.fa-droplet:before,.fa-tint:before{content:"\f043"}.fa-eraser:before{content:"\f12d"}.fa-earth-america:before,.fa-earth-americas:before,.fa-earth:before,.fa-globe-americas:before{content:"\f57d"}.fa-person-burst:before{content:"\e53b"}.fa-dove:before{content:"\f4ba"}.fa-battery-0:before,.fa-battery-empty:before{content:"\f244"}.fa-socks:before{content:"\f696"}.fa-inbox:before{content:"\f01c"}.fa-section:before{content:"\e447"}.fa-gauge-high:before,.fa-tachometer-alt-fast:before,.fa-tachometer-alt:before{content:"\f625"}.fa-envelope-open-text:before{content:"\f658"}.fa-hospital-alt:before,.fa-hospital-wide:before,.fa-hospital:before{content:"\f0f8"}.fa-wine-bottle:before{content:"\f72f"}.fa-chess-rook:before{content:"\f447"}.fa-bars-staggered:before,.fa-reorder:before,.fa-stream:before{content:"\f550"}.fa-dharmachakra:before{content:"\f655"}.fa-hotdog:before{content:"\f80f"}.fa-blind:before,.fa-person-walking-with-cane:before{content:"\f29d"}.fa-drum:before{content:"\f569"}.fa-ice-cream:before{content:"\f810"}.fa-heart-circle-bolt:before{content:"\e4fc"}.fa-fax:before{content:"\f1ac"}.fa-paragraph:before{content:"\f1dd"}.fa-check-to-slot:before,.fa-vote-yea:before{content:"\f772"}.fa-star-half:before{content:"\f089"}.fa-boxes-alt:before,.fa-boxes-stacked:before,.fa-boxes:before{content:"\f468"}.fa-chain:before,.fa-link:before{content:"\f0c1"}.fa-assistive-listening-systems:before,.fa-ear-listen:before{content:"\f2a2"}.fa-tree-city:before{content:"\e587"}.fa-play:before{content:"\f04b"}.fa-font:before{content:"\f031"}.fa-rupiah-sign:before{content:"\e23d"}.fa-magnifying-glass:before,.fa-search:before{content:"\f002"}.fa-ping-pong-paddle-ball:before,.fa-table-tennis-paddle-ball:before,.fa-table-tennis:before{content:"\f45d"}.fa-diagnoses:before,.fa-person-dots-from-line:before{content:"\f470"}.fa-trash-can-arrow-up:before,.fa-trash-restore-alt:before{content:"\f82a"}.fa-naira-sign:before{content:"\e1f6"}.fa-cart-arrow-down:before{content:"\f218"}.fa-walkie-talkie:before{content:"\f8ef"}.fa-file-edit:before,.fa-file-pen:before{content:"\f31c"}.fa-receipt:before{content:"\f543"}.fa-pen-square:before,.fa-pencil-square:before,.fa-square-pen:before{content:"\f14b"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-person-circle-exclamation:before{content:"\e53f"}.fa-chevron-down:before{content:"\f078"}.fa-battery-5:before,.fa-battery-full:before,.fa-battery:before{content:"\f240"}.fa-skull-crossbones:before{content:"\f714"}.fa-code-compare:before{content:"\e13a"}.fa-list-dots:before,.fa-list-ul:before{content:"\f0ca"}.fa-school-lock:before{content:"\e56f"}.fa-tower-cell:before{content:"\e585"}.fa-down-long:before,.fa-long-arrow-alt-down:before{content:"\f309"}.fa-ranking-star:before{content:"\e561"}.fa-chess-king:before{content:"\f43f"}.fa-person-harassing:before{content:"\e549"}.fa-brazilian-real-sign:before{content:"\e46c"}.fa-landmark-alt:before,.fa-landmark-dome:before{content:"\f752"}.fa-arrow-up:before{content:"\f062"}.fa-television:before,.fa-tv-alt:before,.fa-tv:before{content:"\f26c"}.fa-shrimp:before{content:"\e448"}.fa-list-check:before,.fa-tasks:before{content:"\f0ae"}.fa-jug-detergent:before{content:"\e519"}.fa-circle-user:before,.fa-user-circle:before{content:"\f2bd"}.fa-user-shield:before{content:"\f505"}.fa-wind:before{content:"\f72e"}.fa-car-burst:before,.fa-car-crash:before{content:"\f5e1"}.fa-y:before{content:"\59"}.fa-person-snowboarding:before,.fa-snowboarding:before{content:"\f7ce"}.fa-shipping-fast:before,.fa-truck-fast:before{content:"\f48b"}.fa-fish:before{content:"\f578"}.fa-user-graduate:before{content:"\f501"}.fa-adjust:before,.fa-circle-half-stroke:before{content:"\f042"}.fa-clapperboard:before{content:"\e131"}.fa-circle-radiation:before,.fa-radiation-alt:before{content:"\f7ba"}.fa-baseball-ball:before,.fa-baseball:before{content:"\f433"}.fa-jet-fighter-up:before{content:"\e518"}.fa-diagram-project:before,.fa-project-diagram:before{content:"\f542"}.fa-copy:before{content:"\f0c5"}.fa-volume-mute:before,.fa-volume-times:before,.fa-volume-xmark:before{content:"\f6a9"}.fa-hand-sparkles:before{content:"\e05d"}.fa-grip-horizontal:before,.fa-grip:before{content:"\f58d"}.fa-share-from-square:before,.fa-share-square:before{content:"\f14d"}.fa-child-combatant:before,.fa-child-rifle:before{content:"\e4e0"}.fa-gun:before{content:"\e19b"}.fa-phone-square:before,.fa-square-phone:before{content:"\f098"}.fa-add:before,.fa-plus:before{content:"\2b"}.fa-expand:before{content:"\f065"}.fa-computer:before{content:"\e4e5"}.fa-close:before,.fa-multiply:before,.fa-remove:before,.fa-times:before,.fa-xmark:before{content:"\f00d"}.fa-arrows-up-down-left-right:before,.fa-arrows:before{content:"\f047"}.fa-chalkboard-teacher:before,.fa-chalkboard-user:before{content:"\f51c"}.fa-peso-sign:before{content:"\e222"}.fa-building-shield:before{content:"\e4d8"}.fa-baby:before{content:"\f77c"}.fa-users-line:before{content:"\e592"}.fa-quote-left-alt:before,.fa-quote-left:before{content:"\f10d"}.fa-tractor:before{content:"\f722"}.fa-trash-arrow-up:before,.fa-trash-restore:before{content:"\f829"}.fa-arrow-down-up-lock:before{content:"\e4b0"}.fa-lines-leaning:before{content:"\e51e"}.fa-ruler-combined:before{content:"\f546"}.fa-copyright:before{content:"\f1f9"}.fa-equals:before{content:"\3d"}.fa-blender:before{content:"\f517"}.fa-teeth:before{content:"\f62e"}.fa-ils:before,.fa-shekel-sign:before,.fa-shekel:before,.fa-sheqel-sign:before,.fa-sheqel:before{content:"\f20b"}.fa-map:before{content:"\f279"}.fa-rocket:before{content:"\f135"}.fa-photo-film:before,.fa-photo-video:before{content:"\f87c"}.fa-folder-minus:before{content:"\f65d"}.fa-store:before{content:"\f54e"}.fa-arrow-trend-up:before{content:"\e098"}.fa-plug-circle-minus:before{content:"\e55e"}.fa-sign-hanging:before,.fa-sign:before{content:"\f4d9"}.fa-bezier-curve:before{content:"\f55b"}.fa-bell-slash:before{content:"\f1f6"}.fa-tablet-android:before,.fa-tablet:before{content:"\f3fb"}.fa-school-flag:before{content:"\e56e"}.fa-fill:before{content:"\f575"}.fa-angle-up:before{content:"\f106"}.fa-drumstick-bite:before{content:"\f6d7"}.fa-holly-berry:before{content:"\f7aa"}.fa-chevron-left:before{content:"\f053"}.fa-bacteria:before{content:"\e059"}.fa-hand-lizard:before{content:"\f258"}.fa-notdef:before{content:"\e1fe"}.fa-disease:before{content:"\f7fa"}.fa-briefcase-medical:before{content:"\f469"}.fa-genderless:before{content:"\f22d"}.fa-chevron-right:before{content:"\f054"}.fa-retweet:before{content:"\f079"}.fa-car-alt:before,.fa-car-rear:before{content:"\f5de"}.fa-pump-soap:before{content:"\e06b"}.fa-video-slash:before{content:"\f4e2"}.fa-battery-2:before,.fa-battery-quarter:before{content:"\f243"}.fa-radio:before{content:"\f8d7"}.fa-baby-carriage:before,.fa-carriage-baby:before{content:"\f77d"}.fa-traffic-light:before{content:"\f637"}.fa-thermometer:before{content:"\f491"}.fa-vr-cardboard:before{content:"\f729"}.fa-hand-middle-finger:before{content:"\f806"}.fa-percent:before,.fa-percentage:before{content:"\25"}.fa-truck-moving:before{content:"\f4df"}.fa-glass-water-droplet:before{content:"\e4f5"}.fa-display:before{content:"\e163"}.fa-face-smile:before,.fa-smile:before{content:"\f118"}.fa-thumb-tack:before,.fa-thumbtack:before{content:"\f08d"}.fa-trophy:before{content:"\f091"}.fa-person-praying:before,.fa-pray:before{content:"\f683"}.fa-hammer:before{content:"\f6e3"}.fa-hand-peace:before{content:"\f25b"}.fa-rotate:before,.fa-sync-alt:before{content:"\f2f1"}.fa-spinner:before{content:"\f110"}.fa-robot:before{content:"\f544"}.fa-peace:before{content:"\f67c"}.fa-cogs:before,.fa-gears:before{content:"\f085"}.fa-warehouse:before{content:"\f494"}.fa-arrow-up-right-dots:before{content:"\e4b7"}.fa-splotch:before{content:"\f5bc"}.fa-face-grin-hearts:before,.fa-grin-hearts:before{content:"\f584"}.fa-dice-four:before{content:"\f524"}.fa-sim-card:before{content:"\f7c4"}.fa-transgender-alt:before,.fa-transgender:before{content:"\f225"}.fa-mercury:before{content:"\f223"}.fa-arrow-turn-down:before,.fa-level-down:before{content:"\f149"}.fa-person-falling-burst:before{content:"\e547"}.fa-award:before{content:"\f559"}.fa-ticket-alt:before,.fa-ticket-simple:before{content:"\f3ff"}.fa-building:before{content:"\f1ad"}.fa-angle-double-left:before,.fa-angles-left:before{content:"\f100"}.fa-qrcode:before{content:"\f029"}.fa-clock-rotate-left:before,.fa-history:before{content:"\f1da"}.fa-face-grin-beam-sweat:before,.fa-grin-beam-sweat:before{content:"\f583"}.fa-arrow-right-from-file:before,.fa-file-export:before{content:"\f56e"}.fa-shield-blank:before,.fa-shield:before{content:"\f132"}.fa-arrow-up-short-wide:before,.fa-sort-amount-up-alt:before{content:"\f885"}.fa-house-medical:before{content:"\e3b2"}.fa-golf-ball-tee:before,.fa-golf-ball:before{content:"\f450"}.fa-chevron-circle-left:before,.fa-circle-chevron-left:before{content:"\f137"}.fa-house-chimney-window:before{content:"\e00d"}.fa-pen-nib:before{content:"\f5ad"}.fa-tent-arrow-turn-left:before{content:"\e580"}.fa-tents:before{content:"\e582"}.fa-magic:before,.fa-wand-magic:before{content:"\f0d0"}.fa-dog:before{content:"\f6d3"}.fa-carrot:before{content:"\f787"}.fa-moon:before{content:"\f186"}.fa-wine-glass-alt:before,.fa-wine-glass-empty:before{content:"\f5ce"}.fa-cheese:before{content:"\f7ef"}.fa-yin-yang:before{content:"\f6ad"}.fa-music:before{content:"\f001"}.fa-code-commit:before{content:"\f386"}.fa-temperature-low:before{content:"\f76b"}.fa-biking:before,.fa-person-biking:before{content:"\f84a"}.fa-broom:before{content:"\f51a"}.fa-shield-heart:before{content:"\e574"}.fa-gopuram:before{content:"\f664"}.fa-earth-oceania:before,.fa-globe-oceania:before{content:"\e47b"}.fa-square-xmark:before,.fa-times-square:before,.fa-xmark-square:before{content:"\f2d3"}.fa-hashtag:before{content:"\23"}.fa-expand-alt:before,.fa-up-right-and-down-left-from-center:before{content:"\f424"}.fa-oil-can:before{content:"\f613"}.fa-t:before{content:"\54"}.fa-hippo:before{content:"\f6ed"}.fa-chart-column:before{content:"\e0e3"}.fa-infinity:before{content:"\f534"}.fa-vial-circle-check:before{content:"\e596"}.fa-person-arrow-down-to-line:before{content:"\e538"}.fa-voicemail:before{content:"\f897"}.fa-fan:before{content:"\f863"}.fa-person-walking-luggage:before{content:"\e554"}.fa-arrows-alt-v:before,.fa-up-down:before{content:"\f338"}.fa-cloud-moon-rain:before{content:"\f73c"}.fa-calendar:before{content:"\f133"}.fa-trailer:before{content:"\e041"}.fa-bahai:before,.fa-haykal:before{content:"\f666"}.fa-sd-card:before{content:"\f7c2"}.fa-dragon:before{content:"\f6d5"}.fa-shoe-prints:before{content:"\f54b"}.fa-circle-plus:before,.fa-plus-circle:before{content:"\f055"}.fa-face-grin-tongue-wink:before,.fa-grin-tongue-wink:before{content:"\f58b"}.fa-hand-holding:before{content:"\f4bd"}.fa-plug-circle-exclamation:before{content:"\e55d"}.fa-chain-broken:before,.fa-chain-slash:before,.fa-link-slash:before,.fa-unlink:before{content:"\f127"}.fa-clone:before{content:"\f24d"}.fa-person-walking-arrow-loop-left:before{content:"\e551"}.fa-arrow-up-z-a:before,.fa-sort-alpha-up-alt:before{content:"\f882"}.fa-fire-alt:before,.fa-fire-flame-curved:before{content:"\f7e4"}.fa-tornado:before{content:"\f76f"}.fa-file-circle-plus:before{content:"\e494"}.fa-book-quran:before,.fa-quran:before{content:"\f687"}.fa-anchor:before{content:"\f13d"}.fa-border-all:before{content:"\f84c"}.fa-angry:before,.fa-face-angry:before{content:"\f556"}.fa-cookie-bite:before{content:"\f564"}.fa-arrow-trend-down:before{content:"\e097"}.fa-feed:before,.fa-rss:before{content:"\f09e"}.fa-draw-polygon:before{content:"\f5ee"}.fa-balance-scale:before,.fa-scale-balanced:before{content:"\f24e"}.fa-gauge-simple-high:before,.fa-tachometer-fast:before,.fa-tachometer:before{content:"\f62a"}.fa-shower:before{content:"\f2cc"}.fa-desktop-alt:before,.fa-desktop:before{content:"\f390"}.fa-m:before{content:"\4d"}.fa-table-list:before,.fa-th-list:before{content:"\f00b"}.fa-comment-sms:before,.fa-sms:before{content:"\f7cd"}.fa-book:before{content:"\f02d"}.fa-user-plus:before{content:"\f234"}.fa-check:before{content:"\f00c"}.fa-battery-4:before,.fa-battery-three-quarters:before{content:"\f241"}.fa-house-circle-check:before{content:"\e509"}.fa-angle-left:before{content:"\f104"}.fa-diagram-successor:before{content:"\e47a"}.fa-truck-arrow-right:before{content:"\e58b"}.fa-arrows-split-up-and-left:before{content:"\e4bc"}.fa-fist-raised:before,.fa-hand-fist:before{content:"\f6de"}.fa-cloud-moon:before{content:"\f6c3"}.fa-briefcase:before{content:"\f0b1"}.fa-person-falling:before{content:"\e546"}.fa-image-portrait:before,.fa-portrait:before{content:"\f3e0"}.fa-user-tag:before{content:"\f507"}.fa-rug:before{content:"\e569"}.fa-earth-europe:before,.fa-globe-europe:before{content:"\f7a2"}.fa-cart-flatbed-suitcase:before,.fa-luggage-cart:before{content:"\f59d"}.fa-rectangle-times:before,.fa-rectangle-xmark:before,.fa-times-rectangle:before,.fa-window-close:before{content:"\f410"}.fa-baht-sign:before{content:"\e0ac"}.fa-book-open:before{content:"\f518"}.fa-book-journal-whills:before,.fa-journal-whills:before{content:"\f66a"}.fa-handcuffs:before{content:"\e4f8"}.fa-exclamation-triangle:before,.fa-triangle-exclamation:before,.fa-warning:before{content:"\f071"}.fa-database:before{content:"\f1c0"}.fa-mail-forward:before,.fa-share:before{content:"\f064"}.fa-bottle-droplet:before{content:"\e4c4"}.fa-mask-face:before{content:"\e1d7"}.fa-hill-rockslide:before{content:"\e508"}.fa-exchange-alt:before,.fa-right-left:before{content:"\f362"}.fa-paper-plane:before{content:"\f1d8"}.fa-road-circle-exclamation:before{content:"\e565"}.fa-dungeon:before{content:"\f6d9"}.fa-align-right:before{content:"\f038"}.fa-money-bill-1-wave:before,.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-life-ring:before{content:"\f1cd"}.fa-hands:before,.fa-sign-language:before,.fa-signing:before{content:"\f2a7"}.fa-calendar-day:before{content:"\f783"}.fa-ladder-water:before,.fa-swimming-pool:before,.fa-water-ladder:before{content:"\f5c5"}.fa-arrows-up-down:before,.fa-arrows-v:before{content:"\f07d"}.fa-face-grimace:before,.fa-grimace:before{content:"\f57f"}.fa-wheelchair-alt:before,.fa-wheelchair-move:before{content:"\e2ce"}.fa-level-down-alt:before,.fa-turn-down:before{content:"\f3be"}.fa-person-walking-arrow-right:before{content:"\e552"}.fa-envelope-square:before,.fa-square-envelope:before{content:"\f199"}.fa-dice:before{content:"\f522"}.fa-bowling-ball:before{content:"\f436"}.fa-brain:before{content:"\f5dc"}.fa-band-aid:before,.fa-bandage:before{content:"\f462"}.fa-calendar-minus:before{content:"\f272"}.fa-circle-xmark:before,.fa-times-circle:before,.fa-xmark-circle:before{content:"\f057"}.fa-gifts:before{content:"\f79c"}.fa-hotel:before{content:"\f594"}.fa-earth-asia:before,.fa-globe-asia:before{content:"\f57e"}.fa-id-card-alt:before,.fa-id-card-clip:before{content:"\f47f"}.fa-magnifying-glass-plus:before,.fa-search-plus:before{content:"\f00e"}.fa-thumbs-up:before{content:"\f164"}.fa-user-clock:before{content:"\f4fd"}.fa-allergies:before,.fa-hand-dots:before{content:"\f461"}.fa-file-invoice:before{content:"\f570"}.fa-window-minimize:before{content:"\f2d1"}.fa-coffee:before,.fa-mug-saucer:before{content:"\f0f4"}.fa-brush:before{content:"\f55d"}.fa-mask:before{content:"\f6fa"}.fa-magnifying-glass-minus:before,.fa-search-minus:before{content:"\f010"}.fa-ruler-vertical:before{content:"\f548"}.fa-user-alt:before,.fa-user-large:before{content:"\f406"}.fa-train-tram:before{content:"\e5b4"}.fa-user-nurse:before{content:"\f82f"}.fa-syringe:before{content:"\f48e"}.fa-cloud-sun:before{content:"\f6c4"}.fa-stopwatch-20:before{content:"\e06f"}.fa-square-full:before{content:"\f45c"}.fa-magnet:before{content:"\f076"}.fa-jar:before{content:"\e516"}.fa-note-sticky:before,.fa-sticky-note:before{content:"\f249"}.fa-bug-slash:before{content:"\e490"}.fa-arrow-up-from-water-pump:before{content:"\e4b6"}.fa-bone:before{content:"\f5d7"}.fa-user-injured:before{content:"\f728"}.fa-face-sad-tear:before,.fa-sad-tear:before{content:"\f5b4"}.fa-plane:before{content:"\f072"}.fa-tent-arrows-down:before{content:"\e581"}.fa-exclamation:before{content:"\21"}.fa-arrows-spin:before{content:"\e4bb"}.fa-print:before{content:"\f02f"}.fa-try:before,.fa-turkish-lira-sign:before,.fa-turkish-lira:before{content:"\e2bb"}.fa-dollar-sign:before,.fa-dollar:before,.fa-usd:before{content:"\24"}.fa-x:before{content:"\58"}.fa-magnifying-glass-dollar:before,.fa-search-dollar:before{content:"\f688"}.fa-users-cog:before,.fa-users-gear:before{content:"\f509"}.fa-person-military-pointing:before{content:"\e54a"}.fa-bank:before,.fa-building-columns:before,.fa-institution:before,.fa-museum:before,.fa-university:before{content:"\f19c"}.fa-umbrella:before{content:"\f0e9"}.fa-trowel:before{content:"\e589"}.fa-d:before{content:"\44"}.fa-stapler:before{content:"\e5af"}.fa-masks-theater:before,.fa-theater-masks:before{content:"\f630"}.fa-kip-sign:before{content:"\e1c4"}.fa-hand-point-left:before{content:"\f0a5"}.fa-handshake-alt:before,.fa-handshake-simple:before{content:"\f4c6"}.fa-fighter-jet:before,.fa-jet-fighter:before{content:"\f0fb"}.fa-share-alt-square:before,.fa-square-share-nodes:before{content:"\f1e1"}.fa-barcode:before{content:"\f02a"}.fa-plus-minus:before{content:"\e43c"}.fa-video-camera:before,.fa-video:before{content:"\f03d"}.fa-graduation-cap:before,.fa-mortar-board:before{content:"\f19d"}.fa-hand-holding-medical:before{content:"\e05c"}.fa-person-circle-check:before{content:"\e53e"}.fa-level-up-alt:before,.fa-turn-up:before{content:"\f3bf"} +.fa-sr-only,.fa-sr-only-focusable:not(:focus),.sr-only,.sr-only-focusable:not(:focus){position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border-width:0}:host,:root{--fa-style-family-brands:"Font Awesome 6 Brands";--fa-font-brands:normal 400 1em/1 "Font Awesome 6 Brands"}@font-face{font-family:"Font Awesome 6 Brands";font-style:normal;font-weight:400;font-display:block;src:url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.ttf) format("truetype")}.fa-brands,.fab{font-weight:400}.fa-monero:before{content:"\f3d0"}.fa-hooli:before{content:"\f427"}.fa-yelp:before{content:"\f1e9"}.fa-cc-visa:before{content:"\f1f0"}.fa-lastfm:before{content:"\f202"}.fa-shopware:before{content:"\f5b5"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-aws:before{content:"\f375"}.fa-redhat:before{content:"\f7bc"}.fa-yoast:before{content:"\f2b1"}.fa-cloudflare:before{content:"\e07d"}.fa-ups:before{content:"\f7e0"}.fa-pixiv:before{content:"\e640"}.fa-wpexplorer:before{content:"\f2de"}.fa-dyalog:before{content:"\f399"}.fa-bity:before{content:"\f37a"}.fa-stackpath:before{content:"\f842"}.fa-buysellads:before{content:"\f20d"}.fa-first-order:before{content:"\f2b0"}.fa-modx:before{content:"\f285"}.fa-guilded:before{content:"\e07e"}.fa-vnv:before{content:"\f40b"}.fa-js-square:before,.fa-square-js:before{content:"\f3b9"}.fa-microsoft:before{content:"\f3ca"}.fa-qq:before{content:"\f1d6"}.fa-orcid:before{content:"\f8d2"}.fa-java:before{content:"\f4e4"}.fa-invision:before{content:"\f7b0"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-centercode:before{content:"\f380"}.fa-glide-g:before{content:"\f2a6"}.fa-drupal:before{content:"\f1a9"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-unity:before{content:"\e049"}.fa-whmcs:before{content:"\f40d"}.fa-rocketchat:before{content:"\f3e8"}.fa-vk:before{content:"\f189"}.fa-untappd:before{content:"\f405"}.fa-mailchimp:before{content:"\f59e"}.fa-css3-alt:before{content:"\f38b"}.fa-reddit-square:before,.fa-square-reddit:before{content:"\f1a2"}.fa-vimeo-v:before{content:"\f27d"}.fa-contao:before{content:"\f26d"}.fa-square-font-awesome:before{content:"\e5ad"}.fa-deskpro:before{content:"\f38f"}.fa-brave:before{content:"\e63c"}.fa-sistrix:before{content:"\f3ee"}.fa-instagram-square:before,.fa-square-instagram:before{content:"\e055"}.fa-battle-net:before{content:"\f835"}.fa-the-red-yeti:before{content:"\f69d"}.fa-hacker-news-square:before,.fa-square-hacker-news:before{content:"\f3af"}.fa-edge:before{content:"\f282"}.fa-threads:before{content:"\e618"}.fa-napster:before{content:"\f3d2"}.fa-snapchat-square:before,.fa-square-snapchat:before{content:"\f2ad"}.fa-google-plus-g:before{content:"\f0d5"}.fa-artstation:before{content:"\f77a"}.fa-markdown:before{content:"\f60f"}.fa-sourcetree:before{content:"\f7d3"}.fa-google-plus:before{content:"\f2b3"}.fa-diaspora:before{content:"\f791"}.fa-foursquare:before{content:"\f180"}.fa-stack-overflow:before{content:"\f16c"}.fa-github-alt:before{content:"\f113"}.fa-phoenix-squadron:before{content:"\f511"}.fa-pagelines:before{content:"\f18c"}.fa-algolia:before{content:"\f36c"}.fa-red-river:before{content:"\f3e3"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-safari:before{content:"\f267"}.fa-google:before{content:"\f1a0"}.fa-font-awesome-alt:before,.fa-square-font-awesome-stroke:before{content:"\f35c"}.fa-atlassian:before{content:"\f77b"}.fa-linkedin-in:before{content:"\f0e1"}.fa-digital-ocean:before{content:"\f391"}.fa-nimblr:before{content:"\f5a8"}.fa-chromecast:before{content:"\f838"}.fa-evernote:before{content:"\f839"}.fa-hacker-news:before{content:"\f1d4"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-adversal:before{content:"\f36a"}.fa-creative-commons:before{content:"\f25e"}.fa-watchman-monitoring:before{content:"\e087"}.fa-fonticons:before{content:"\f280"}.fa-weixin:before{content:"\f1d7"}.fa-shirtsinbulk:before{content:"\f214"}.fa-codepen:before{content:"\f1cb"}.fa-git-alt:before{content:"\f841"}.fa-lyft:before{content:"\f3c3"}.fa-rev:before{content:"\f5b2"}.fa-windows:before{content:"\f17a"}.fa-wizards-of-the-coast:before{content:"\f730"}.fa-square-viadeo:before,.fa-viadeo-square:before{content:"\f2aa"}.fa-meetup:before{content:"\f2e0"}.fa-centos:before{content:"\f789"}.fa-adn:before{content:"\f170"}.fa-cloudsmith:before{content:"\f384"}.fa-opensuse:before{content:"\e62b"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-dribbble-square:before,.fa-square-dribbble:before{content:"\f397"}.fa-codiepie:before{content:"\f284"}.fa-node:before{content:"\f419"}.fa-mix:before{content:"\f3cb"}.fa-steam:before{content:"\f1b6"}.fa-cc-apple-pay:before{content:"\f416"}.fa-scribd:before{content:"\f28a"}.fa-debian:before{content:"\e60b"}.fa-openid:before{content:"\f19b"}.fa-instalod:before{content:"\e081"}.fa-expeditedssl:before{content:"\f23e"}.fa-sellcast:before{content:"\f2da"}.fa-square-twitter:before,.fa-twitter-square:before{content:"\f081"}.fa-r-project:before{content:"\f4f7"}.fa-delicious:before{content:"\f1a5"}.fa-freebsd:before{content:"\f3a4"}.fa-vuejs:before{content:"\f41f"}.fa-accusoft:before{content:"\f369"}.fa-ioxhost:before{content:"\f208"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-app-store:before{content:"\f36f"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-itunes-note:before{content:"\f3b5"}.fa-golang:before{content:"\e40f"}.fa-kickstarter:before{content:"\f3bb"}.fa-grav:before{content:"\f2d6"}.fa-weibo:before{content:"\f18a"}.fa-uncharted:before{content:"\e084"}.fa-firstdraft:before{content:"\f3a1"}.fa-square-youtube:before,.fa-youtube-square:before{content:"\f431"}.fa-wikipedia-w:before{content:"\f266"}.fa-rendact:before,.fa-wpressr:before{content:"\f3e4"}.fa-angellist:before{content:"\f209"}.fa-galactic-republic:before{content:"\f50c"}.fa-nfc-directional:before{content:"\e530"}.fa-skype:before{content:"\f17e"}.fa-joget:before{content:"\f3b7"}.fa-fedora:before{content:"\f798"}.fa-stripe-s:before{content:"\f42a"}.fa-meta:before{content:"\e49b"}.fa-laravel:before{content:"\f3bd"}.fa-hotjar:before{content:"\f3b1"}.fa-bluetooth-b:before{content:"\f294"}.fa-square-letterboxd:before{content:"\e62e"}.fa-sticker-mule:before{content:"\f3f7"}.fa-creative-commons-zero:before{content:"\f4f3"}.fa-hips:before{content:"\f452"}.fa-behance:before{content:"\f1b4"}.fa-reddit:before{content:"\f1a1"}.fa-discord:before{content:"\f392"}.fa-chrome:before{content:"\f268"}.fa-app-store-ios:before{content:"\f370"}.fa-cc-discover:before{content:"\f1f2"}.fa-wpbeginner:before{content:"\f297"}.fa-confluence:before{content:"\f78d"}.fa-shoelace:before{content:"\e60c"}.fa-mdb:before{content:"\f8ca"}.fa-dochub:before{content:"\f394"}.fa-accessible-icon:before{content:"\f368"}.fa-ebay:before{content:"\f4f4"}.fa-amazon:before{content:"\f270"}.fa-unsplash:before{content:"\e07c"}.fa-yarn:before{content:"\f7e3"}.fa-square-steam:before,.fa-steam-square:before{content:"\f1b7"}.fa-500px:before{content:"\f26e"}.fa-square-vimeo:before,.fa-vimeo-square:before{content:"\f194"}.fa-asymmetrik:before{content:"\f372"}.fa-font-awesome-flag:before,.fa-font-awesome-logo-full:before,.fa-font-awesome:before{content:"\f2b4"}.fa-gratipay:before{content:"\f184"}.fa-apple:before{content:"\f179"}.fa-hive:before{content:"\e07f"}.fa-gitkraken:before{content:"\f3a6"}.fa-keybase:before{content:"\f4f5"}.fa-apple-pay:before{content:"\f415"}.fa-padlet:before{content:"\e4a0"}.fa-amazon-pay:before{content:"\f42c"}.fa-github-square:before,.fa-square-github:before{content:"\f092"}.fa-stumbleupon:before{content:"\f1a4"}.fa-fedex:before{content:"\f797"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-shopify:before{content:"\e057"}.fa-neos:before{content:"\f612"}.fa-square-threads:before{content:"\e619"}.fa-hackerrank:before{content:"\f5f7"}.fa-researchgate:before{content:"\f4f8"}.fa-swift:before{content:"\f8e1"}.fa-angular:before{content:"\f420"}.fa-speakap:before{content:"\f3f3"}.fa-angrycreative:before{content:"\f36e"}.fa-y-combinator:before{content:"\f23b"}.fa-empire:before{content:"\f1d1"}.fa-envira:before{content:"\f299"}.fa-google-scholar:before{content:"\e63b"}.fa-gitlab-square:before,.fa-square-gitlab:before{content:"\e5ae"}.fa-studiovinari:before{content:"\f3f8"}.fa-pied-piper:before{content:"\f2ae"}.fa-wordpress:before{content:"\f19a"}.fa-product-hunt:before{content:"\f288"}.fa-firefox:before{content:"\f269"}.fa-linode:before{content:"\f2b8"}.fa-goodreads:before{content:"\f3a8"}.fa-odnoklassniki-square:before,.fa-square-odnoklassniki:before{content:"\f264"}.fa-jsfiddle:before{content:"\f1cc"}.fa-sith:before{content:"\f512"}.fa-themeisle:before{content:"\f2b2"}.fa-page4:before{content:"\f3d7"}.fa-hashnode:before{content:"\e499"}.fa-react:before{content:"\f41b"}.fa-cc-paypal:before{content:"\f1f4"}.fa-squarespace:before{content:"\f5be"}.fa-cc-stripe:before{content:"\f1f5"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-bitcoin:before{content:"\f379"}.fa-keycdn:before{content:"\f3ba"}.fa-opera:before{content:"\f26a"}.fa-itch-io:before{content:"\f83a"}.fa-umbraco:before{content:"\f8e8"}.fa-galactic-senate:before{content:"\f50d"}.fa-ubuntu:before{content:"\f7df"}.fa-draft2digital:before{content:"\f396"}.fa-stripe:before{content:"\f429"}.fa-houzz:before{content:"\f27c"}.fa-gg:before{content:"\f260"}.fa-dhl:before{content:"\f790"}.fa-pinterest-square:before,.fa-square-pinterest:before{content:"\f0d3"}.fa-xing:before{content:"\f168"}.fa-blackberry:before{content:"\f37b"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-playstation:before{content:"\f3df"}.fa-quinscape:before{content:"\f459"}.fa-less:before{content:"\f41d"}.fa-blogger-b:before{content:"\f37d"}.fa-opencart:before{content:"\f23d"}.fa-vine:before{content:"\f1ca"}.fa-signal-messenger:before{content:"\e663"}.fa-paypal:before{content:"\f1ed"}.fa-gitlab:before{content:"\f296"}.fa-typo3:before{content:"\f42b"}.fa-reddit-alien:before{content:"\f281"}.fa-yahoo:before{content:"\f19e"}.fa-dailymotion:before{content:"\e052"}.fa-affiliatetheme:before{content:"\f36b"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-bootstrap:before{content:"\f836"}.fa-odnoklassniki:before{content:"\f263"}.fa-nfc-symbol:before{content:"\e531"}.fa-mintbit:before{content:"\e62f"}.fa-ethereum:before{content:"\f42e"}.fa-speaker-deck:before{content:"\f83c"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-patreon:before{content:"\f3d9"}.fa-avianex:before{content:"\f374"}.fa-ello:before{content:"\f5f1"}.fa-gofore:before{content:"\f3a7"}.fa-bimobject:before{content:"\f378"}.fa-brave-reverse:before{content:"\e63d"}.fa-facebook-f:before{content:"\f39e"}.fa-google-plus-square:before,.fa-square-google-plus:before{content:"\f0d4"}.fa-mandalorian:before{content:"\f50f"}.fa-first-order-alt:before{content:"\f50a"}.fa-osi:before{content:"\f41a"}.fa-google-wallet:before{content:"\f1ee"}.fa-d-and-d-beyond:before{content:"\f6ca"}.fa-periscope:before{content:"\f3da"}.fa-fulcrum:before{content:"\f50b"}.fa-cloudscale:before{content:"\f383"}.fa-forumbee:before{content:"\f211"}.fa-mizuni:before{content:"\f3cc"}.fa-schlix:before{content:"\f3ea"}.fa-square-xing:before,.fa-xing-square:before{content:"\f169"}.fa-bandcamp:before{content:"\f2d5"}.fa-wpforms:before{content:"\f298"}.fa-cloudversify:before{content:"\f385"}.fa-usps:before{content:"\f7e1"}.fa-megaport:before{content:"\f5a3"}.fa-magento:before{content:"\f3c4"}.fa-spotify:before{content:"\f1bc"}.fa-optin-monster:before{content:"\f23c"}.fa-fly:before{content:"\f417"}.fa-aviato:before{content:"\f421"}.fa-itunes:before{content:"\f3b4"}.fa-cuttlefish:before{content:"\f38c"}.fa-blogger:before{content:"\f37c"}.fa-flickr:before{content:"\f16e"}.fa-viber:before{content:"\f409"}.fa-soundcloud:before{content:"\f1be"}.fa-digg:before{content:"\f1a6"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-letterboxd:before{content:"\e62d"}.fa-symfony:before{content:"\f83d"}.fa-maxcdn:before{content:"\f136"}.fa-etsy:before{content:"\f2d7"}.fa-facebook-messenger:before{content:"\f39f"}.fa-audible:before{content:"\f373"}.fa-think-peaks:before{content:"\f731"}.fa-bilibili:before{content:"\e3d9"}.fa-erlang:before{content:"\f39d"}.fa-x-twitter:before{content:"\e61b"}.fa-cotton-bureau:before{content:"\f89e"}.fa-dashcube:before{content:"\f210"}.fa-42-group:before,.fa-innosoft:before{content:"\e080"}.fa-stack-exchange:before{content:"\f18d"}.fa-elementor:before{content:"\f430"}.fa-pied-piper-square:before,.fa-square-pied-piper:before{content:"\e01e"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-palfed:before{content:"\f3d8"}.fa-superpowers:before{content:"\f2dd"}.fa-resolving:before{content:"\f3e7"}.fa-xbox:before{content:"\f412"}.fa-searchengin:before{content:"\f3eb"}.fa-tiktok:before{content:"\e07b"}.fa-facebook-square:before,.fa-square-facebook:before{content:"\f082"}.fa-renren:before{content:"\f18b"}.fa-linux:before{content:"\f17c"}.fa-glide:before{content:"\f2a5"}.fa-linkedin:before{content:"\f08c"}.fa-hubspot:before{content:"\f3b2"}.fa-deploydog:before{content:"\f38e"}.fa-twitch:before{content:"\f1e8"}.fa-ravelry:before{content:"\f2d9"}.fa-mixer:before{content:"\e056"}.fa-lastfm-square:before,.fa-square-lastfm:before{content:"\f203"}.fa-vimeo:before{content:"\f40a"}.fa-mendeley:before{content:"\f7b3"}.fa-uniregistry:before{content:"\f404"}.fa-figma:before{content:"\f799"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-dropbox:before{content:"\f16b"}.fa-instagram:before{content:"\f16d"}.fa-cmplid:before{content:"\e360"}.fa-upwork:before{content:"\e641"}.fa-facebook:before{content:"\f09a"}.fa-gripfire:before{content:"\f3ac"}.fa-jedi-order:before{content:"\f50e"}.fa-uikit:before{content:"\f403"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-phabricator:before{content:"\f3db"}.fa-ussunnah:before{content:"\f407"}.fa-earlybirds:before{content:"\f39a"}.fa-trade-federation:before{content:"\f513"}.fa-autoprefixer:before{content:"\f41c"}.fa-whatsapp:before{content:"\f232"}.fa-slideshare:before{content:"\f1e7"}.fa-google-play:before{content:"\f3ab"}.fa-viadeo:before{content:"\f2a9"}.fa-line:before{content:"\f3c0"}.fa-google-drive:before{content:"\f3aa"}.fa-servicestack:before{content:"\f3ec"}.fa-simplybuilt:before{content:"\f215"}.fa-bitbucket:before{content:"\f171"}.fa-imdb:before{content:"\f2d8"}.fa-deezer:before{content:"\e077"}.fa-raspberry-pi:before{content:"\f7bb"}.fa-jira:before{content:"\f7b1"}.fa-docker:before{content:"\f395"}.fa-screenpal:before{content:"\e570"}.fa-bluetooth:before{content:"\f293"}.fa-gitter:before{content:"\f426"}.fa-d-and-d:before{content:"\f38d"}.fa-microblog:before{content:"\e01a"}.fa-cc-diners-club:before{content:"\f24c"}.fa-gg-circle:before{content:"\f261"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-yandex:before{content:"\f413"}.fa-readme:before{content:"\f4d5"}.fa-html5:before{content:"\f13b"}.fa-sellsy:before{content:"\f213"}.fa-sass:before{content:"\f41e"}.fa-wirsindhandwerk:before,.fa-wsh:before{content:"\e2d0"}.fa-buromobelexperte:before{content:"\f37f"}.fa-salesforce:before{content:"\f83b"}.fa-octopus-deploy:before{content:"\e082"}.fa-medapps:before{content:"\f3c6"}.fa-ns8:before{content:"\f3d5"}.fa-pinterest-p:before{content:"\f231"}.fa-apper:before{content:"\f371"}.fa-fort-awesome:before{content:"\f286"}.fa-waze:before{content:"\f83f"}.fa-cc-jcb:before{content:"\f24b"}.fa-snapchat-ghost:before,.fa-snapchat:before{content:"\f2ab"}.fa-fantasy-flight-games:before{content:"\f6dc"}.fa-rust:before{content:"\e07a"}.fa-wix:before{content:"\f5cf"}.fa-behance-square:before,.fa-square-behance:before{content:"\f1b5"}.fa-supple:before{content:"\f3f9"}.fa-webflow:before{content:"\e65c"}.fa-rebel:before{content:"\f1d0"}.fa-css3:before{content:"\f13c"}.fa-staylinked:before{content:"\f3f5"}.fa-kaggle:before{content:"\f5fa"}.fa-space-awesome:before{content:"\e5ac"}.fa-deviantart:before{content:"\f1bd"}.fa-cpanel:before{content:"\f388"}.fa-goodreads-g:before{content:"\f3a9"}.fa-git-square:before,.fa-square-git:before{content:"\f1d2"}.fa-square-tumblr:before,.fa-tumblr-square:before{content:"\f174"}.fa-trello:before{content:"\f181"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-get-pocket:before{content:"\f265"}.fa-perbyte:before{content:"\e083"}.fa-grunt:before{content:"\f3ad"}.fa-weebly:before{content:"\f5cc"}.fa-connectdevelop:before{content:"\f20e"}.fa-leanpub:before{content:"\f212"}.fa-black-tie:before{content:"\f27e"}.fa-themeco:before{content:"\f5c6"}.fa-python:before{content:"\f3e2"}.fa-android:before{content:"\f17b"}.fa-bots:before{content:"\e340"}.fa-free-code-camp:before{content:"\f2c5"}.fa-hornbill:before{content:"\f592"}.fa-js:before{content:"\f3b8"}.fa-ideal:before{content:"\e013"}.fa-git:before{content:"\f1d3"}.fa-dev:before{content:"\f6cc"}.fa-sketch:before{content:"\f7c6"}.fa-yandex-international:before{content:"\f414"}.fa-cc-amex:before{content:"\f1f3"}.fa-uber:before{content:"\f402"}.fa-github:before{content:"\f09b"}.fa-php:before{content:"\f457"}.fa-alipay:before{content:"\f642"}.fa-youtube:before{content:"\f167"}.fa-skyatlas:before{content:"\f216"}.fa-firefox-browser:before{content:"\e007"}.fa-replyd:before{content:"\f3e6"}.fa-suse:before{content:"\f7d6"}.fa-jenkins:before{content:"\f3b6"}.fa-twitter:before{content:"\f099"}.fa-rockrms:before{content:"\f3e9"}.fa-pinterest:before{content:"\f0d2"}.fa-buffer:before{content:"\f837"}.fa-npm:before{content:"\f3d4"}.fa-yammer:before{content:"\f840"}.fa-btc:before{content:"\f15a"}.fa-dribbble:before{content:"\f17d"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-internet-explorer:before{content:"\f26b"}.fa-stubber:before{content:"\e5c7"}.fa-telegram-plane:before,.fa-telegram:before{content:"\f2c6"}.fa-old-republic:before{content:"\f510"}.fa-odysee:before{content:"\e5c6"}.fa-square-whatsapp:before,.fa-whatsapp-square:before{content:"\f40c"}.fa-node-js:before{content:"\f3d3"}.fa-edge-legacy:before{content:"\e078"}.fa-slack-hash:before,.fa-slack:before{content:"\f198"}.fa-medrt:before{content:"\f3c8"}.fa-usb:before{content:"\f287"}.fa-tumblr:before{content:"\f173"}.fa-vaadin:before{content:"\f408"}.fa-quora:before{content:"\f2c4"}.fa-square-x-twitter:before{content:"\e61a"}.fa-reacteurope:before{content:"\f75d"}.fa-medium-m:before,.fa-medium:before{content:"\f23a"}.fa-amilia:before{content:"\f36d"}.fa-mixcloud:before{content:"\f289"}.fa-flipboard:before{content:"\f44d"}.fa-viacoin:before{content:"\f237"}.fa-critical-role:before{content:"\f6c9"}.fa-sitrox:before{content:"\e44a"}.fa-discourse:before{content:"\f393"}.fa-joomla:before{content:"\f1aa"}.fa-mastodon:before{content:"\f4f6"}.fa-airbnb:before{content:"\f834"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-buy-n-large:before{content:"\f8a6"}.fa-gulp:before{content:"\f3ae"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-strava:before{content:"\f428"}.fa-ember:before{content:"\f423"}.fa-canadian-maple-leaf:before{content:"\f785"}.fa-teamspeak:before{content:"\f4f9"}.fa-pushed:before{content:"\f3e1"}.fa-wordpress-simple:before{content:"\f411"}.fa-nutritionix:before{content:"\f3d6"}.fa-wodu:before{content:"\e088"}.fa-google-pay:before{content:"\e079"}.fa-intercom:before{content:"\f7af"}.fa-zhihu:before{content:"\f63f"}.fa-korvue:before{content:"\f42f"}.fa-pix:before{content:"\e43a"}.fa-steam-symbol:before{content:"\f3f6"}:host,:root{--fa-font-regular:normal 400 1em/1 "Font Awesome 6 Free"}@font-face{font-family:"Font Awesome 6 Free";font-style:normal;font-weight:400;font-display:block;src:url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.ttf) format("truetype")}.fa-regular,.far{font-weight:400}:host,:root{--fa-style-family-classic:"Font Awesome 6 Free";--fa-font-solid:normal 900 1em/1 "Font Awesome 6 Free"}@font-face{font-family:"Font Awesome 6 Free";font-style:normal;font-weight:900;font-display:block;src:url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.ttf) format("truetype")}.fa-solid,.fas{font-weight:900}@font-face{font-family:"Font Awesome 5 Brands";font-display:block;font-weight:400;src:url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.ttf) format("truetype")}@font-face{font-family:"Font Awesome 5 Free";font-display:block;font-weight:900;src:url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.ttf) format("truetype")}@font-face{font-family:"Font Awesome 5 Free";font-display:block;font-weight:400;src:url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.ttf) format("truetype")}@font-face{font-family:"FontAwesome";font-display:block;src:url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.ttf) format("truetype")}@font-face{font-family:"FontAwesome";font-display:block;src:url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.ttf) format("truetype")}@font-face{font-family:"FontAwesome";font-display:block;src:url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.ttf) format("truetype");unicode-range:u+f003,u+f006,u+f014,u+f016-f017,u+f01a-f01b,u+f01d,u+f022,u+f03e,u+f044,u+f046,u+f05c-f05d,u+f06e,u+f070,u+f087-f088,u+f08a,u+f094,u+f096-f097,u+f09d,u+f0a0,u+f0a2,u+f0a4-f0a7,u+f0c5,u+f0c7,u+f0e5-f0e6,u+f0eb,u+f0f6-f0f8,u+f10c,u+f114-f115,u+f118-f11a,u+f11c-f11d,u+f133,u+f147,u+f14e,u+f150-f152,u+f185-f186,u+f18e,u+f190-f192,u+f196,u+f1c1-f1c9,u+f1d9,u+f1db,u+f1e3,u+f1ea,u+f1f7,u+f1f9,u+f20a,u+f247-f248,u+f24a,u+f24d,u+f255-f25b,u+f25d,u+f271-f274,u+f278,u+f27b,u+f28c,u+f28e,u+f29c,u+f2b5,u+f2b7,u+f2ba,u+f2bc,u+f2be,u+f2c0-f2c1,u+f2c3,u+f2d0,u+f2d2,u+f2d4,u+f2dc}@font-face{font-family:"FontAwesome";font-display:block;src:url(../webfonts/fa-v4compatibility.woff2) format("woff2"),url(../webfonts/fa-v4compatibility.ttf) format("truetype");unicode-range:u+f041,u+f047,u+f065-f066,u+f07d-f07e,u+f080,u+f08b,u+f08e,u+f090,u+f09a,u+f0ac,u+f0ae,u+f0b2,u+f0d0,u+f0d6,u+f0e4,u+f0ec,u+f10a-f10b,u+f123,u+f13e,u+f148-f149,u+f14c,u+f156,u+f15e,u+f160-f161,u+f163,u+f175-f178,u+f195,u+f1f8,u+f219,u+f27a} \ No newline at end of file diff --git a/shared/static/fontawesome/css/v4-shims.min.css b/shared/static/fontawesome/css/v4-shims.min.css new file mode 100644 index 0000000..13fa437 --- /dev/null +++ b/shared/static/fontawesome/css/v4-shims.min.css @@ -0,0 +1,6 @@ +/*! + * Font Awesome Free 6.5.1 by @fontawesome - https://fontawesome.com + * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) + * Copyright 2023 Fonticons, Inc. + */ +.fa.fa-glass:before{content:"\f000"}.fa.fa-envelope-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-envelope-o:before{content:"\f0e0"}.fa.fa-star-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-star-o:before{content:"\f005"}.fa.fa-close:before,.fa.fa-remove:before{content:"\f00d"}.fa.fa-gear:before{content:"\f013"}.fa.fa-trash-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-trash-o:before{content:"\f2ed"}.fa.fa-home:before{content:"\f015"}.fa.fa-file-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-o:before{content:"\f15b"}.fa.fa-clock-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-clock-o:before{content:"\f017"}.fa.fa-arrow-circle-o-down{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-arrow-circle-o-down:before{content:"\f358"}.fa.fa-arrow-circle-o-up{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-arrow-circle-o-up:before{content:"\f35b"}.fa.fa-play-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-play-circle-o:before{content:"\f144"}.fa.fa-repeat:before,.fa.fa-rotate-right:before{content:"\f01e"}.fa.fa-refresh:before{content:"\f021"}.fa.fa-list-alt{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-list-alt:before{content:"\f022"}.fa.fa-dedent:before{content:"\f03b"}.fa.fa-video-camera:before{content:"\f03d"}.fa.fa-picture-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-picture-o:before{content:"\f03e"}.fa.fa-photo{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-photo:before{content:"\f03e"}.fa.fa-image{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-image:before{content:"\f03e"}.fa.fa-map-marker:before{content:"\f3c5"}.fa.fa-pencil-square-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-pencil-square-o:before{content:"\f044"}.fa.fa-edit{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-edit:before{content:"\f044"}.fa.fa-share-square-o:before{content:"\f14d"}.fa.fa-check-square-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-check-square-o:before{content:"\f14a"}.fa.fa-arrows:before{content:"\f0b2"}.fa.fa-times-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-times-circle-o:before{content:"\f057"}.fa.fa-check-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-check-circle-o:before{content:"\f058"}.fa.fa-mail-forward:before{content:"\f064"}.fa.fa-expand:before{content:"\f424"}.fa.fa-compress:before{content:"\f422"}.fa.fa-eye,.fa.fa-eye-slash{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-warning:before{content:"\f071"}.fa.fa-calendar:before{content:"\f073"}.fa.fa-arrows-v:before{content:"\f338"}.fa.fa-arrows-h:before{content:"\f337"}.fa.fa-bar-chart-o:before,.fa.fa-bar-chart:before{content:"\e0e3"}.fa.fa-twitter-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-twitter-square:before{content:"\f081"}.fa.fa-facebook-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-facebook-square:before{content:"\f082"}.fa.fa-gears:before{content:"\f085"}.fa.fa-thumbs-o-up{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-thumbs-o-up:before{content:"\f164"}.fa.fa-thumbs-o-down{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-thumbs-o-down:before{content:"\f165"}.fa.fa-heart-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-heart-o:before{content:"\f004"}.fa.fa-sign-out:before{content:"\f2f5"}.fa.fa-linkedin-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-linkedin-square:before{content:"\f08c"}.fa.fa-thumb-tack:before{content:"\f08d"}.fa.fa-external-link:before{content:"\f35d"}.fa.fa-sign-in:before{content:"\f2f6"}.fa.fa-github-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-github-square:before{content:"\f092"}.fa.fa-lemon-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-lemon-o:before{content:"\f094"}.fa.fa-square-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-square-o:before{content:"\f0c8"}.fa.fa-bookmark-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-bookmark-o:before{content:"\f02e"}.fa.fa-facebook,.fa.fa-twitter{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-facebook:before{content:"\f39e"}.fa.fa-facebook-f{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-facebook-f:before{content:"\f39e"}.fa.fa-github{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-credit-card{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-feed:before{content:"\f09e"}.fa.fa-hdd-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hdd-o:before{content:"\f0a0"}.fa.fa-hand-o-right{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-o-right:before{content:"\f0a4"}.fa.fa-hand-o-left{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-o-left:before{content:"\f0a5"}.fa.fa-hand-o-up{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-o-up:before{content:"\f0a6"}.fa.fa-hand-o-down{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-o-down:before{content:"\f0a7"}.fa.fa-globe:before{content:"\f57d"}.fa.fa-tasks:before{content:"\f828"}.fa.fa-arrows-alt:before{content:"\f31e"}.fa.fa-group:before{content:"\f0c0"}.fa.fa-chain:before{content:"\f0c1"}.fa.fa-cut:before{content:"\f0c4"}.fa.fa-files-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-files-o:before{content:"\f0c5"}.fa.fa-floppy-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-floppy-o:before{content:"\f0c7"}.fa.fa-save{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-save:before{content:"\f0c7"}.fa.fa-navicon:before,.fa.fa-reorder:before{content:"\f0c9"}.fa.fa-magic:before{content:"\e2ca"}.fa.fa-pinterest,.fa.fa-pinterest-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-pinterest-square:before{content:"\f0d3"}.fa.fa-google-plus-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-google-plus-square:before{content:"\f0d4"}.fa.fa-google-plus{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-google-plus:before{content:"\f0d5"}.fa.fa-money:before{content:"\f3d1"}.fa.fa-unsorted:before{content:"\f0dc"}.fa.fa-sort-desc:before{content:"\f0dd"}.fa.fa-sort-asc:before{content:"\f0de"}.fa.fa-linkedin{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-linkedin:before{content:"\f0e1"}.fa.fa-rotate-left:before{content:"\f0e2"}.fa.fa-legal:before{content:"\f0e3"}.fa.fa-dashboard:before,.fa.fa-tachometer:before{content:"\f625"}.fa.fa-comment-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-comment-o:before{content:"\f075"}.fa.fa-comments-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-comments-o:before{content:"\f086"}.fa.fa-flash:before{content:"\f0e7"}.fa.fa-clipboard:before{content:"\f0ea"}.fa.fa-lightbulb-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-lightbulb-o:before{content:"\f0eb"}.fa.fa-exchange:before{content:"\f362"}.fa.fa-cloud-download:before{content:"\f0ed"}.fa.fa-cloud-upload:before{content:"\f0ee"}.fa.fa-bell-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-bell-o:before{content:"\f0f3"}.fa.fa-cutlery:before{content:"\f2e7"}.fa.fa-file-text-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-text-o:before{content:"\f15c"}.fa.fa-building-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-building-o:before{content:"\f1ad"}.fa.fa-hospital-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hospital-o:before{content:"\f0f8"}.fa.fa-tablet:before{content:"\f3fa"}.fa.fa-mobile-phone:before,.fa.fa-mobile:before{content:"\f3cd"}.fa.fa-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-circle-o:before{content:"\f111"}.fa.fa-mail-reply:before{content:"\f3e5"}.fa.fa-github-alt{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-folder-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-folder-o:before{content:"\f07b"}.fa.fa-folder-open-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-folder-open-o:before{content:"\f07c"}.fa.fa-smile-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-smile-o:before{content:"\f118"}.fa.fa-frown-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-frown-o:before{content:"\f119"}.fa.fa-meh-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-meh-o:before{content:"\f11a"}.fa.fa-keyboard-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-keyboard-o:before{content:"\f11c"}.fa.fa-flag-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-flag-o:before{content:"\f024"}.fa.fa-mail-reply-all:before{content:"\f122"}.fa.fa-star-half-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-star-half-o:before{content:"\f5c0"}.fa.fa-star-half-empty{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-star-half-empty:before{content:"\f5c0"}.fa.fa-star-half-full{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-star-half-full:before{content:"\f5c0"}.fa.fa-code-fork:before{content:"\f126"}.fa.fa-chain-broken:before,.fa.fa-unlink:before{content:"\f127"}.fa.fa-calendar-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-calendar-o:before{content:"\f133"}.fa.fa-css3,.fa.fa-html5,.fa.fa-maxcdn{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-unlock-alt:before{content:"\f09c"}.fa.fa-minus-square-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-minus-square-o:before{content:"\f146"}.fa.fa-level-up:before{content:"\f3bf"}.fa.fa-level-down:before{content:"\f3be"}.fa.fa-pencil-square:before{content:"\f14b"}.fa.fa-external-link-square:before{content:"\f360"}.fa.fa-compass{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-caret-square-o-down{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-caret-square-o-down:before{content:"\f150"}.fa.fa-toggle-down{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-toggle-down:before{content:"\f150"}.fa.fa-caret-square-o-up{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-caret-square-o-up:before{content:"\f151"}.fa.fa-toggle-up{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-toggle-up:before{content:"\f151"}.fa.fa-caret-square-o-right{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-caret-square-o-right:before{content:"\f152"}.fa.fa-toggle-right{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-toggle-right:before{content:"\f152"}.fa.fa-eur:before,.fa.fa-euro:before{content:"\f153"}.fa.fa-gbp:before{content:"\f154"}.fa.fa-dollar:before,.fa.fa-usd:before{content:"\24"}.fa.fa-inr:before,.fa.fa-rupee:before{content:"\e1bc"}.fa.fa-cny:before,.fa.fa-jpy:before,.fa.fa-rmb:before,.fa.fa-yen:before{content:"\f157"}.fa.fa-rouble:before,.fa.fa-rub:before,.fa.fa-ruble:before{content:"\f158"}.fa.fa-krw:before,.fa.fa-won:before{content:"\f159"}.fa.fa-bitcoin,.fa.fa-btc{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-bitcoin:before{content:"\f15a"}.fa.fa-file-text:before{content:"\f15c"}.fa.fa-sort-alpha-asc:before{content:"\f15d"}.fa.fa-sort-alpha-desc:before{content:"\f881"}.fa.fa-sort-amount-asc:before{content:"\f884"}.fa.fa-sort-amount-desc:before{content:"\f160"}.fa.fa-sort-numeric-asc:before{content:"\f162"}.fa.fa-sort-numeric-desc:before{content:"\f886"}.fa.fa-youtube-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-youtube-square:before{content:"\f431"}.fa.fa-xing,.fa.fa-xing-square,.fa.fa-youtube{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-xing-square:before{content:"\f169"}.fa.fa-youtube-play{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-youtube-play:before{content:"\f167"}.fa.fa-adn,.fa.fa-bitbucket,.fa.fa-bitbucket-square,.fa.fa-dropbox,.fa.fa-flickr,.fa.fa-instagram,.fa.fa-stack-overflow{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-bitbucket-square:before{content:"\f171"}.fa.fa-tumblr,.fa.fa-tumblr-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-tumblr-square:before{content:"\f174"}.fa.fa-long-arrow-down:before{content:"\f309"}.fa.fa-long-arrow-up:before{content:"\f30c"}.fa.fa-long-arrow-left:before{content:"\f30a"}.fa.fa-long-arrow-right:before{content:"\f30b"}.fa.fa-android,.fa.fa-apple,.fa.fa-dribbble,.fa.fa-foursquare,.fa.fa-gittip,.fa.fa-gratipay,.fa.fa-linux,.fa.fa-skype,.fa.fa-trello,.fa.fa-windows{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-gittip:before{content:"\f184"}.fa.fa-sun-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-sun-o:before{content:"\f185"}.fa.fa-moon-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-moon-o:before{content:"\f186"}.fa.fa-pagelines,.fa.fa-renren,.fa.fa-stack-exchange,.fa.fa-vk,.fa.fa-weibo{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-arrow-circle-o-right{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-arrow-circle-o-right:before{content:"\f35a"}.fa.fa-arrow-circle-o-left{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-arrow-circle-o-left:before{content:"\f359"}.fa.fa-caret-square-o-left{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-caret-square-o-left:before{content:"\f191"}.fa.fa-toggle-left{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-toggle-left:before{content:"\f191"}.fa.fa-dot-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-dot-circle-o:before{content:"\f192"}.fa.fa-vimeo-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-vimeo-square:before{content:"\f194"}.fa.fa-try:before,.fa.fa-turkish-lira:before{content:"\e2bb"}.fa.fa-plus-square-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-plus-square-o:before{content:"\f0fe"}.fa.fa-openid,.fa.fa-slack,.fa.fa-wordpress{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-bank:before,.fa.fa-institution:before{content:"\f19c"}.fa.fa-mortar-board:before{content:"\f19d"}.fa.fa-google,.fa.fa-reddit,.fa.fa-reddit-square,.fa.fa-yahoo{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-reddit-square:before{content:"\f1a2"}.fa.fa-behance,.fa.fa-behance-square,.fa.fa-delicious,.fa.fa-digg,.fa.fa-drupal,.fa.fa-joomla,.fa.fa-pied-piper-alt,.fa.fa-pied-piper-pp,.fa.fa-stumbleupon,.fa.fa-stumbleupon-circle{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-behance-square:before{content:"\f1b5"}.fa.fa-steam,.fa.fa-steam-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-steam-square:before{content:"\f1b7"}.fa.fa-automobile:before{content:"\f1b9"}.fa.fa-cab:before{content:"\f1ba"}.fa.fa-deviantart,.fa.fa-soundcloud,.fa.fa-spotify{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-file-pdf-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-pdf-o:before{content:"\f1c1"}.fa.fa-file-word-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-word-o:before{content:"\f1c2"}.fa.fa-file-excel-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-excel-o:before{content:"\f1c3"}.fa.fa-file-powerpoint-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-powerpoint-o:before{content:"\f1c4"}.fa.fa-file-image-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-image-o:before{content:"\f1c5"}.fa.fa-file-photo-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-photo-o:before{content:"\f1c5"}.fa.fa-file-picture-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-picture-o:before{content:"\f1c5"}.fa.fa-file-archive-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-archive-o:before{content:"\f1c6"}.fa.fa-file-zip-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-zip-o:before{content:"\f1c6"}.fa.fa-file-audio-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-audio-o:before{content:"\f1c7"}.fa.fa-file-sound-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-sound-o:before{content:"\f1c7"}.fa.fa-file-video-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-video-o:before{content:"\f1c8"}.fa.fa-file-movie-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-movie-o:before{content:"\f1c8"}.fa.fa-file-code-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-file-code-o:before{content:"\f1c9"}.fa.fa-codepen,.fa.fa-jsfiddle,.fa.fa-vine{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-life-bouy:before,.fa.fa-life-buoy:before,.fa.fa-life-saver:before,.fa.fa-support:before{content:"\f1cd"}.fa.fa-circle-o-notch:before{content:"\f1ce"}.fa.fa-ra,.fa.fa-rebel{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-ra:before{content:"\f1d0"}.fa.fa-resistance{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-resistance:before{content:"\f1d0"}.fa.fa-empire,.fa.fa-ge{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-ge:before{content:"\f1d1"}.fa.fa-git-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-git-square:before{content:"\f1d2"}.fa.fa-git,.fa.fa-hacker-news,.fa.fa-y-combinator-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-y-combinator-square:before{content:"\f1d4"}.fa.fa-yc-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-yc-square:before{content:"\f1d4"}.fa.fa-qq,.fa.fa-tencent-weibo,.fa.fa-wechat,.fa.fa-weixin{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-wechat:before{content:"\f1d7"}.fa.fa-send:before{content:"\f1d8"}.fa.fa-paper-plane-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-paper-plane-o:before{content:"\f1d8"}.fa.fa-send-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-send-o:before{content:"\f1d8"}.fa.fa-circle-thin{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-circle-thin:before{content:"\f111"}.fa.fa-header:before{content:"\f1dc"}.fa.fa-futbol-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-futbol-o:before{content:"\f1e3"}.fa.fa-soccer-ball-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-soccer-ball-o:before{content:"\f1e3"}.fa.fa-slideshare,.fa.fa-twitch,.fa.fa-yelp{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-newspaper-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-newspaper-o:before{content:"\f1ea"}.fa.fa-cc-amex,.fa.fa-cc-discover,.fa.fa-cc-mastercard,.fa.fa-cc-paypal,.fa.fa-cc-stripe,.fa.fa-cc-visa,.fa.fa-google-wallet,.fa.fa-paypal{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-bell-slash-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-bell-slash-o:before{content:"\f1f6"}.fa.fa-trash:before{content:"\f2ed"}.fa.fa-copyright{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-eyedropper:before{content:"\f1fb"}.fa.fa-area-chart:before{content:"\f1fe"}.fa.fa-pie-chart:before{content:"\f200"}.fa.fa-line-chart:before{content:"\f201"}.fa.fa-lastfm,.fa.fa-lastfm-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-lastfm-square:before{content:"\f203"}.fa.fa-angellist,.fa.fa-ioxhost{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-cc{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-cc:before{content:"\f20a"}.fa.fa-ils:before,.fa.fa-shekel:before,.fa.fa-sheqel:before{content:"\f20b"}.fa.fa-buysellads,.fa.fa-connectdevelop,.fa.fa-dashcube,.fa.fa-forumbee,.fa.fa-leanpub,.fa.fa-sellsy,.fa.fa-shirtsinbulk,.fa.fa-simplybuilt,.fa.fa-skyatlas{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-diamond{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-diamond:before{content:"\f3a5"}.fa.fa-intersex:before,.fa.fa-transgender:before{content:"\f224"}.fa.fa-transgender-alt:before{content:"\f225"}.fa.fa-facebook-official{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-facebook-official:before{content:"\f09a"}.fa.fa-pinterest-p,.fa.fa-whatsapp{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-hotel:before{content:"\f236"}.fa.fa-medium,.fa.fa-viacoin,.fa.fa-y-combinator,.fa.fa-yc{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-yc:before{content:"\f23b"}.fa.fa-expeditedssl,.fa.fa-opencart,.fa.fa-optin-monster{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-battery-4:before,.fa.fa-battery:before{content:"\f240"}.fa.fa-battery-3:before{content:"\f241"}.fa.fa-battery-2:before{content:"\f242"}.fa.fa-battery-1:before{content:"\f243"}.fa.fa-battery-0:before{content:"\f244"}.fa.fa-object-group,.fa.fa-object-ungroup,.fa.fa-sticky-note-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-sticky-note-o:before{content:"\f249"}.fa.fa-cc-diners-club,.fa.fa-cc-jcb{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-clone{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hourglass-o:before{content:"\f254"}.fa.fa-hourglass-1:before{content:"\f251"}.fa.fa-hourglass-2:before{content:"\f252"}.fa.fa-hourglass-3:before{content:"\f253"}.fa.fa-hand-rock-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-rock-o:before{content:"\f255"}.fa.fa-hand-grab-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-grab-o:before{content:"\f255"}.fa.fa-hand-paper-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-paper-o:before{content:"\f256"}.fa.fa-hand-stop-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-stop-o:before{content:"\f256"}.fa.fa-hand-scissors-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-scissors-o:before{content:"\f257"}.fa.fa-hand-lizard-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-lizard-o:before{content:"\f258"}.fa.fa-hand-spock-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-spock-o:before{content:"\f259"}.fa.fa-hand-pointer-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-pointer-o:before{content:"\f25a"}.fa.fa-hand-peace-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-hand-peace-o:before{content:"\f25b"}.fa.fa-registered{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-creative-commons,.fa.fa-gg,.fa.fa-gg-circle,.fa.fa-odnoklassniki,.fa.fa-odnoklassniki-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-odnoklassniki-square:before{content:"\f264"}.fa.fa-chrome,.fa.fa-firefox,.fa.fa-get-pocket,.fa.fa-internet-explorer,.fa.fa-opera,.fa.fa-safari,.fa.fa-wikipedia-w{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-television:before{content:"\f26c"}.fa.fa-500px,.fa.fa-amazon,.fa.fa-contao{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-calendar-plus-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-calendar-plus-o:before{content:"\f271"}.fa.fa-calendar-minus-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-calendar-minus-o:before{content:"\f272"}.fa.fa-calendar-times-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-calendar-times-o:before{content:"\f273"}.fa.fa-calendar-check-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-calendar-check-o:before{content:"\f274"}.fa.fa-map-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-map-o:before{content:"\f279"}.fa.fa-commenting:before{content:"\f4ad"}.fa.fa-commenting-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-commenting-o:before{content:"\f4ad"}.fa.fa-houzz,.fa.fa-vimeo{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-vimeo:before{content:"\f27d"}.fa.fa-black-tie,.fa.fa-edge,.fa.fa-fonticons,.fa.fa-reddit-alien{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-credit-card-alt:before{content:"\f09d"}.fa.fa-codiepie,.fa.fa-fort-awesome,.fa.fa-mixcloud,.fa.fa-modx,.fa.fa-product-hunt,.fa.fa-scribd,.fa.fa-usb{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-pause-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-pause-circle-o:before{content:"\f28b"}.fa.fa-stop-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-stop-circle-o:before{content:"\f28d"}.fa.fa-bluetooth,.fa.fa-bluetooth-b,.fa.fa-envira,.fa.fa-gitlab,.fa.fa-wheelchair-alt,.fa.fa-wpbeginner,.fa.fa-wpforms{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-wheelchair-alt:before{content:"\f368"}.fa.fa-question-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-question-circle-o:before{content:"\f059"}.fa.fa-volume-control-phone:before{content:"\f2a0"}.fa.fa-asl-interpreting:before{content:"\f2a3"}.fa.fa-deafness:before,.fa.fa-hard-of-hearing:before{content:"\f2a4"}.fa.fa-glide,.fa.fa-glide-g{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-signing:before{content:"\f2a7"}.fa.fa-viadeo,.fa.fa-viadeo-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-viadeo-square:before{content:"\f2aa"}.fa.fa-snapchat,.fa.fa-snapchat-ghost{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-snapchat-ghost:before{content:"\f2ab"}.fa.fa-snapchat-square{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-snapchat-square:before{content:"\f2ad"}.fa.fa-first-order,.fa.fa-google-plus-official,.fa.fa-pied-piper,.fa.fa-themeisle,.fa.fa-yoast{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-google-plus-official:before{content:"\f2b3"}.fa.fa-google-plus-circle{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-google-plus-circle:before{content:"\f2b3"}.fa.fa-fa,.fa.fa-font-awesome{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-fa:before{content:"\f2b4"}.fa.fa-handshake-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-handshake-o:before{content:"\f2b5"}.fa.fa-envelope-open-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-envelope-open-o:before{content:"\f2b6"}.fa.fa-linode{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-address-book-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-address-book-o:before{content:"\f2b9"}.fa.fa-vcard:before{content:"\f2bb"}.fa.fa-address-card-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-address-card-o:before{content:"\f2bb"}.fa.fa-vcard-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-vcard-o:before{content:"\f2bb"}.fa.fa-user-circle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-user-circle-o:before{content:"\f2bd"}.fa.fa-user-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-user-o:before{content:"\f007"}.fa.fa-id-badge{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-drivers-license:before{content:"\f2c2"}.fa.fa-id-card-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-id-card-o:before{content:"\f2c2"}.fa.fa-drivers-license-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-drivers-license-o:before{content:"\f2c2"}.fa.fa-free-code-camp,.fa.fa-quora,.fa.fa-telegram{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-thermometer-4:before,.fa.fa-thermometer:before{content:"\f2c7"}.fa.fa-thermometer-3:before{content:"\f2c8"}.fa.fa-thermometer-2:before{content:"\f2c9"}.fa.fa-thermometer-1:before{content:"\f2ca"}.fa.fa-thermometer-0:before{content:"\f2cb"}.fa.fa-bathtub:before,.fa.fa-s15:before{content:"\f2cd"}.fa.fa-window-maximize,.fa.fa-window-restore{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-times-rectangle:before{content:"\f410"}.fa.fa-window-close-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-window-close-o:before{content:"\f410"}.fa.fa-times-rectangle-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-times-rectangle-o:before{content:"\f410"}.fa.fa-bandcamp,.fa.fa-eercast,.fa.fa-etsy,.fa.fa-grav,.fa.fa-imdb,.fa.fa-ravelry{font-family:"Font Awesome 6 Brands";font-weight:400}.fa.fa-eercast:before{content:"\f2da"}.fa.fa-snowflake-o{font-family:"Font Awesome 6 Free";font-weight:400}.fa.fa-snowflake-o:before{content:"\f2dc"}.fa.fa-meetup,.fa.fa-superpowers,.fa.fa-wpexplorer{font-family:"Font Awesome 6 Brands";font-weight:400} \ No newline at end of file diff --git a/shared/static/fontawesome/webfonts/fa-brands-400.ttf b/shared/static/fontawesome/webfonts/fa-brands-400.ttf new file mode 100644 index 0000000..5efb1d4 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-brands-400.ttf differ diff --git a/shared/static/fontawesome/webfonts/fa-brands-400.woff2 b/shared/static/fontawesome/webfonts/fa-brands-400.woff2 new file mode 100644 index 0000000..36fbda7 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-brands-400.woff2 differ diff --git a/shared/static/fontawesome/webfonts/fa-regular-400.ttf b/shared/static/fontawesome/webfonts/fa-regular-400.ttf new file mode 100644 index 0000000..838b4e2 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-regular-400.ttf differ diff --git a/shared/static/fontawesome/webfonts/fa-regular-400.woff2 b/shared/static/fontawesome/webfonts/fa-regular-400.woff2 new file mode 100644 index 0000000..b6cabba Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-regular-400.woff2 differ diff --git a/shared/static/fontawesome/webfonts/fa-solid-900.ttf b/shared/static/fontawesome/webfonts/fa-solid-900.ttf new file mode 100644 index 0000000..ec24749 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-solid-900.ttf differ diff --git a/shared/static/fontawesome/webfonts/fa-solid-900.woff2 b/shared/static/fontawesome/webfonts/fa-solid-900.woff2 new file mode 100644 index 0000000..824d518 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-solid-900.woff2 differ diff --git a/shared/static/fontawesome/webfonts/fa-v4compatibility.ttf b/shared/static/fontawesome/webfonts/fa-v4compatibility.ttf new file mode 100644 index 0000000..b175aa8 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-v4compatibility.ttf differ diff --git a/shared/static/fontawesome/webfonts/fa-v4compatibility.woff2 b/shared/static/fontawesome/webfonts/fa-v4compatibility.woff2 new file mode 100644 index 0000000..e09b5a5 Binary files /dev/null and b/shared/static/fontawesome/webfonts/fa-v4compatibility.woff2 differ diff --git a/shared/static/img/filter.svg b/shared/static/img/filter.svg new file mode 100644 index 0000000..f8b6af6 --- /dev/null +++ b/shared/static/img/filter.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/shared/static/img/logo.jpg b/shared/static/img/logo.jpg new file mode 100644 index 0000000..edac717 Binary files /dev/null and b/shared/static/img/logo.jpg differ diff --git a/shared/static/img/search.svg b/shared/static/img/search.svg new file mode 100644 index 0000000..648171e --- /dev/null +++ b/shared/static/img/search.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/shared/static/labels/_blank.svg b/shared/static/labels/_blank.svg new file mode 100644 index 0000000..9d600b1 --- /dev/null +++ b/shared/static/labels/_blank.svg @@ -0,0 +1,17 @@ + + Offer ribbon (top-right) + + + + + + + + NEW + + + diff --git a/shared/static/labels/new.svg b/shared/static/labels/new.svg new file mode 100644 index 0000000..ffa7f27 --- /dev/null +++ b/shared/static/labels/new.svg @@ -0,0 +1,17 @@ + + Offer ribbon (top-right) + + + + + + + + NEW + + + diff --git a/shared/static/labels/offer.svg b/shared/static/labels/offer.svg new file mode 100644 index 0000000..d4752e5 --- /dev/null +++ b/shared/static/labels/offer.svg @@ -0,0 +1,19 @@ + + Offer ribbon + + + + + + + + + + OFFER + + + diff --git a/shared/static/nav-labels/new.svg b/shared/static/nav-labels/new.svg new file mode 100644 index 0000000..04c43c3 --- /dev/null +++ b/shared/static/nav-labels/new.svg @@ -0,0 +1,14 @@ + + New + + + + + + NEW + + diff --git a/shared/static/nav-labels/offer.svg b/shared/static/nav-labels/offer.svg new file mode 100644 index 0000000..41b2a37 --- /dev/null +++ b/shared/static/nav-labels/offer.svg @@ -0,0 +1,16 @@ + + + + Offer + + + + + + OFFER + + diff --git a/shared/static/order/a-z.svg b/shared/static/order/a-z.svg new file mode 100644 index 0000000..c25cfb9 --- /dev/null +++ b/shared/static/order/a-z.svg @@ -0,0 +1,10 @@ + + + + + + A–Z + diff --git a/shared/static/order/h-l.svg b/shared/static/order/h-l.svg new file mode 100644 index 0000000..c487d57 --- /dev/null +++ b/shared/static/order/h-l.svg @@ -0,0 +1,10 @@ + + + + + + £ ↓ + diff --git a/shared/static/order/l-h.svg b/shared/static/order/l-h.svg new file mode 100644 index 0000000..2bcf700 --- /dev/null +++ b/shared/static/order/l-h.svg @@ -0,0 +1,10 @@ + + + + + + £ ↑ + diff --git a/shared/static/order/z-a.svg b/shared/static/order/z-a.svg new file mode 100644 index 0000000..f544a32 --- /dev/null +++ b/shared/static/order/z-a.svg @@ -0,0 +1,10 @@ + + + + + + Z-A + diff --git a/shared/static/scripts/body.js b/shared/static/scripts/body.js new file mode 100644 index 0000000..a7dad81 --- /dev/null +++ b/shared/static/scripts/body.js @@ -0,0 +1,822 @@ +// ============================================================================ +// 1. Mobile navigation toggle +// - Handles opening/closing the mobile nav panel +// - Updates ARIA attributes for accessibility +// - Closes panel when a link inside it is clicked +// ============================================================================ + +(function () { + const btn = document.getElementById('nav-toggle'); + const panel = document.getElementById('mobile-nav'); + if (!btn || !panel) return; // No mobile nav in this layout, abort + + btn.addEventListener('click', () => { + // Toggle the "hidden" class on the panel. + // classList.toggle returns true if the class is present AFTER the call. + const isHidden = panel.classList.toggle('hidden'); + const expanded = !isHidden; // aria-expanded = true when the panel is visible + + btn.setAttribute('aria-expanded', String(expanded)); + btn.setAttribute('aria-label', expanded ? 'Close menu' : 'Open menu'); + }); + + // Close panel when clicking any link inside the mobile nav + panel.addEventListener('click', (e) => { + const a = e.target.closest('a'); + if (!a) return; + + panel.classList.add('hidden'); + btn.setAttribute('aria-expanded', 'false'); + btn.setAttribute('aria-label', 'Open menu'); + }); +})(); + + +// ============================================================================ +// 2. Image gallery +// - Supports multiple galleries via [data-gallery-root] +// - Thumbnail navigation, prev/next arrows, keyboard arrows, touch swipe +// - HTMX-aware: runs on initial load and after HTMX swaps +// ============================================================================ + +(() => { + /** + * Initialize any galleries found within a given DOM subtree. + * @param {ParentNode} root - Root element to search in (defaults to document). + */ + function initGallery(root) { + if (!root) return; + + // Find all nested gallery roots + const galleries = root.querySelectorAll('[data-gallery-root]'); + + // If root itself is a gallery and no nested galleries exist, + // initialize just the root. + if (!galleries.length && root.matches?.('[data-gallery-root]')) { + initOneGallery(root); + return; + } + + galleries.forEach(initOneGallery); + } + + /** + * Initialize a single gallery instance. + * This attaches handlers only once, even if HTMX re-inserts the fragment. + * @param {Element} root - Element with [data-gallery-root]. + */ + function initOneGallery(root) { + // Prevent double-initialization (HTMX may re-insert the same fragment) + if (root.dataset.galleryInitialized === 'true') return; + root.dataset.galleryInitialized = 'true'; + + let index = 0; + + // Collect all image URLs from [data-image-src] attributes + const imgs = Array.from(root.querySelectorAll('[data-image-src]')) + .map(el => el.getAttribute('data-image-src') || el.dataset.imageSrc) + .filter(Boolean); + + const main = root.querySelector('[data-main-img]'); + const prevBtn = root.querySelector('[data-prev]'); + const nextBtn = root.querySelector('[data-next]'); + const thumbs = Array.from(root.querySelectorAll('[data-thumb]')); + const titleEl = root.querySelector('[data-title]'); + const total = imgs.length; + + // Without a main image or any sources, the gallery is not usable + if (!main || !total) return; + + /** + * Render the gallery to reflect the current `index`: + * - Update main image src/alt + * - Update active thumbnail highlight + * - Keep prev/next button ARIA labels consistent + */ + function render() { + main.setAttribute('src', imgs[index]); + + // Highlight active thumbnail + thumbs.forEach((t, i) => { + if (i === index) t.classList.add('ring-2', 'ring-stone-900'); + else t.classList.remove('ring-2', 'ring-stone-900'); + }); + + // Basic ARIA labels for navigation buttons + if (prevBtn && nextBtn) { + prevBtn.setAttribute('aria-label', 'Previous image'); + nextBtn.setAttribute('aria-label', 'Next image'); + } + + // Alt text uses base title + position (e.g. "Product image (1/4)") + const baseTitle = (titleEl?.textContent || 'Product image').trim(); + main.setAttribute('alt', `${baseTitle} (${index + 1}/${total})`); + } + + /** + * Move to a specific index, wrapping around at bounds. + * @param {number} n - Desired index (can be out-of-bounds; we mod it). + */ + function go(n) { + index = (n + imgs.length) % imgs.length; + render(); + } + + // --- Button handlers ---------------------------------------------------- + + prevBtn?.addEventListener('click', (e) => { + e.preventDefault(); + go(index - 1); + }); + + nextBtn?.addEventListener('click', (e) => { + e.preventDefault(); + go(index + 1); + }); + + // --- Thumbnail handlers ------------------------------------------------- + + thumbs.forEach((t, i) => { + t.addEventListener('click', (e) => { + e.preventDefault(); + go(i); + }); + }); + + // --- Keyboard navigation (left/right arrows) --------------------------- + // Note: we only act if `root` is still attached to the DOM. + const keyHandler = (e) => { + if (!root.isConnected) return; + if (e.key === 'ArrowLeft') go(index - 1); + if (e.key === 'ArrowRight') go(index + 1); + }; + document.addEventListener('keydown', keyHandler); + + // --- Touch swipe on main image (horizontal only) ----------------------- + + let touchStartX = null; + let touchStartY = null; + const SWIPE_MIN = 30; // px + + main.addEventListener('touchstart', (e) => { + const t = e.changedTouches[0]; + touchStartX = t.clientX; + touchStartY = t.clientY; + }, { passive: true }); + + main.addEventListener('touchend', (e) => { + if (touchStartX === null) return; + + const t = e.changedTouches[0]; + const dx = t.clientX - touchStartX; + const dy = t.clientY - touchStartY; + + // Horizontal swipe: dx large, dy relatively small + if (Math.abs(dx) > SWIPE_MIN && Math.abs(dy) < 0.6 * Math.abs(dx)) { + if (dx < 0) go(index + 1); + else go(index - 1); + } + + touchStartX = touchStartY = null; + }, { passive: true }); + + // Initial UI state + render(); + } + + // Initialize all galleries on initial page load + document.addEventListener('DOMContentLoaded', () => { + initGallery(document); + }); + + // Re-initialize galleries inside new fragments from HTMX + if (window.htmx) { + // htmx.onLoad runs on initial load and after each swap + htmx.onLoad((content) => { + initGallery(content); + }); + + // Alternative: + // htmx.on('htmx:afterSwap', (evt) => { + // initGallery(evt.detail.target); + // }); + } +})(); + + +// ============================================================================ +// 3. "Peek" scroll viewport +// - Adds a clipped/peek effect to scrollable containers +// - Uses negative margins and optional CSS mask fade +// - Automatically updates on resize and DOM mutations +// ============================================================================ + +(() => { + /** + * Safely parse a numeric value or fall back to a default. + */ + function px(val, def) { + const n = Number(val); + return Number.isFinite(n) ? n : def; + } + + /** + * Apply the peek effect to a viewport and its inner content. + * @param {HTMLElement} vp - The viewport (with data-peek-viewport). + * @param {HTMLElement} inner - Inner content wrapper. + */ + function applyPeek(vp, inner) { + const edge = (vp.dataset.peekEdge || 'bottom').toLowerCase(); + const useMask = vp.dataset.peekMask === 'true'; + + // Compute peek size in pixels: + // - data-peek-size-px: direct px value + // - data-peek-size: "units" that are scaled by root font size * 0.25 + // - default: 24px + const sizePx = + px(vp.dataset.peekSizePx, NaN) || + px(vp.dataset.peekSize, NaN) * + (parseFloat(getComputedStyle(document.documentElement).fontSize) || 16) * + 0.25 || + 24; + + const overflowing = vp.scrollHeight > vp.clientHeight; + + // Reset any previous modifications + inner.style.marginTop = ''; + inner.style.marginBottom = ''; + vp.style.webkitMaskImage = vp.style.maskImage = ''; + + // Reset last child's margin in case we changed it previously + const last = inner.lastElementChild; + if (last) last.style.marginBottom = ''; + + if (!overflowing) return; + + // NOTE: For clipping to look right, we want the viewport's own bottom padding + // to be minimal. Consider also using pb-0 in CSS if needed. + + // Apply negative margins to "cut" off content at top/bottom, creating peek + if (edge === 'bottom' || edge === 'both') inner.style.marginBottom = `-${sizePx}px`; + if (edge === 'top' || edge === 'both') inner.style.marginTop = `-${sizePx}px`; + + // Prevent the very last child from cancelling the visual clip + if (edge === 'bottom' || edge === 'both') { + if (last) last.style.marginBottom = '0px'; + } + + // Optional fade in/out mask on top/bottom + if (useMask) { + const topStop = (edge === 'top' || edge === 'both') ? `${sizePx}px` : '0px'; + const bottomStop = (edge === 'bottom' || edge === 'both') ? `${sizePx}px` : '0px'; + const mask = `linear-gradient( + 180deg, + transparent 0, + black ${topStop}, + black calc(100% - ${bottomStop}), + transparent 100% + )`; + vp.style.webkitMaskImage = vp.style.maskImage = mask; + } + } + + /** + * Set up one viewport with peek behavior. + * @param {HTMLElement} vp - Element with [data-peek-viewport]. + */ + function setupViewport(vp) { + const inner = vp.querySelector('[data-peek-inner]') || vp.firstElementChild; + if (!inner) return; + + const update = () => applyPeek(vp, inner); + + // Observe size changes (viewport & inner) + const ro = 'ResizeObserver' in window ? new ResizeObserver(update) : null; + ro?.observe(vp); + ro?.observe(inner); + + // Observe DOM changes inside the inner container + const mo = new MutationObserver(update); + mo.observe(inner, { childList: true, subtree: true }); + + // Run once on window load and once immediately + window.addEventListener('load', update, { once: true }); + update(); + } + + /** + * Initialize peek behavior for all [data-peek-viewport] elements + * inside the given root. + */ + function initPeek(root = document) { + root.querySelectorAll('[data-peek-viewport]').forEach(setupViewport); + } + + // Run on initial DOM readiness + if (document.readyState === 'loading') { + document.addEventListener('DOMContentLoaded', () => initPeek()); + } else { + initPeek(); + } + + // Expose for dynamic inserts (e.g., from HTMX or other JS) + window.initPeekScroll = initPeek; +})(); + + +// ============================================================================ +// 4. Exclusive
    behavior +// - Only one
    with the same [data-toggle-group] is open at a time +// - Respects HTMX swaps by re-attaching afterSwap +// - Scrolls to top when opening a panel +// ============================================================================ + +/** + * Attach behavior so that only one
    in each data-toggle-group is open. + * @param {ParentNode} root - Limit binding to within this node (defaults to document). + */ +function attachExclusiveDetailsBehavior(root = document) { + const detailsList = root.querySelectorAll('details[data-toggle-group]'); + + detailsList.forEach((el) => { + // Prevent double-binding on the same element + if (el.__exclusiveBound) return; + el.__exclusiveBound = true; + + el.addEventListener('toggle', function () { + // Only act when this
    was just opened + if (!el.open) return; + + const group = el.getAttribute('data-toggle-group'); + if (!group) return; + + // Close all other
    with the same data-toggle-group + document + .querySelectorAll('details[data-toggle-group="' + group + '"]') + .forEach((other) => { + if (other === el) return; + if (other.open) { + other.open = false; + } + }); + + // Scroll to top when a panel is opened + window.scrollTo(0, 0); + }); + }); +} + +// Initial binding on page load +attachExclusiveDetailsBehavior(); + +// Re-bind for new content after HTMX swaps +document.body.addEventListener('htmx:afterSwap', function (evt) { + attachExclusiveDetailsBehavior(evt.target); +}); + + +// ============================================================================ +// 5. Close
    panels before HTMX requests +// - When a link/button inside a triggers HTMX, +// we close that panel and scroll to top. +// ============================================================================ + +document.body.addEventListener('htmx:beforeRequest', function (evt) { + const triggerEl = evt.target; + + // Find the closest
    panel (e.g., mobile panel, filters, etc.) + const panel = triggerEl.closest('details[data-toggle-group]'); + if (!panel) return; + + panel.open = false; + window.scrollTo(0, 0); +}); + + +// ============================================================================ +// 6. Ghost / Koenig video card fix +// - Ghost/Koenig editors may output
    +// - This replaces the
    with just the