Compare commits
41 Commits
526d4f6e1b
...
widget-pha
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bccfff0c69 | ||
|
|
9a8b556c13 | ||
|
|
a626dd849d | ||
|
|
d0b1edea7a | ||
|
|
eec750a699 | ||
|
|
fd163b577f | ||
|
|
3bde451ce9 | ||
|
|
798fe56165 | ||
|
|
18410c4b16 | ||
|
|
a28add8640 | ||
|
|
68941b97f6 | ||
|
|
1d83a339b6 | ||
|
|
24432cd52a | ||
|
|
9a1a4996bc | ||
|
|
1832c53980 | ||
|
|
9db739e56d | ||
|
|
dd7a99e8b7 | ||
|
|
8850a0106a | ||
|
|
7abef48cf2 | ||
|
|
1f8fb521b2 | ||
|
|
e83df2f742 | ||
|
|
7ee8638d6e | ||
|
|
71729ffb28 | ||
|
|
8b6be6da96 | ||
|
|
7882644731 | ||
|
|
dfc324b1be | ||
|
|
98c3df860b | ||
|
|
d404349806 | ||
|
|
3febef074b | ||
|
|
6db91cb3c1 | ||
|
|
7b55d78214 | ||
|
|
b3a0e9922a | ||
|
|
9cba422aa9 | ||
|
|
de4bc92fce | ||
|
|
f1716a0fc0 | ||
|
|
5bcf68af2b | ||
|
|
70b1c7de10 | ||
|
|
ea7dc9723a | ||
|
|
e3f8ff6e3c | ||
|
|
0c0f3c8416 | ||
|
|
da10fc4cf9 |
@@ -19,7 +19,7 @@ from shared.db.base import Base
|
||||
|
||||
# Import ALL models so Base.metadata sees every table
|
||||
import shared.models # noqa: F401 User, KV, MagicLink, MenuItem, Ghost*
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "glue.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "glue.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
|
||||
142
alembic/versions/k1i9f5g7h8_add_federation_tables.py
Normal file
142
alembic/versions/k1i9f5g7h8_add_federation_tables.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""add federation tables
|
||||
|
||||
Revision ID: k1i9f5g7h8
|
||||
Revises: j0h8e4f6g7
|
||||
Create Date: 2026-02-21
|
||||
|
||||
Creates:
|
||||
- ap_actor_profiles — AP identity per user
|
||||
- ap_activities — local + remote AP activities
|
||||
- ap_followers — remote followers
|
||||
- ap_inbox_items — raw incoming AP activities
|
||||
- ap_anchors — OpenTimestamps merkle batches
|
||||
- ipfs_pins — IPFS content tracking (platform-wide)
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision = "k1i9f5g7h8"
|
||||
down_revision = "j0h8e4f6g7"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# -- ap_anchors (referenced by ap_activities) ----------------------------
|
||||
op.create_table(
|
||||
"ap_anchors",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("merkle_root", sa.String(128), nullable=False),
|
||||
sa.Column("tree_ipfs_cid", sa.String(128), nullable=True),
|
||||
sa.Column("ots_proof_cid", sa.String(128), nullable=True),
|
||||
sa.Column("activity_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("bitcoin_txid", sa.String(128), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# -- ap_actor_profiles ---------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_actor_profiles",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("preferred_username", sa.String(64), nullable=False),
|
||||
sa.Column("display_name", sa.String(255), nullable=True),
|
||||
sa.Column("summary", sa.Text(), nullable=True),
|
||||
sa.Column("public_key_pem", sa.Text(), nullable=False),
|
||||
sa.Column("private_key_pem", sa.Text(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("preferred_username"),
|
||||
sa.UniqueConstraint("user_id"),
|
||||
)
|
||||
op.create_index("ix_ap_actor_user_id", "ap_actor_profiles", ["user_id"], unique=True)
|
||||
op.create_index("ix_ap_actor_username", "ap_actor_profiles", ["preferred_username"], unique=True)
|
||||
|
||||
# -- ap_activities -------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_activities",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), nullable=False),
|
||||
sa.Column("activity_type", sa.String(64), nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("object_type", sa.String(64), nullable=True),
|
||||
sa.Column("object_data", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("signature", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("is_local", sa.Boolean(), nullable=False, server_default="true"),
|
||||
sa.Column("source_type", sa.String(64), nullable=True),
|
||||
sa.Column("source_id", sa.Integer(), nullable=True),
|
||||
sa.Column("ipfs_cid", sa.String(128), nullable=True),
|
||||
sa.Column("anchor_id", sa.Integer(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["anchor_id"], ["ap_anchors.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("activity_id"),
|
||||
)
|
||||
op.create_index("ix_ap_activity_actor", "ap_activities", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_activity_source", "ap_activities", ["source_type", "source_id"])
|
||||
op.create_index("ix_ap_activity_published", "ap_activities", ["published"])
|
||||
|
||||
# -- ap_followers --------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_followers",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("follower_acct", sa.String(512), nullable=False),
|
||||
sa.Column("follower_inbox", sa.String(512), nullable=False),
|
||||
sa.Column("follower_actor_url", sa.String(512), nullable=False),
|
||||
sa.Column("follower_public_key", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
|
||||
)
|
||||
op.create_index("ix_ap_follower_actor", "ap_followers", ["actor_profile_id"])
|
||||
|
||||
# -- ap_inbox_items ------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_inbox_items",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("raw_json", postgresql.JSONB(), nullable=False),
|
||||
sa.Column("activity_type", sa.String(64), nullable=True),
|
||||
sa.Column("from_actor", sa.String(512), nullable=True),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_ap_inbox_state", "ap_inbox_items", ["state"])
|
||||
op.create_index("ix_ap_inbox_actor", "ap_inbox_items", ["actor_profile_id"])
|
||||
|
||||
# -- ipfs_pins -----------------------------------------------------------
|
||||
op.create_table(
|
||||
"ipfs_pins",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("content_hash", sa.String(128), nullable=False),
|
||||
sa.Column("ipfs_cid", sa.String(128), nullable=False),
|
||||
sa.Column("pin_type", sa.String(64), nullable=False),
|
||||
sa.Column("source_type", sa.String(64), nullable=True),
|
||||
sa.Column("source_id", sa.Integer(), nullable=True),
|
||||
sa.Column("size_bytes", sa.BigInteger(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("ipfs_cid"),
|
||||
)
|
||||
op.create_index("ix_ipfs_pin_source", "ipfs_pins", ["source_type", "source_id"])
|
||||
op.create_index("ix_ipfs_pin_cid", "ipfs_pins", ["ipfs_cid"], unique=True)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("ipfs_pins")
|
||||
op.drop_table("ap_inbox_items")
|
||||
op.drop_table("ap_followers")
|
||||
op.drop_table("ap_activities")
|
||||
op.drop_table("ap_actor_profiles")
|
||||
op.drop_table("ap_anchors")
|
||||
138
alembic/versions/l2j0g6h8i9_add_fediverse_tables.py
Normal file
138
alembic/versions/l2j0g6h8i9_add_fediverse_tables.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""add fediverse social tables
|
||||
|
||||
Revision ID: l2j0g6h8i9
|
||||
Revises: k1i9f5g7h8
|
||||
Create Date: 2026-02-22
|
||||
|
||||
Creates:
|
||||
- ap_remote_actors — cached profiles of remote actors
|
||||
- ap_following — outbound follows (local → remote)
|
||||
- ap_remote_posts — ingested posts from remote actors
|
||||
- ap_local_posts — native posts composed in federation UI
|
||||
- ap_interactions — likes and boosts
|
||||
- ap_notifications — follow/like/boost/mention/reply notifications
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
revision = "l2j0g6h8i9"
|
||||
down_revision = "k1i9f5g7h8"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# -- ap_remote_actors --
|
||||
op.create_table(
|
||||
"ap_remote_actors",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_url", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("inbox_url", sa.String(512), nullable=False),
|
||||
sa.Column("shared_inbox_url", sa.String(512), nullable=True),
|
||||
sa.Column("preferred_username", sa.String(255), nullable=False),
|
||||
sa.Column("display_name", sa.String(255), nullable=True),
|
||||
sa.Column("summary", sa.Text, nullable=True),
|
||||
sa.Column("icon_url", sa.String(512), nullable=True),
|
||||
sa.Column("public_key_pem", sa.Text, nullable=True),
|
||||
sa.Column("domain", sa.String(255), nullable=False),
|
||||
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_remote_actor_url", "ap_remote_actors", ["actor_url"], unique=True)
|
||||
op.create_index("ix_ap_remote_actor_domain", "ap_remote_actors", ["domain"])
|
||||
|
||||
# -- ap_following --
|
||||
op.create_table(
|
||||
"ap_following",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("accepted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"),
|
||||
)
|
||||
op.create_index("ix_ap_following_actor", "ap_following", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_following_remote", "ap_following", ["remote_actor_id"])
|
||||
|
||||
# -- ap_remote_posts --
|
||||
op.create_table(
|
||||
"ap_remote_posts",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("object_id", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("object_type", sa.String(64), nullable=False, server_default="Note"),
|
||||
sa.Column("content", sa.Text, nullable=True),
|
||||
sa.Column("summary", sa.Text, nullable=True),
|
||||
sa.Column("url", sa.String(512), nullable=True),
|
||||
sa.Column("attachment_data", JSONB, nullable=True),
|
||||
sa.Column("tag_data", JSONB, nullable=True),
|
||||
sa.Column("in_reply_to", sa.String(512), nullable=True),
|
||||
sa.Column("conversation", sa.String(512), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_remote_post_actor", "ap_remote_posts", ["remote_actor_id"])
|
||||
op.create_index("ix_ap_remote_post_published", "ap_remote_posts", ["published"])
|
||||
op.create_index("ix_ap_remote_post_object", "ap_remote_posts", ["object_id"], unique=True)
|
||||
|
||||
# -- ap_local_posts --
|
||||
op.create_table(
|
||||
"ap_local_posts",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("content", sa.Text, nullable=False),
|
||||
sa.Column("visibility", sa.String(20), nullable=False, server_default="public"),
|
||||
sa.Column("in_reply_to", sa.String(512), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_local_post_actor", "ap_local_posts", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_local_post_published", "ap_local_posts", ["published"])
|
||||
|
||||
# -- ap_interactions --
|
||||
op.create_table(
|
||||
"ap_interactions",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True),
|
||||
sa.Column("post_type", sa.String(20), nullable=False),
|
||||
sa.Column("post_id", sa.Integer, nullable=False),
|
||||
sa.Column("interaction_type", sa.String(20), nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_interaction_post", "ap_interactions", ["post_type", "post_id"])
|
||||
op.create_index("ix_ap_interaction_actor", "ap_interactions", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_interaction_remote", "ap_interactions", ["remote_actor_id"])
|
||||
|
||||
# -- ap_notifications --
|
||||
op.create_table(
|
||||
"ap_notifications",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("notification_type", sa.String(20), nullable=False),
|
||||
sa.Column("from_remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("from_actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("target_activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("target_remote_post_id", sa.Integer, sa.ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("read", sa.Boolean, nullable=False, server_default="false"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_notification_actor", "ap_notifications", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_notification_read", "ap_notifications", ["actor_profile_id", "read"])
|
||||
op.create_index("ix_ap_notification_created", "ap_notifications", ["created_at"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("ap_notifications")
|
||||
op.drop_table("ap_interactions")
|
||||
op.drop_table("ap_local_posts")
|
||||
op.drop_table("ap_remote_posts")
|
||||
op.drop_table("ap_following")
|
||||
op.drop_table("ap_remote_actors")
|
||||
@@ -9,7 +9,7 @@ from quart import current_app
|
||||
from shared.config import config
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from cart.models.order import Order
|
||||
from shared.models.order import Order
|
||||
|
||||
SUMUP_BASE_URL = "https://api.sumup.com/v0.1"
|
||||
|
||||
|
||||
44
browser/templates/_types/auth/_bookings_panel.html
Normal file
44
browser/templates/_types/auth/_bookings_panel.html
Normal file
@@ -0,0 +1,44 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6">
|
||||
|
||||
<h1 class="text-xl font-semibold tracking-tight">Bookings</h1>
|
||||
|
||||
{% if bookings %}
|
||||
<div class="divide-y divide-stone-100">
|
||||
{% for booking in bookings %}
|
||||
<div class="py-4 first:pt-0 last:pb-0">
|
||||
<div class="flex items-start justify-between gap-4">
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="text-sm font-medium text-stone-800">{{ booking.name }}</p>
|
||||
<div class="mt-1 flex flex-wrap items-center gap-x-3 gap-y-1 text-xs text-stone-500">
|
||||
<span>{{ booking.start_at.strftime('%d %b %Y, %H:%M') }}</span>
|
||||
{% if booking.end_at %}
|
||||
<span>– {{ booking.end_at.strftime('%H:%M') }}</span>
|
||||
{% endif %}
|
||||
{% if booking.calendar_name %}
|
||||
<span>· {{ booking.calendar_name }}</span>
|
||||
{% endif %}
|
||||
{% if booking.cost %}
|
||||
<span>· £{{ booking.cost }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-shrink-0">
|
||||
{% if booking.state == 'confirmed' %}
|
||||
<span class="inline-flex items-center rounded-full bg-emerald-50 border border-emerald-200 px-2.5 py-0.5 text-xs font-medium text-emerald-700">confirmed</span>
|
||||
{% elif booking.state == 'provisional' %}
|
||||
<span class="inline-flex items-center rounded-full bg-amber-50 border border-amber-200 px-2.5 py-0.5 text-xs font-medium text-amber-700">provisional</span>
|
||||
{% else %}
|
||||
<span class="inline-flex items-center rounded-full bg-stone-50 border border-stone-200 px-2.5 py-0.5 text-xs font-medium text-stone-600">{{ booking.state }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-stone-500">No bookings yet.</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -18,7 +18,7 @@
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<form action="{{ url_for('auth.logout')|host }}" method="post">
|
||||
<form action="{{ coop_url('/auth/logout/') }}" method="post">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button
|
||||
type="submit"
|
||||
|
||||
@@ -2,8 +2,16 @@
|
||||
{% call links.link(coop_url('/auth/newsletters/'), hx_select_search, select_colours, True, aclass=styles.nav_button) %}
|
||||
newsletters
|
||||
{% endcall %}
|
||||
<div class="relative nav-group">
|
||||
<a href="{{ cart_url('/orders/') }}" class="{{styles.nav_button}}">
|
||||
orders
|
||||
</a>
|
||||
</div>
|
||||
{% for link in account_nav_links %}
|
||||
{% if link.external %}
|
||||
<div class="relative nav-group">
|
||||
<a href="{{ link.href_fn() }}" class="{{styles.nav_button}}" data-hx-disable>
|
||||
{{ link.label }}
|
||||
</a>
|
||||
</div>
|
||||
{% else %}
|
||||
{% call links.link(link.href_fn(), hx_select_search, select_colours, True, aclass=styles.nav_button) %}
|
||||
{{ link.label }}
|
||||
{% endcall %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<div id="nl-{{ un.newsletter_id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ url_for('auth.toggle_newsletter', newsletter_id=un.newsletter_id) }}"
|
||||
hx-post="{{ coop_url('/auth/newsletter/' ~ un.newsletter_id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ un.newsletter_id }}"
|
||||
hx-swap="outerHTML"
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
{# No subscription row yet — show an off toggle that will create one #}
|
||||
<div id="nl-{{ item.newsletter.id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ url_for('auth.toggle_newsletter', newsletter_id=item.newsletter.id) }}"
|
||||
hx-post="{{ coop_url('/auth/newsletter/' ~ item.newsletter.id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ item.newsletter.id }}"
|
||||
hx-swap="outerHTML"
|
||||
|
||||
44
browser/templates/_types/auth/_tickets_panel.html
Normal file
44
browser/templates/_types/auth/_tickets_panel.html
Normal file
@@ -0,0 +1,44 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6">
|
||||
|
||||
<h1 class="text-xl font-semibold tracking-tight">Tickets</h1>
|
||||
|
||||
{% if tickets %}
|
||||
<div class="divide-y divide-stone-100">
|
||||
{% for ticket in tickets %}
|
||||
<div class="py-4 first:pt-0 last:pb-0">
|
||||
<div class="flex items-start justify-between gap-4">
|
||||
<div class="min-w-0 flex-1">
|
||||
<a href="{{ events_url('/tickets/' ~ ticket.code ~ '/') }}"
|
||||
class="text-sm font-medium text-stone-800 hover:text-emerald-700 transition">
|
||||
{{ ticket.entry_name }}
|
||||
</a>
|
||||
<div class="mt-1 flex flex-wrap items-center gap-x-3 gap-y-1 text-xs text-stone-500">
|
||||
<span>{{ ticket.entry_start_at.strftime('%d %b %Y, %H:%M') }}</span>
|
||||
{% if ticket.calendar_name %}
|
||||
<span>· {{ ticket.calendar_name }}</span>
|
||||
{% endif %}
|
||||
{% if ticket.ticket_type_name %}
|
||||
<span>· {{ ticket.ticket_type_name }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-shrink-0">
|
||||
{% if ticket.state == 'checked_in' %}
|
||||
<span class="inline-flex items-center rounded-full bg-blue-50 border border-blue-200 px-2.5 py-0.5 text-xs font-medium text-blue-700">checked in</span>
|
||||
{% elif ticket.state == 'confirmed' %}
|
||||
<span class="inline-flex items-center rounded-full bg-emerald-50 border border-emerald-200 px-2.5 py-0.5 text-xs font-medium text-emerald-700">confirmed</span>
|
||||
{% else %}
|
||||
<span class="inline-flex items-center rounded-full bg-amber-50 border border-amber-200 px-2.5 py-0.5 text-xs font-medium text-amber-700">{{ ticket.state }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-stone-500">No tickets yet.</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -22,7 +22,7 @@
|
||||
|
||||
<p class="mt-6 text-sm">
|
||||
<a
|
||||
href="{{ url_for('auth.login_form')|host }}"
|
||||
href="{{ coop_url('/auth/login/') }}"
|
||||
class="text-stone-600 dark:text-stone-300 hover:underline"
|
||||
>
|
||||
← Back
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
{% endif %}
|
||||
|
||||
<form
|
||||
method="post" action="{{ url_for('auth.start_login')|host }}"
|
||||
method="post" action="{{ coop_url('/auth/start/') }}"
|
||||
class="mt-6 space-y-5"
|
||||
>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
@@ -69,41 +69,10 @@
|
||||
{% endif %}
|
||||
</a>
|
||||
|
||||
{# Associated Entries - Scrollable list #}
|
||||
{% if post.associated_entries %}
|
||||
<div class="mt-4 mb-2">
|
||||
<h3 class="text-sm font-semibold text-stone-700 mb-2 px-2">Events:</h3>
|
||||
<div class="overflow-x-auto scrollbar-hide" style="scroll-behavior: smooth;">
|
||||
<div class="flex gap-2 px-2">
|
||||
{% for entry in post.associated_entries %}
|
||||
{% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar.slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %}
|
||||
<a
|
||||
href="{{ events_url(_entry_path) }}"
|
||||
class="flex flex-col gap-1 px-3 py-2 bg-stone-50 hover:bg-stone-100 rounded border border-stone-200 transition text-sm whitespace-nowrap flex-shrink-0 min-w-[180px]">
|
||||
<div class="font-medium text-stone-900 truncate">{{ entry.name }}</div>
|
||||
<div class="text-xs text-stone-600">
|
||||
{{ entry.start_at.strftime('%a, %b %d') }}
|
||||
</div>
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ entry.start_at.strftime('%H:%M') }}
|
||||
{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.scrollbar-hide::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
.scrollbar-hide {
|
||||
-ms-overflow-style: none;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
</style>
|
||||
{% endif %}
|
||||
{# Widget-driven card decorations #}
|
||||
{% for w in widgets.container_cards %}
|
||||
{% include w.template with context %}
|
||||
{% endfor %}
|
||||
|
||||
{% include '_types/blog/_card/at_bar.html' %}
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
type="text"
|
||||
name="title"
|
||||
value=""
|
||||
placeholder="Post title..."
|
||||
placeholder="{{ 'Page title...' if is_page else 'Post title...' }}"
|
||||
class="w-full text-[36px] font-bold bg-transparent border-none outline-none
|
||||
placeholder:text-stone-300 mb-[8px] leading-tight"
|
||||
>
|
||||
@@ -101,7 +101,7 @@
|
||||
type="submit"
|
||||
class="px-[20px] py-[6px] bg-stone-700 text-white text-[14px] rounded-[8px]
|
||||
hover:bg-stone-800 transition-colors cursor-pointer"
|
||||
>Create Post</button>
|
||||
>{{ 'Create Page' if is_page else 'Create Post' }}</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<nav aria-label="Categories"
|
||||
class="rounded-xl border bg-white shadow-sm min-h-0">
|
||||
<ul class="divide-y">
|
||||
{% set top_active = (current_local_href == top_local_href) %}
|
||||
{% set top_active = (sub_slug is not defined or sub_slug is none or sub_slug == '') %}
|
||||
{% set href = (url_for('market.browse.browse_top', top_slug=top_slug) ~ qs)|host %}
|
||||
<li>
|
||||
<a
|
||||
@@ -19,7 +19,7 @@
|
||||
</li>
|
||||
|
||||
{% for sub in subs_local %}
|
||||
{% set active = (current_local_href == sub.local_href) %}
|
||||
{% set active = (sub.slug == sub_slug) %}
|
||||
{% set href = (url_for('market.browse.browse_sub', top_slug=top_slug, sub_slug=sub.slug) ~ qs)|host %}
|
||||
<li>
|
||||
<a
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
<li class="flex items-start justify-between text-sm">
|
||||
<div>
|
||||
<div class="font-medium">
|
||||
{{ entry.name or entry.calendar.name }}
|
||||
{{ entry.name or entry.calendar_name }}
|
||||
</div>
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ entry.start_at }}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
{% macro mini(oob=False) %}
|
||||
{% macro mini(oob=False, count=None) %}
|
||||
<div id="cart-mini" {% if oob %}hx-swap-oob="{{oob}}"{% endif %} >
|
||||
{# cart_count is set by the context processor in all apps.
|
||||
Cart app computes it from g.cart + calendar_cart_entries;
|
||||
other apps get it from the cart internal API. #}
|
||||
{% if cart_count is defined and cart_count is not none %}
|
||||
other apps get it from the cart internal API.
|
||||
count param allows explicit override when macro is imported without context. #}
|
||||
{% if count is not none %}
|
||||
{% set _count = count %}
|
||||
{% elif cart_count is defined and cart_count is not none %}
|
||||
{% set _count = cart_count %}
|
||||
{% elif cart is defined and cart is not none %}
|
||||
{% set _count = (cart | sum(attribute="quantity")) + ((calendar_cart_entries | length) if calendar_cart_entries else 0) %}
|
||||
@@ -14,7 +17,7 @@
|
||||
{% if _count == 0 %}
|
||||
<div class="h-12 w-12 rounded-full overflow-hidden border border-stone-300 flex-shrink-0">
|
||||
<a
|
||||
href="{{ {'clear_filters': True}|qs|host }}"
|
||||
href="{{ coop_url('/') }}"
|
||||
class="h-full w-full font-bold text-5xl flex-shrink-0 flex flex-row items-center gap-1"
|
||||
>
|
||||
<img
|
||||
|
||||
@@ -47,8 +47,8 @@
|
||||
{% endif %}
|
||||
{% include '_types/order/_items.html' %}
|
||||
{% include '_types/order/_calendar_items.html' %}
|
||||
|
||||
|
||||
{% include '_types/order/_ticket_items.html' %}
|
||||
|
||||
{% if order.status == 'failed' and order %}
|
||||
<div class="rounded-2xl border border-rose-200 bg-rose-50/80 p-4 sm:p-6 text-sm text-rose-900 space-y-2">
|
||||
<p class="font-medium">Your payment was not completed.</p>
|
||||
|
||||
@@ -25,6 +25,15 @@
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
{# Container nav widgets (market links, etc.) #}
|
||||
{% if container_nav_widgets %}
|
||||
{% for wdata in container_nav_widgets %}
|
||||
{% with ctx=wdata.ctx %}
|
||||
{% include wdata.widget.template with context %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{# Admin link #}
|
||||
{% if g.rights.admin %}
|
||||
{% from 'macros/admin_nav.html' import admin_nav_item %}
|
||||
|
||||
@@ -22,6 +22,14 @@
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
{% if container_nav_widgets %}
|
||||
{% for wdata in container_nav_widgets %}
|
||||
{% with ctx=wdata.ctx %}
|
||||
{% include wdata.widget.template with context %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{# Admin link #}
|
||||
{% if g.rights.admin %}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{% set _app_slugs = {'cart': cart_url('/')} %}
|
||||
{% set _first_seg = request.path.strip('/').split('/')[0] %}
|
||||
<div class="flex flex-col sm:flex-row sm:items-center gap-2 border-r border-stone-200 mr-2 sm:max-w-2xl"
|
||||
id="menu-items-nav-wrapper"
|
||||
hx-swap-oob="outerHTML">
|
||||
@@ -14,6 +15,7 @@
|
||||
hx-swap="outerHTML"
|
||||
hx-push-url="true"
|
||||
{% endif %}
|
||||
aria-selected="{{ 'true' if (item.slug == _first_seg or item.slug == app_name) else 'false' }}"
|
||||
class="{{styles.nav_button}}"
|
||||
>
|
||||
{% if item.feature_image %}
|
||||
|
||||
49
browser/templates/_types/order/_ticket_items.html
Normal file
49
browser/templates/_types/order/_ticket_items.html
Normal file
@@ -0,0 +1,49 @@
|
||||
{# --- Tickets in this order --- #}
|
||||
{% if order and order_tickets %}
|
||||
<section class="mt-6 space-y-3">
|
||||
<h2 class="text-base sm:text-lg font-semibold">
|
||||
Event tickets in this order
|
||||
</h2>
|
||||
|
||||
<ul class="divide-y divide-stone-200 rounded-2xl border border-stone-200 bg-white/80">
|
||||
{% for tk in order_tickets %}
|
||||
<li class="px-4 py-3 flex items-start justify-between text-sm">
|
||||
<div>
|
||||
<div class="font-medium flex items-center gap-2">
|
||||
{{ tk.entry_name }}
|
||||
{# Small status pill #}
|
||||
<span class="inline-flex items-center rounded-full px-2 py-0.5 text-[11px] font-medium
|
||||
{% if tk.state == 'confirmed' %}
|
||||
bg-emerald-100 text-emerald-800
|
||||
{% elif tk.state == 'reserved' %}
|
||||
bg-amber-100 text-amber-800
|
||||
{% elif tk.state == 'checked_in' %}
|
||||
bg-blue-100 text-blue-800
|
||||
{% else %}
|
||||
bg-stone-100 text-stone-700
|
||||
{% endif %}
|
||||
">
|
||||
{{ tk.state|replace('_', ' ')|capitalize }}
|
||||
</span>
|
||||
</div>
|
||||
{% if tk.ticket_type_name %}
|
||||
<div class="text-xs text-stone-500">{{ tk.ticket_type_name }}</div>
|
||||
{% endif %}
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ tk.entry_start_at.strftime('%-d %b %Y, %H:%M') }}
|
||||
{% if tk.entry_end_at %}
|
||||
– {{ tk.entry_end_at.strftime('%-d %b %Y, %H:%M') }}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-xs text-stone-400 font-mono mt-0.5">
|
||||
{{ tk.code }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="ml-4 font-medium">
|
||||
£{{ "%.2f"|format(tk.price or 0) }}
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</section>
|
||||
{% endif %}
|
||||
@@ -4,14 +4,14 @@
|
||||
{% set has_more_entries = has_more if has_more is defined else (associated_entries.has_more if associated_entries is defined else False) %}
|
||||
|
||||
{% for entry in entry_list %}
|
||||
{% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar.slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %}
|
||||
{% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %}
|
||||
<a
|
||||
href="{{ events_url(_entry_path) }}"
|
||||
class="{{styles.nav_button_less_pad}}"
|
||||
>
|
||||
{% if entry.calendar.post.feature_image %}
|
||||
<img src="{{ entry.calendar.post.feature_image }}"
|
||||
alt="{{ entry.calendar.post.title }}"
|
||||
{% if post.feature_image %}
|
||||
<img src="{{ post.feature_image }}"
|
||||
alt="{{ post.title }}"
|
||||
class="w-8 h-8 rounded object-cover flex-shrink-0" />
|
||||
{% else %}
|
||||
<div class="w-8 h-8 rounded bg-stone-200 flex-shrink-0"></div>
|
||||
|
||||
@@ -1,17 +1,6 @@
|
||||
{# Main panel fragment for HTMX navigation - post article content #}
|
||||
{# Main panel fragment for HTMX navigation - post/page article content #}
|
||||
<article class="relative">
|
||||
{# ❤️ like button - always visible in top right of article #}
|
||||
{% if g.user %}
|
||||
<div class="absolute top-2 right-2 z-10 text-8xl md:text-6xl">
|
||||
{% set slug = post.slug %}
|
||||
{% set liked = post.is_liked or False %}
|
||||
{% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %}
|
||||
{% set item_type = 'post' %}
|
||||
{% include "_types/browse/like/button.html" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Draft indicator + edit link #}
|
||||
{# Draft indicator + edit link (shown for both posts and pages) #}
|
||||
{% if post.status == "draft" %}
|
||||
<div class="flex items-center justify-center gap-2 mb-3">
|
||||
<span class="inline-block px-3 py-1 rounded-full text-sm font-semibold bg-amber-100 text-amber-800">Draft</span>
|
||||
@@ -36,6 +25,18 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if not post.is_page %}
|
||||
{# ── Blog post chrome: like button, excerpt, tags/authors ── #}
|
||||
{% if g.user %}
|
||||
<div class="absolute top-2 right-2 z-10 text-8xl md:text-6xl">
|
||||
{% set slug = post.slug %}
|
||||
{% set liked = post.is_liked or False %}
|
||||
{% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %}
|
||||
{% set item_type = 'post' %}
|
||||
{% include "_types/browse/like/button.html" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if post.custom_excerpt %}
|
||||
<div class="w-full text-center italic text-3xl p-2">
|
||||
{{post.custom_excerpt|safe}}
|
||||
@@ -44,6 +45,8 @@
|
||||
<div class="hidden md:block">
|
||||
{% include '_types/blog/_card/at_bar.html' %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if post.feature_image %}
|
||||
<div class="mb-3 flex justify-center">
|
||||
<img
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{# Associated Entries and Calendars - vertical on mobile, horizontal with arrows on desktop #}
|
||||
{% if (associated_entries and associated_entries.entries) or calendars %}
|
||||
{# Widget-driven container nav — entries, calendars, markets #}
|
||||
{% if container_nav_widgets %}
|
||||
<div class="flex flex-col sm:flex-row sm:items-center gap-2 border-r border-stone-200 mr-2 sm:max-w-2xl"
|
||||
id="entries-calendars-nav-wrapper">
|
||||
{% include '_types/post/admin/_nav_entries.html' %}
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
{# Entries for this day #}
|
||||
<div class="space-y-0.5">
|
||||
{% for e in month_entries %}
|
||||
{% if e.start_at.date() == day.date and e.deleted_at is none %}
|
||||
{% if e.start_at.date() == day.date %}
|
||||
{% if e.id in associated_entry_ids %}
|
||||
{# Associated entry - show with delete button #}
|
||||
<div class="flex items-center gap-1 text-[10px] rounded px-1 py-0.5 bg-green-200 text-green-900">
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<i class="fa fa-chevron-left"></i>
|
||||
</button>
|
||||
|
||||
{# Entries and Calendars container #}
|
||||
{# Widget-driven nav items container #}
|
||||
<div id="associated-items-container"
|
||||
class="overflow-y-auto sm:overflow-x-auto sm:overflow-y-visible scrollbar-hide max-h-[50vh] sm:max-h-none"
|
||||
style="scroll-behavior: smooth;"
|
||||
@@ -22,30 +22,10 @@
|
||||
remove .flex from .entries-nav-arrow
|
||||
end">
|
||||
<div class="flex flex-col sm:flex-row gap-1">
|
||||
{# Associated Entries #}
|
||||
{% if associated_entries and associated_entries.entries %}
|
||||
{% include '_types/post/_entry_items.html' with context %}
|
||||
{% endif %}
|
||||
|
||||
{# Calendars #}
|
||||
{% for calendar in calendars %}
|
||||
{% set local_href=events_url('/' + post.slug + '/calendars/' + calendar.slug + '/') %}
|
||||
<a
|
||||
href="{{ local_href }}"
|
||||
class="{{styles.nav_button_less_pad}}">
|
||||
<i class="fa fa-calendar" aria-hidden="true"></i>
|
||||
<div>{{calendar.name}}</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
|
||||
{# Markets #}
|
||||
{% for m in markets %}
|
||||
<a
|
||||
href="{{ market_url('/' + post.slug + '/' + m.slug + '/') }}"
|
||||
class="{{styles.nav_button_less_pad}}">
|
||||
<i class="fa fa-shopping-bag" aria-hidden="true"></i>
|
||||
<div>{{m.name}}</div>
|
||||
</a>
|
||||
{% for wdata in container_nav_widgets %}
|
||||
{% with ctx=wdata.ctx %}
|
||||
{% include wdata.widget.template with context %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
type="text"
|
||||
name="title"
|
||||
value="{{ ghost_post.title if ghost_post else '' }}"
|
||||
placeholder="Post title..."
|
||||
placeholder="{{ 'Page title...' if post and post.is_page else 'Post title...' }}"
|
||||
class="w-full text-[36px] font-bold bg-transparent border-none outline-none
|
||||
placeholder:text-stone-300 mb-[8px] leading-tight"
|
||||
>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{# ── Post Settings Form ── #}
|
||||
{# ── Post/Page Settings Form ── #}
|
||||
{% set gp = ghost_post or {} %}
|
||||
{% set _is_page = post.is_page if post else False %}
|
||||
|
||||
{% macro field_label(text, field_for=None) %}
|
||||
<label {% if field_for %}for="{{ field_for }}"{% endif %}
|
||||
@@ -68,7 +69,7 @@
|
||||
{% call section('General', open=True) %}
|
||||
<div>
|
||||
{{ field_label('Slug', 'settings-slug') }}
|
||||
{{ text_input('slug', gp.slug or '', 'post-slug') }}
|
||||
{{ text_input('slug', gp.slug or '', 'page-slug' if _is_page else 'post-slug') }}
|
||||
</div>
|
||||
<div>
|
||||
{{ field_label('Published at', 'settings-published_at') }}
|
||||
@@ -83,7 +84,7 @@
|
||||
>
|
||||
</div>
|
||||
<div>
|
||||
{{ checkbox_input('featured', gp.featured, 'Featured post') }}
|
||||
{{ checkbox_input('featured', gp.featured, 'Featured page' if _is_page else 'Featured post') }}
|
||||
</div>
|
||||
<div>
|
||||
{{ field_label('Visibility', 'settings-visibility') }}
|
||||
@@ -176,7 +177,7 @@
|
||||
{% call section('Advanced') %}
|
||||
<div>
|
||||
{{ field_label('Custom template', 'settings-custom_template') }}
|
||||
{{ text_input('custom_template', gp.custom_template or '', 'custom-post.hbs') }}
|
||||
{{ text_input('custom_template', gp.custom_template or '', 'custom-page.hbs' if _is_page else 'custom-post.hbs') }}
|
||||
</div>
|
||||
{% endcall %}
|
||||
|
||||
|
||||
@@ -188,10 +188,11 @@
|
||||
<div class="mt-3 flex flex-col sm:flex-row sm:items-center justify-between gap-2 sm:gap-4">
|
||||
<div class="flex items-center gap-2 text-xs sm:text-sm text-stone-700">
|
||||
<span class="text-[0.65rem] sm:text-xs uppercase tracking-wide text-stone-500">Quantity</span>
|
||||
{% set qty_url = cart_quantity_url(item.product_id) if cart_quantity_url is defined else market_product_url(p.slug, 'cart', item.market_place) %}
|
||||
<form
|
||||
action="{{ market_product_url(p.slug, 'cart', item.market_place) }}"
|
||||
action="{{ qty_url }}"
|
||||
method="post"
|
||||
hx-post="{{ market_product_url(p.slug, 'cart', item.market_place) }}"
|
||||
hx-post="{{ qty_url }}"
|
||||
hx-target="#cart-mini"
|
||||
hx-swap="outerHTML"
|
||||
>
|
||||
@@ -199,7 +200,7 @@
|
||||
<input
|
||||
type="hidden"
|
||||
name="count"
|
||||
value="{{ item.quantity - 1 }}"
|
||||
value="{{ [item.quantity - 1, 0] | max }}"
|
||||
>
|
||||
<button
|
||||
type="submit"
|
||||
@@ -208,13 +209,13 @@
|
||||
-
|
||||
</button>
|
||||
</form>
|
||||
<span class="inline-flex items-center justify-center px-2 py-1 rounded-full bg-stone-100 text-[0.7rem] sm:text-xs font-medium">
|
||||
<span class="inline-flex items-center justify-center px-2 py-1 rounded-full bg-stone-100 text-[0.7rem] sm:text-xs font-medium {{ 'text-stone-400' if item.quantity == 0 }}">
|
||||
{{ item.quantity }}
|
||||
</span>
|
||||
<form
|
||||
action="{{ market_product_url(p.slug, 'cart', item.market_place) }}"
|
||||
action="{{ qty_url }}"
|
||||
method="post"
|
||||
hx-post="{{ market_product_url(p.slug, 'cart', item.market_place) }}"
|
||||
hx-post="{{ qty_url }}"
|
||||
hx-target="#cart-mini"
|
||||
hx-swap="outerHTML"
|
||||
>
|
||||
@@ -231,6 +232,33 @@
|
||||
+
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if cart_delete_url is defined %}
|
||||
<form
|
||||
action="{{ cart_delete_url(item.product_id) }}"
|
||||
method="post"
|
||||
hx-post="{{ cart_delete_url(item.product_id) }}"
|
||||
hx-trigger="confirmed"
|
||||
hx-target="#cart-mini"
|
||||
hx-swap="outerHTML"
|
||||
>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button
|
||||
type="button"
|
||||
data-confirm
|
||||
data-confirm-title="Remove item?"
|
||||
data-confirm-text="Remove {{ p.title }} from your cart?"
|
||||
data-confirm-icon="warning"
|
||||
data-confirm-confirm-text="Yes, remove"
|
||||
data-confirm-cancel-text="Cancel"
|
||||
data-confirm-event="confirmed"
|
||||
class="inline-flex items-center justify-center w-8 h-8 text-sm font-medium rounded-full border border-red-300 text-red-600 hover:bg-red-50"
|
||||
title="Remove from cart"
|
||||
>
|
||||
<i class="fa-solid fa-trash-can text-xs" aria-hidden="true"></i>
|
||||
</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between sm:justify-end gap-3">
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{% set _app_slugs = {'cart': cart_url('/')} %}
|
||||
{% set _first_seg = request.path.strip('/').split('/')[0] %}
|
||||
<div class="flex flex-col sm:flex-row sm:items-center gap-2 border-r border-stone-200 mr-2 sm:max-w-2xl"
|
||||
id="menu-items-nav-wrapper">
|
||||
{% from 'macros/scrolling_menu.html' import scrolling_menu with context %}
|
||||
@@ -6,6 +7,7 @@
|
||||
{% set _href = _app_slugs.get(item.slug, coop_url('/' + item.slug + '/')) %}
|
||||
<a
|
||||
href="{{ _href }}"
|
||||
aria-selected="{{ 'true' if (item.slug == _first_seg or item.slug == app_name) else 'false' }}"
|
||||
class="{{styles.nav_button_less_pad}}"
|
||||
>
|
||||
{% if item.feature_image %}
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
{# Associated entries on blog listing cards — loaded via widget registry #}
|
||||
{% set widget_entries = post[w.context_key] if post[w.context_key] is defined else [] %}
|
||||
{% if widget_entries %}
|
||||
<div class="mt-4 mb-2">
|
||||
<h3 class="text-sm font-semibold text-stone-700 mb-2 px-2">Events:</h3>
|
||||
<div class="overflow-x-auto scrollbar-hide" style="scroll-behavior: smooth;">
|
||||
<div class="flex gap-2 px-2">
|
||||
{% for entry in widget_entries %}
|
||||
{% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %}
|
||||
<a
|
||||
href="{{ events_url(_entry_path) }}"
|
||||
class="flex flex-col gap-1 px-3 py-2 bg-stone-50 hover:bg-stone-100 rounded border border-stone-200 transition text-sm whitespace-nowrap flex-shrink-0 min-w-[180px]">
|
||||
<div class="font-medium text-stone-900 truncate">{{ entry.name }}</div>
|
||||
<div class="text-xs text-stone-600">
|
||||
{{ entry.start_at.strftime('%a, %b %d') }}
|
||||
</div>
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ entry.start_at.strftime('%H:%M') }}
|
||||
{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.scrollbar-hide::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
.scrollbar-hide {
|
||||
-ms-overflow-style: none;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
</style>
|
||||
{% endif %}
|
||||
@@ -0,0 +1,38 @@
|
||||
{# Calendar entries nav items — loaded via widget registry #}
|
||||
{% set entry_list = ctx.entries if ctx.entries is defined else [] %}
|
||||
{% set current_page = ctx.page if ctx.page is defined else 1 %}
|
||||
{% set has_more_entries = ctx.has_more if ctx.has_more is defined else False %}
|
||||
|
||||
{% for entry in entry_list %}
|
||||
{% set _entry_path = '/' + post.slug + '/calendars/' + entry.calendar_slug + '/' + entry.start_at.year|string + '/' + entry.start_at.month|string + '/' + entry.start_at.day|string + '/entries/' + entry.id|string + '/' %}
|
||||
<a
|
||||
href="{{ events_url(_entry_path) }}"
|
||||
class="{{styles.nav_button_less_pad}}"
|
||||
>
|
||||
{% if post.feature_image %}
|
||||
<img src="{{ post.feature_image }}"
|
||||
alt="{{ post.title }}"
|
||||
class="w-8 h-8 rounded object-cover flex-shrink-0" />
|
||||
{% else %}
|
||||
<div class="w-8 h-8 rounded bg-stone-200 flex-shrink-0"></div>
|
||||
{% endif %}
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="font-medium truncate">{{ entry.name }}</div>
|
||||
<div class="text-xs text-stone-600 truncate">
|
||||
{{ entry.start_at.strftime('%b %d, %Y at %H:%M') }}
|
||||
{% if entry.end_at %} – {{ entry.end_at.strftime('%H:%M') }}{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
|
||||
{# Load more entries one at a time until container is full #}
|
||||
{% if has_more_entries %}
|
||||
<div id="entries-load-sentinel-{{ current_page }}"
|
||||
hx-get="{{ url_for('blog.post.widget_paginate', slug=post.slug, widget_domain='calendar', page=current_page + 1) }}"
|
||||
hx-trigger="intersect once"
|
||||
hx-swap="beforebegin"
|
||||
_="on htmx:afterRequest trigger scroll on #associated-entries-container"
|
||||
class="flex-shrink-0 w-1">
|
||||
</div>
|
||||
{% endif %}
|
||||
10
browser/templates/_widgets/container_nav/calendar_links.html
Normal file
10
browser/templates/_widgets/container_nav/calendar_links.html
Normal file
@@ -0,0 +1,10 @@
|
||||
{# Calendar link nav items — loaded via widget registry #}
|
||||
{% for calendar in ctx.calendars %}
|
||||
{% set local_href=events_url('/' + post.slug + '/calendars/' + calendar.slug + '/') %}
|
||||
<a
|
||||
href="{{ local_href }}"
|
||||
class="{{styles.nav_button_less_pad}}">
|
||||
<i class="fa fa-calendar" aria-hidden="true"></i>
|
||||
<div>{{calendar.name}}</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
@@ -0,0 +1,9 @@
|
||||
{# Market link nav items — loaded via widget registry #}
|
||||
{% for m in ctx.markets %}
|
||||
<a
|
||||
href="{{ market_url('/' + post.slug + '/' + m.slug + '/') }}"
|
||||
class="{{styles.nav_button_less_pad}}">
|
||||
<i class="fa fa-shopping-bag" aria-hidden="true"></i>
|
||||
<div>{{m.name}}</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
31
contracts/__init__.py
Normal file
31
contracts/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Typed contracts (DTOs + Protocols) for cross-domain service interfaces."""
|
||||
|
||||
from .dtos import (
|
||||
PostDTO,
|
||||
CalendarDTO,
|
||||
CalendarEntryDTO,
|
||||
MarketPlaceDTO,
|
||||
ProductDTO,
|
||||
CartItemDTO,
|
||||
CartSummaryDTO,
|
||||
)
|
||||
from .protocols import (
|
||||
BlogService,
|
||||
CalendarService,
|
||||
MarketService,
|
||||
CartService,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"PostDTO",
|
||||
"CalendarDTO",
|
||||
"CalendarEntryDTO",
|
||||
"MarketPlaceDTO",
|
||||
"ProductDTO",
|
||||
"CartItemDTO",
|
||||
"CartSummaryDTO",
|
||||
"BlogService",
|
||||
"CalendarService",
|
||||
"MarketService",
|
||||
"CartService",
|
||||
]
|
||||
254
contracts/dtos.py
Normal file
254
contracts/dtos.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""Frozen dataclasses for cross-domain data transfer.
|
||||
|
||||
These are the *only* shapes that cross domain boundaries. Consumers never
|
||||
see ORM model instances from another domain — only these DTOs.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Blog domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class PostDTO:
|
||||
id: int
|
||||
slug: str
|
||||
title: str
|
||||
status: str
|
||||
visibility: str
|
||||
is_page: bool = False
|
||||
feature_image: str | None = None
|
||||
html: str | None = None
|
||||
excerpt: str | None = None
|
||||
custom_excerpt: str | None = None
|
||||
published_at: datetime | None = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Calendar / Events domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class CalendarDTO:
|
||||
id: int
|
||||
container_type: str
|
||||
container_id: int
|
||||
name: str
|
||||
slug: str
|
||||
description: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class TicketDTO:
|
||||
id: int
|
||||
code: str
|
||||
state: str
|
||||
entry_name: str
|
||||
entry_start_at: datetime
|
||||
entry_end_at: datetime | None = None
|
||||
ticket_type_name: str | None = None
|
||||
calendar_name: str | None = None
|
||||
created_at: datetime | None = None
|
||||
checked_in_at: datetime | None = None
|
||||
entry_id: int | None = None
|
||||
ticket_type_id: int | None = None
|
||||
price: Decimal | None = None
|
||||
order_id: int | None = None
|
||||
calendar_container_id: int | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class CalendarEntryDTO:
|
||||
id: int
|
||||
calendar_id: int
|
||||
name: str
|
||||
start_at: datetime
|
||||
state: str
|
||||
cost: Decimal
|
||||
end_at: datetime | None = None
|
||||
user_id: int | None = None
|
||||
session_id: str | None = None
|
||||
order_id: int | None = None
|
||||
slot_id: int | None = None
|
||||
ticket_price: Decimal | None = None
|
||||
ticket_count: int | None = None
|
||||
calendar_name: str | None = None
|
||||
calendar_slug: str | None = None
|
||||
calendar_container_id: int | None = None
|
||||
calendar_container_type: str | None = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Market domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class MarketPlaceDTO:
|
||||
id: int
|
||||
container_type: str
|
||||
container_id: int
|
||||
name: str
|
||||
slug: str
|
||||
description: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ProductDTO:
|
||||
id: int
|
||||
slug: str
|
||||
title: str | None = None
|
||||
image: str | None = None
|
||||
description_short: str | None = None
|
||||
rrp: Decimal | None = None
|
||||
regular_price: Decimal | None = None
|
||||
special_price: Decimal | None = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cart domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class CartItemDTO:
|
||||
id: int
|
||||
product_id: int
|
||||
quantity: int
|
||||
product_title: str | None = None
|
||||
product_slug: str | None = None
|
||||
product_image: str | None = None
|
||||
unit_price: Decimal | None = None
|
||||
market_place_id: int | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class CartSummaryDTO:
|
||||
count: int = 0
|
||||
total: Decimal = Decimal("0")
|
||||
calendar_count: int = 0
|
||||
calendar_total: Decimal = Decimal("0")
|
||||
items: list[CartItemDTO] = field(default_factory=list)
|
||||
ticket_count: int = 0
|
||||
ticket_total: Decimal = Decimal("0")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Federation / ActivityPub domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ActorProfileDTO:
|
||||
id: int
|
||||
user_id: int
|
||||
preferred_username: str
|
||||
public_key_pem: str
|
||||
display_name: str | None = None
|
||||
summary: str | None = None
|
||||
inbox_url: str | None = None
|
||||
outbox_url: str | None = None
|
||||
created_at: datetime | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APActivityDTO:
|
||||
id: int
|
||||
activity_id: str
|
||||
activity_type: str
|
||||
actor_profile_id: int
|
||||
object_type: str | None = None
|
||||
object_data: dict | None = None
|
||||
published: datetime | None = None
|
||||
is_local: bool = True
|
||||
source_type: str | None = None
|
||||
source_id: int | None = None
|
||||
ipfs_cid: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APFollowerDTO:
|
||||
id: int
|
||||
actor_profile_id: int
|
||||
follower_acct: str
|
||||
follower_inbox: str
|
||||
follower_actor_url: str
|
||||
created_at: datetime | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APAnchorDTO:
|
||||
id: int
|
||||
merkle_root: str
|
||||
activity_count: int = 0
|
||||
tree_ipfs_cid: str | None = None
|
||||
ots_proof_cid: str | None = None
|
||||
confirmed_at: datetime | None = None
|
||||
bitcoin_txid: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RemoteActorDTO:
|
||||
id: int
|
||||
actor_url: str
|
||||
inbox_url: str
|
||||
preferred_username: str
|
||||
domain: str
|
||||
display_name: str | None = None
|
||||
summary: str | None = None
|
||||
icon_url: str | None = None
|
||||
shared_inbox_url: str | None = None
|
||||
public_key_pem: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RemotePostDTO:
|
||||
id: int
|
||||
remote_actor_id: int
|
||||
object_id: str
|
||||
content: str
|
||||
summary: str | None = None
|
||||
url: str | None = None
|
||||
attachments: list[dict] = field(default_factory=list)
|
||||
tags: list[dict] = field(default_factory=list)
|
||||
published: datetime | None = None
|
||||
actor: RemoteActorDTO | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class TimelineItemDTO:
|
||||
id: str # composite key for cursor pagination
|
||||
post_type: str # "local" | "remote" | "boost"
|
||||
content: str # HTML
|
||||
published: datetime
|
||||
actor_name: str
|
||||
actor_username: str
|
||||
object_id: str | None = None
|
||||
summary: str | None = None
|
||||
url: str | None = None
|
||||
attachments: list[dict] = field(default_factory=list)
|
||||
tags: list[dict] = field(default_factory=list)
|
||||
actor_domain: str | None = None # None = local
|
||||
actor_icon: str | None = None
|
||||
actor_url: str | None = None
|
||||
boosted_by: str | None = None
|
||||
like_count: int = 0
|
||||
boost_count: int = 0
|
||||
liked_by_me: bool = False
|
||||
boosted_by_me: bool = False
|
||||
author_inbox: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class NotificationDTO:
|
||||
id: int
|
||||
notification_type: str # follow/like/boost/mention/reply
|
||||
from_actor_name: str
|
||||
from_actor_username: str
|
||||
created_at: datetime
|
||||
read: bool
|
||||
from_actor_domain: str | None = None
|
||||
from_actor_icon: str | None = None
|
||||
target_content_preview: str | None = None
|
||||
333
contracts/protocols.py
Normal file
333
contracts/protocols.py
Normal file
@@ -0,0 +1,333 @@
|
||||
"""Protocol classes defining each domain's service interface.
|
||||
|
||||
All cross-domain callers program against these Protocols. Concrete
|
||||
implementations (Sql*Service) and no-op stubs both satisfy them.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from .dtos import (
|
||||
PostDTO,
|
||||
CalendarDTO,
|
||||
CalendarEntryDTO,
|
||||
TicketDTO,
|
||||
MarketPlaceDTO,
|
||||
ProductDTO,
|
||||
CartItemDTO,
|
||||
CartSummaryDTO,
|
||||
ActorProfileDTO,
|
||||
APActivityDTO,
|
||||
APFollowerDTO,
|
||||
RemoteActorDTO,
|
||||
RemotePostDTO,
|
||||
TimelineItemDTO,
|
||||
NotificationDTO,
|
||||
)
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class BlogService(Protocol):
|
||||
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None: ...
|
||||
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: ...
|
||||
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: ...
|
||||
|
||||
async def search_posts(
|
||||
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
|
||||
) -> tuple[list[PostDTO], int]: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CalendarService(Protocol):
|
||||
async def calendars_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[CalendarDTO]: ...
|
||||
|
||||
async def pending_entries(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
async def entries_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None: ...
|
||||
|
||||
async def associated_entries(
|
||||
self, session: AsyncSession, content_type: str, content_id: int, page: int,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]: ...
|
||||
|
||||
async def toggle_entry_post(
|
||||
self, session: AsyncSession, entry_id: int, content_type: str, content_id: int,
|
||||
) -> bool: ...
|
||||
|
||||
async def adopt_entries_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None: ...
|
||||
|
||||
async def claim_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None: ...
|
||||
|
||||
async def confirm_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None,
|
||||
) -> None: ...
|
||||
|
||||
async def get_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
async def user_tickets(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def user_bookings(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
async def confirmed_entries_for_posts(
|
||||
self, session: AsyncSession, post_ids: list[int],
|
||||
) -> dict[int, list[CalendarEntryDTO]]: ...
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None: ...
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None: ...
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None: ...
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session: AsyncSession, entry_id: int, count: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
ticket_type_id: int | None = None,
|
||||
) -> int: ...
|
||||
|
||||
async def entry_ids_for_content(
|
||||
self, session: AsyncSession, content_type: str, content_id: int,
|
||||
) -> set[int]: ...
|
||||
|
||||
async def visible_entries_for_period(
|
||||
self, session: AsyncSession, calendar_id: int,
|
||||
period_start: datetime, period_end: datetime,
|
||||
*, user_id: int | None, is_admin: bool, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class MarketService(Protocol):
|
||||
async def marketplaces_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[MarketPlaceDTO]: ...
|
||||
|
||||
async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None: ...
|
||||
|
||||
async def create_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
name: str, slug: str,
|
||||
) -> MarketPlaceDTO: ...
|
||||
|
||||
async def soft_delete_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
slug: str,
|
||||
) -> bool: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CartService(Protocol):
|
||||
async def cart_summary(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
page_slug: str | None = None,
|
||||
) -> CartSummaryDTO: ...
|
||||
|
||||
async def cart_items(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CartItemDTO]: ...
|
||||
|
||||
async def adopt_cart_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class FederationService(Protocol):
|
||||
# -- Actor management -----------------------------------------------------
|
||||
async def get_actor_by_username(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> ActorProfileDTO | None: ...
|
||||
|
||||
async def get_actor_by_user_id(
|
||||
self, session: AsyncSession, user_id: int,
|
||||
) -> ActorProfileDTO | None: ...
|
||||
|
||||
async def create_actor(
|
||||
self, session: AsyncSession, user_id: int, preferred_username: str,
|
||||
display_name: str | None = None, summary: str | None = None,
|
||||
) -> ActorProfileDTO: ...
|
||||
|
||||
async def username_available(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> bool: ...
|
||||
|
||||
# -- Publishing (core cross-domain API) -----------------------------------
|
||||
async def publish_activity(
|
||||
self, session: AsyncSession, *,
|
||||
actor_user_id: int,
|
||||
activity_type: str,
|
||||
object_type: str,
|
||||
object_data: dict,
|
||||
source_type: str | None = None,
|
||||
source_id: int | None = None,
|
||||
) -> APActivityDTO: ...
|
||||
|
||||
# -- Queries --------------------------------------------------------------
|
||||
async def get_activity(
|
||||
self, session: AsyncSession, activity_id: str,
|
||||
) -> APActivityDTO | None: ...
|
||||
|
||||
async def get_outbox(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[APActivityDTO], int]: ...
|
||||
|
||||
async def get_activity_for_source(
|
||||
self, session: AsyncSession, source_type: str, source_id: int,
|
||||
) -> APActivityDTO | None: ...
|
||||
|
||||
# -- Followers ------------------------------------------------------------
|
||||
async def get_followers(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> list[APFollowerDTO]: ...
|
||||
|
||||
async def add_follower(
|
||||
self, session: AsyncSession, username: str,
|
||||
follower_acct: str, follower_inbox: str, follower_actor_url: str,
|
||||
follower_public_key: str | None = None,
|
||||
) -> APFollowerDTO: ...
|
||||
|
||||
async def remove_follower(
|
||||
self, session: AsyncSession, username: str, follower_acct: str,
|
||||
) -> bool: ...
|
||||
|
||||
# -- Remote actors --------------------------------------------------------
|
||||
async def get_or_fetch_remote_actor(
|
||||
self, session: AsyncSession, actor_url: str,
|
||||
) -> RemoteActorDTO | None: ...
|
||||
|
||||
async def search_remote_actor(
|
||||
self, session: AsyncSession, acct: str,
|
||||
) -> RemoteActorDTO | None: ...
|
||||
|
||||
# -- Following (outbound) -------------------------------------------------
|
||||
async def send_follow(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
async def get_following(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]: ...
|
||||
|
||||
async def accept_follow_response(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unfollow(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
# -- Remote posts ---------------------------------------------------------
|
||||
async def ingest_remote_post(
|
||||
self, session: AsyncSession, remote_actor_id: int,
|
||||
activity_json: dict, object_json: dict,
|
||||
) -> None: ...
|
||||
|
||||
async def delete_remote_post(
|
||||
self, session: AsyncSession, object_id: str,
|
||||
) -> None: ...
|
||||
|
||||
async def get_remote_post(
|
||||
self, session: AsyncSession, object_id: str,
|
||||
) -> RemotePostDTO | None: ...
|
||||
|
||||
# -- Timelines ------------------------------------------------------------
|
||||
async def get_home_timeline(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
async def get_public_timeline(
|
||||
self, session: AsyncSession,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
# -- Local posts ----------------------------------------------------------
|
||||
async def create_local_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
content: str, visibility: str = "public",
|
||||
in_reply_to: str | None = None,
|
||||
) -> int: ...
|
||||
|
||||
async def delete_local_post(
|
||||
self, session: AsyncSession, actor_profile_id: int, post_id: int,
|
||||
) -> None: ...
|
||||
|
||||
# -- Interactions ---------------------------------------------------------
|
||||
async def like_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unlike_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def boost_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unboost_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
# -- Notifications --------------------------------------------------------
|
||||
async def get_notifications(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[NotificationDTO]: ...
|
||||
|
||||
async def unread_notification_count(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
) -> int: ...
|
||||
|
||||
async def mark_notifications_read(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
) -> None: ...
|
||||
|
||||
# -- Stats ----------------------------------------------------------------
|
||||
async def get_stats(self, session: AsyncSession) -> dict: ...
|
||||
49
contracts/widgets.py
Normal file
49
contracts/widgets.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Widget descriptors for cross-domain UI composition.
|
||||
|
||||
Each widget type describes a UI fragment that one domain contributes to
|
||||
another domain's page. Host apps iterate widgets generically — they never
|
||||
name the contributing domain.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class NavWidget:
|
||||
"""Renders nav items on a container page (entries, calendars, markets)."""
|
||||
domain: str
|
||||
order: int
|
||||
context_fn: Callable # async (session, *, container_type, container_id, **kw) -> dict
|
||||
template: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class CardWidget:
|
||||
"""Decorates content cards in listings with domain data."""
|
||||
domain: str
|
||||
order: int
|
||||
batch_fn: Callable # async (session, post_ids) -> dict[int, list]
|
||||
context_key: str # key injected into each post dict
|
||||
template: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AccountPageWidget:
|
||||
"""Sub-page under /auth/<slug>/."""
|
||||
domain: str
|
||||
slug: str
|
||||
label: str
|
||||
order: int
|
||||
context_fn: Callable # async (session, *, user_id, **kw) -> dict
|
||||
template: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AccountNavLink:
|
||||
"""Nav link on account page (internal or external)."""
|
||||
label: str
|
||||
order: int
|
||||
href_fn: Callable # () -> str
|
||||
external: bool = False
|
||||
10
events/handlers/__init__.py
Normal file
10
events/handlers/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Shared event handlers (replaces glue.setup.register_glue_handlers)."""
|
||||
|
||||
|
||||
def register_shared_handlers():
|
||||
"""Import handler modules to trigger registration. Call at app startup."""
|
||||
import shared.events.handlers.container_handlers # noqa: F401
|
||||
import shared.events.handlers.login_handlers # noqa: F401
|
||||
import shared.events.handlers.order_handlers # noqa: F401
|
||||
# federation_handlers removed — publication is now inline at write sites
|
||||
import shared.events.handlers.ap_delivery_handler # noqa: F401
|
||||
170
events/handlers/ap_delivery_handler.py
Normal file
170
events/handlers/ap_delivery_handler.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Deliver AP activities to remote followers.
|
||||
|
||||
On ``federation.activity_created`` → load activity + actor + followers →
|
||||
sign with HTTP Signatures → POST to each follower inbox.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events.bus import register_handler, DomainEvent
|
||||
from shared.models.federation import ActorProfile, APActivity, APFollower
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
DELIVERY_TIMEOUT = 15 # seconds per request
|
||||
|
||||
|
||||
def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str) -> dict:
|
||||
"""Build the full AP activity JSON-LD for delivery."""
|
||||
username = actor.preferred_username
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
|
||||
obj = dict(activity.object_data or {})
|
||||
|
||||
# Object id MUST be on the actor's domain (Mastodon origin check).
|
||||
# The post URL (e.g. coop.rose-ash.com/slug/) goes in "url" only.
|
||||
object_id = activity.activity_id + "/object"
|
||||
|
||||
if activity.activity_type == "Delete":
|
||||
# Delete: object is a Tombstone with just id + type
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", "Tombstone")
|
||||
else:
|
||||
# Create/Update: full object with attribution
|
||||
# Prefer stable id from object_data (set by try_publish), fall back to activity-derived
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", activity.object_type)
|
||||
obj.setdefault("attributedTo", actor_url)
|
||||
obj.setdefault("published", activity.published.isoformat() if activity.published else None)
|
||||
obj.setdefault("to", ["https://www.w3.org/ns/activitystreams#Public"])
|
||||
obj.setdefault("cc", [f"{actor_url}/followers"])
|
||||
|
||||
return {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"id": activity.activity_id,
|
||||
"type": activity.activity_type,
|
||||
"actor": actor_url,
|
||||
"published": activity.published.isoformat() if activity.published else None,
|
||||
"to": ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc": [f"{actor_url}/followers"],
|
||||
"object": obj,
|
||||
}
|
||||
|
||||
|
||||
async def _deliver_to_inbox(
|
||||
client: httpx.AsyncClient,
|
||||
inbox_url: str,
|
||||
body: dict,
|
||||
actor: ActorProfile,
|
||||
domain: str,
|
||||
) -> bool:
|
||||
"""POST signed activity to a single inbox. Returns True on success."""
|
||||
from shared.utils.http_signatures import sign_request
|
||||
from urllib.parse import urlparse
|
||||
import json
|
||||
|
||||
body_bytes = json.dumps(body).encode()
|
||||
key_id = f"https://{domain}/users/{actor.preferred_username}#main-key"
|
||||
|
||||
parsed = urlparse(inbox_url)
|
||||
headers = sign_request(
|
||||
private_key_pem=actor.private_key_pem,
|
||||
key_id=key_id,
|
||||
method="POST",
|
||||
path=parsed.path,
|
||||
host=parsed.netloc,
|
||||
body=body_bytes,
|
||||
)
|
||||
headers["Content-Type"] = AP_CONTENT_TYPE
|
||||
|
||||
try:
|
||||
resp = await client.post(
|
||||
inbox_url,
|
||||
content=body_bytes,
|
||||
headers=headers,
|
||||
timeout=DELIVERY_TIMEOUT,
|
||||
)
|
||||
if resp.status_code < 300:
|
||||
log.info("Delivered to %s → %d", inbox_url, resp.status_code)
|
||||
return True
|
||||
else:
|
||||
log.warning("Delivery to %s → %d: %s", inbox_url, resp.status_code, resp.text[:200])
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Delivery failed for %s", inbox_url)
|
||||
return False
|
||||
|
||||
|
||||
async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None:
|
||||
"""Deliver a newly created activity to all followers."""
|
||||
import os
|
||||
|
||||
if not services.has("federation"):
|
||||
return
|
||||
|
||||
payload = event.payload
|
||||
activity_id_uri = payload.get("activity_id")
|
||||
if not activity_id_uri:
|
||||
return
|
||||
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
|
||||
# Load the activity
|
||||
activity = (
|
||||
await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == activity_id_uri)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not activity:
|
||||
log.warning("Activity not found: %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Load actor with private key
|
||||
actor = (
|
||||
await session.execute(
|
||||
select(ActorProfile).where(ActorProfile.id == activity.actor_profile_id)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not actor or not actor.private_key_pem:
|
||||
log.warning("Actor not found or missing key for activity %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Load followers
|
||||
followers = (
|
||||
await session.execute(
|
||||
select(APFollower).where(APFollower.actor_profile_id == actor.id)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
if not followers:
|
||||
log.debug("No followers to deliver to for %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Build activity JSON
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
|
||||
# Deliver to each follower inbox
|
||||
# Deduplicate inboxes (multiple followers might share a shared inbox)
|
||||
inboxes = {f.follower_inbox for f in followers if f.follower_inbox}
|
||||
|
||||
log.info(
|
||||
"Delivering %s to %d inbox(es) for @%s",
|
||||
activity.activity_type, len(inboxes), actor.preferred_username,
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
for inbox_url in inboxes:
|
||||
await _deliver_to_inbox(client, inbox_url, activity_json, actor, domain)
|
||||
|
||||
|
||||
register_handler("federation.activity_created", on_activity_created)
|
||||
19
events/handlers/container_handlers.py
Normal file
19
events/handlers/container_handlers.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.services.navigation import rebuild_navigation
|
||||
|
||||
|
||||
async def on_child_attached(event: DomainEvent, session: AsyncSession) -> None:
|
||||
await rebuild_navigation(session)
|
||||
|
||||
|
||||
async def on_child_detached(event: DomainEvent, session: AsyncSession) -> None:
|
||||
await rebuild_navigation(session)
|
||||
|
||||
|
||||
register_handler("container.child_attached", on_child_attached)
|
||||
register_handler("container.child_detached", on_child_detached)
|
||||
8
events/handlers/federation_handlers.py
Normal file
8
events/handlers/federation_handlers.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Federation event handlers — REMOVED.
|
||||
|
||||
Federation publication is now inline at the write site (ghost_sync, entries,
|
||||
market routes) via shared.services.federation_publish.try_publish().
|
||||
|
||||
AP delivery (federation.activity_created → inbox POST) remains async via
|
||||
ap_delivery_handler.
|
||||
"""
|
||||
25
events/handlers/login_handlers.py
Normal file
25
events/handlers/login_handlers.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
from shared.services.registry import services
|
||||
|
||||
|
||||
async def on_user_logged_in(event: DomainEvent, session: AsyncSession) -> None:
|
||||
payload = event.payload
|
||||
user_id = payload["user_id"]
|
||||
session_id = payload["session_id"]
|
||||
|
||||
# Adopt cart items (if cart service is registered)
|
||||
if services.has("cart"):
|
||||
await services.cart.adopt_cart_for_user(session, user_id, session_id)
|
||||
|
||||
# Adopt calendar entries and tickets (if calendar service is registered)
|
||||
if services.has("calendar"):
|
||||
await services.calendar.adopt_entries_for_user(session, user_id, session_id)
|
||||
await services.calendar.adopt_tickets_for_user(session, user_id, session_id)
|
||||
|
||||
|
||||
register_handler("user.logged_in", on_user_logged_in)
|
||||
22
events/handlers/order_handlers.py
Normal file
22
events/handlers/order_handlers.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import register_handler
|
||||
from shared.models.domain_event import DomainEvent
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def on_order_created(event: DomainEvent, session: AsyncSession) -> None:
|
||||
log.info("order.created: order_id=%s", event.payload.get("order_id"))
|
||||
|
||||
|
||||
async def on_order_paid(event: DomainEvent, session: AsyncSession) -> None:
|
||||
log.info("order.paid: order_id=%s", event.payload.get("order_id"))
|
||||
|
||||
|
||||
register_handler("order.created", on_order_created)
|
||||
register_handler("order.paid", on_order_paid)
|
||||
@@ -1,11 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import g
|
||||
|
||||
|
||||
async def load_cart():
|
||||
# Lazy import: cart.bp.cart.services only exists in the cart app process.
|
||||
# This avoids cross-app model conflicts at import time.
|
||||
from cart.bp.cart.services import get_cart
|
||||
|
||||
g.cart = await get_cart(g.s)
|
||||
@@ -11,7 +11,7 @@ from shared.config import init_config, config, pretty
|
||||
from shared.models import KV # ensure shared models imported
|
||||
# Register all app model classes with SQLAlchemy so cross-domain
|
||||
# relationship() string references resolve correctly.
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "glue.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
@@ -42,6 +42,7 @@ def create_base_app(
|
||||
*,
|
||||
context_fn: Callable[[], Awaitable[dict]] | None = None,
|
||||
before_request_fns: Sequence[Callable[[], Awaitable[None]]] | None = None,
|
||||
domain_services_fn: Callable[[], None] | None = None,
|
||||
) -> Quart:
|
||||
"""
|
||||
Create a Quart app with shared infrastructure.
|
||||
@@ -57,7 +58,17 @@ def create_base_app(
|
||||
If not provided, a minimal default context is used.
|
||||
before_request_fns:
|
||||
Extra before-request hooks (e.g. cart_loader for the cart app).
|
||||
domain_services_fn:
|
||||
Callable that registers domain services on the shared registry.
|
||||
Each app provides its own — registering real impls for owned
|
||||
domains and stubs (or real impls) for others.
|
||||
"""
|
||||
if domain_services_fn is not None:
|
||||
domain_services_fn()
|
||||
|
||||
from shared.services.widgets import register_all_widgets
|
||||
register_all_widgets()
|
||||
|
||||
app = Quart(
|
||||
name,
|
||||
static_folder=STATIC_DIR,
|
||||
@@ -132,23 +143,14 @@ def create_base_app(
|
||||
async def _inject_base():
|
||||
return await base_context()
|
||||
|
||||
# --- cleanup internal API client on shutdown ---
|
||||
@app.after_serving
|
||||
async def _close_internal_client():
|
||||
from .internal_api import close_client
|
||||
await close_client()
|
||||
|
||||
# --- event processor ---
|
||||
_event_processor = EventProcessor()
|
||||
|
||||
# --- startup ---
|
||||
@app.before_serving
|
||||
async def _startup():
|
||||
try:
|
||||
from glue.setup import register_glue_handlers
|
||||
register_glue_handlers()
|
||||
except ImportError:
|
||||
pass # glue submodule not present in this build context
|
||||
from shared.events.handlers import register_shared_handlers
|
||||
register_shared_handlers()
|
||||
await init_config()
|
||||
print(pretty())
|
||||
await _event_processor.start()
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
"""
|
||||
Async HTTP client for inter-app communication.
|
||||
|
||||
Each app exposes internal JSON API endpoints. Other apps call them
|
||||
via httpx over the Docker overlay network (or localhost in dev).
|
||||
|
||||
URLs resolved from env vars:
|
||||
INTERNAL_URL_COOP (default http://localhost:8000)
|
||||
INTERNAL_URL_MARKET (default http://localhost:8001)
|
||||
INTERNAL_URL_CART (default http://localhost:8002)
|
||||
|
||||
Session cookie forwarding: when ``forward_session=True`` the current
|
||||
request's ``coop_session`` cookie is sent along so the target app can
|
||||
resolve ``g.user`` / cart identity.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from quart import request as quart_request
|
||||
|
||||
log = logging.getLogger("internal_api")
|
||||
|
||||
class DictObj:
|
||||
"""Thin wrapper so ``d.key`` works on dicts returned by JSON APIs.
|
||||
|
||||
Jinja templates use attribute access (``item.post.slug``) which
|
||||
doesn't work on plain dicts. Wrapping the API response with
|
||||
``dictobj()`` makes both ``item.post.slug`` and ``item["post"]["slug"]``
|
||||
work identically.
|
||||
"""
|
||||
|
||||
__slots__ = ("_data",)
|
||||
|
||||
def __init__(self, data: dict):
|
||||
self._data = data
|
||||
|
||||
def __getattr__(self, name: str):
|
||||
try:
|
||||
v = self._data[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
if isinstance(v, dict):
|
||||
return DictObj(v)
|
||||
return v
|
||||
|
||||
def get(self, key, default=None):
|
||||
v = self._data.get(key, default)
|
||||
if isinstance(v, dict):
|
||||
return DictObj(v)
|
||||
return v
|
||||
|
||||
def __repr__(self):
|
||||
return f"DictObj({self._data!r})"
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self._data)
|
||||
|
||||
|
||||
def dictobj(data):
|
||||
"""Recursively wrap dicts (or lists of dicts) for attribute access."""
|
||||
if isinstance(data, list):
|
||||
return [DictObj(d) if isinstance(d, dict) else d for d in data]
|
||||
if isinstance(data, dict):
|
||||
return DictObj(data)
|
||||
return data
|
||||
|
||||
|
||||
_DEFAULTS = {
|
||||
"coop": "http://localhost:8000",
|
||||
"market": "http://localhost:8001",
|
||||
"cart": "http://localhost:8002",
|
||||
"events": "http://localhost:8003",
|
||||
}
|
||||
|
||||
_client: httpx.AsyncClient | None = None
|
||||
|
||||
TIMEOUT = 3.0 # seconds
|
||||
|
||||
|
||||
def _base_url(app_name: str) -> str:
|
||||
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
||||
return os.getenv(env_key, _DEFAULTS.get(app_name, ""))
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(timeout=TIMEOUT)
|
||||
return _client
|
||||
|
||||
|
||||
async def close_client() -> None:
|
||||
"""Call from ``@app.after_serving`` to cleanly close the pool."""
|
||||
global _client
|
||||
if _client is not None and not _client.is_closed:
|
||||
await _client.aclose()
|
||||
_client = None
|
||||
|
||||
|
||||
def _session_cookies() -> dict[str, str]:
|
||||
"""Extract the shared session cookie from the incoming request."""
|
||||
cookie_name = "coop_session"
|
||||
try:
|
||||
val = quart_request.cookies.get(cookie_name)
|
||||
except RuntimeError:
|
||||
# No active request context
|
||||
val = None
|
||||
if val:
|
||||
return {cookie_name: val}
|
||||
return {}
|
||||
|
||||
|
||||
async def get(
|
||||
app_name: str,
|
||||
path: str,
|
||||
*,
|
||||
forward_session: bool = False,
|
||||
params: dict | None = None,
|
||||
) -> dict | list | None:
|
||||
"""GET ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
|
||||
url = _base_url(app_name).rstrip("/") + path
|
||||
cookies = _session_cookies() if forward_session else {}
|
||||
try:
|
||||
resp = await _get_client().get(url, params=params, cookies=cookies)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
except Exception as exc:
|
||||
log.warning("internal_api GET %s failed: %r", url, exc)
|
||||
return None
|
||||
|
||||
|
||||
async def post(
|
||||
app_name: str,
|
||||
path: str,
|
||||
*,
|
||||
json: Any = None,
|
||||
forward_session: bool = False,
|
||||
) -> dict | list | None:
|
||||
"""POST ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
|
||||
url = _base_url(app_name).rstrip("/") + path
|
||||
cookies = _session_cookies() if forward_session else {}
|
||||
try:
|
||||
resp = await _get_client().post(url, json=json, cookies=cookies)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
except Exception as exc:
|
||||
log.warning("internal_api POST %s failed: %r", url, exc)
|
||||
return None
|
||||
@@ -37,11 +37,16 @@ def setup_jinja(app: Quart) -> None:
|
||||
app.jinja_env.globals["level"] = level
|
||||
app.jinja_env.globals["level_up"] = level_up
|
||||
app.jinja_env.globals["menu_colour"] = "sky"
|
||||
app.jinja_env.globals["app_name"] = app.name
|
||||
|
||||
nav_button = """justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black
|
||||
select_colours = """
|
||||
[.hover-capable_&]:hover:bg-yellow-300
|
||||
aria-selected:bg-stone-500 aria-selected:text-white
|
||||
[.hover-capable_&[aria-selected=true]:hover]:bg-orange-500"""
|
||||
app.jinja_env.globals["select_colours"] = select_colours
|
||||
|
||||
nav_button = f"""justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black
|
||||
{select_colours}"""
|
||||
|
||||
styles = {
|
||||
"pill": """
|
||||
@@ -96,5 +101,9 @@ def setup_jinja(app: Quart) -> None:
|
||||
app.jinja_env.globals["page_cart_url"] = page_cart_url
|
||||
app.jinja_env.globals["market_product_url"] = market_product_url
|
||||
|
||||
# widget registry available in all templates
|
||||
from shared.services.widget_registry import widgets as _widget_registry
|
||||
app.jinja_env.globals["widgets"] = _widget_registry
|
||||
|
||||
# register jinja filters
|
||||
register_filters(app)
|
||||
|
||||
@@ -37,6 +37,10 @@ def events_url(path: str = "/") -> str:
|
||||
return app_url("events", path)
|
||||
|
||||
|
||||
def federation_url(path: str = "/") -> str:
|
||||
return app_url("federation", path)
|
||||
|
||||
|
||||
def page_cart_url(page_slug: str, path: str = "/") -> str:
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
@@ -62,6 +66,9 @@ def market_product_url(product_slug: str, suffix: str = "", market_place=None) -
|
||||
|
||||
|
||||
def login_url(next_url: str = "") -> str:
|
||||
# Auth lives in blog (coop) for now. Set AUTH_APP=federation to switch.
|
||||
auth_app = os.getenv("AUTH_APP", "coop")
|
||||
base = app_url(auth_app, "/auth/login/")
|
||||
if next_url:
|
||||
return coop_url(f"/auth/login/?next={quote(next_url, safe='')}")
|
||||
return coop_url("/auth/login/")
|
||||
return f"{base}?next={quote(next_url, safe='')}"
|
||||
return base
|
||||
|
||||
@@ -9,3 +9,25 @@ from .ghost_membership_entities import (
|
||||
GhostTier, GhostSubscription,
|
||||
)
|
||||
from .domain_event import DomainEvent
|
||||
|
||||
from .ghost_content import Tag, Post, Author, PostAuthor, PostTag, PostLike
|
||||
from .page_config import PageConfig
|
||||
from .order import Order, OrderItem
|
||||
from .market import (
|
||||
Product, ProductLike, ProductImage, ProductSection,
|
||||
NavTop, NavSub, Listing, ListingItem,
|
||||
LinkError, LinkExternal, SubcategoryRedirect, ProductLog,
|
||||
ProductLabel, ProductSticker, ProductAttribute, ProductNutrition, ProductAllergen,
|
||||
CartItem,
|
||||
)
|
||||
from .market_place import MarketPlace
|
||||
from .calendars import (
|
||||
Calendar, CalendarEntry, CalendarSlot,
|
||||
TicketType, Ticket, CalendarEntryPost,
|
||||
)
|
||||
from .container_relation import ContainerRelation
|
||||
from .menu_node import MenuNode
|
||||
from .federation import (
|
||||
ActorProfile, APActivity, APFollower, APInboxItem, APAnchor, IPFSPin,
|
||||
RemoteActor, APFollowing, APRemotePost, APLocalPost, APInteraction, APNotification,
|
||||
)
|
||||
|
||||
297
models/calendars.py
Normal file
297
models/calendars.py
Normal file
@@ -0,0 +1,297 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import (
|
||||
Column, Integer, String, DateTime, ForeignKey, CheckConstraint,
|
||||
Index, text, Text, Boolean, Time, Numeric
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
# Adjust this import to match where your Base lives
|
||||
from shared.db.base import Base
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
def utcnow() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
|
||||
class Calendar(Base):
|
||||
__tablename__ = "calendars"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
container_type = Column(String(32), nullable=False, server_default=text("'page'"))
|
||||
container_id = Column(Integer, nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
slug = Column(String(255), nullable=False)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# relationships
|
||||
entries = relationship(
|
||||
"CalendarEntry",
|
||||
back_populates="calendar",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
order_by="CalendarEntry.start_at",
|
||||
)
|
||||
|
||||
slots = relationship(
|
||||
"CalendarSlot",
|
||||
back_populates="calendar",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
order_by="CalendarSlot.time_start",
|
||||
)
|
||||
|
||||
# Indexes / constraints
|
||||
__table_args__ = (
|
||||
Index("ix_calendars_container", "container_type", "container_id"),
|
||||
Index("ix_calendars_name", "name"),
|
||||
Index("ix_calendars_slug", "slug"),
|
||||
# Soft-delete-aware uniqueness: one active calendar per container/slug
|
||||
Index(
|
||||
"ux_calendars_container_slug_active",
|
||||
"container_type",
|
||||
"container_id",
|
||||
func.lower(slug),
|
||||
unique=True,
|
||||
postgresql_where=text("deleted_at IS NULL"),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class CalendarEntry(Base):
|
||||
__tablename__ = "calendar_entries"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
calendar_id = Column(
|
||||
Integer,
|
||||
ForeignKey("calendars.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# NEW: ownership + order link
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
|
||||
session_id = Column(String(64), nullable=True, index=True)
|
||||
order_id = Column(Integer, nullable=True, index=True)
|
||||
|
||||
# NEW: slot link
|
||||
slot_id = Column(Integer, ForeignKey("calendar_slots.id", ondelete="SET NULL"), nullable=True, index=True)
|
||||
|
||||
# details
|
||||
name = Column(String(255), nullable=False)
|
||||
start_at = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
end_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# NEW: booking state + cost
|
||||
state = Column(
|
||||
String(20),
|
||||
nullable=False,
|
||||
server_default=text("'pending'"),
|
||||
)
|
||||
cost = Column(Numeric(10, 2), nullable=False, server_default=text("10"))
|
||||
|
||||
# Ticket configuration
|
||||
ticket_price = Column(Numeric(10, 2), nullable=True) # Price per ticket (NULL = no tickets)
|
||||
ticket_count = Column(Integer, nullable=True) # Total available tickets (NULL = unlimited)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"(end_at IS NULL) OR (end_at >= start_at)",
|
||||
name="ck_calendar_entries_end_after_start",
|
||||
),
|
||||
Index("ix_calendar_entries_name", "name"),
|
||||
Index("ix_calendar_entries_start_at", "start_at"),
|
||||
Index("ix_calendar_entries_user_id", "user_id"),
|
||||
Index("ix_calendar_entries_session_id", "session_id"),
|
||||
Index("ix_calendar_entries_state", "state"),
|
||||
Index("ix_calendar_entries_order_id", "order_id"),
|
||||
Index("ix_calendar_entries_slot_id", "slot_id"),
|
||||
)
|
||||
|
||||
calendar = relationship("Calendar", back_populates="entries")
|
||||
slot = relationship("CalendarSlot", back_populates="entries", lazy="selectin")
|
||||
posts = relationship("CalendarEntryPost", back_populates="entry", cascade="all, delete-orphan")
|
||||
ticket_types = relationship(
|
||||
"TicketType",
|
||||
back_populates="entry",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
order_by="TicketType.name",
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
DAY_LABELS = [
|
||||
("mon", "Mon"),
|
||||
("tue", "Tue"),
|
||||
("wed", "Wed"),
|
||||
("thu", "Thu"),
|
||||
("fri", "Fri"),
|
||||
("sat", "Sat"),
|
||||
("sun", "Sun"),
|
||||
]
|
||||
|
||||
|
||||
class CalendarSlot(Base):
|
||||
__tablename__ = "calendar_slots"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
calendar_id = Column(
|
||||
Integer,
|
||||
ForeignKey("calendars.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
name = Column(String(255), nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
mon = Column(Boolean, nullable=False, default=False)
|
||||
tue = Column(Boolean, nullable=False, default=False)
|
||||
wed = Column(Boolean, nullable=False, default=False)
|
||||
thu = Column(Boolean, nullable=False, default=False)
|
||||
fri = Column(Boolean, nullable=False, default=False)
|
||||
sat = Column(Boolean, nullable=False, default=False)
|
||||
sun = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# NEW: whether bookings can be made at flexible times within this band
|
||||
flexible = Column(
|
||||
Boolean,
|
||||
nullable=False,
|
||||
server_default=text("false"),
|
||||
default=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def days_display(self) -> str:
|
||||
days = [label for attr, label in DAY_LABELS if getattr(self, attr)]
|
||||
if len(days) == len(DAY_LABELS):
|
||||
# all days selected
|
||||
return "All" # or "All days" if you prefer
|
||||
return ", ".join(days) if days else "—"
|
||||
|
||||
time_start = Column(Time(timezone=False), nullable=False)
|
||||
time_end = Column(Time(timezone=False), nullable=False)
|
||||
|
||||
cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
CheckConstraint(
|
||||
"(time_end > time_start)",
|
||||
name="ck_calendar_slots_time_end_after_start",
|
||||
),
|
||||
Index("ix_calendar_slots_calendar_id", "calendar_id"),
|
||||
Index("ix_calendar_slots_time_start", "time_start"),
|
||||
)
|
||||
|
||||
calendar = relationship("Calendar", back_populates="slots")
|
||||
entries = relationship("CalendarEntry", back_populates="slot")
|
||||
|
||||
|
||||
class TicketType(Base):
|
||||
__tablename__ = "ticket_types"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
entry_id = Column(
|
||||
Integer,
|
||||
ForeignKey("calendar_entries.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
name = Column(String(255), nullable=False)
|
||||
cost = Column(Numeric(10, 2), nullable=False)
|
||||
count = Column(Integer, nullable=False)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ticket_types_entry_id", "entry_id"),
|
||||
Index("ix_ticket_types_name", "name"),
|
||||
)
|
||||
|
||||
entry = relationship("CalendarEntry", back_populates="ticket_types")
|
||||
|
||||
|
||||
class Ticket(Base):
|
||||
__tablename__ = "tickets"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
entry_id = Column(
|
||||
Integer,
|
||||
ForeignKey("calendar_entries.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
ticket_type_id = Column(
|
||||
Integer,
|
||||
ForeignKey("ticket_types.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True)
|
||||
session_id = Column(String(64), nullable=True, index=True)
|
||||
order_id = Column(Integer, nullable=True, index=True)
|
||||
|
||||
code = Column(String(64), unique=True, nullable=False) # QR/barcode value
|
||||
state = Column(
|
||||
String(20),
|
||||
nullable=False,
|
||||
server_default=text("'reserved'"),
|
||||
) # reserved, confirmed, checked_in, cancelled
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
checked_in_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_tickets_entry_id", "entry_id"),
|
||||
Index("ix_tickets_ticket_type_id", "ticket_type_id"),
|
||||
Index("ix_tickets_user_id", "user_id"),
|
||||
Index("ix_tickets_session_id", "session_id"),
|
||||
Index("ix_tickets_order_id", "order_id"),
|
||||
Index("ix_tickets_code", "code", unique=True),
|
||||
Index("ix_tickets_state", "state"),
|
||||
)
|
||||
|
||||
entry = relationship("CalendarEntry", backref="tickets")
|
||||
ticket_type = relationship("TicketType", backref="tickets")
|
||||
|
||||
|
||||
class CalendarEntryPost(Base):
|
||||
"""Junction between calendar entries and content (posts, etc.)."""
|
||||
__tablename__ = "calendar_entry_posts"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
entry_id = Column(Integer, ForeignKey("calendar_entries.id", ondelete="CASCADE"), nullable=False)
|
||||
content_type = Column(String(32), nullable=False, server_default=text("'post'"))
|
||||
content_id = Column(Integer, nullable=False)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_entry_posts_entry_id", "entry_id"),
|
||||
Index("ix_entry_posts_content", "content_type", "content_id"),
|
||||
)
|
||||
|
||||
entry = relationship("CalendarEntry", back_populates="posts")
|
||||
|
||||
|
||||
__all__ = ["Calendar", "CalendarEntry", "CalendarSlot", "TicketType", "Ticket", "CalendarEntryPost"]
|
||||
38
models/container_relation.py
Normal file
38
models/container_relation.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import Integer, String, DateTime, Index, UniqueConstraint, func
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class ContainerRelation(Base):
|
||||
__tablename__ = "container_relations"
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"parent_type", "parent_id", "child_type", "child_id",
|
||||
name="uq_container_relations_parent_child",
|
||||
),
|
||||
Index("ix_container_relations_parent", "parent_type", "parent_id"),
|
||||
Index("ix_container_relations_child", "child_type", "child_id"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
parent_type: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||
parent_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
child_type: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||
child_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
|
||||
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
label: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
)
|
||||
399
models/federation.py
Normal file
399
models/federation.py
Normal file
@@ -0,0 +1,399 @@
|
||||
"""Federation / ActivityPub ORM models.
|
||||
|
||||
These models support AP identity, activities, followers, inbox processing,
|
||||
IPFS content addressing, and OpenTimestamps anchoring.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import (
|
||||
String, Integer, DateTime, Text, Boolean, BigInteger,
|
||||
ForeignKey, UniqueConstraint, Index, func,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class ActorProfile(Base):
|
||||
"""AP identity for a user. Created when user chooses a username."""
|
||||
__tablename__ = "ap_actor_profiles"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("users.id", ondelete="CASCADE"),
|
||||
unique=True, nullable=False,
|
||||
)
|
||||
preferred_username: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
public_key_pem: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
private_key_pem: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", backref="actor_profile", uselist=False, lazy="selectin")
|
||||
activities = relationship("APActivity", back_populates="actor_profile", lazy="dynamic")
|
||||
followers = relationship("APFollower", back_populates="actor_profile", lazy="dynamic")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_actor_user_id", "user_id", unique=True),
|
||||
Index("ix_ap_actor_username", "preferred_username", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ActorProfile {self.id} @{self.preferred_username}>"
|
||||
|
||||
|
||||
class APActivity(Base):
|
||||
"""An ActivityPub activity (local or remote)."""
|
||||
__tablename__ = "ap_activities"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
activity_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
object_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
published: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
signature: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
is_local: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
|
||||
|
||||
# Link back to originating domain object (e.g. source_type='post', source_id=42)
|
||||
source_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
source_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
|
||||
# IPFS content-addressed copy of the activity
|
||||
ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
|
||||
# Anchoring (filled later when batched into a merkle tree)
|
||||
anchor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_anchors.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", back_populates="activities")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_activity_actor", "actor_profile_id"),
|
||||
Index("ix_ap_activity_source", "source_type", "source_id"),
|
||||
Index("ix_ap_activity_published", "published"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APActivity {self.id} {self.activity_type}>"
|
||||
|
||||
|
||||
class APFollower(Base):
|
||||
"""A remote follower of a local actor."""
|
||||
__tablename__ = "ap_followers"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
follower_acct: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_inbox: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_actor_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_public_key: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", back_populates="followers")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
|
||||
Index("ix_ap_follower_actor", "actor_profile_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APFollower {self.id} {self.follower_acct}>"
|
||||
|
||||
|
||||
class APInboxItem(Base):
|
||||
"""Raw incoming AP activity, stored for async processing."""
|
||||
__tablename__ = "ap_inbox_items"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
raw_json: Mapped[dict] = mapped_column(JSONB, nullable=False)
|
||||
activity_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
from_actor: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="pending", server_default="pending",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
processed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_inbox_state", "state"),
|
||||
Index("ix_ap_inbox_actor", "actor_profile_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APInboxItem {self.id} {self.activity_type} [{self.state}]>"
|
||||
|
||||
|
||||
class APAnchor(Base):
|
||||
"""OpenTimestamps anchoring batch — merkle tree of activities."""
|
||||
__tablename__ = "ap_anchors"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
merkle_root: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
tree_ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
ots_proof_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
activity_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
confirmed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
bitcoin_txid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APAnchor {self.id} activities={self.activity_count}>"
|
||||
|
||||
|
||||
class IPFSPin(Base):
|
||||
"""Tracks content stored on IPFS — used by all domains."""
|
||||
__tablename__ = "ipfs_pins"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
content_hash: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
ipfs_cid: Mapped[str] = mapped_column(String(128), nullable=False, unique=True)
|
||||
pin_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
source_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
source_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
size_bytes: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ipfs_pin_source", "source_type", "source_id"),
|
||||
Index("ix_ipfs_pin_cid", "ipfs_cid", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<IPFSPin {self.id} {self.ipfs_cid[:16]}...>"
|
||||
|
||||
|
||||
class RemoteActor(Base):
|
||||
"""Cached profile of a remote actor we interact with."""
|
||||
__tablename__ = "ap_remote_actors"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_url: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
inbox_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
shared_inbox_url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
preferred_username: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
icon_url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
public_key_pem: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
domain: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
fetched_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_remote_actor_url", "actor_url", unique=True),
|
||||
Index("ix_ap_remote_actor_domain", "domain"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<RemoteActor {self.id} {self.preferred_username}@{self.domain}>"
|
||||
|
||||
|
||||
class APFollowing(Base):
|
||||
"""Outbound follow: local actor → remote actor."""
|
||||
__tablename__ = "ap_following"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
remote_actor_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="pending", server_default="pending",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
accepted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile")
|
||||
remote_actor = relationship("RemoteActor")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"),
|
||||
Index("ix_ap_following_actor", "actor_profile_id"),
|
||||
Index("ix_ap_following_remote", "remote_actor_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APFollowing {self.id} [{self.state}]>"
|
||||
|
||||
|
||||
class APRemotePost(Base):
|
||||
"""A federated post ingested from a remote actor."""
|
||||
__tablename__ = "ap_remote_posts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
remote_actor_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
object_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
object_type: Mapped[str] = mapped_column(String(64), nullable=False, default="Note")
|
||||
content: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
attachment_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
tag_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
conversation: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
published: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
fetched_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
remote_actor = relationship("RemoteActor")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_remote_post_actor", "remote_actor_id"),
|
||||
Index("ix_ap_remote_post_published", "published"),
|
||||
Index("ix_ap_remote_post_object", "object_id", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APRemotePost {self.id} {self.object_type}>"
|
||||
|
||||
|
||||
class APLocalPost(Base):
|
||||
"""A native post composed in the federation UI."""
|
||||
__tablename__ = "ap_local_posts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
content: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
visibility: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="public", server_default="public",
|
||||
)
|
||||
in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
published: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_local_post_actor", "actor_profile_id"),
|
||||
Index("ix_ap_local_post_published", "published"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APLocalPost {self.id}>"
|
||||
|
||||
|
||||
class APInteraction(Base):
|
||||
"""Like or boost (local or remote)."""
|
||||
__tablename__ = "ap_interactions"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True,
|
||||
)
|
||||
remote_actor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True,
|
||||
)
|
||||
post_type: Mapped[str] = mapped_column(String(20), nullable=False) # local/remote
|
||||
post_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
interaction_type: Mapped[str] = mapped_column(String(20), nullable=False) # like/boost
|
||||
activity_id: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_interaction_post", "post_type", "post_id"),
|
||||
Index("ix_ap_interaction_actor", "actor_profile_id"),
|
||||
Index("ix_ap_interaction_remote", "remote_actor_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APInteraction {self.id} {self.interaction_type}>"
|
||||
|
||||
|
||||
class APNotification(Base):
|
||||
"""Notification for a local actor."""
|
||||
__tablename__ = "ap_notifications"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
notification_type: Mapped[str] = mapped_column(String(20), nullable=False)
|
||||
from_remote_actor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
from_actor_profile_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
target_activity_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
target_remote_post_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", foreign_keys=[actor_profile_id])
|
||||
from_remote_actor = relationship("RemoteActor")
|
||||
from_actor_profile = relationship("ActorProfile", foreign_keys=[from_actor_profile_id])
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_notification_actor", "actor_profile_id"),
|
||||
Index("ix_ap_notification_read", "actor_profile_id", "read"),
|
||||
Index("ix_ap_notification_created", "created_at"),
|
||||
)
|
||||
216
models/ghost_content.py
Normal file
216
models/ghost_content.py
Normal file
@@ -0,0 +1,216 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy import (
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
Boolean,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Column,
|
||||
func,
|
||||
)
|
||||
from shared.db.base import Base # whatever your Base is
|
||||
# from .author import Author # make sure imports resolve
|
||||
# from ..app.blog.calendars.model import Calendar
|
||||
|
||||
class Tag(Base):
|
||||
__tablename__ = "tags"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
|
||||
|
||||
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
|
||||
description: Mapped[Optional[str]] = mapped_column(Text())
|
||||
visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False)
|
||||
feature_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
meta_title: Mapped[Optional[str]] = mapped_column(String(300))
|
||||
meta_description: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
# NEW: posts relationship is now direct Post objects via PostTag
|
||||
posts: Mapped[List["Post"]] = relationship(
|
||||
"Post",
|
||||
secondary="post_tags",
|
||||
primaryjoin="Tag.id==post_tags.c.tag_id",
|
||||
secondaryjoin="Post.id==post_tags.c.post_id",
|
||||
back_populates="tags",
|
||||
order_by="PostTag.sort_order",
|
||||
)
|
||||
|
||||
|
||||
class Post(Base):
|
||||
__tablename__ = "posts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
|
||||
uuid: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
|
||||
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
|
||||
|
||||
title: Mapped[str] = mapped_column(String(500), nullable=False)
|
||||
|
||||
html: Mapped[Optional[str]] = mapped_column(Text())
|
||||
plaintext: Mapped[Optional[str]] = mapped_column(Text())
|
||||
mobiledoc: Mapped[Optional[str]] = mapped_column(Text())
|
||||
lexical: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
feature_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
feature_image_alt: Mapped[Optional[str]] = mapped_column(Text())
|
||||
feature_image_caption: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
excerpt: Mapped[Optional[str]] = mapped_column(Text())
|
||||
custom_excerpt: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
visibility: Mapped[str] = mapped_column(String(32), default="public", nullable=False)
|
||||
status: Mapped[str] = mapped_column(String(32), default="draft", nullable=False)
|
||||
featured: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
|
||||
is_page: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
|
||||
email_only: Mapped[bool] = mapped_column(Boolean(), default=False, nullable=False)
|
||||
|
||||
canonical_url: Mapped[Optional[str]] = mapped_column(Text())
|
||||
meta_title: Mapped[Optional[str]] = mapped_column(String(500))
|
||||
meta_description: Mapped[Optional[str]] = mapped_column(Text())
|
||||
og_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
og_title: Mapped[Optional[str]] = mapped_column(String(500))
|
||||
og_description: Mapped[Optional[str]] = mapped_column(Text())
|
||||
twitter_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
twitter_title: Mapped[Optional[str]] = mapped_column(String(500))
|
||||
twitter_description: Mapped[Optional[str]] = mapped_column(Text())
|
||||
custom_template: Mapped[Optional[str]] = mapped_column(String(191))
|
||||
|
||||
reading_time: Mapped[Optional[int]] = mapped_column(Integer())
|
||||
comment_id: Mapped[Optional[str]] = mapped_column(String(191))
|
||||
|
||||
published_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
user_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer, ForeignKey("users.id", ondelete="SET NULL"), index=True
|
||||
)
|
||||
publish_requested: Mapped[bool] = mapped_column(Boolean(), default=False, server_default="false", nullable=False)
|
||||
|
||||
primary_author_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer, ForeignKey("authors.id", ondelete="SET NULL")
|
||||
)
|
||||
primary_tag_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer, ForeignKey("tags.id", ondelete="SET NULL")
|
||||
)
|
||||
|
||||
primary_author: Mapped[Optional["Author"]] = relationship(
|
||||
"Author", foreign_keys=[primary_author_id]
|
||||
)
|
||||
primary_tag: Mapped[Optional[Tag]] = relationship(
|
||||
"Tag", foreign_keys=[primary_tag_id]
|
||||
)
|
||||
user: Mapped[Optional["User"]] = relationship(
|
||||
"User", foreign_keys=[user_id]
|
||||
)
|
||||
|
||||
# AUTHORS RELATIONSHIP (many-to-many via post_authors)
|
||||
authors: Mapped[List["Author"]] = relationship(
|
||||
"Author",
|
||||
secondary="post_authors",
|
||||
primaryjoin="Post.id==post_authors.c.post_id",
|
||||
secondaryjoin="Author.id==post_authors.c.author_id",
|
||||
back_populates="posts",
|
||||
order_by="PostAuthor.sort_order",
|
||||
)
|
||||
|
||||
# TAGS RELATIONSHIP (many-to-many via post_tags)
|
||||
tags: Mapped[List[Tag]] = relationship(
|
||||
"Tag",
|
||||
secondary="post_tags",
|
||||
primaryjoin="Post.id==post_tags.c.post_id",
|
||||
secondaryjoin="Tag.id==post_tags.c.tag_id",
|
||||
back_populates="posts",
|
||||
order_by="PostTag.sort_order",
|
||||
)
|
||||
likes: Mapped[List["PostLike"]] = relationship(
|
||||
"PostLike",
|
||||
back_populates="post",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
class Author(Base):
|
||||
__tablename__ = "authors"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
ghost_id: Mapped[str] = mapped_column(String(64), index=True, unique=True, nullable=False)
|
||||
|
||||
slug: Mapped[str] = mapped_column(String(191), index=True, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
email: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
|
||||
profile_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
cover_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||
bio: Mapped[Optional[str]] = mapped_column(Text())
|
||||
website: Mapped[Optional[str]] = mapped_column(Text())
|
||||
location: Mapped[Optional[str]] = mapped_column(Text())
|
||||
facebook: Mapped[Optional[str]] = mapped_column(Text())
|
||||
twitter: Mapped[Optional[str]] = mapped_column(Text())
|
||||
|
||||
created_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
# backref to posts via post_authors
|
||||
posts: Mapped[List[Post]] = relationship(
|
||||
"Post",
|
||||
secondary="post_authors",
|
||||
primaryjoin="Author.id==post_authors.c.author_id",
|
||||
secondaryjoin="Post.id==post_authors.c.post_id",
|
||||
back_populates="authors",
|
||||
order_by="PostAuthor.sort_order",
|
||||
)
|
||||
|
||||
class PostAuthor(Base):
|
||||
__tablename__ = "post_authors"
|
||||
|
||||
post_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("posts.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
author_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("authors.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
|
||||
|
||||
|
||||
class PostTag(Base):
|
||||
__tablename__ = "post_tags"
|
||||
|
||||
post_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("posts.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
tag_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tags.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
|
||||
|
||||
|
||||
class PostLike(Base):
|
||||
__tablename__ = "post_likes"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||
post_id: Mapped[int] = mapped_column(ForeignKey("posts.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
post: Mapped["Post"] = relationship("Post", back_populates="likes", foreign_keys=[post_id])
|
||||
user = relationship("User", back_populates="liked_posts")
|
||||
441
models/market.py
Normal file
441
models/market.py
Normal file
@@ -0,0 +1,441 @@
|
||||
# at top of persist_snapshot.py:
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
String, Text, Integer, ForeignKey, DateTime, Boolean, Numeric,
|
||||
UniqueConstraint, Index, func
|
||||
)
|
||||
|
||||
from shared.db.base import Base # you already import Base in app.py
|
||||
|
||||
|
||||
|
||||
class Product(Base):
|
||||
__tablename__ = "products"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
slug: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
|
||||
|
||||
title: Mapped[Optional[str]] = mapped_column(String(512))
|
||||
image: Mapped[Optional[str]] = mapped_column(Text)
|
||||
|
||||
description_short: Mapped[Optional[str]] = mapped_column(Text)
|
||||
description_html: Mapped[Optional[str]] = mapped_column(Text)
|
||||
|
||||
suma_href: Mapped[Optional[str]] = mapped_column(Text)
|
||||
brand: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
|
||||
rrp: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
|
||||
rrp_currency: Mapped[Optional[str]] = mapped_column(String(16))
|
||||
rrp_raw: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
price_per_unit: Mapped[Optional[float]] = mapped_column(Numeric(12, 4))
|
||||
price_per_unit_currency: Mapped[Optional[str]] = mapped_column(String(16))
|
||||
price_per_unit_raw: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
special_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
|
||||
special_price_currency: Mapped[Optional[str]] = mapped_column(String(16))
|
||||
special_price_raw: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
regular_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
|
||||
regular_price_currency: Mapped[Optional[str]] = mapped_column(String(16))
|
||||
regular_price_raw: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
oe_list_price: Mapped[Optional[float]] = mapped_column(Numeric(12, 2))
|
||||
|
||||
case_size_count: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
case_size_item_qty: Mapped[Optional[float]] = mapped_column(Numeric(12, 3))
|
||||
case_size_item_unit: Mapped[Optional[str]] = mapped_column(String(32))
|
||||
case_size_raw: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
images: Mapped[List["ProductImage"]] = relationship(
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
sections: Mapped[List["ProductSection"]] = relationship(
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
labels: Mapped[List["ProductLabel"]] = relationship(
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
stickers: Mapped[List["ProductSticker"]] = relationship(
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
ean: Mapped[Optional[str]] = mapped_column(String(64))
|
||||
sku: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
unit_size: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
pack_size: Mapped[Optional[str]] = mapped_column(String(128))
|
||||
|
||||
attributes = relationship(
|
||||
"ProductAttribute",
|
||||
back_populates="product",
|
||||
lazy="selectin",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
nutrition = relationship(
|
||||
"ProductNutrition",
|
||||
back_populates="product",
|
||||
lazy="selectin",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
allergens = relationship(
|
||||
"ProductAllergen",
|
||||
back_populates="product",
|
||||
lazy="selectin",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
likes = relationship(
|
||||
"ProductLike",
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
cart_items: Mapped[List["CartItem"]] = relationship(
|
||||
"CartItem",
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# NEW: all order items that reference this product
|
||||
order_items: Mapped[List["OrderItem"]] = relationship(
|
||||
"OrderItem",
|
||||
back_populates="product",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
from sqlalchemy import Column
|
||||
|
||||
class ProductLike(Base):
|
||||
__tablename__ = "product_likes"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||
product_slug: Mapped[str] = mapped_column(ForeignKey("products.slug", ondelete="CASCADE"))
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
product: Mapped["Product"] = relationship("Product", back_populates="likes", foreign_keys=[product_slug])
|
||||
|
||||
user = relationship("User", back_populates="liked_products") # optional, if you want reverse access
|
||||
|
||||
|
||||
class ProductImage(Base):
|
||||
__tablename__ = "product_images"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
url: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
position: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
kind: Mapped[str] = mapped_column(String(16), nullable=False, default="gallery")
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
product: Mapped["Product"] = relationship(back_populates="images")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("product_id", "url", "kind", name="uq_product_images_product_url_kind"),
|
||||
Index("ix_product_images_position", "position"),
|
||||
)
|
||||
|
||||
class ProductSection(Base):
|
||||
__tablename__ = "product_sections"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("products.id", ondelete="CASCADE"),
|
||||
index=True,
|
||||
nullable=False,
|
||||
)
|
||||
title: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
html: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
product: Mapped["Product"] = relationship(back_populates="sections")
|
||||
__table_args__ = (
|
||||
UniqueConstraint("product_id", "title", name="uq_product_sections_product_title"),
|
||||
)
|
||||
# --- Nav & listings ---
|
||||
|
||||
class NavTop(Base):
|
||||
__tablename__ = "nav_tops"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
label: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
market_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("market_places.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
listings: Mapped[List["Listing"]] = relationship(back_populates="top", cascade="all, delete-orphan")
|
||||
market = relationship("MarketPlace", back_populates="nav_tops")
|
||||
|
||||
__table_args__ = (UniqueConstraint("label", "slug", name="uq_nav_tops_label_slug"),)
|
||||
|
||||
class NavSub(Base):
|
||||
__tablename__ = "nav_subs"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
label: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
href: Mapped[Optional[str]] = mapped_column(Text)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
listings: Mapped[List["Listing"]] = relationship(back_populates="sub", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (UniqueConstraint("top_id", "slug", name="uq_nav_subs_top_slug"),)
|
||||
|
||||
class Listing(Base):
|
||||
__tablename__ = "listings"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
# Old slug-based fields (optional: remove)
|
||||
# top_slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
# sub_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
|
||||
|
||||
top_id: Mapped[int] = mapped_column(ForeignKey("nav_tops.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
sub_id: Mapped[Optional[int]] = mapped_column(ForeignKey("nav_subs.id", ondelete="CASCADE"), index=True)
|
||||
|
||||
total_pages: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
|
||||
top: Mapped["NavTop"] = relationship(back_populates="listings")
|
||||
sub: Mapped[Optional["NavSub"]] = relationship(back_populates="listings")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("top_id", "sub_id", name="uq_listings_top_sub"),
|
||||
)
|
||||
|
||||
class ListingItem(Base):
|
||||
__tablename__ = "listing_items"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
listing_id: Mapped[int] = mapped_column(ForeignKey("listings.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
__table_args__ = (UniqueConstraint("listing_id", "slug", name="uq_listing_items_listing_slug"),)
|
||||
|
||||
# --- Reports / redirects / logs ---
|
||||
|
||||
class LinkError(Base):
|
||||
__tablename__ = "link_errors"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
|
||||
href: Mapped[Optional[str]] = mapped_column(Text)
|
||||
text: Mapped[Optional[str]] = mapped_column(Text)
|
||||
top: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
sub: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
target_slug: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
type: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
class LinkExternal(Base):
|
||||
__tablename__ = "link_externals"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
|
||||
href: Mapped[Optional[str]] = mapped_column(Text)
|
||||
text: Mapped[Optional[str]] = mapped_column(Text)
|
||||
host: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
class SubcategoryRedirect(Base):
|
||||
__tablename__ = "subcategory_redirects"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
old_path: Mapped[str] = mapped_column(String(512), nullable=False, index=True)
|
||||
new_path: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
class ProductLog(Base):
|
||||
__tablename__ = "product_logs"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
slug: Mapped[Optional[str]] = mapped_column(String(255), index=True)
|
||||
href_tried: Mapped[Optional[str]] = mapped_column(Text)
|
||||
ok: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false")
|
||||
error_type: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
error_message: Mapped[Optional[str]] = mapped_column(Text)
|
||||
http_status: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
final_url: Mapped[Optional[str]] = mapped_column(Text)
|
||||
transport_error: Mapped[Optional[bool]] = mapped_column(Boolean)
|
||||
title: Mapped[Optional[str]] = mapped_column(String(512))
|
||||
has_description_html: Mapped[Optional[bool]] = mapped_column(Boolean)
|
||||
has_description_short: Mapped[Optional[bool]] = mapped_column(Boolean)
|
||||
sections_count: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
images_count: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
embedded_images_count: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
all_images_count: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
|
||||
|
||||
# ...existing models...
|
||||
|
||||
class ProductLabel(Base):
|
||||
__tablename__ = "product_labels"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
product: Mapped["Product"] = relationship(back_populates="labels")
|
||||
|
||||
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_labels_product_name"),)
|
||||
|
||||
class ProductSticker(Base):
|
||||
__tablename__ = "product_stickers"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
product: Mapped["Product"] = relationship(back_populates="stickers")
|
||||
|
||||
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_stickers_product_name"),)
|
||||
|
||||
class ProductAttribute(Base):
|
||||
__tablename__ = "product_attributes"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
|
||||
value: Mapped[Optional[str]] = mapped_column(Text)
|
||||
product = relationship("Product", back_populates="attributes")
|
||||
__table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_attributes_product_key"),)
|
||||
|
||||
class ProductNutrition(Base):
|
||||
__tablename__ = "product_nutrition"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
value: Mapped[Optional[str]] = mapped_column(String(255))
|
||||
unit: Mapped[Optional[str]] = mapped_column(String(64))
|
||||
product = relationship("Product", back_populates="nutrition")
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
__table_args__ = (UniqueConstraint("product_id", "key", name="uq_product_nutrition_product_key"),)
|
||||
|
||||
class ProductAllergen(Base):
|
||||
__tablename__ = "product_allergens"
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey("products.id", ondelete="CASCADE"), index=True, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
contains: Mapped[bool] = mapped_column(Boolean, nullable=False, server_default="false")
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
|
||||
product: Mapped["Product"] = relationship(back_populates="allergens")
|
||||
__table_args__ = (UniqueConstraint("product_id", "name", name="uq_product_allergens_product_name"),)
|
||||
|
||||
|
||||
class CartItem(Base):
|
||||
__tablename__ = "cart_items"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
# Either a logged-in user OR an anonymous session
|
||||
user_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
)
|
||||
session_id: Mapped[str | None] = mapped_column(
|
||||
String(128),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# IMPORTANT: link to product *id*, not slug
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("products.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
quantity: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
default=1,
|
||||
server_default="1",
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
market_place_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("market_places.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
deleted_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
|
||||
market_place: Mapped["MarketPlace | None"] = relationship(
|
||||
"MarketPlace",
|
||||
foreign_keys=[market_place_id],
|
||||
)
|
||||
product: Mapped["Product"] = relationship(
|
||||
"Product",
|
||||
back_populates="cart_items",
|
||||
)
|
||||
user: Mapped["User | None"] = relationship("User", back_populates="cart_items")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_cart_items_user_product", "user_id", "product_id"),
|
||||
Index("ix_cart_items_session_product", "session_id", "product_id"),
|
||||
)
|
||||
52
models/market_place.py
Normal file
52
models/market_place.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, List
|
||||
|
||||
from sqlalchemy import (
|
||||
Integer, String, Text, DateTime, ForeignKey, Index, func, text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
def utcnow() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
class MarketPlace(Base):
|
||||
__tablename__ = "market_places"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
container_type: Mapped[str] = mapped_column(
|
||||
String(32), nullable=False, server_default=text("'page'"),
|
||||
)
|
||||
container_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True,
|
||||
)
|
||||
|
||||
nav_tops: Mapped[List["NavTop"]] = relationship(
|
||||
"NavTop", back_populates="market",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_market_places_container", "container_type", "container_id"),
|
||||
Index(
|
||||
"ux_market_places_slug_active",
|
||||
func.lower(slug),
|
||||
unique=True,
|
||||
postgresql_where=text("deleted_at IS NULL"),
|
||||
),
|
||||
)
|
||||
50
models/menu_node.py
Normal file
50
models/menu_node.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import Integer, String, Text, DateTime, ForeignKey, Index, func
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class MenuNode(Base):
|
||||
__tablename__ = "menu_nodes"
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_menu_nodes_container", "container_type", "container_id"),
|
||||
Index("ix_menu_nodes_parent_id", "parent_id"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
container_type: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||
container_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
|
||||
parent_id: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey("menu_nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
depth: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
|
||||
label: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
slug: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
href: Mapped[Optional[str]] = mapped_column(String(1024), nullable=True)
|
||||
icon: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
||||
feature_image: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
)
|
||||
114
models/order.py
Normal file
114
models/order.py
Normal file
@@ -0,0 +1,114 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
|
||||
from sqlalchemy import Integer, String, DateTime, ForeignKey, Numeric, func, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class Order(Base):
|
||||
__tablename__ = "orders"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id"), nullable=True)
|
||||
session_id: Mapped[Optional[str]] = mapped_column(String(64), index=True, nullable=True)
|
||||
|
||||
page_config_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey("page_configs.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
status: Mapped[str] = mapped_column(
|
||||
String(32),
|
||||
nullable=False,
|
||||
default="pending",
|
||||
server_default="pending",
|
||||
)
|
||||
currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP")
|
||||
total_amount: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False)
|
||||
|
||||
# free-form description for the order
|
||||
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True, index=True)
|
||||
|
||||
# SumUp reference string (what we send as checkout_reference)
|
||||
sumup_reference: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# SumUp integration fields
|
||||
sumup_checkout_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(128),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
sumup_status: Mapped[Optional[str]] = mapped_column(String(32), nullable=True)
|
||||
sumup_hosted_url: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
)
|
||||
|
||||
items: Mapped[List["OrderItem"]] = relationship(
|
||||
"OrderItem",
|
||||
back_populates="order",
|
||||
cascade="all, delete-orphan",
|
||||
lazy="selectin",
|
||||
)
|
||||
page_config: Mapped[Optional["PageConfig"]] = relationship(
|
||||
"PageConfig",
|
||||
foreign_keys=[page_config_id],
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
|
||||
class OrderItem(Base):
|
||||
__tablename__ = "order_items"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
order_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("orders.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("products.id"),
|
||||
nullable=False,
|
||||
)
|
||||
product_title: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
|
||||
|
||||
quantity: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
|
||||
unit_price: Mapped[float] = mapped_column(Numeric(12, 2), nullable=False)
|
||||
currency: Mapped[str] = mapped_column(String(16), nullable=False, default="GBP")
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=func.now(),
|
||||
)
|
||||
|
||||
order: Mapped["Order"] = relationship(
|
||||
"Order",
|
||||
back_populates="items",
|
||||
)
|
||||
|
||||
# NEW: link each order item to its product
|
||||
product: Mapped["Product"] = relationship(
|
||||
"Product",
|
||||
back_populates="order_items",
|
||||
lazy="selectin",
|
||||
)
|
||||
39
models/page_config.py
Normal file
39
models/page_config.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Integer, String, Text, DateTime, func, JSON, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class PageConfig(Base):
|
||||
__tablename__ = "page_configs"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
container_type: Mapped[str] = mapped_column(
|
||||
String(32), nullable=False, server_default=text("'page'"),
|
||||
)
|
||||
container_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
|
||||
features: Mapped[dict] = mapped_column(
|
||||
JSON, nullable=False, server_default="{}"
|
||||
)
|
||||
|
||||
# Per-page SumUp credentials (NULL until configured)
|
||||
sumup_merchant_code: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
||||
sumup_api_key: Mapped[Optional[str]] = mapped_column(Text(), nullable=True)
|
||||
sumup_checkout_prefix: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||
)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True
|
||||
)
|
||||
@@ -10,6 +10,7 @@ blinker==1.9.0
|
||||
Brotli==1.1.0
|
||||
certifi==2025.10.5
|
||||
click==8.3.0
|
||||
cryptography>=41.0
|
||||
exceptiongroup==1.3.0
|
||||
Flask==3.1.2
|
||||
greenlet==3.2.4
|
||||
|
||||
5
services/__init__.py
Normal file
5
services/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Domain service implementations and registry."""
|
||||
|
||||
from .registry import services
|
||||
|
||||
__all__ = ["services"]
|
||||
65
services/blog_impl.py
Normal file
65
services/blog_impl.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""SQL-backed BlogService implementation.
|
||||
|
||||
Queries ``shared.models.ghost_content.Post`` — only this module may read
|
||||
blog-domain tables on behalf of other domains.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.ghost_content import Post
|
||||
from shared.contracts.dtos import PostDTO
|
||||
|
||||
|
||||
def _post_to_dto(post: Post) -> PostDTO:
|
||||
return PostDTO(
|
||||
id=post.id,
|
||||
slug=post.slug,
|
||||
title=post.title,
|
||||
status=post.status,
|
||||
visibility=post.visibility,
|
||||
is_page=post.is_page,
|
||||
feature_image=post.feature_image,
|
||||
html=post.html,
|
||||
excerpt=post.excerpt,
|
||||
custom_excerpt=post.custom_excerpt,
|
||||
published_at=post.published_at,
|
||||
)
|
||||
|
||||
|
||||
class SqlBlogService:
|
||||
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None:
|
||||
post = (
|
||||
await session.execute(select(Post).where(Post.slug == slug))
|
||||
).scalar_one_or_none()
|
||||
return _post_to_dto(post) if post else None
|
||||
|
||||
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None:
|
||||
post = (
|
||||
await session.execute(select(Post).where(Post.id == id))
|
||||
).scalar_one_or_none()
|
||||
return _post_to_dto(post) if post else None
|
||||
|
||||
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]:
|
||||
if not ids:
|
||||
return []
|
||||
result = await session.execute(select(Post).where(Post.id.in_(ids)))
|
||||
return [_post_to_dto(p) for p in result.scalars().all()]
|
||||
|
||||
async def search_posts(
|
||||
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
|
||||
) -> tuple[list[PostDTO], int]:
|
||||
"""Search posts by title with pagination. Not part of the Protocol
|
||||
(admin-only use in events), but provided for convenience."""
|
||||
if query:
|
||||
count_stmt = select(func.count(Post.id)).where(Post.title.ilike(f"%{query}%"))
|
||||
posts_stmt = select(Post).where(Post.title.ilike(f"%{query}%")).order_by(Post.title)
|
||||
else:
|
||||
count_stmt = select(func.count(Post.id))
|
||||
posts_stmt = select(Post).order_by(Post.published_at.desc().nullslast())
|
||||
|
||||
total = (await session.execute(count_stmt)).scalar() or 0
|
||||
offset = (page - 1) * per_page
|
||||
result = await session.execute(posts_stmt.limit(per_page).offset(offset))
|
||||
return [_post_to_dto(p) for p in result.scalars().all()], total
|
||||
630
services/calendar_impl.py
Normal file
630
services/calendar_impl.py
Normal file
@@ -0,0 +1,630 @@
|
||||
"""SQL-backed CalendarService implementation.
|
||||
|
||||
Queries ``shared.models.calendars.*`` — only this module may write to
|
||||
calendar-domain tables on behalf of other domains.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import select, update, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.models.calendars import Calendar, CalendarEntry, CalendarEntryPost, Ticket
|
||||
from shared.contracts.dtos import CalendarDTO, CalendarEntryDTO, TicketDTO
|
||||
|
||||
|
||||
def _cal_to_dto(cal: Calendar) -> CalendarDTO:
|
||||
return CalendarDTO(
|
||||
id=cal.id,
|
||||
container_type=cal.container_type,
|
||||
container_id=cal.container_id,
|
||||
name=cal.name,
|
||||
slug=cal.slug,
|
||||
description=cal.description,
|
||||
)
|
||||
|
||||
|
||||
def _entry_to_dto(entry: CalendarEntry) -> CalendarEntryDTO:
|
||||
cal = getattr(entry, "calendar", None)
|
||||
return CalendarEntryDTO(
|
||||
id=entry.id,
|
||||
calendar_id=entry.calendar_id,
|
||||
name=entry.name,
|
||||
start_at=entry.start_at,
|
||||
state=entry.state,
|
||||
cost=entry.cost,
|
||||
end_at=entry.end_at,
|
||||
user_id=entry.user_id,
|
||||
session_id=entry.session_id,
|
||||
order_id=entry.order_id,
|
||||
slot_id=entry.slot_id,
|
||||
ticket_price=entry.ticket_price,
|
||||
ticket_count=entry.ticket_count,
|
||||
calendar_name=cal.name if cal else None,
|
||||
calendar_slug=cal.slug if cal else None,
|
||||
calendar_container_id=cal.container_id if cal else None,
|
||||
calendar_container_type=cal.container_type if cal else None,
|
||||
)
|
||||
|
||||
|
||||
def _ticket_to_dto(ticket: Ticket) -> TicketDTO:
|
||||
entry = getattr(ticket, "entry", None)
|
||||
tt = getattr(ticket, "ticket_type", None)
|
||||
cal = getattr(entry, "calendar", None) if entry else None
|
||||
# Price: ticket type cost if available, else entry ticket_price
|
||||
price = None
|
||||
if tt and tt.cost is not None:
|
||||
price = tt.cost
|
||||
elif entry and entry.ticket_price is not None:
|
||||
price = entry.ticket_price
|
||||
return TicketDTO(
|
||||
id=ticket.id,
|
||||
code=ticket.code,
|
||||
state=ticket.state,
|
||||
entry_name=entry.name if entry else "",
|
||||
entry_start_at=entry.start_at if entry else ticket.created_at,
|
||||
entry_end_at=entry.end_at if entry else None,
|
||||
ticket_type_name=tt.name if tt else None,
|
||||
calendar_name=cal.name if cal else None,
|
||||
created_at=ticket.created_at,
|
||||
checked_in_at=ticket.checked_in_at,
|
||||
entry_id=entry.id if entry else None,
|
||||
ticket_type_id=ticket.ticket_type_id,
|
||||
price=price,
|
||||
order_id=ticket.order_id,
|
||||
calendar_container_id=cal.container_id if cal else None,
|
||||
)
|
||||
|
||||
|
||||
class SqlCalendarService:
|
||||
|
||||
# -- reads ----------------------------------------------------------------
|
||||
|
||||
async def calendars_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[CalendarDTO]:
|
||||
result = await session.execute(
|
||||
select(Calendar).where(
|
||||
Calendar.container_type == container_type,
|
||||
Calendar.container_id == container_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).order_by(Calendar.name.asc())
|
||||
)
|
||||
return [_cal_to_dto(c) for c in result.scalars().all()]
|
||||
|
||||
async def pending_entries(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
filters = [
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "pending",
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(CalendarEntry.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(*filters)
|
||||
.order_by(CalendarEntry.start_at.asc())
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
return [_entry_to_dto(e) for e in result.scalars().all()]
|
||||
|
||||
async def entries_for_page(
|
||||
self, session: AsyncSession, page_id: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
|
||||
filters = [
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "pending",
|
||||
CalendarEntry.calendar_id.in_(cal_ids),
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(CalendarEntry.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(*filters)
|
||||
.order_by(CalendarEntry.start_at.asc())
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
return [_entry_to_dto(e) for e in result.scalars().all()]
|
||||
|
||||
async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None:
|
||||
entry = (
|
||||
await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(CalendarEntry.id == entry_id, CalendarEntry.deleted_at.is_(None))
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
return _entry_to_dto(entry) if entry else None
|
||||
|
||||
async def entry_ids_for_content(
|
||||
self, session: AsyncSession, content_type: str, content_id: int,
|
||||
) -> set[int]:
|
||||
"""Get entry IDs associated with a content item (e.g. post)."""
|
||||
result = await session.execute(
|
||||
select(CalendarEntryPost.entry_id).where(
|
||||
CalendarEntryPost.content_type == content_type,
|
||||
CalendarEntryPost.content_id == content_id,
|
||||
CalendarEntryPost.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
return set(result.scalars().all())
|
||||
|
||||
async def visible_entries_for_period(
|
||||
self, session: AsyncSession, calendar_id: int,
|
||||
period_start: datetime, period_end: datetime,
|
||||
*, user_id: int | None, is_admin: bool, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
"""Return visible entries for a calendar in a date range.
|
||||
|
||||
Visibility rules:
|
||||
- Everyone sees confirmed entries.
|
||||
- Current user/session sees their own entries (any state).
|
||||
- Admins also see ordered + provisional entries for all users.
|
||||
"""
|
||||
# User/session entries (any state)
|
||||
user_entries: list[CalendarEntry] = []
|
||||
if user_id or session_id:
|
||||
conditions = [
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
]
|
||||
if user_id:
|
||||
conditions.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id:
|
||||
conditions.append(CalendarEntry.session_id == session_id)
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(*conditions)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
user_entries = list(result.scalars().all())
|
||||
|
||||
# Confirmed entries for everyone
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.state == "confirmed",
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
).options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
confirmed_entries = list(result.scalars().all())
|
||||
|
||||
# Admin: ordered + provisional for everyone
|
||||
admin_entries: list[CalendarEntry] = []
|
||||
if is_admin:
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.state.in_(("ordered", "provisional")),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
).options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
admin_entries = list(result.scalars().all())
|
||||
|
||||
# Merge, deduplicate, sort
|
||||
entries_by_id: dict[int, CalendarEntry] = {}
|
||||
for e in confirmed_entries:
|
||||
entries_by_id[e.id] = e
|
||||
for e in admin_entries:
|
||||
entries_by_id[e.id] = e
|
||||
for e in user_entries:
|
||||
entries_by_id[e.id] = e
|
||||
|
||||
merged = sorted(entries_by_id.values(), key=lambda e: e.start_at or period_start)
|
||||
return [_entry_to_dto(e) for e in merged]
|
||||
|
||||
async def associated_entries(
|
||||
self, session: AsyncSession, content_type: str, content_id: int, page: int,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]:
|
||||
"""Get paginated confirmed entries associated with a content item."""
|
||||
per_page = 10
|
||||
entry_ids_result = await session.execute(
|
||||
select(CalendarEntryPost.entry_id).where(
|
||||
CalendarEntryPost.content_type == content_type,
|
||||
CalendarEntryPost.content_id == content_id,
|
||||
CalendarEntryPost.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
entry_ids = set(entry_ids_result.scalars().all())
|
||||
if not entry_ids:
|
||||
return [], False
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(
|
||||
CalendarEntry.id.in_(entry_ids),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "confirmed",
|
||||
)
|
||||
.order_by(CalendarEntry.start_at.desc())
|
||||
.limit(per_page)
|
||||
.offset(offset)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
entries = result.scalars().all()
|
||||
has_more = len(entries) == per_page
|
||||
return [_entry_to_dto(e) for e in entries], has_more
|
||||
|
||||
async def toggle_entry_post(
|
||||
self, session: AsyncSession, entry_id: int, content_type: str, content_id: int,
|
||||
) -> bool:
|
||||
"""Toggle association; returns True if now associated, False if removed."""
|
||||
existing = await session.scalar(
|
||||
select(CalendarEntryPost).where(
|
||||
CalendarEntryPost.entry_id == entry_id,
|
||||
CalendarEntryPost.content_type == content_type,
|
||||
CalendarEntryPost.content_id == content_id,
|
||||
CalendarEntryPost.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
if existing:
|
||||
existing.deleted_at = func.now()
|
||||
await session.flush()
|
||||
return False
|
||||
else:
|
||||
assoc = CalendarEntryPost(
|
||||
entry_id=entry_id,
|
||||
content_type=content_type,
|
||||
content_id=content_id,
|
||||
)
|
||||
session.add(assoc)
|
||||
await session.flush()
|
||||
return True
|
||||
|
||||
async def get_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(
|
||||
CalendarEntry.order_id == order_id,
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
return [_entry_to_dto(e) for e in result.scalars().all()]
|
||||
|
||||
async def user_tickets(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(
|
||||
Ticket.user_id == user_id,
|
||||
Ticket.state != "cancelled",
|
||||
)
|
||||
.order_by(Ticket.created_at.desc())
|
||||
.options(
|
||||
selectinload(Ticket.entry).selectinload(CalendarEntry.calendar),
|
||||
selectinload(Ticket.ticket_type),
|
||||
)
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def user_bookings(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
result = await session.execute(
|
||||
select(CalendarEntry)
|
||||
.where(
|
||||
CalendarEntry.user_id == user_id,
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state.in_(("ordered", "provisional", "confirmed")),
|
||||
)
|
||||
.order_by(CalendarEntry.start_at.desc())
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
return [_entry_to_dto(e) for e in result.scalars().all()]
|
||||
|
||||
# -- batch reads (not in protocol — convenience for blog service) ---------
|
||||
|
||||
async def confirmed_entries_for_posts(
|
||||
self, session: AsyncSession, post_ids: list[int],
|
||||
) -> dict[int, list[CalendarEntryDTO]]:
|
||||
"""Return confirmed entries grouped by post_id for a batch of posts."""
|
||||
if not post_ids:
|
||||
return {}
|
||||
|
||||
result = await session.execute(
|
||||
select(CalendarEntry, CalendarEntryPost.content_id)
|
||||
.join(CalendarEntryPost, CalendarEntry.id == CalendarEntryPost.entry_id)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
.where(
|
||||
CalendarEntryPost.content_type == "post",
|
||||
CalendarEntryPost.content_id.in_(post_ids),
|
||||
CalendarEntryPost.deleted_at.is_(None),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "confirmed",
|
||||
)
|
||||
.order_by(CalendarEntry.start_at.asc())
|
||||
)
|
||||
|
||||
entries_by_post: dict[int, list[CalendarEntryDTO]] = {}
|
||||
for entry, post_id in result:
|
||||
entries_by_post.setdefault(post_id, []).append(_entry_to_dto(entry))
|
||||
return entries_by_post
|
||||
|
||||
# -- writes (absorb glue lifecycle) ---------------------------------------
|
||||
|
||||
async def adopt_entries_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
"""Adopt anonymous calendar entries for a logged-in user.
|
||||
|
||||
Only deletes stale *pending* entries for the user — confirmed/ordered
|
||||
entries must be preserved.
|
||||
"""
|
||||
await session.execute(
|
||||
update(CalendarEntry)
|
||||
.where(
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.user_id == user_id,
|
||||
CalendarEntry.state == "pending",
|
||||
)
|
||||
.values(deleted_at=func.now())
|
||||
)
|
||||
cal_result = await session.execute(
|
||||
select(CalendarEntry).where(
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.session_id == session_id,
|
||||
)
|
||||
)
|
||||
for entry in cal_result.scalars().all():
|
||||
entry.user_id = user_id
|
||||
|
||||
async def claim_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
"""Mark pending CalendarEntries as 'ordered' and set order_id."""
|
||||
filters = [
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "pending",
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(CalendarEntry.session_id == session_id)
|
||||
|
||||
if page_post_id is not None:
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_post_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
filters.append(CalendarEntry.calendar_id.in_(cal_ids))
|
||||
|
||||
await session.execute(
|
||||
update(CalendarEntry)
|
||||
.where(*filters)
|
||||
.values(state="ordered", order_id=order_id)
|
||||
)
|
||||
|
||||
async def confirm_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None,
|
||||
) -> None:
|
||||
"""Mark ordered CalendarEntries as 'provisional'."""
|
||||
filters = [
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.state == "ordered",
|
||||
CalendarEntry.order_id == order_id,
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(CalendarEntry.session_id == session_id)
|
||||
|
||||
await session.execute(
|
||||
update(CalendarEntry)
|
||||
.where(*filters)
|
||||
.values(state="provisional")
|
||||
)
|
||||
|
||||
# -- ticket methods -------------------------------------------------------
|
||||
|
||||
def _ticket_query_options(self):
|
||||
return [
|
||||
selectinload(Ticket.entry).selectinload(CalendarEntry.calendar),
|
||||
selectinload(Ticket.ticket_type),
|
||||
]
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
"""Reserved tickets for the given identity (cart line items)."""
|
||||
filters = [Ticket.state == "reserved"]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
"""Reserved tickets scoped to a page (via entry → calendar → container_id)."""
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
|
||||
entry_ids = select(CalendarEntry.id).where(
|
||||
CalendarEntry.calendar_id.in_(cal_ids),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
|
||||
filters = [
|
||||
Ticket.state == "reserved",
|
||||
Ticket.entry_id.in_(entry_ids),
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
"""Set order_id on reserved tickets at checkout."""
|
||||
filters = [Ticket.state == "reserved"]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
|
||||
if page_post_id is not None:
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_post_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
entry_ids = select(CalendarEntry.id).where(
|
||||
CalendarEntry.calendar_id.in_(cal_ids),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
filters.append(Ticket.entry_id.in_(entry_ids))
|
||||
|
||||
await session.execute(
|
||||
update(Ticket).where(*filters).values(order_id=order_id)
|
||||
)
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None:
|
||||
"""Reserved → confirmed on payment."""
|
||||
await session.execute(
|
||||
update(Ticket)
|
||||
.where(Ticket.order_id == order_id, Ticket.state == "reserved")
|
||||
.values(state="confirmed")
|
||||
)
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
"""Tickets for a given order (checkout return display)."""
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(Ticket.order_id == order_id)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
"""Migrate anonymous reserved tickets to user on login."""
|
||||
result = await session.execute(
|
||||
select(Ticket).where(
|
||||
Ticket.session_id == session_id,
|
||||
Ticket.state == "reserved",
|
||||
)
|
||||
)
|
||||
for ticket in result.scalars().all():
|
||||
ticket.user_id = user_id
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session: AsyncSession, entry_id: int, count: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
ticket_type_id: int | None = None,
|
||||
) -> int:
|
||||
"""Adjust reserved ticket count to target. Returns new count."""
|
||||
import uuid
|
||||
|
||||
count = max(count, 0)
|
||||
|
||||
# Current reserved count
|
||||
filters = [
|
||||
Ticket.entry_id == entry_id,
|
||||
Ticket.state == "reserved",
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return 0
|
||||
if ticket_type_id is not None:
|
||||
filters.append(Ticket.ticket_type_id == ticket_type_id)
|
||||
|
||||
current = await session.scalar(
|
||||
select(func.count(Ticket.id)).where(*filters)
|
||||
) or 0
|
||||
|
||||
if count > current:
|
||||
# Create tickets
|
||||
for _ in range(count - current):
|
||||
ticket = Ticket(
|
||||
entry_id=entry_id,
|
||||
ticket_type_id=ticket_type_id,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
code=uuid.uuid4().hex,
|
||||
state="reserved",
|
||||
)
|
||||
session.add(ticket)
|
||||
await session.flush()
|
||||
elif count < current:
|
||||
# Cancel newest tickets
|
||||
to_cancel = current - count
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.desc())
|
||||
.limit(to_cancel)
|
||||
)
|
||||
for ticket in result.scalars().all():
|
||||
ticket.state = "cancelled"
|
||||
await session.flush()
|
||||
|
||||
return count
|
||||
162
services/cart_impl.py
Normal file
162
services/cart_impl.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""SQL-backed CartService implementation.
|
||||
|
||||
Queries ``shared.models.market.CartItem`` — only this module may write
|
||||
to cart-domain tables on behalf of other domains.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import select, update, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.models.market import CartItem
|
||||
from shared.models.market_place import MarketPlace
|
||||
from shared.models.calendars import CalendarEntry, Calendar
|
||||
from shared.contracts.dtos import CartItemDTO, CartSummaryDTO
|
||||
|
||||
|
||||
def _item_to_dto(ci: CartItem) -> CartItemDTO:
|
||||
product = ci.product
|
||||
return CartItemDTO(
|
||||
id=ci.id,
|
||||
product_id=ci.product_id,
|
||||
quantity=ci.quantity,
|
||||
product_title=product.title if product else None,
|
||||
product_slug=product.slug if product else None,
|
||||
product_image=product.image if product else None,
|
||||
unit_price=Decimal(str(product.special_price or product.regular_price or 0)) if product else None,
|
||||
market_place_id=ci.market_place_id,
|
||||
)
|
||||
|
||||
|
||||
class SqlCartService:
|
||||
|
||||
async def cart_summary(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
page_slug: str | None = None,
|
||||
) -> CartSummaryDTO:
|
||||
"""Build a lightweight cart summary for the current identity."""
|
||||
# Resolve page filter
|
||||
page_post_id: int | None = None
|
||||
if page_slug:
|
||||
from shared.services.registry import services
|
||||
post = await services.blog.get_post_by_slug(session, page_slug)
|
||||
if post and post.is_page:
|
||||
page_post_id = post.id
|
||||
|
||||
# --- product cart ---
|
||||
cart_q = select(CartItem).where(CartItem.deleted_at.is_(None))
|
||||
if user_id is not None:
|
||||
cart_q = cart_q.where(CartItem.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
cart_q = cart_q.where(CartItem.session_id == session_id)
|
||||
else:
|
||||
return CartSummaryDTO()
|
||||
|
||||
if page_post_id is not None:
|
||||
mp_ids = select(MarketPlace.id).where(
|
||||
MarketPlace.container_type == "page",
|
||||
MarketPlace.container_id == page_post_id,
|
||||
MarketPlace.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
cart_q = cart_q.where(CartItem.market_place_id.in_(mp_ids))
|
||||
|
||||
cart_q = cart_q.options(selectinload(CartItem.product))
|
||||
result = await session.execute(cart_q)
|
||||
cart_items = result.scalars().all()
|
||||
|
||||
count = sum(ci.quantity for ci in cart_items)
|
||||
total = sum(
|
||||
Decimal(str(ci.product.special_price or ci.product.regular_price or 0)) * ci.quantity
|
||||
for ci in cart_items
|
||||
if ci.product and (ci.product.special_price or ci.product.regular_price)
|
||||
)
|
||||
|
||||
# --- calendar entries ---
|
||||
from shared.services.registry import services
|
||||
if page_post_id is not None:
|
||||
cal_entries = await services.calendar.entries_for_page(
|
||||
session, page_post_id,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
else:
|
||||
cal_entries = await services.calendar.pending_entries(
|
||||
session,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
calendar_count = len(cal_entries)
|
||||
calendar_total = sum(Decimal(str(e.cost or 0)) for e in cal_entries if e.cost is not None)
|
||||
|
||||
# --- tickets ---
|
||||
if page_post_id is not None:
|
||||
tickets = await services.calendar.tickets_for_page(
|
||||
session, page_post_id,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
else:
|
||||
tickets = await services.calendar.pending_tickets(
|
||||
session,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
ticket_count = len(tickets)
|
||||
ticket_total = sum(Decimal(str(t.price or 0)) for t in tickets)
|
||||
|
||||
items = [_item_to_dto(ci) for ci in cart_items]
|
||||
|
||||
return CartSummaryDTO(
|
||||
count=count,
|
||||
total=total,
|
||||
calendar_count=calendar_count,
|
||||
calendar_total=calendar_total,
|
||||
items=items,
|
||||
ticket_count=ticket_count,
|
||||
ticket_total=ticket_total,
|
||||
)
|
||||
|
||||
async def cart_items(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
) -> list[CartItemDTO]:
|
||||
cart_q = select(CartItem).where(CartItem.deleted_at.is_(None))
|
||||
if user_id is not None:
|
||||
cart_q = cart_q.where(CartItem.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
cart_q = cart_q.where(CartItem.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
cart_q = cart_q.options(selectinload(CartItem.product)).order_by(CartItem.created_at.desc())
|
||||
result = await session.execute(cart_q)
|
||||
return [_item_to_dto(ci) for ci in result.scalars().all()]
|
||||
|
||||
async def adopt_cart_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
"""Adopt anonymous cart items for a logged-in user."""
|
||||
anon_result = await session.execute(
|
||||
select(CartItem).where(
|
||||
CartItem.deleted_at.is_(None),
|
||||
CartItem.user_id.is_(None),
|
||||
CartItem.session_id == session_id,
|
||||
)
|
||||
)
|
||||
anon_items = anon_result.scalars().all()
|
||||
|
||||
if anon_items:
|
||||
# Soft-delete existing user cart
|
||||
await session.execute(
|
||||
update(CartItem)
|
||||
.where(CartItem.deleted_at.is_(None), CartItem.user_id == user_id)
|
||||
.values(deleted_at=func.now())
|
||||
)
|
||||
for ci in anon_items:
|
||||
ci.user_id = user_id
|
||||
1445
services/federation_impl.py
Normal file
1445
services/federation_impl.py
Normal file
File diff suppressed because it is too large
Load Diff
82
services/federation_publish.py
Normal file
82
services/federation_publish.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""Inline federation publication — called at write time, not via async handler.
|
||||
|
||||
Replaces the old pattern where emit_event("post.published") → async handler →
|
||||
publish_activity(). Now the originating service calls try_publish() directly,
|
||||
which creates the APActivity in the same DB transaction. AP delivery
|
||||
(federation.activity_created → inbox POST) stays async.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def try_publish(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
user_id: int | None,
|
||||
activity_type: str,
|
||||
object_type: str,
|
||||
object_data: dict,
|
||||
source_type: str,
|
||||
source_id: int,
|
||||
) -> None:
|
||||
"""Publish an AP activity if federation is available and user has a profile.
|
||||
|
||||
Safe to call from any app — returns silently if federation isn't wired
|
||||
or the user has no actor profile.
|
||||
"""
|
||||
if not services.has("federation"):
|
||||
return
|
||||
|
||||
if not user_id:
|
||||
return
|
||||
|
||||
actor = await services.federation.get_actor_by_user_id(session, user_id)
|
||||
if not actor:
|
||||
return
|
||||
|
||||
# Dedup: don't re-Create if already published, don't re-Delete if already deleted
|
||||
existing = await services.federation.get_activity_for_source(
|
||||
session, source_type, source_id,
|
||||
)
|
||||
if existing:
|
||||
if activity_type == "Create" and existing.activity_type != "Delete":
|
||||
return # already published (allow re-Create after Delete/unpublish)
|
||||
if activity_type == "Update" and existing.activity_type == "Update":
|
||||
return # already updated (Ghost fires duplicate webhooks)
|
||||
if activity_type == "Delete" and existing.activity_type == "Delete":
|
||||
return # already deleted
|
||||
elif activity_type in ("Delete", "Update"):
|
||||
return # never published, nothing to delete/update
|
||||
|
||||
# Stable object ID: same source always gets the same object id so
|
||||
# Mastodon treats Create/Update/Delete as the same post.
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
object_data["id"] = (
|
||||
f"https://{domain}/users/{actor.preferred_username}"
|
||||
f"/objects/{source_type.lower()}/{source_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await services.federation.publish_activity(
|
||||
session,
|
||||
actor_user_id=user_id,
|
||||
activity_type=activity_type,
|
||||
object_type=object_type,
|
||||
object_data=object_data,
|
||||
source_type=source_type,
|
||||
source_id=source_id,
|
||||
)
|
||||
log.info(
|
||||
"Published %s/%s for %s#%d by user %d",
|
||||
activity_type, object_type, source_type, source_id, user_id,
|
||||
)
|
||||
except Exception:
|
||||
log.exception("Failed to publish activity for %s#%d", source_type, source_id)
|
||||
111
services/market_impl.py
Normal file
111
services/market_impl.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""SQL-backed MarketService implementation.
|
||||
|
||||
Queries ``shared.models.market.*`` and ``shared.models.market_place.*`` —
|
||||
only this module may read market-domain tables on behalf of other domains.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.market import Product
|
||||
from shared.models.market_place import MarketPlace
|
||||
from shared.browser.app.utils import utcnow
|
||||
from shared.contracts.dtos import MarketPlaceDTO, ProductDTO
|
||||
from shared.services.relationships import attach_child, detach_child
|
||||
|
||||
|
||||
def _mp_to_dto(mp: MarketPlace) -> MarketPlaceDTO:
|
||||
return MarketPlaceDTO(
|
||||
id=mp.id,
|
||||
container_type=mp.container_type,
|
||||
container_id=mp.container_id,
|
||||
name=mp.name,
|
||||
slug=mp.slug,
|
||||
description=mp.description,
|
||||
)
|
||||
|
||||
|
||||
def _product_to_dto(p: Product) -> ProductDTO:
|
||||
return ProductDTO(
|
||||
id=p.id,
|
||||
slug=p.slug,
|
||||
title=p.title,
|
||||
image=p.image,
|
||||
description_short=p.description_short,
|
||||
rrp=p.rrp,
|
||||
regular_price=p.regular_price,
|
||||
special_price=p.special_price,
|
||||
)
|
||||
|
||||
|
||||
class SqlMarketService:
|
||||
async def marketplaces_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[MarketPlaceDTO]:
|
||||
result = await session.execute(
|
||||
select(MarketPlace).where(
|
||||
MarketPlace.container_type == container_type,
|
||||
MarketPlace.container_id == container_id,
|
||||
MarketPlace.deleted_at.is_(None),
|
||||
).order_by(MarketPlace.name.asc())
|
||||
)
|
||||
return [_mp_to_dto(mp) for mp in result.scalars().all()]
|
||||
|
||||
async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None:
|
||||
product = (
|
||||
await session.execute(select(Product).where(Product.id == product_id))
|
||||
).scalar_one_or_none()
|
||||
return _product_to_dto(product) if product else None
|
||||
|
||||
async def create_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
name: str, slug: str,
|
||||
) -> MarketPlaceDTO:
|
||||
# Look for existing (including soft-deleted)
|
||||
existing = (await session.execute(
|
||||
select(MarketPlace).where(
|
||||
MarketPlace.container_type == container_type,
|
||||
MarketPlace.container_id == container_id,
|
||||
MarketPlace.slug == slug,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
if existing.deleted_at is not None:
|
||||
existing.deleted_at = None # revive
|
||||
existing.name = name
|
||||
await session.flush()
|
||||
await attach_child(session, container_type, container_id, "market", existing.id)
|
||||
return _mp_to_dto(existing)
|
||||
raise ValueError(f'Market with slug "{slug}" already exists for this container.')
|
||||
|
||||
market = MarketPlace(
|
||||
container_type=container_type, container_id=container_id,
|
||||
name=name, slug=slug,
|
||||
)
|
||||
session.add(market)
|
||||
await session.flush()
|
||||
await attach_child(session, container_type, container_id, "market", market.id)
|
||||
return _mp_to_dto(market)
|
||||
|
||||
async def soft_delete_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
slug: str,
|
||||
) -> bool:
|
||||
market = (await session.execute(
|
||||
select(MarketPlace).where(
|
||||
MarketPlace.container_type == container_type,
|
||||
MarketPlace.container_id == container_id,
|
||||
MarketPlace.slug == slug,
|
||||
MarketPlace.deleted_at.is_(None),
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if not market:
|
||||
return False
|
||||
|
||||
market.deleted_at = utcnow()
|
||||
await session.flush()
|
||||
await detach_child(session, container_type, container_id, "market", market.id)
|
||||
return True
|
||||
32
services/navigation.py
Normal file
32
services/navigation.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.menu_node import MenuNode
|
||||
|
||||
|
||||
async def get_navigation_tree(session: AsyncSession) -> list[MenuNode]:
|
||||
"""
|
||||
Return top-level menu nodes ordered by sort_order.
|
||||
|
||||
All apps call this directly (shared DB) — no more HTTP API.
|
||||
"""
|
||||
result = await session.execute(
|
||||
select(MenuNode)
|
||||
.where(MenuNode.deleted_at.is_(None), MenuNode.depth == 0)
|
||||
.order_by(MenuNode.sort_order.asc(), MenuNode.id.asc())
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def rebuild_navigation(session: AsyncSession) -> None:
|
||||
"""
|
||||
Rebuild menu_nodes from container_relations.
|
||||
|
||||
Called by event handlers when relationships change.
|
||||
Currently a no-op placeholder — menu nodes are managed directly
|
||||
by the admin UI. When the full relationship-driven nav is needed,
|
||||
this will sync ContainerRelation -> MenuNode.
|
||||
"""
|
||||
pass
|
||||
105
services/registry.py
Normal file
105
services/registry.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""Typed singleton registry for domain services.
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.services.registry import services
|
||||
|
||||
# Register at app startup
|
||||
services.blog = SqlBlogService()
|
||||
|
||||
# Query anywhere
|
||||
if services.has("calendar"):
|
||||
entries = await services.calendar.pending_entries(session, ...)
|
||||
|
||||
# Or use stubs for absent domains
|
||||
summary = await services.cart.cart_summary(session, ...)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from shared.contracts.protocols import (
|
||||
BlogService,
|
||||
CalendarService,
|
||||
MarketService,
|
||||
CartService,
|
||||
FederationService,
|
||||
)
|
||||
|
||||
|
||||
class _ServiceRegistry:
|
||||
"""Central registry holding one implementation per domain.
|
||||
|
||||
Properties return the registered implementation or raise
|
||||
``RuntimeError`` if nothing is registered. Use ``has(name)``
|
||||
to check before access when the domain might be absent.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._blog: BlogService | None = None
|
||||
self._calendar: CalendarService | None = None
|
||||
self._market: MarketService | None = None
|
||||
self._cart: CartService | None = None
|
||||
self._federation: FederationService | None = None
|
||||
|
||||
# -- blog -----------------------------------------------------------------
|
||||
@property
|
||||
def blog(self) -> BlogService:
|
||||
if self._blog is None:
|
||||
raise RuntimeError("BlogService not registered")
|
||||
return self._blog
|
||||
|
||||
@blog.setter
|
||||
def blog(self, impl: BlogService) -> None:
|
||||
self._blog = impl
|
||||
|
||||
# -- calendar -------------------------------------------------------------
|
||||
@property
|
||||
def calendar(self) -> CalendarService:
|
||||
if self._calendar is None:
|
||||
raise RuntimeError("CalendarService not registered")
|
||||
return self._calendar
|
||||
|
||||
@calendar.setter
|
||||
def calendar(self, impl: CalendarService) -> None:
|
||||
self._calendar = impl
|
||||
|
||||
# -- market ---------------------------------------------------------------
|
||||
@property
|
||||
def market(self) -> MarketService:
|
||||
if self._market is None:
|
||||
raise RuntimeError("MarketService not registered")
|
||||
return self._market
|
||||
|
||||
@market.setter
|
||||
def market(self, impl: MarketService) -> None:
|
||||
self._market = impl
|
||||
|
||||
# -- cart -----------------------------------------------------------------
|
||||
@property
|
||||
def cart(self) -> CartService:
|
||||
if self._cart is None:
|
||||
raise RuntimeError("CartService not registered")
|
||||
return self._cart
|
||||
|
||||
@cart.setter
|
||||
def cart(self, impl: CartService) -> None:
|
||||
self._cart = impl
|
||||
|
||||
# -- federation -----------------------------------------------------------
|
||||
@property
|
||||
def federation(self) -> FederationService:
|
||||
if self._federation is None:
|
||||
raise RuntimeError("FederationService not registered")
|
||||
return self._federation
|
||||
|
||||
@federation.setter
|
||||
def federation(self, impl: FederationService) -> None:
|
||||
self._federation = impl
|
||||
|
||||
# -- introspection --------------------------------------------------------
|
||||
def has(self, name: str) -> bool:
|
||||
"""Check whether a domain service is registered."""
|
||||
return getattr(self, f"_{name}", None) is not None
|
||||
|
||||
|
||||
# Module-level singleton — import this everywhere.
|
||||
services = _ServiceRegistry()
|
||||
155
services/relationships.py
Normal file
155
services/relationships.py
Normal file
@@ -0,0 +1,155 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events import emit_event
|
||||
from shared.models.container_relation import ContainerRelation
|
||||
|
||||
|
||||
async def attach_child(
|
||||
session: AsyncSession,
|
||||
parent_type: str,
|
||||
parent_id: int,
|
||||
child_type: str,
|
||||
child_id: int,
|
||||
label: str | None = None,
|
||||
sort_order: int | None = None,
|
||||
) -> ContainerRelation:
|
||||
"""
|
||||
Create a ContainerRelation and emit container.child_attached event.
|
||||
|
||||
Upsert behaviour: if a relation already exists (including soft-deleted),
|
||||
revive it instead of inserting a duplicate.
|
||||
"""
|
||||
# Check for existing (including soft-deleted)
|
||||
existing = await session.scalar(
|
||||
select(ContainerRelation).where(
|
||||
ContainerRelation.parent_type == parent_type,
|
||||
ContainerRelation.parent_id == parent_id,
|
||||
ContainerRelation.child_type == child_type,
|
||||
ContainerRelation.child_id == child_id,
|
||||
)
|
||||
)
|
||||
if existing:
|
||||
if existing.deleted_at is not None:
|
||||
# Revive soft-deleted relation
|
||||
existing.deleted_at = None
|
||||
if sort_order is not None:
|
||||
existing.sort_order = sort_order
|
||||
if label is not None:
|
||||
existing.label = label
|
||||
await session.flush()
|
||||
await emit_event(
|
||||
session,
|
||||
event_type="container.child_attached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=existing.id,
|
||||
payload={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
)
|
||||
return existing
|
||||
# Already attached and active — no-op
|
||||
return existing
|
||||
|
||||
if sort_order is None:
|
||||
max_order = await session.scalar(
|
||||
select(func.max(ContainerRelation.sort_order)).where(
|
||||
ContainerRelation.parent_type == parent_type,
|
||||
ContainerRelation.parent_id == parent_id,
|
||||
ContainerRelation.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
sort_order = (max_order or 0) + 1
|
||||
|
||||
rel = ContainerRelation(
|
||||
parent_type=parent_type,
|
||||
parent_id=parent_id,
|
||||
child_type=child_type,
|
||||
child_id=child_id,
|
||||
label=label,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
session.add(rel)
|
||||
await session.flush()
|
||||
|
||||
await emit_event(
|
||||
session,
|
||||
event_type="container.child_attached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=rel.id,
|
||||
payload={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
)
|
||||
|
||||
return rel
|
||||
|
||||
|
||||
async def get_children(
|
||||
session: AsyncSession,
|
||||
parent_type: str,
|
||||
parent_id: int,
|
||||
child_type: str | None = None,
|
||||
) -> list[ContainerRelation]:
|
||||
"""Query children of a container, optionally filtered by child_type."""
|
||||
stmt = select(ContainerRelation).where(
|
||||
ContainerRelation.parent_type == parent_type,
|
||||
ContainerRelation.parent_id == parent_id,
|
||||
ContainerRelation.deleted_at.is_(None),
|
||||
)
|
||||
if child_type is not None:
|
||||
stmt = stmt.where(ContainerRelation.child_type == child_type)
|
||||
|
||||
stmt = stmt.order_by(
|
||||
ContainerRelation.sort_order.asc(), ContainerRelation.id.asc()
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def detach_child(
|
||||
session: AsyncSession,
|
||||
parent_type: str,
|
||||
parent_id: int,
|
||||
child_type: str,
|
||||
child_id: int,
|
||||
) -> bool:
|
||||
"""Soft-delete a ContainerRelation and emit container.child_detached event."""
|
||||
result = await session.execute(
|
||||
select(ContainerRelation).where(
|
||||
ContainerRelation.parent_type == parent_type,
|
||||
ContainerRelation.parent_id == parent_id,
|
||||
ContainerRelation.child_type == child_type,
|
||||
ContainerRelation.child_id == child_id,
|
||||
ContainerRelation.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
rel = result.scalar_one_or_none()
|
||||
if not rel:
|
||||
return False
|
||||
|
||||
rel.deleted_at = func.now()
|
||||
await session.flush()
|
||||
|
||||
await emit_event(
|
||||
session,
|
||||
event_type="container.child_detached",
|
||||
aggregate_type="container_relation",
|
||||
aggregate_id=rel.id,
|
||||
payload={
|
||||
"parent_type": parent_type,
|
||||
"parent_id": parent_id,
|
||||
"child_type": child_type,
|
||||
"child_id": child_id,
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
291
services/stubs.py
Normal file
291
services/stubs.py
Normal file
@@ -0,0 +1,291 @@
|
||||
"""No-op stub services for absent domains.
|
||||
|
||||
When an app starts without a particular domain, it registers the stub
|
||||
so that ``services.X.method()`` returns empty/None rather than crashing.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.contracts.dtos import (
|
||||
PostDTO,
|
||||
CalendarDTO,
|
||||
CalendarEntryDTO,
|
||||
TicketDTO,
|
||||
MarketPlaceDTO,
|
||||
ProductDTO,
|
||||
CartItemDTO,
|
||||
CartSummaryDTO,
|
||||
ActorProfileDTO,
|
||||
APActivityDTO,
|
||||
APFollowerDTO,
|
||||
)
|
||||
|
||||
|
||||
class StubBlogService:
|
||||
async def get_post_by_slug(self, session: AsyncSession, slug: str) -> PostDTO | None:
|
||||
return None
|
||||
|
||||
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None:
|
||||
return None
|
||||
|
||||
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]:
|
||||
return []
|
||||
|
||||
async def search_posts(self, session, query, page=1, per_page=10):
|
||||
return [], 0
|
||||
|
||||
|
||||
class StubCalendarService:
|
||||
async def calendars_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[CalendarDTO]:
|
||||
return []
|
||||
|
||||
async def pending_entries(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
return []
|
||||
|
||||
async def entries_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
return []
|
||||
|
||||
async def entry_by_id(self, session: AsyncSession, entry_id: int) -> CalendarEntryDTO | None:
|
||||
return None
|
||||
|
||||
async def associated_entries(
|
||||
self, session: AsyncSession, content_type: str, content_id: int, page: int,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]:
|
||||
return [], False
|
||||
|
||||
async def toggle_entry_post(
|
||||
self, session: AsyncSession, entry_id: int, content_type: str, content_id: int,
|
||||
) -> bool:
|
||||
return False
|
||||
|
||||
async def adopt_entries_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def claim_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def confirm_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def get_entries_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
return []
|
||||
|
||||
async def user_tickets(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def user_bookings(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
return []
|
||||
|
||||
async def confirmed_entries_for_posts(
|
||||
self, session: AsyncSession, post_ids: list[int],
|
||||
) -> dict[int, list[CalendarEntryDTO]]:
|
||||
return {}
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session, entry_id, count, *, user_id, session_id, ticket_type_id=None,
|
||||
) -> int:
|
||||
return 0
|
||||
|
||||
async def entry_ids_for_content(self, session, content_type, content_id):
|
||||
return set()
|
||||
|
||||
async def visible_entries_for_period(self, session, calendar_id, period_start, period_end, *, user_id, is_admin, session_id):
|
||||
return []
|
||||
|
||||
|
||||
class StubMarketService:
|
||||
async def marketplaces_for_container(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
) -> list[MarketPlaceDTO]:
|
||||
return []
|
||||
|
||||
async def product_by_id(self, session: AsyncSession, product_id: int) -> ProductDTO | None:
|
||||
return None
|
||||
|
||||
async def create_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
name: str, slug: str,
|
||||
) -> MarketPlaceDTO:
|
||||
raise RuntimeError("MarketService not available")
|
||||
|
||||
async def soft_delete_marketplace(
|
||||
self, session: AsyncSession, container_type: str, container_id: int,
|
||||
slug: str,
|
||||
) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class StubCartService:
|
||||
async def cart_summary(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
page_slug: str | None = None,
|
||||
) -> CartSummaryDTO:
|
||||
return CartSummaryDTO()
|
||||
|
||||
async def cart_items(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[CartItemDTO]:
|
||||
return []
|
||||
|
||||
async def adopt_cart_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class StubFederationService:
|
||||
"""No-op federation stub for apps that don't own federation."""
|
||||
|
||||
async def get_actor_by_username(self, session, username):
|
||||
return None
|
||||
|
||||
async def get_actor_by_user_id(self, session, user_id):
|
||||
return None
|
||||
|
||||
async def create_actor(self, session, user_id, preferred_username,
|
||||
display_name=None, summary=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def username_available(self, session, username):
|
||||
return False
|
||||
|
||||
async def publish_activity(self, session, *, actor_user_id, activity_type,
|
||||
object_type, object_data, source_type=None,
|
||||
source_id=None):
|
||||
return None
|
||||
|
||||
async def get_activity(self, session, activity_id):
|
||||
return None
|
||||
|
||||
async def get_outbox(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def get_activity_for_source(self, session, source_type, source_id):
|
||||
return None
|
||||
|
||||
async def get_followers(self, session, username):
|
||||
return []
|
||||
|
||||
async def add_follower(self, session, username, follower_acct, follower_inbox,
|
||||
follower_actor_url, follower_public_key=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def remove_follower(self, session, username, follower_acct):
|
||||
return False
|
||||
|
||||
async def get_or_fetch_remote_actor(self, session, actor_url):
|
||||
return None
|
||||
|
||||
async def search_remote_actor(self, session, acct):
|
||||
return None
|
||||
|
||||
async def send_follow(self, session, local_username, remote_actor_url):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def get_following(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def accept_follow_response(self, session, local_username, remote_actor_url):
|
||||
pass
|
||||
|
||||
async def unfollow(self, session, local_username, remote_actor_url):
|
||||
pass
|
||||
|
||||
async def ingest_remote_post(self, session, remote_actor_id, activity_json, object_json):
|
||||
pass
|
||||
|
||||
async def delete_remote_post(self, session, object_id):
|
||||
pass
|
||||
|
||||
async def get_remote_post(self, session, object_id):
|
||||
return None
|
||||
|
||||
async def get_home_timeline(self, session, actor_profile_id, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def get_public_timeline(self, session, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def create_local_post(self, session, actor_profile_id, content, visibility="public", in_reply_to=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def delete_local_post(self, session, actor_profile_id, post_id):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def like_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def unlike_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def boost_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def unboost_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def get_notifications(self, session, actor_profile_id, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def unread_notification_count(self, session, actor_profile_id):
|
||||
return 0
|
||||
|
||||
async def mark_notifications_read(self, session, actor_profile_id):
|
||||
pass
|
||||
|
||||
async def get_stats(self, session):
|
||||
return {"actors": 0, "activities": 0, "followers": 0}
|
||||
90
services/widget_registry.py
Normal file
90
services/widget_registry.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Singleton widget registry for cross-domain UI composition.
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.services.widget_registry import widgets
|
||||
|
||||
# Register at app startup (after domain services)
|
||||
widgets.add_container_nav(NavWidget(...))
|
||||
|
||||
# Query in templates / context processors
|
||||
for w in widgets.container_nav:
|
||||
ctx = await w.context_fn(session, container_type="page", ...)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from shared.contracts.widgets import (
|
||||
NavWidget,
|
||||
CardWidget,
|
||||
AccountPageWidget,
|
||||
AccountNavLink,
|
||||
)
|
||||
|
||||
|
||||
class _WidgetRegistry:
|
||||
"""Central registry holding all widget descriptors.
|
||||
|
||||
Widgets are added at startup and read at request time.
|
||||
Properties return sorted-by-order copies.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._container_nav: list[NavWidget] = []
|
||||
self._container_card: list[CardWidget] = []
|
||||
self._account_pages: list[AccountPageWidget] = []
|
||||
self._account_nav: list[AccountNavLink] = []
|
||||
|
||||
# -- registration ---------------------------------------------------------
|
||||
|
||||
def add_container_nav(self, w: NavWidget) -> None:
|
||||
self._container_nav.append(w)
|
||||
|
||||
def add_container_card(self, w: CardWidget) -> None:
|
||||
self._container_card.append(w)
|
||||
|
||||
def add_account_page(self, w: AccountPageWidget) -> None:
|
||||
self._account_pages.append(w)
|
||||
# Auto-create a matching internal nav link
|
||||
slug = w.slug
|
||||
|
||||
def _href(s=slug):
|
||||
from shared.infrastructure.urls import coop_url
|
||||
return coop_url(f"/auth/{s}/")
|
||||
|
||||
self._account_nav.append(AccountNavLink(
|
||||
label=w.label,
|
||||
order=w.order,
|
||||
href_fn=_href,
|
||||
external=False,
|
||||
))
|
||||
|
||||
def add_account_link(self, link: AccountNavLink) -> None:
|
||||
self._account_nav.append(link)
|
||||
|
||||
# -- read access (sorted copies) ------------------------------------------
|
||||
|
||||
@property
|
||||
def container_nav(self) -> list[NavWidget]:
|
||||
return sorted(self._container_nav, key=lambda w: w.order)
|
||||
|
||||
@property
|
||||
def container_cards(self) -> list[CardWidget]:
|
||||
return sorted(self._container_card, key=lambda w: w.order)
|
||||
|
||||
@property
|
||||
def account_pages(self) -> list[AccountPageWidget]:
|
||||
return sorted(self._account_pages, key=lambda w: w.order)
|
||||
|
||||
@property
|
||||
def account_nav(self) -> list[AccountNavLink]:
|
||||
return sorted(self._account_nav, key=lambda w: w.order)
|
||||
|
||||
def account_page_by_slug(self, slug: str) -> AccountPageWidget | None:
|
||||
for w in self._account_pages:
|
||||
if w.slug == slug:
|
||||
return w
|
||||
return None
|
||||
|
||||
|
||||
# Module-level singleton — import this everywhere.
|
||||
widgets = _WidgetRegistry()
|
||||
22
services/widgets/__init__.py
Normal file
22
services/widgets/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Per-domain widget registration.
|
||||
|
||||
Called once at startup after domain services are registered.
|
||||
Only registers widgets for domains that are actually available.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def register_all_widgets() -> None:
|
||||
from shared.services.registry import services
|
||||
|
||||
if services.has("calendar"):
|
||||
from .calendar_widgets import register_calendar_widgets
|
||||
register_calendar_widgets()
|
||||
|
||||
if services.has("market"):
|
||||
from .market_widgets import register_market_widgets
|
||||
register_market_widgets()
|
||||
|
||||
if services.has("cart"):
|
||||
from .cart_widgets import register_cart_widgets
|
||||
register_cart_widgets()
|
||||
91
services/widgets/calendar_widgets.py
Normal file
91
services/widgets/calendar_widgets.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Calendar-domain widgets: entries nav, calendar links, card entries, account pages."""
|
||||
from __future__ import annotations
|
||||
|
||||
from shared.contracts.widgets import NavWidget, CardWidget, AccountPageWidget
|
||||
from shared.services.widget_registry import widgets
|
||||
from shared.services.registry import services
|
||||
|
||||
|
||||
# -- container_nav: associated entries ----------------------------------------
|
||||
|
||||
async def _nav_entries_context(
|
||||
session, *, container_type, container_id, post_slug, page=1, **kw,
|
||||
):
|
||||
entries, has_more = await services.calendar.associated_entries(
|
||||
session, container_type, container_id, page,
|
||||
)
|
||||
return {
|
||||
"entries": entries,
|
||||
"has_more": has_more,
|
||||
"page": page,
|
||||
"post_slug": post_slug,
|
||||
}
|
||||
|
||||
|
||||
# -- container_nav: calendar links -------------------------------------------
|
||||
|
||||
async def _nav_calendars_context(
|
||||
session, *, container_type, container_id, post_slug, **kw,
|
||||
):
|
||||
calendars = await services.calendar.calendars_for_container(
|
||||
session, container_type, container_id,
|
||||
)
|
||||
return {"calendars": calendars, "post_slug": post_slug}
|
||||
|
||||
|
||||
# -- container_card: confirmed entries for post listings ----------------------
|
||||
|
||||
async def _card_entries_batch(session, post_ids):
|
||||
return await services.calendar.confirmed_entries_for_posts(session, post_ids)
|
||||
|
||||
|
||||
# -- account pages: tickets & bookings ---------------------------------------
|
||||
|
||||
async def _tickets_context(session, *, user_id, **kw):
|
||||
tickets = await services.calendar.user_tickets(session, user_id=user_id)
|
||||
return {"tickets": tickets}
|
||||
|
||||
|
||||
async def _bookings_context(session, *, user_id, **kw):
|
||||
bookings = await services.calendar.user_bookings(session, user_id=user_id)
|
||||
return {"bookings": bookings}
|
||||
|
||||
|
||||
# -- registration entry point ------------------------------------------------
|
||||
|
||||
def register_calendar_widgets() -> None:
|
||||
widgets.add_container_nav(NavWidget(
|
||||
domain="calendar",
|
||||
order=10,
|
||||
context_fn=_nav_entries_context,
|
||||
template="_widgets/container_nav/calendar_entries.html",
|
||||
))
|
||||
widgets.add_container_nav(NavWidget(
|
||||
domain="calendar_links",
|
||||
order=20,
|
||||
context_fn=_nav_calendars_context,
|
||||
template="_widgets/container_nav/calendar_links.html",
|
||||
))
|
||||
widgets.add_container_card(CardWidget(
|
||||
domain="calendar",
|
||||
order=10,
|
||||
batch_fn=_card_entries_batch,
|
||||
context_key="associated_entries",
|
||||
template="_widgets/container_card/calendar_entries.html",
|
||||
))
|
||||
widgets.add_account_page(AccountPageWidget(
|
||||
domain="calendar",
|
||||
slug="tickets",
|
||||
label="tickets",
|
||||
order=20,
|
||||
context_fn=_tickets_context,
|
||||
template="_types/auth/_tickets_panel.html",
|
||||
))
|
||||
widgets.add_account_page(AccountPageWidget(
|
||||
domain="calendar",
|
||||
slug="bookings",
|
||||
label="bookings",
|
||||
order=30,
|
||||
context_fn=_bookings_context,
|
||||
template="_types/auth/_bookings_panel.html",
|
||||
))
|
||||
15
services/widgets/cart_widgets.py
Normal file
15
services/widgets/cart_widgets.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Cart-domain widgets: orders link on account page."""
|
||||
from __future__ import annotations
|
||||
|
||||
from shared.contracts.widgets import AccountNavLink
|
||||
from shared.services.widget_registry import widgets
|
||||
from shared.infrastructure.urls import cart_url
|
||||
|
||||
|
||||
def register_cart_widgets() -> None:
|
||||
widgets.add_account_link(AccountNavLink(
|
||||
label="orders",
|
||||
order=100,
|
||||
href_fn=lambda: cart_url("/orders/"),
|
||||
external=True,
|
||||
))
|
||||
24
services/widgets/market_widgets.py
Normal file
24
services/widgets/market_widgets.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Market-domain widgets: marketplace links on container pages."""
|
||||
from __future__ import annotations
|
||||
|
||||
from shared.contracts.widgets import NavWidget
|
||||
from shared.services.widget_registry import widgets
|
||||
from shared.services.registry import services
|
||||
|
||||
|
||||
async def _nav_markets_context(
|
||||
session, *, container_type, container_id, post_slug, **kw,
|
||||
):
|
||||
markets = await services.market.marketplaces_for_container(
|
||||
session, container_type, container_id,
|
||||
)
|
||||
return {"markets": markets, "post_slug": post_slug}
|
||||
|
||||
|
||||
def register_market_widgets() -> None:
|
||||
widgets.add_container_nav(NavWidget(
|
||||
domain="market",
|
||||
order=30,
|
||||
context_fn=_nav_markets_context,
|
||||
template="_widgets/container_nav/market_links.html",
|
||||
))
|
||||
236
utils/anchoring.py
Normal file
236
utils/anchoring.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Merkle tree construction and OpenTimestamps anchoring.
|
||||
|
||||
Ported from ~/art-dag/activity-pub/anchoring.py.
|
||||
Builds a SHA256 merkle tree from activity IDs, submits the root to
|
||||
OpenTimestamps for Bitcoin timestamping, and stores the tree + proof on IPFS.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.federation import APActivity, APAnchor
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
OTS_SERVERS = [
|
||||
"https://a.pool.opentimestamps.org",
|
||||
"https://b.pool.opentimestamps.org",
|
||||
"https://a.pool.eternitywall.com",
|
||||
]
|
||||
|
||||
|
||||
def _sha256(data: str | bytes) -> str:
|
||||
"""SHA256 hex digest."""
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def build_merkle_tree(items: list[str]) -> dict:
|
||||
"""Build a SHA256 merkle tree from a list of strings (activity IDs).
|
||||
|
||||
Returns:
|
||||
{
|
||||
"root": hex_str,
|
||||
"leaves": [hex_str, ...],
|
||||
"levels": [[hex_str, ...], ...], # bottom-up
|
||||
}
|
||||
"""
|
||||
if not items:
|
||||
raise ValueError("Cannot build merkle tree from empty list")
|
||||
|
||||
# Sort for deterministic ordering
|
||||
items = sorted(items)
|
||||
|
||||
# Leaf hashes
|
||||
leaves = [_sha256(item) for item in items]
|
||||
levels = [leaves[:]]
|
||||
|
||||
current = leaves[:]
|
||||
while len(current) > 1:
|
||||
next_level = []
|
||||
for i in range(0, len(current), 2):
|
||||
left = current[i]
|
||||
right = current[i + 1] if i + 1 < len(current) else left
|
||||
combined = _sha256(left + right)
|
||||
next_level.append(combined)
|
||||
levels.append(next_level)
|
||||
current = next_level
|
||||
|
||||
return {
|
||||
"root": current[0],
|
||||
"leaves": leaves,
|
||||
"levels": levels,
|
||||
}
|
||||
|
||||
|
||||
def get_merkle_proof(tree: dict, item: str) -> list[dict] | None:
|
||||
"""Generate a proof-of-membership for an item.
|
||||
|
||||
Returns a list of {sibling: hex, position: "left"|"right"} dicts,
|
||||
or None if the item is not in the tree.
|
||||
"""
|
||||
item_hash = _sha256(item)
|
||||
leaves = tree["leaves"]
|
||||
|
||||
try:
|
||||
idx = leaves.index(item_hash)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
proof = []
|
||||
for level in tree["levels"][:-1]: # skip root level
|
||||
if idx % 2 == 0:
|
||||
sibling_idx = idx + 1
|
||||
position = "right"
|
||||
else:
|
||||
sibling_idx = idx - 1
|
||||
position = "left"
|
||||
|
||||
if sibling_idx < len(level):
|
||||
proof.append({"sibling": level[sibling_idx], "position": position})
|
||||
else:
|
||||
proof.append({"sibling": level[idx], "position": position})
|
||||
|
||||
idx = idx // 2
|
||||
|
||||
return proof
|
||||
|
||||
|
||||
def verify_merkle_proof(item: str, proof: list[dict], root: str) -> bool:
|
||||
"""Verify a merkle proof against a root hash."""
|
||||
current = _sha256(item)
|
||||
for step in proof:
|
||||
sibling = step["sibling"]
|
||||
if step["position"] == "right":
|
||||
current = _sha256(current + sibling)
|
||||
else:
|
||||
current = _sha256(sibling + current)
|
||||
return current == root
|
||||
|
||||
|
||||
async def submit_to_opentimestamps(merkle_root: str) -> bytes | None:
|
||||
"""Submit a hash to OpenTimestamps. Returns the (incomplete) OTS proof bytes."""
|
||||
root_bytes = bytes.fromhex(merkle_root)
|
||||
|
||||
for server in OTS_SERVERS:
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.post(
|
||||
f"{server}/digest",
|
||||
content=root_bytes,
|
||||
headers={"Content-Type": "application/x-opentimestamps"},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
log.info("OTS proof obtained from %s", server)
|
||||
return resp.content
|
||||
except Exception:
|
||||
log.debug("OTS server %s failed", server, exc_info=True)
|
||||
continue
|
||||
|
||||
log.warning("All OTS servers failed for root %s", merkle_root)
|
||||
return None
|
||||
|
||||
|
||||
async def upgrade_ots_proof(proof_bytes: bytes) -> tuple[bytes, bool]:
|
||||
"""Try to upgrade an incomplete OTS proof to a Bitcoin-confirmed one.
|
||||
|
||||
Returns (proof_bytes, is_confirmed). The proof_bytes may be updated.
|
||||
"""
|
||||
# OpenTimestamps upgrade is done via the `ots` CLI or their calendar API.
|
||||
# For now, return the proof as-is with is_confirmed=False.
|
||||
# TODO: Implement calendar-based upgrade polling.
|
||||
return proof_bytes, False
|
||||
|
||||
|
||||
async def create_anchor(
|
||||
session: AsyncSession,
|
||||
batch_size: int = 100,
|
||||
) -> APAnchor | None:
|
||||
"""Anchor a batch of un-anchored activities.
|
||||
|
||||
1. Select activities without an anchor_id
|
||||
2. Build merkle tree from their activity_ids
|
||||
3. Store tree on IPFS
|
||||
4. Submit root to OpenTimestamps
|
||||
5. Store OTS proof on IPFS
|
||||
6. Create APAnchor record
|
||||
7. Link activities to anchor
|
||||
"""
|
||||
# Find un-anchored activities
|
||||
result = await session.execute(
|
||||
select(APActivity)
|
||||
.where(
|
||||
APActivity.anchor_id.is_(None),
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
.order_by(APActivity.created_at.asc())
|
||||
.limit(batch_size)
|
||||
)
|
||||
activities = result.scalars().all()
|
||||
|
||||
if not activities:
|
||||
log.debug("No un-anchored activities to process")
|
||||
return None
|
||||
|
||||
activity_ids = [a.activity_id for a in activities]
|
||||
log.info("Anchoring %d activities", len(activity_ids))
|
||||
|
||||
# Build merkle tree
|
||||
tree = build_merkle_tree(activity_ids)
|
||||
merkle_root = tree["root"]
|
||||
|
||||
# Store tree on IPFS
|
||||
tree_cid = None
|
||||
ots_proof_cid = None
|
||||
try:
|
||||
from shared.utils.ipfs_client import add_json, add_bytes, is_available
|
||||
if await is_available():
|
||||
tree_cid = await add_json({
|
||||
"root": merkle_root,
|
||||
"leaves": tree["leaves"],
|
||||
"activity_ids": activity_ids,
|
||||
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||
})
|
||||
log.info("Merkle tree stored on IPFS: %s", tree_cid)
|
||||
except Exception:
|
||||
log.exception("IPFS tree storage failed")
|
||||
|
||||
# Submit to OpenTimestamps
|
||||
ots_proof = await submit_to_opentimestamps(merkle_root)
|
||||
if ots_proof:
|
||||
try:
|
||||
from shared.utils.ipfs_client import add_bytes, is_available
|
||||
if await is_available():
|
||||
ots_proof_cid = await add_bytes(ots_proof)
|
||||
log.info("OTS proof stored on IPFS: %s", ots_proof_cid)
|
||||
except Exception:
|
||||
log.exception("IPFS OTS proof storage failed")
|
||||
|
||||
# Create anchor record
|
||||
anchor = APAnchor(
|
||||
merkle_root=merkle_root,
|
||||
tree_ipfs_cid=tree_cid,
|
||||
ots_proof_cid=ots_proof_cid,
|
||||
activity_count=len(activities),
|
||||
)
|
||||
session.add(anchor)
|
||||
await session.flush()
|
||||
|
||||
# Link activities to anchor
|
||||
for a in activities:
|
||||
a.anchor_id = anchor.id
|
||||
await session.flush()
|
||||
|
||||
log.info(
|
||||
"Anchor created: root=%s, activities=%d, tree_cid=%s",
|
||||
merkle_root, len(activities), tree_cid,
|
||||
)
|
||||
|
||||
return anchor
|
||||
54
utils/calendar_helpers.py
Normal file
54
utils/calendar_helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Pure calendar utility functions (no ORM dependencies).
|
||||
|
||||
Extracted from events/bp/calendar/services/calendar_view.py so that
|
||||
blog admin (and any other app) can use them without cross-app imports.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar as pycalendar
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import request
|
||||
|
||||
|
||||
def parse_int_arg(name: str, default: int | None = None) -> int | None:
|
||||
"""Parse an integer query parameter from the request."""
|
||||
val = request.args.get(name, "").strip()
|
||||
if not val:
|
||||
return default
|
||||
try:
|
||||
return int(val)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
|
||||
def add_months(year: int, month: int, delta: int) -> tuple[int, int]:
|
||||
"""Add (or subtract) months to a given year/month, handling year overflow."""
|
||||
new_month = month + delta
|
||||
new_year = year + (new_month - 1) // 12
|
||||
new_month = ((new_month - 1) % 12) + 1
|
||||
return new_year, new_month
|
||||
|
||||
|
||||
def build_calendar_weeks(year: int, month: int) -> list[list[dict]]:
|
||||
"""Build a calendar grid for the given year and month.
|
||||
|
||||
Returns a list of weeks, where each week is a list of 7 day dicts.
|
||||
"""
|
||||
today = datetime.now(timezone.utc).date()
|
||||
cal = pycalendar.Calendar(firstweekday=0) # 0 = Monday
|
||||
weeks: list[list[dict]] = []
|
||||
|
||||
for week in cal.monthdatescalendar(year, month):
|
||||
week_days = []
|
||||
for d in week:
|
||||
week_days.append(
|
||||
{
|
||||
"date": d,
|
||||
"in_month": (d.month == month),
|
||||
"is_today": (d == today),
|
||||
}
|
||||
)
|
||||
weeks.append(week_days)
|
||||
|
||||
return weeks
|
||||
181
utils/http_signatures.py
Normal file
181
utils/http_signatures.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""RSA key generation and HTTP Signature signing/verification.
|
||||
|
||||
Keys are stored in DB (ActorProfile), not the filesystem.
|
||||
Ported from ~/art-dag/activity-pub/keys.py.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||
|
||||
|
||||
def generate_rsa_keypair() -> tuple[str, str]:
|
||||
"""Generate an RSA-2048 keypair.
|
||||
|
||||
Returns:
|
||||
(private_pem, public_pem) as UTF-8 strings.
|
||||
"""
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode()
|
||||
|
||||
public_pem = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode()
|
||||
|
||||
return private_pem, public_pem
|
||||
|
||||
|
||||
def sign_request(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
method: str,
|
||||
path: str,
|
||||
host: str,
|
||||
body: bytes | None = None,
|
||||
date: str | None = None,
|
||||
) -> dict[str, str]:
|
||||
"""Build HTTP Signature headers for an outgoing request.
|
||||
|
||||
Returns a dict of headers to merge into the request:
|
||||
``{"Signature": ..., "Date": ..., "Digest": ..., "Host": ...}``
|
||||
"""
|
||||
if date is None:
|
||||
date = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
|
||||
headers_to_sign = [
|
||||
f"(request-target): {method.lower()} {path}",
|
||||
f"host: {host}",
|
||||
f"date: {date}",
|
||||
]
|
||||
|
||||
out_headers: dict[str, str] = {
|
||||
"Host": host,
|
||||
"Date": date,
|
||||
}
|
||||
|
||||
if body is not None:
|
||||
digest = base64.b64encode(hashlib.sha256(body).digest()).decode()
|
||||
digest_header = f"SHA-256={digest}"
|
||||
headers_to_sign.append(f"digest: {digest_header}")
|
||||
out_headers["Digest"] = digest_header
|
||||
|
||||
signed_string = "\n".join(headers_to_sign)
|
||||
header_names = " ".join(
|
||||
h.split(":")[0] for h in headers_to_sign
|
||||
)
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
out_headers["Signature"] = (
|
||||
f'keyId="{key_id}",'
|
||||
f'headers="{header_names}",'
|
||||
f'signature="{signature_b64}",'
|
||||
f'algorithm="rsa-sha256"'
|
||||
)
|
||||
|
||||
return out_headers
|
||||
|
||||
|
||||
def verify_request_signature(
|
||||
public_key_pem: str,
|
||||
signature_header: str,
|
||||
method: str,
|
||||
path: str,
|
||||
headers: dict[str, str],
|
||||
) -> bool:
|
||||
"""Verify an incoming HTTP Signature.
|
||||
|
||||
Args:
|
||||
public_key_pem: PEM-encoded public key of the sender.
|
||||
signature_header: Value of the ``Signature`` header.
|
||||
method: HTTP method (GET, POST, etc.).
|
||||
path: Request path (e.g. ``/users/alice/inbox``).
|
||||
headers: All request headers (case-insensitive keys).
|
||||
|
||||
Returns:
|
||||
True if the signature is valid.
|
||||
"""
|
||||
# Parse Signature header
|
||||
parts: dict[str, str] = {}
|
||||
for part in signature_header.split(","):
|
||||
part = part.strip()
|
||||
eq = part.index("=")
|
||||
key = part[:eq]
|
||||
val = part[eq + 1:].strip('"')
|
||||
parts[key] = val
|
||||
|
||||
signed_headers = parts.get("headers", "date").split()
|
||||
signature_b64 = parts.get("signature", "")
|
||||
|
||||
# Reconstruct the signed string
|
||||
lines: list[str] = []
|
||||
# Normalize header lookup to lowercase
|
||||
lc_headers = {k.lower(): v for k, v in headers.items()}
|
||||
for h in signed_headers:
|
||||
if h == "(request-target)":
|
||||
lines.append(f"(request-target): {method.lower()} {path}")
|
||||
else:
|
||||
val = lc_headers.get(h, "")
|
||||
lines.append(f"{h}: {val}")
|
||||
|
||||
signed_string = "\n".join(lines)
|
||||
|
||||
public_key = serialization.load_pem_public_key(public_key_pem.encode())
|
||||
try:
|
||||
public_key.verify(
|
||||
base64.b64decode(signature_b64),
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def create_ld_signature(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
activity: dict,
|
||||
) -> dict:
|
||||
"""Create an RsaSignature2017 Linked Data signature for an activity."""
|
||||
canonical = json.dumps(activity, sort_keys=True, separators=(",", ":"))
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
canonical.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
return {
|
||||
"type": "RsaSignature2017",
|
||||
"creator": key_id,
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"signatureValue": signature_b64,
|
||||
}
|
||||
141
utils/ipfs_client.py
Normal file
141
utils/ipfs_client.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Async IPFS client for content-addressed storage.
|
||||
|
||||
All content can be stored on IPFS — blog posts, products, activities, etc.
|
||||
Ported from ~/art-dag/activity-pub/ipfs_client.py (converted to async httpx).
|
||||
|
||||
Config via environment:
|
||||
IPFS_API — multiaddr or URL (default: /ip4/127.0.0.1/tcp/5001)
|
||||
IPFS_TIMEOUT — request timeout in seconds (default: 60)
|
||||
IPFS_GATEWAY_URL — public gateway for CID links (default: https://ipfs.io)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IPFSError(Exception):
|
||||
"""Raised when an IPFS operation fails."""
|
||||
|
||||
|
||||
# -- Config ------------------------------------------------------------------
|
||||
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "60"))
|
||||
IPFS_GATEWAY_URL = os.getenv("IPFS_GATEWAY_URL", "https://ipfs.io")
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
"""Convert IPFS multiaddr to HTTP URL."""
|
||||
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if dns_match:
|
||||
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
|
||||
|
||||
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if ip4_match:
|
||||
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
|
||||
|
||||
if multiaddr.startswith("http"):
|
||||
return multiaddr
|
||||
return "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
# -- Async client functions --------------------------------------------------
|
||||
|
||||
async def add_bytes(data: bytes, *, pin: bool = True) -> str:
|
||||
"""Add raw bytes to IPFS.
|
||||
|
||||
Returns the CID.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/add",
|
||||
params={"pin": str(pin).lower()},
|
||||
files={"file": ("data", data)},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
cid = resp.json()["Hash"]
|
||||
logger.info("Added to IPFS: %d bytes -> %s", len(data), cid)
|
||||
return cid
|
||||
except Exception as e:
|
||||
logger.error("Failed to add bytes to IPFS: %s", e)
|
||||
raise IPFSError(f"Failed to add bytes: {e}") from e
|
||||
|
||||
|
||||
async def add_json(data: dict) -> str:
|
||||
"""Serialize dict to sorted JSON and add to IPFS."""
|
||||
json_bytes = json.dumps(data, indent=2, sort_keys=True).encode("utf-8")
|
||||
return await add_bytes(json_bytes, pin=True)
|
||||
|
||||
|
||||
async def get_bytes(cid: str) -> bytes | None:
|
||||
"""Fetch content from IPFS by CID."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/cat",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Retrieved from IPFS: %s (%d bytes)", cid, len(resp.content))
|
||||
return resp.content
|
||||
except Exception as e:
|
||||
logger.error("Failed to get from IPFS: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
async def pin_cid(cid: str) -> bool:
|
||||
"""Pin a CID on this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/add",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Pinned on IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to pin on IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def unpin_cid(cid: str) -> bool:
|
||||
"""Unpin a CID from this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/rm",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Unpinned from IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to unpin from IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def is_available() -> bool:
|
||||
"""Check if IPFS daemon is reachable."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5) as client:
|
||||
resp = await client.post(f"{IPFS_BASE_URL}/api/v0/id")
|
||||
return resp.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def gateway_url(cid: str) -> str:
|
||||
"""Return a public gateway URL for a CID."""
|
||||
return f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
|
||||
68
utils/webfinger.py
Normal file
68
utils/webfinger.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""WebFinger client for resolving remote AP actor profiles."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
|
||||
async def resolve_actor(acct: str) -> dict | None:
|
||||
"""Resolve user@domain to actor JSON via WebFinger + actor fetch.
|
||||
|
||||
Args:
|
||||
acct: Handle in the form ``user@domain`` (no leading ``@``).
|
||||
|
||||
Returns:
|
||||
Actor JSON-LD dict, or None if resolution fails.
|
||||
"""
|
||||
acct = acct.lstrip("@")
|
||||
if "@" not in acct:
|
||||
return None
|
||||
|
||||
_, domain = acct.rsplit("@", 1)
|
||||
webfinger_url = f"https://{domain}/.well-known/webfinger"
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
|
||||
# Step 1: WebFinger lookup
|
||||
resp = await client.get(
|
||||
webfinger_url,
|
||||
params={"resource": f"acct:{acct}"},
|
||||
headers={"Accept": "application/jrd+json, application/json"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
log.debug("WebFinger %s returned %d", webfinger_url, resp.status_code)
|
||||
return None
|
||||
|
||||
data = resp.json()
|
||||
# Find self link with AP content type
|
||||
actor_url = None
|
||||
for link in data.get("links", []):
|
||||
if link.get("rel") == "self" and link.get("type") in (
|
||||
AP_CONTENT_TYPE,
|
||||
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
|
||||
):
|
||||
actor_url = link.get("href")
|
||||
break
|
||||
|
||||
if not actor_url:
|
||||
log.debug("No AP self link in WebFinger response for %s", acct)
|
||||
return None
|
||||
|
||||
# Step 2: Fetch actor JSON
|
||||
resp = await client.get(
|
||||
actor_url,
|
||||
headers={"Accept": AP_CONTENT_TYPE},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
|
||||
log.debug("Actor fetch %s returned %d", actor_url, resp.status_code)
|
||||
except Exception:
|
||||
log.exception("WebFinger resolution failed for %s", acct)
|
||||
|
||||
return None
|
||||
Reference in New Issue
Block a user