Compare commits
22 Commits
71729ffb28
...
widget-pha
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bccfff0c69 | ||
|
|
9a8b556c13 | ||
|
|
a626dd849d | ||
|
|
d0b1edea7a | ||
|
|
eec750a699 | ||
|
|
fd163b577f | ||
|
|
3bde451ce9 | ||
|
|
798fe56165 | ||
|
|
18410c4b16 | ||
|
|
a28add8640 | ||
|
|
68941b97f6 | ||
|
|
1d83a339b6 | ||
|
|
24432cd52a | ||
|
|
9a1a4996bc | ||
|
|
1832c53980 | ||
|
|
9db739e56d | ||
|
|
dd7a99e8b7 | ||
|
|
8850a0106a | ||
|
|
7abef48cf2 | ||
|
|
1f8fb521b2 | ||
|
|
e83df2f742 | ||
|
|
7ee8638d6e |
@@ -19,7 +19,7 @@ from shared.db.base import Base
|
||||
|
||||
# Import ALL models so Base.metadata sees every table
|
||||
import shared.models # noqa: F401 User, KV, MagicLink, MenuItem, Ghost*
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "glue.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models", "glue.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
|
||||
142
alembic/versions/k1i9f5g7h8_add_federation_tables.py
Normal file
142
alembic/versions/k1i9f5g7h8_add_federation_tables.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""add federation tables
|
||||
|
||||
Revision ID: k1i9f5g7h8
|
||||
Revises: j0h8e4f6g7
|
||||
Create Date: 2026-02-21
|
||||
|
||||
Creates:
|
||||
- ap_actor_profiles — AP identity per user
|
||||
- ap_activities — local + remote AP activities
|
||||
- ap_followers — remote followers
|
||||
- ap_inbox_items — raw incoming AP activities
|
||||
- ap_anchors — OpenTimestamps merkle batches
|
||||
- ipfs_pins — IPFS content tracking (platform-wide)
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision = "k1i9f5g7h8"
|
||||
down_revision = "j0h8e4f6g7"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# -- ap_anchors (referenced by ap_activities) ----------------------------
|
||||
op.create_table(
|
||||
"ap_anchors",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("merkle_root", sa.String(128), nullable=False),
|
||||
sa.Column("tree_ipfs_cid", sa.String(128), nullable=True),
|
||||
sa.Column("ots_proof_cid", sa.String(128), nullable=True),
|
||||
sa.Column("activity_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("bitcoin_txid", sa.String(128), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# -- ap_actor_profiles ---------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_actor_profiles",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("preferred_username", sa.String(64), nullable=False),
|
||||
sa.Column("display_name", sa.String(255), nullable=True),
|
||||
sa.Column("summary", sa.Text(), nullable=True),
|
||||
sa.Column("public_key_pem", sa.Text(), nullable=False),
|
||||
sa.Column("private_key_pem", sa.Text(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("preferred_username"),
|
||||
sa.UniqueConstraint("user_id"),
|
||||
)
|
||||
op.create_index("ix_ap_actor_user_id", "ap_actor_profiles", ["user_id"], unique=True)
|
||||
op.create_index("ix_ap_actor_username", "ap_actor_profiles", ["preferred_username"], unique=True)
|
||||
|
||||
# -- ap_activities -------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_activities",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), nullable=False),
|
||||
sa.Column("activity_type", sa.String(64), nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("object_type", sa.String(64), nullable=True),
|
||||
sa.Column("object_data", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("signature", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("is_local", sa.Boolean(), nullable=False, server_default="true"),
|
||||
sa.Column("source_type", sa.String(64), nullable=True),
|
||||
sa.Column("source_id", sa.Integer(), nullable=True),
|
||||
sa.Column("ipfs_cid", sa.String(128), nullable=True),
|
||||
sa.Column("anchor_id", sa.Integer(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["anchor_id"], ["ap_anchors.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("activity_id"),
|
||||
)
|
||||
op.create_index("ix_ap_activity_actor", "ap_activities", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_activity_source", "ap_activities", ["source_type", "source_id"])
|
||||
op.create_index("ix_ap_activity_published", "ap_activities", ["published"])
|
||||
|
||||
# -- ap_followers --------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_followers",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("follower_acct", sa.String(512), nullable=False),
|
||||
sa.Column("follower_inbox", sa.String(512), nullable=False),
|
||||
sa.Column("follower_actor_url", sa.String(512), nullable=False),
|
||||
sa.Column("follower_public_key", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
|
||||
)
|
||||
op.create_index("ix_ap_follower_actor", "ap_followers", ["actor_profile_id"])
|
||||
|
||||
# -- ap_inbox_items ------------------------------------------------------
|
||||
op.create_table(
|
||||
"ap_inbox_items",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("actor_profile_id", sa.Integer(), nullable=False),
|
||||
sa.Column("raw_json", postgresql.JSONB(), nullable=False),
|
||||
sa.Column("activity_type", sa.String(64), nullable=True),
|
||||
sa.Column("from_actor", sa.String(512), nullable=True),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["actor_profile_id"], ["ap_actor_profiles.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_ap_inbox_state", "ap_inbox_items", ["state"])
|
||||
op.create_index("ix_ap_inbox_actor", "ap_inbox_items", ["actor_profile_id"])
|
||||
|
||||
# -- ipfs_pins -----------------------------------------------------------
|
||||
op.create_table(
|
||||
"ipfs_pins",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("content_hash", sa.String(128), nullable=False),
|
||||
sa.Column("ipfs_cid", sa.String(128), nullable=False),
|
||||
sa.Column("pin_type", sa.String(64), nullable=False),
|
||||
sa.Column("source_type", sa.String(64), nullable=True),
|
||||
sa.Column("source_id", sa.Integer(), nullable=True),
|
||||
sa.Column("size_bytes", sa.BigInteger(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("ipfs_cid"),
|
||||
)
|
||||
op.create_index("ix_ipfs_pin_source", "ipfs_pins", ["source_type", "source_id"])
|
||||
op.create_index("ix_ipfs_pin_cid", "ipfs_pins", ["ipfs_cid"], unique=True)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("ipfs_pins")
|
||||
op.drop_table("ap_inbox_items")
|
||||
op.drop_table("ap_followers")
|
||||
op.drop_table("ap_activities")
|
||||
op.drop_table("ap_actor_profiles")
|
||||
op.drop_table("ap_anchors")
|
||||
138
alembic/versions/l2j0g6h8i9_add_fediverse_tables.py
Normal file
138
alembic/versions/l2j0g6h8i9_add_fediverse_tables.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""add fediverse social tables
|
||||
|
||||
Revision ID: l2j0g6h8i9
|
||||
Revises: k1i9f5g7h8
|
||||
Create Date: 2026-02-22
|
||||
|
||||
Creates:
|
||||
- ap_remote_actors — cached profiles of remote actors
|
||||
- ap_following — outbound follows (local → remote)
|
||||
- ap_remote_posts — ingested posts from remote actors
|
||||
- ap_local_posts — native posts composed in federation UI
|
||||
- ap_interactions — likes and boosts
|
||||
- ap_notifications — follow/like/boost/mention/reply notifications
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
revision = "l2j0g6h8i9"
|
||||
down_revision = "k1i9f5g7h8"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# -- ap_remote_actors --
|
||||
op.create_table(
|
||||
"ap_remote_actors",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_url", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("inbox_url", sa.String(512), nullable=False),
|
||||
sa.Column("shared_inbox_url", sa.String(512), nullable=True),
|
||||
sa.Column("preferred_username", sa.String(255), nullable=False),
|
||||
sa.Column("display_name", sa.String(255), nullable=True),
|
||||
sa.Column("summary", sa.Text, nullable=True),
|
||||
sa.Column("icon_url", sa.String(512), nullable=True),
|
||||
sa.Column("public_key_pem", sa.Text, nullable=True),
|
||||
sa.Column("domain", sa.String(255), nullable=False),
|
||||
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_remote_actor_url", "ap_remote_actors", ["actor_url"], unique=True)
|
||||
op.create_index("ix_ap_remote_actor_domain", "ap_remote_actors", ["domain"])
|
||||
|
||||
# -- ap_following --
|
||||
op.create_table(
|
||||
"ap_following",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("state", sa.String(20), nullable=False, server_default="pending"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("accepted_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"),
|
||||
)
|
||||
op.create_index("ix_ap_following_actor", "ap_following", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_following_remote", "ap_following", ["remote_actor_id"])
|
||||
|
||||
# -- ap_remote_posts --
|
||||
op.create_table(
|
||||
"ap_remote_posts",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("object_id", sa.String(512), unique=True, nullable=False),
|
||||
sa.Column("object_type", sa.String(64), nullable=False, server_default="Note"),
|
||||
sa.Column("content", sa.Text, nullable=True),
|
||||
sa.Column("summary", sa.Text, nullable=True),
|
||||
sa.Column("url", sa.String(512), nullable=True),
|
||||
sa.Column("attachment_data", JSONB, nullable=True),
|
||||
sa.Column("tag_data", JSONB, nullable=True),
|
||||
sa.Column("in_reply_to", sa.String(512), nullable=True),
|
||||
sa.Column("conversation", sa.String(512), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("fetched_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_remote_post_actor", "ap_remote_posts", ["remote_actor_id"])
|
||||
op.create_index("ix_ap_remote_post_published", "ap_remote_posts", ["published"])
|
||||
op.create_index("ix_ap_remote_post_object", "ap_remote_posts", ["object_id"], unique=True)
|
||||
|
||||
# -- ap_local_posts --
|
||||
op.create_table(
|
||||
"ap_local_posts",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("content", sa.Text, nullable=False),
|
||||
sa.Column("visibility", sa.String(20), nullable=False, server_default="public"),
|
||||
sa.Column("in_reply_to", sa.String(512), nullable=True),
|
||||
sa.Column("published", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_local_post_actor", "ap_local_posts", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_local_post_published", "ap_local_posts", ["published"])
|
||||
|
||||
# -- ap_interactions --
|
||||
op.create_table(
|
||||
"ap_interactions",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True),
|
||||
sa.Column("remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True),
|
||||
sa.Column("post_type", sa.String(20), nullable=False),
|
||||
sa.Column("post_id", sa.Integer, nullable=False),
|
||||
sa.Column("interaction_type", sa.String(20), nullable=False),
|
||||
sa.Column("activity_id", sa.String(512), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_interaction_post", "ap_interactions", ["post_type", "post_id"])
|
||||
op.create_index("ix_ap_interaction_actor", "ap_interactions", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_interaction_remote", "ap_interactions", ["remote_actor_id"])
|
||||
|
||||
# -- ap_notifications --
|
||||
op.create_table(
|
||||
"ap_notifications",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("notification_type", sa.String(20), nullable=False),
|
||||
sa.Column("from_remote_actor_id", sa.Integer, sa.ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("from_actor_profile_id", sa.Integer, sa.ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("target_activity_id", sa.Integer, sa.ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("target_remote_post_id", sa.Integer, sa.ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True),
|
||||
sa.Column("read", sa.Boolean, nullable=False, server_default="false"),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_ap_notification_actor", "ap_notifications", ["actor_profile_id"])
|
||||
op.create_index("ix_ap_notification_read", "ap_notifications", ["actor_profile_id", "read"])
|
||||
op.create_index("ix_ap_notification_created", "ap_notifications", ["created_at"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("ap_notifications")
|
||||
op.drop_table("ap_interactions")
|
||||
op.drop_table("ap_local_posts")
|
||||
op.drop_table("ap_remote_posts")
|
||||
op.drop_table("ap_following")
|
||||
op.drop_table("ap_remote_actors")
|
||||
@@ -70,7 +70,7 @@
|
||||
type="text"
|
||||
name="title"
|
||||
value=""
|
||||
placeholder="Post title..."
|
||||
placeholder="{{ 'Page title...' if is_page else 'Post title...' }}"
|
||||
class="w-full text-[36px] font-bold bg-transparent border-none outline-none
|
||||
placeholder:text-stone-300 mb-[8px] leading-tight"
|
||||
>
|
||||
@@ -101,7 +101,7 @@
|
||||
type="submit"
|
||||
class="px-[20px] py-[6px] bg-stone-700 text-white text-[14px] rounded-[8px]
|
||||
hover:bg-stone-800 transition-colors cursor-pointer"
|
||||
>Create Post</button>
|
||||
>{{ 'Create Page' if is_page else 'Create Post' }}</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
{% macro mini(oob=False) %}
|
||||
{% macro mini(oob=False, count=None) %}
|
||||
<div id="cart-mini" {% if oob %}hx-swap-oob="{{oob}}"{% endif %} >
|
||||
{# cart_count is set by the context processor in all apps.
|
||||
Cart app computes it from g.cart + calendar_cart_entries;
|
||||
other apps get it from the cart internal API. #}
|
||||
{% if cart_count is defined and cart_count is not none %}
|
||||
other apps get it from the cart internal API.
|
||||
count param allows explicit override when macro is imported without context. #}
|
||||
{% if count is not none %}
|
||||
{% set _count = count %}
|
||||
{% elif cart_count is defined and cart_count is not none %}
|
||||
{% set _count = cart_count %}
|
||||
{% elif cart is defined and cart is not none %}
|
||||
{% set _count = (cart | sum(attribute="quantity")) + ((calendar_cart_entries | length) if calendar_cart_entries else 0) %}
|
||||
|
||||
@@ -47,8 +47,8 @@
|
||||
{% endif %}
|
||||
{% include '_types/order/_items.html' %}
|
||||
{% include '_types/order/_calendar_items.html' %}
|
||||
|
||||
|
||||
{% include '_types/order/_ticket_items.html' %}
|
||||
|
||||
{% if order.status == 'failed' and order %}
|
||||
<div class="rounded-2xl border border-rose-200 bg-rose-50/80 p-4 sm:p-6 text-sm text-rose-900 space-y-2">
|
||||
<p class="font-medium">Your payment was not completed.</p>
|
||||
|
||||
@@ -25,6 +25,15 @@
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
{# Container nav widgets (market links, etc.) #}
|
||||
{% if container_nav_widgets %}
|
||||
{% for wdata in container_nav_widgets %}
|
||||
{% with ctx=wdata.ctx %}
|
||||
{% include wdata.widget.template with context %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{# Admin link #}
|
||||
{% if g.rights.admin %}
|
||||
{% from 'macros/admin_nav.html' import admin_nav_item %}
|
||||
|
||||
@@ -22,6 +22,14 @@
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
{% if container_nav_widgets %}
|
||||
{% for wdata in container_nav_widgets %}
|
||||
{% with ctx=wdata.ctx %}
|
||||
{% include wdata.widget.template with context %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{# Admin link #}
|
||||
{% if g.rights.admin %}
|
||||
|
||||
|
||||
49
browser/templates/_types/order/_ticket_items.html
Normal file
49
browser/templates/_types/order/_ticket_items.html
Normal file
@@ -0,0 +1,49 @@
|
||||
{# --- Tickets in this order --- #}
|
||||
{% if order and order_tickets %}
|
||||
<section class="mt-6 space-y-3">
|
||||
<h2 class="text-base sm:text-lg font-semibold">
|
||||
Event tickets in this order
|
||||
</h2>
|
||||
|
||||
<ul class="divide-y divide-stone-200 rounded-2xl border border-stone-200 bg-white/80">
|
||||
{% for tk in order_tickets %}
|
||||
<li class="px-4 py-3 flex items-start justify-between text-sm">
|
||||
<div>
|
||||
<div class="font-medium flex items-center gap-2">
|
||||
{{ tk.entry_name }}
|
||||
{# Small status pill #}
|
||||
<span class="inline-flex items-center rounded-full px-2 py-0.5 text-[11px] font-medium
|
||||
{% if tk.state == 'confirmed' %}
|
||||
bg-emerald-100 text-emerald-800
|
||||
{% elif tk.state == 'reserved' %}
|
||||
bg-amber-100 text-amber-800
|
||||
{% elif tk.state == 'checked_in' %}
|
||||
bg-blue-100 text-blue-800
|
||||
{% else %}
|
||||
bg-stone-100 text-stone-700
|
||||
{% endif %}
|
||||
">
|
||||
{{ tk.state|replace('_', ' ')|capitalize }}
|
||||
</span>
|
||||
</div>
|
||||
{% if tk.ticket_type_name %}
|
||||
<div class="text-xs text-stone-500">{{ tk.ticket_type_name }}</div>
|
||||
{% endif %}
|
||||
<div class="text-xs text-stone-500">
|
||||
{{ tk.entry_start_at.strftime('%-d %b %Y, %H:%M') }}
|
||||
{% if tk.entry_end_at %}
|
||||
– {{ tk.entry_end_at.strftime('%-d %b %Y, %H:%M') }}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-xs text-stone-400 font-mono mt-0.5">
|
||||
{{ tk.code }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="ml-4 font-medium">
|
||||
£{{ "%.2f"|format(tk.price or 0) }}
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</section>
|
||||
{% endif %}
|
||||
@@ -1,17 +1,6 @@
|
||||
{# Main panel fragment for HTMX navigation - post article content #}
|
||||
{# Main panel fragment for HTMX navigation - post/page article content #}
|
||||
<article class="relative">
|
||||
{# ❤️ like button - always visible in top right of article #}
|
||||
{% if g.user %}
|
||||
<div class="absolute top-2 right-2 z-10 text-8xl md:text-6xl">
|
||||
{% set slug = post.slug %}
|
||||
{% set liked = post.is_liked or False %}
|
||||
{% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %}
|
||||
{% set item_type = 'post' %}
|
||||
{% include "_types/browse/like/button.html" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Draft indicator + edit link #}
|
||||
{# Draft indicator + edit link (shown for both posts and pages) #}
|
||||
{% if post.status == "draft" %}
|
||||
<div class="flex items-center justify-center gap-2 mb-3">
|
||||
<span class="inline-block px-3 py-1 rounded-full text-sm font-semibold bg-amber-100 text-amber-800">Draft</span>
|
||||
@@ -36,6 +25,18 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if not post.is_page %}
|
||||
{# ── Blog post chrome: like button, excerpt, tags/authors ── #}
|
||||
{% if g.user %}
|
||||
<div class="absolute top-2 right-2 z-10 text-8xl md:text-6xl">
|
||||
{% set slug = post.slug %}
|
||||
{% set liked = post.is_liked or False %}
|
||||
{% set like_url = url_for('blog.post.like_toggle', slug=slug)|host %}
|
||||
{% set item_type = 'post' %}
|
||||
{% include "_types/browse/like/button.html" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if post.custom_excerpt %}
|
||||
<div class="w-full text-center italic text-3xl p-2">
|
||||
{{post.custom_excerpt|safe}}
|
||||
@@ -44,6 +45,8 @@
|
||||
<div class="hidden md:block">
|
||||
{% include '_types/blog/_card/at_bar.html' %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if post.feature_image %}
|
||||
<div class="mb-3 flex justify-center">
|
||||
<img
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
{# Entries for this day #}
|
||||
<div class="space-y-0.5">
|
||||
{% for e in month_entries %}
|
||||
{% if e.start_at.date() == day.date and e.deleted_at is none %}
|
||||
{% if e.start_at.date() == day.date %}
|
||||
{% if e.id in associated_entry_ids %}
|
||||
{# Associated entry - show with delete button #}
|
||||
<div class="flex items-center gap-1 text-[10px] rounded px-1 py-0.5 bg-green-200 text-green-900">
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
type="text"
|
||||
name="title"
|
||||
value="{{ ghost_post.title if ghost_post else '' }}"
|
||||
placeholder="Post title..."
|
||||
placeholder="{{ 'Page title...' if post and post.is_page else 'Post title...' }}"
|
||||
class="w-full text-[36px] font-bold bg-transparent border-none outline-none
|
||||
placeholder:text-stone-300 mb-[8px] leading-tight"
|
||||
>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{# ── Post Settings Form ── #}
|
||||
{# ── Post/Page Settings Form ── #}
|
||||
{% set gp = ghost_post or {} %}
|
||||
{% set _is_page = post.is_page if post else False %}
|
||||
|
||||
{% macro field_label(text, field_for=None) %}
|
||||
<label {% if field_for %}for="{{ field_for }}"{% endif %}
|
||||
@@ -68,7 +69,7 @@
|
||||
{% call section('General', open=True) %}
|
||||
<div>
|
||||
{{ field_label('Slug', 'settings-slug') }}
|
||||
{{ text_input('slug', gp.slug or '', 'post-slug') }}
|
||||
{{ text_input('slug', gp.slug or '', 'page-slug' if _is_page else 'post-slug') }}
|
||||
</div>
|
||||
<div>
|
||||
{{ field_label('Published at', 'settings-published_at') }}
|
||||
@@ -83,7 +84,7 @@
|
||||
>
|
||||
</div>
|
||||
<div>
|
||||
{{ checkbox_input('featured', gp.featured, 'Featured post') }}
|
||||
{{ checkbox_input('featured', gp.featured, 'Featured page' if _is_page else 'Featured post') }}
|
||||
</div>
|
||||
<div>
|
||||
{{ field_label('Visibility', 'settings-visibility') }}
|
||||
@@ -176,7 +177,7 @@
|
||||
{% call section('Advanced') %}
|
||||
<div>
|
||||
{{ field_label('Custom template', 'settings-custom_template') }}
|
||||
{{ text_input('custom_template', gp.custom_template or '', 'custom-post.hbs') }}
|
||||
{{ text_input('custom_template', gp.custom_template or '', 'custom-page.hbs' if _is_page else 'custom-post.hbs') }}
|
||||
</div>
|
||||
{% endcall %}
|
||||
|
||||
|
||||
@@ -55,6 +55,11 @@ class TicketDTO:
|
||||
calendar_name: str | None = None
|
||||
created_at: datetime | None = None
|
||||
checked_in_at: datetime | None = None
|
||||
entry_id: int | None = None
|
||||
ticket_type_id: int | None = None
|
||||
price: Decimal | None = None
|
||||
order_id: int | None = None
|
||||
calendar_container_id: int | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
@@ -127,3 +132,123 @@ class CartSummaryDTO:
|
||||
calendar_count: int = 0
|
||||
calendar_total: Decimal = Decimal("0")
|
||||
items: list[CartItemDTO] = field(default_factory=list)
|
||||
ticket_count: int = 0
|
||||
ticket_total: Decimal = Decimal("0")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Federation / ActivityPub domain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ActorProfileDTO:
|
||||
id: int
|
||||
user_id: int
|
||||
preferred_username: str
|
||||
public_key_pem: str
|
||||
display_name: str | None = None
|
||||
summary: str | None = None
|
||||
inbox_url: str | None = None
|
||||
outbox_url: str | None = None
|
||||
created_at: datetime | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APActivityDTO:
|
||||
id: int
|
||||
activity_id: str
|
||||
activity_type: str
|
||||
actor_profile_id: int
|
||||
object_type: str | None = None
|
||||
object_data: dict | None = None
|
||||
published: datetime | None = None
|
||||
is_local: bool = True
|
||||
source_type: str | None = None
|
||||
source_id: int | None = None
|
||||
ipfs_cid: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APFollowerDTO:
|
||||
id: int
|
||||
actor_profile_id: int
|
||||
follower_acct: str
|
||||
follower_inbox: str
|
||||
follower_actor_url: str
|
||||
created_at: datetime | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class APAnchorDTO:
|
||||
id: int
|
||||
merkle_root: str
|
||||
activity_count: int = 0
|
||||
tree_ipfs_cid: str | None = None
|
||||
ots_proof_cid: str | None = None
|
||||
confirmed_at: datetime | None = None
|
||||
bitcoin_txid: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RemoteActorDTO:
|
||||
id: int
|
||||
actor_url: str
|
||||
inbox_url: str
|
||||
preferred_username: str
|
||||
domain: str
|
||||
display_name: str | None = None
|
||||
summary: str | None = None
|
||||
icon_url: str | None = None
|
||||
shared_inbox_url: str | None = None
|
||||
public_key_pem: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RemotePostDTO:
|
||||
id: int
|
||||
remote_actor_id: int
|
||||
object_id: str
|
||||
content: str
|
||||
summary: str | None = None
|
||||
url: str | None = None
|
||||
attachments: list[dict] = field(default_factory=list)
|
||||
tags: list[dict] = field(default_factory=list)
|
||||
published: datetime | None = None
|
||||
actor: RemoteActorDTO | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class TimelineItemDTO:
|
||||
id: str # composite key for cursor pagination
|
||||
post_type: str # "local" | "remote" | "boost"
|
||||
content: str # HTML
|
||||
published: datetime
|
||||
actor_name: str
|
||||
actor_username: str
|
||||
object_id: str | None = None
|
||||
summary: str | None = None
|
||||
url: str | None = None
|
||||
attachments: list[dict] = field(default_factory=list)
|
||||
tags: list[dict] = field(default_factory=list)
|
||||
actor_domain: str | None = None # None = local
|
||||
actor_icon: str | None = None
|
||||
actor_url: str | None = None
|
||||
boosted_by: str | None = None
|
||||
like_count: int = 0
|
||||
boost_count: int = 0
|
||||
liked_by_me: bool = False
|
||||
boosted_by_me: bool = False
|
||||
author_inbox: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class NotificationDTO:
|
||||
id: int
|
||||
notification_type: str # follow/like/boost/mention/reply
|
||||
from_actor_name: str
|
||||
from_actor_username: str
|
||||
created_at: datetime
|
||||
read: bool
|
||||
from_actor_domain: str | None = None
|
||||
from_actor_icon: str | None = None
|
||||
target_content_preview: str | None = None
|
||||
|
||||
@@ -5,6 +5,7 @@ implementations (Sql*Service) and no-op stubs both satisfy them.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -18,6 +19,13 @@ from .dtos import (
|
||||
ProductDTO,
|
||||
CartItemDTO,
|
||||
CartSummaryDTO,
|
||||
ActorProfileDTO,
|
||||
APActivityDTO,
|
||||
APFollowerDTO,
|
||||
RemoteActorDTO,
|
||||
RemotePostDTO,
|
||||
TimelineItemDTO,
|
||||
NotificationDTO,
|
||||
)
|
||||
|
||||
|
||||
@@ -27,6 +35,10 @@ class BlogService(Protocol):
|
||||
async def get_post_by_id(self, session: AsyncSession, id: int) -> PostDTO | None: ...
|
||||
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]: ...
|
||||
|
||||
async def search_posts(
|
||||
self, session: AsyncSession, query: str, page: int = 1, per_page: int = 10,
|
||||
) -> tuple[list[PostDTO], int]: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CalendarService(Protocol):
|
||||
@@ -82,6 +94,47 @@ class CalendarService(Protocol):
|
||||
self, session: AsyncSession, post_ids: list[int],
|
||||
) -> dict[int, list[CalendarEntryDTO]]: ...
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None: ...
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None: ...
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None: ...
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session: AsyncSession, entry_id: int, count: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
ticket_type_id: int | None = None,
|
||||
) -> int: ...
|
||||
|
||||
async def entry_ids_for_content(
|
||||
self, session: AsyncSession, content_type: str, content_id: int,
|
||||
) -> set[int]: ...
|
||||
|
||||
async def visible_entries_for_period(
|
||||
self, session: AsyncSession, calendar_id: int,
|
||||
period_start: datetime, period_end: datetime,
|
||||
*, user_id: int | None, is_admin: bool, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class MarketService(Protocol):
|
||||
@@ -116,3 +169,165 @@ class CartService(Protocol):
|
||||
async def adopt_cart_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class FederationService(Protocol):
|
||||
# -- Actor management -----------------------------------------------------
|
||||
async def get_actor_by_username(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> ActorProfileDTO | None: ...
|
||||
|
||||
async def get_actor_by_user_id(
|
||||
self, session: AsyncSession, user_id: int,
|
||||
) -> ActorProfileDTO | None: ...
|
||||
|
||||
async def create_actor(
|
||||
self, session: AsyncSession, user_id: int, preferred_username: str,
|
||||
display_name: str | None = None, summary: str | None = None,
|
||||
) -> ActorProfileDTO: ...
|
||||
|
||||
async def username_available(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> bool: ...
|
||||
|
||||
# -- Publishing (core cross-domain API) -----------------------------------
|
||||
async def publish_activity(
|
||||
self, session: AsyncSession, *,
|
||||
actor_user_id: int,
|
||||
activity_type: str,
|
||||
object_type: str,
|
||||
object_data: dict,
|
||||
source_type: str | None = None,
|
||||
source_id: int | None = None,
|
||||
) -> APActivityDTO: ...
|
||||
|
||||
# -- Queries --------------------------------------------------------------
|
||||
async def get_activity(
|
||||
self, session: AsyncSession, activity_id: str,
|
||||
) -> APActivityDTO | None: ...
|
||||
|
||||
async def get_outbox(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[APActivityDTO], int]: ...
|
||||
|
||||
async def get_activity_for_source(
|
||||
self, session: AsyncSession, source_type: str, source_id: int,
|
||||
) -> APActivityDTO | None: ...
|
||||
|
||||
# -- Followers ------------------------------------------------------------
|
||||
async def get_followers(
|
||||
self, session: AsyncSession, username: str,
|
||||
) -> list[APFollowerDTO]: ...
|
||||
|
||||
async def add_follower(
|
||||
self, session: AsyncSession, username: str,
|
||||
follower_acct: str, follower_inbox: str, follower_actor_url: str,
|
||||
follower_public_key: str | None = None,
|
||||
) -> APFollowerDTO: ...
|
||||
|
||||
async def remove_follower(
|
||||
self, session: AsyncSession, username: str, follower_acct: str,
|
||||
) -> bool: ...
|
||||
|
||||
# -- Remote actors --------------------------------------------------------
|
||||
async def get_or_fetch_remote_actor(
|
||||
self, session: AsyncSession, actor_url: str,
|
||||
) -> RemoteActorDTO | None: ...
|
||||
|
||||
async def search_remote_actor(
|
||||
self, session: AsyncSession, acct: str,
|
||||
) -> RemoteActorDTO | None: ...
|
||||
|
||||
# -- Following (outbound) -------------------------------------------------
|
||||
async def send_follow(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
async def get_following(
|
||||
self, session: AsyncSession, username: str,
|
||||
page: int = 1, per_page: int = 20,
|
||||
) -> tuple[list[RemoteActorDTO], int]: ...
|
||||
|
||||
async def accept_follow_response(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unfollow(
|
||||
self, session: AsyncSession, local_username: str, remote_actor_url: str,
|
||||
) -> None: ...
|
||||
|
||||
# -- Remote posts ---------------------------------------------------------
|
||||
async def ingest_remote_post(
|
||||
self, session: AsyncSession, remote_actor_id: int,
|
||||
activity_json: dict, object_json: dict,
|
||||
) -> None: ...
|
||||
|
||||
async def delete_remote_post(
|
||||
self, session: AsyncSession, object_id: str,
|
||||
) -> None: ...
|
||||
|
||||
async def get_remote_post(
|
||||
self, session: AsyncSession, object_id: str,
|
||||
) -> RemotePostDTO | None: ...
|
||||
|
||||
# -- Timelines ------------------------------------------------------------
|
||||
async def get_home_timeline(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
async def get_public_timeline(
|
||||
self, session: AsyncSession,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[TimelineItemDTO]: ...
|
||||
|
||||
# -- Local posts ----------------------------------------------------------
|
||||
async def create_local_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
content: str, visibility: str = "public",
|
||||
in_reply_to: str | None = None,
|
||||
) -> int: ...
|
||||
|
||||
async def delete_local_post(
|
||||
self, session: AsyncSession, actor_profile_id: int, post_id: int,
|
||||
) -> None: ...
|
||||
|
||||
# -- Interactions ---------------------------------------------------------
|
||||
async def like_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unlike_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def boost_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
async def unboost_post(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
object_id: str, author_inbox: str,
|
||||
) -> None: ...
|
||||
|
||||
# -- Notifications --------------------------------------------------------
|
||||
async def get_notifications(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
before: datetime | None = None, limit: int = 20,
|
||||
) -> list[NotificationDTO]: ...
|
||||
|
||||
async def unread_notification_count(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
) -> int: ...
|
||||
|
||||
async def mark_notifications_read(
|
||||
self, session: AsyncSession, actor_profile_id: int,
|
||||
) -> None: ...
|
||||
|
||||
# -- Stats ----------------------------------------------------------------
|
||||
async def get_stats(self, session: AsyncSession) -> dict: ...
|
||||
|
||||
@@ -6,3 +6,5 @@ def register_shared_handlers():
|
||||
import shared.events.handlers.container_handlers # noqa: F401
|
||||
import shared.events.handlers.login_handlers # noqa: F401
|
||||
import shared.events.handlers.order_handlers # noqa: F401
|
||||
# federation_handlers removed — publication is now inline at write sites
|
||||
import shared.events.handlers.ap_delivery_handler # noqa: F401
|
||||
|
||||
170
events/handlers/ap_delivery_handler.py
Normal file
170
events/handlers/ap_delivery_handler.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Deliver AP activities to remote followers.
|
||||
|
||||
On ``federation.activity_created`` → load activity + actor + followers →
|
||||
sign with HTTP Signatures → POST to each follower inbox.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.events.bus import register_handler, DomainEvent
|
||||
from shared.models.federation import ActorProfile, APActivity, APFollower
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
DELIVERY_TIMEOUT = 15 # seconds per request
|
||||
|
||||
|
||||
def _build_activity_json(activity: APActivity, actor: ActorProfile, domain: str) -> dict:
|
||||
"""Build the full AP activity JSON-LD for delivery."""
|
||||
username = actor.preferred_username
|
||||
actor_url = f"https://{domain}/users/{username}"
|
||||
|
||||
obj = dict(activity.object_data or {})
|
||||
|
||||
# Object id MUST be on the actor's domain (Mastodon origin check).
|
||||
# The post URL (e.g. coop.rose-ash.com/slug/) goes in "url" only.
|
||||
object_id = activity.activity_id + "/object"
|
||||
|
||||
if activity.activity_type == "Delete":
|
||||
# Delete: object is a Tombstone with just id + type
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", "Tombstone")
|
||||
else:
|
||||
# Create/Update: full object with attribution
|
||||
# Prefer stable id from object_data (set by try_publish), fall back to activity-derived
|
||||
obj.setdefault("id", object_id)
|
||||
obj.setdefault("type", activity.object_type)
|
||||
obj.setdefault("attributedTo", actor_url)
|
||||
obj.setdefault("published", activity.published.isoformat() if activity.published else None)
|
||||
obj.setdefault("to", ["https://www.w3.org/ns/activitystreams#Public"])
|
||||
obj.setdefault("cc", [f"{actor_url}/followers"])
|
||||
|
||||
return {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"id": activity.activity_id,
|
||||
"type": activity.activity_type,
|
||||
"actor": actor_url,
|
||||
"published": activity.published.isoformat() if activity.published else None,
|
||||
"to": ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc": [f"{actor_url}/followers"],
|
||||
"object": obj,
|
||||
}
|
||||
|
||||
|
||||
async def _deliver_to_inbox(
|
||||
client: httpx.AsyncClient,
|
||||
inbox_url: str,
|
||||
body: dict,
|
||||
actor: ActorProfile,
|
||||
domain: str,
|
||||
) -> bool:
|
||||
"""POST signed activity to a single inbox. Returns True on success."""
|
||||
from shared.utils.http_signatures import sign_request
|
||||
from urllib.parse import urlparse
|
||||
import json
|
||||
|
||||
body_bytes = json.dumps(body).encode()
|
||||
key_id = f"https://{domain}/users/{actor.preferred_username}#main-key"
|
||||
|
||||
parsed = urlparse(inbox_url)
|
||||
headers = sign_request(
|
||||
private_key_pem=actor.private_key_pem,
|
||||
key_id=key_id,
|
||||
method="POST",
|
||||
path=parsed.path,
|
||||
host=parsed.netloc,
|
||||
body=body_bytes,
|
||||
)
|
||||
headers["Content-Type"] = AP_CONTENT_TYPE
|
||||
|
||||
try:
|
||||
resp = await client.post(
|
||||
inbox_url,
|
||||
content=body_bytes,
|
||||
headers=headers,
|
||||
timeout=DELIVERY_TIMEOUT,
|
||||
)
|
||||
if resp.status_code < 300:
|
||||
log.info("Delivered to %s → %d", inbox_url, resp.status_code)
|
||||
return True
|
||||
else:
|
||||
log.warning("Delivery to %s → %d: %s", inbox_url, resp.status_code, resp.text[:200])
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Delivery failed for %s", inbox_url)
|
||||
return False
|
||||
|
||||
|
||||
async def on_activity_created(event: DomainEvent, session: AsyncSession) -> None:
|
||||
"""Deliver a newly created activity to all followers."""
|
||||
import os
|
||||
|
||||
if not services.has("federation"):
|
||||
return
|
||||
|
||||
payload = event.payload
|
||||
activity_id_uri = payload.get("activity_id")
|
||||
if not activity_id_uri:
|
||||
return
|
||||
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
|
||||
# Load the activity
|
||||
activity = (
|
||||
await session.execute(
|
||||
select(APActivity).where(APActivity.activity_id == activity_id_uri)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not activity:
|
||||
log.warning("Activity not found: %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Load actor with private key
|
||||
actor = (
|
||||
await session.execute(
|
||||
select(ActorProfile).where(ActorProfile.id == activity.actor_profile_id)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not actor or not actor.private_key_pem:
|
||||
log.warning("Actor not found or missing key for activity %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Load followers
|
||||
followers = (
|
||||
await session.execute(
|
||||
select(APFollower).where(APFollower.actor_profile_id == actor.id)
|
||||
)
|
||||
).scalars().all()
|
||||
|
||||
if not followers:
|
||||
log.debug("No followers to deliver to for %s", activity_id_uri)
|
||||
return
|
||||
|
||||
# Build activity JSON
|
||||
activity_json = _build_activity_json(activity, actor, domain)
|
||||
|
||||
# Deliver to each follower inbox
|
||||
# Deduplicate inboxes (multiple followers might share a shared inbox)
|
||||
inboxes = {f.follower_inbox for f in followers if f.follower_inbox}
|
||||
|
||||
log.info(
|
||||
"Delivering %s to %d inbox(es) for @%s",
|
||||
activity.activity_type, len(inboxes), actor.preferred_username,
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
for inbox_url in inboxes:
|
||||
await _deliver_to_inbox(client, inbox_url, activity_json, actor, domain)
|
||||
|
||||
|
||||
register_handler("federation.activity_created", on_activity_created)
|
||||
8
events/handlers/federation_handlers.py
Normal file
8
events/handlers/federation_handlers.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Federation event handlers — REMOVED.
|
||||
|
||||
Federation publication is now inline at the write site (ghost_sync, entries,
|
||||
market routes) via shared.services.federation_publish.try_publish().
|
||||
|
||||
AP delivery (federation.activity_created → inbox POST) remains async via
|
||||
ap_delivery_handler.
|
||||
"""
|
||||
@@ -16,9 +16,10 @@ async def on_user_logged_in(event: DomainEvent, session: AsyncSession) -> None:
|
||||
if services.has("cart"):
|
||||
await services.cart.adopt_cart_for_user(session, user_id, session_id)
|
||||
|
||||
# Adopt calendar entries (if calendar service is registered)
|
||||
# Adopt calendar entries and tickets (if calendar service is registered)
|
||||
if services.has("calendar"):
|
||||
await services.calendar.adopt_entries_for_user(session, user_id, session_id)
|
||||
await services.calendar.adopt_tickets_for_user(session, user_id, session_id)
|
||||
|
||||
|
||||
register_handler("user.logged_in", on_user_logged_in)
|
||||
|
||||
@@ -11,7 +11,7 @@ from shared.config import init_config, config, pretty
|
||||
from shared.models import KV # ensure shared models imported
|
||||
# Register all app model classes with SQLAlchemy so cross-domain
|
||||
# relationship() string references resolve correctly.
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models"):
|
||||
for _mod in ("blog.models", "market.models", "cart.models", "events.models", "federation.models"):
|
||||
try:
|
||||
__import__(_mod)
|
||||
except ImportError:
|
||||
@@ -143,12 +143,6 @@ def create_base_app(
|
||||
async def _inject_base():
|
||||
return await base_context()
|
||||
|
||||
# --- cleanup internal API client on shutdown ---
|
||||
@app.after_serving
|
||||
async def _close_internal_client():
|
||||
from .internal_api import close_client
|
||||
await close_client()
|
||||
|
||||
# --- event processor ---
|
||||
_event_processor = EventProcessor()
|
||||
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
"""
|
||||
Async HTTP client for inter-app communication.
|
||||
|
||||
Each app exposes internal JSON API endpoints. Other apps call them
|
||||
via httpx over the Docker overlay network (or localhost in dev).
|
||||
|
||||
URLs resolved from env vars:
|
||||
INTERNAL_URL_COOP (default http://localhost:8000)
|
||||
INTERNAL_URL_MARKET (default http://localhost:8001)
|
||||
INTERNAL_URL_CART (default http://localhost:8002)
|
||||
|
||||
Session cookie forwarding: when ``forward_session=True`` the current
|
||||
request's ``coop_session`` cookie is sent along so the target app can
|
||||
resolve ``g.user`` / cart identity.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from quart import request as quart_request
|
||||
|
||||
log = logging.getLogger("internal_api")
|
||||
|
||||
class DictObj:
|
||||
"""Thin wrapper so ``d.key`` works on dicts returned by JSON APIs.
|
||||
|
||||
Jinja templates use attribute access (``item.post.slug``) which
|
||||
doesn't work on plain dicts. Wrapping the API response with
|
||||
``dictobj()`` makes both ``item.post.slug`` and ``item["post"]["slug"]``
|
||||
work identically.
|
||||
"""
|
||||
|
||||
__slots__ = ("_data",)
|
||||
|
||||
def __init__(self, data: dict):
|
||||
self._data = data
|
||||
|
||||
def __getattr__(self, name: str):
|
||||
try:
|
||||
v = self._data[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
if isinstance(v, dict):
|
||||
return DictObj(v)
|
||||
return v
|
||||
|
||||
def get(self, key, default=None):
|
||||
v = self._data.get(key, default)
|
||||
if isinstance(v, dict):
|
||||
return DictObj(v)
|
||||
return v
|
||||
|
||||
def __repr__(self):
|
||||
return f"DictObj({self._data!r})"
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self._data)
|
||||
|
||||
|
||||
def dictobj(data):
|
||||
"""Recursively wrap dicts (or lists of dicts) for attribute access."""
|
||||
if isinstance(data, list):
|
||||
return [DictObj(d) if isinstance(d, dict) else d for d in data]
|
||||
if isinstance(data, dict):
|
||||
return DictObj(data)
|
||||
return data
|
||||
|
||||
|
||||
_DEFAULTS = {
|
||||
"coop": "http://localhost:8000",
|
||||
"market": "http://localhost:8001",
|
||||
"cart": "http://localhost:8002",
|
||||
"events": "http://localhost:8003",
|
||||
}
|
||||
|
||||
_client: httpx.AsyncClient | None = None
|
||||
|
||||
TIMEOUT = 3.0 # seconds
|
||||
|
||||
|
||||
def _base_url(app_name: str) -> str:
|
||||
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
||||
return os.getenv(env_key, _DEFAULTS.get(app_name, ""))
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(timeout=TIMEOUT)
|
||||
return _client
|
||||
|
||||
|
||||
async def close_client() -> None:
|
||||
"""Call from ``@app.after_serving`` to cleanly close the pool."""
|
||||
global _client
|
||||
if _client is not None and not _client.is_closed:
|
||||
await _client.aclose()
|
||||
_client = None
|
||||
|
||||
|
||||
def _session_cookies() -> dict[str, str]:
|
||||
"""Extract the shared session cookie from the incoming request."""
|
||||
cookie_name = "coop_session"
|
||||
try:
|
||||
val = quart_request.cookies.get(cookie_name)
|
||||
except RuntimeError:
|
||||
# No active request context
|
||||
val = None
|
||||
if val:
|
||||
return {cookie_name: val}
|
||||
return {}
|
||||
|
||||
|
||||
async def get(
|
||||
app_name: str,
|
||||
path: str,
|
||||
*,
|
||||
forward_session: bool = False,
|
||||
params: dict | None = None,
|
||||
) -> dict | list | None:
|
||||
"""GET ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
|
||||
url = _base_url(app_name).rstrip("/") + path
|
||||
cookies = _session_cookies() if forward_session else {}
|
||||
try:
|
||||
resp = await _get_client().get(url, params=params, cookies=cookies)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
except Exception as exc:
|
||||
log.warning("internal_api GET %s failed: %r", url, exc)
|
||||
return None
|
||||
|
||||
|
||||
async def post(
|
||||
app_name: str,
|
||||
path: str,
|
||||
*,
|
||||
json: Any = None,
|
||||
forward_session: bool = False,
|
||||
) -> dict | list | None:
|
||||
"""POST ``<app_base><path>`` and return parsed JSON, or ``None`` on failure."""
|
||||
url = _base_url(app_name).rstrip("/") + path
|
||||
cookies = _session_cookies() if forward_session else {}
|
||||
try:
|
||||
resp = await _get_client().post(url, json=json, cookies=cookies)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
except Exception as exc:
|
||||
log.warning("internal_api POST %s failed: %r", url, exc)
|
||||
return None
|
||||
@@ -37,6 +37,10 @@ def events_url(path: str = "/") -> str:
|
||||
return app_url("events", path)
|
||||
|
||||
|
||||
def federation_url(path: str = "/") -> str:
|
||||
return app_url("federation", path)
|
||||
|
||||
|
||||
def page_cart_url(page_slug: str, path: str = "/") -> str:
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
@@ -62,6 +66,9 @@ def market_product_url(product_slug: str, suffix: str = "", market_place=None) -
|
||||
|
||||
|
||||
def login_url(next_url: str = "") -> str:
|
||||
# Auth lives in blog (coop) for now. Set AUTH_APP=federation to switch.
|
||||
auth_app = os.getenv("AUTH_APP", "coop")
|
||||
base = app_url(auth_app, "/auth/login/")
|
||||
if next_url:
|
||||
return coop_url(f"/auth/login/?next={quote(next_url, safe='')}")
|
||||
return coop_url("/auth/login/")
|
||||
return f"{base}?next={quote(next_url, safe='')}"
|
||||
return base
|
||||
|
||||
@@ -27,3 +27,7 @@ from .calendars import (
|
||||
)
|
||||
from .container_relation import ContainerRelation
|
||||
from .menu_node import MenuNode
|
||||
from .federation import (
|
||||
ActorProfile, APActivity, APFollower, APInboxItem, APAnchor, IPFSPin,
|
||||
RemoteActor, APFollowing, APRemotePost, APLocalPost, APInteraction, APNotification,
|
||||
)
|
||||
|
||||
399
models/federation.py
Normal file
399
models/federation.py
Normal file
@@ -0,0 +1,399 @@
|
||||
"""Federation / ActivityPub ORM models.
|
||||
|
||||
These models support AP identity, activities, followers, inbox processing,
|
||||
IPFS content addressing, and OpenTimestamps anchoring.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import (
|
||||
String, Integer, DateTime, Text, Boolean, BigInteger,
|
||||
ForeignKey, UniqueConstraint, Index, func,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
class ActorProfile(Base):
|
||||
"""AP identity for a user. Created when user chooses a username."""
|
||||
__tablename__ = "ap_actor_profiles"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("users.id", ondelete="CASCADE"),
|
||||
unique=True, nullable=False,
|
||||
)
|
||||
preferred_username: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
public_key_pem: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
private_key_pem: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", backref="actor_profile", uselist=False, lazy="selectin")
|
||||
activities = relationship("APActivity", back_populates="actor_profile", lazy="dynamic")
|
||||
followers = relationship("APFollower", back_populates="actor_profile", lazy="dynamic")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_actor_user_id", "user_id", unique=True),
|
||||
Index("ix_ap_actor_username", "preferred_username", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ActorProfile {self.id} @{self.preferred_username}>"
|
||||
|
||||
|
||||
class APActivity(Base):
|
||||
"""An ActivityPub activity (local or remote)."""
|
||||
__tablename__ = "ap_activities"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
activity_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
object_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
object_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
published: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
signature: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
is_local: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
|
||||
|
||||
# Link back to originating domain object (e.g. source_type='post', source_id=42)
|
||||
source_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
source_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
|
||||
# IPFS content-addressed copy of the activity
|
||||
ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
|
||||
# Anchoring (filled later when batched into a merkle tree)
|
||||
anchor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_anchors.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", back_populates="activities")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_activity_actor", "actor_profile_id"),
|
||||
Index("ix_ap_activity_source", "source_type", "source_id"),
|
||||
Index("ix_ap_activity_published", "published"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APActivity {self.id} {self.activity_type}>"
|
||||
|
||||
|
||||
class APFollower(Base):
|
||||
"""A remote follower of a local actor."""
|
||||
__tablename__ = "ap_followers"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
follower_acct: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_inbox: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_actor_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
follower_public_key: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", back_populates="followers")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("actor_profile_id", "follower_acct", name="uq_follower_acct"),
|
||||
Index("ix_ap_follower_actor", "actor_profile_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APFollower {self.id} {self.follower_acct}>"
|
||||
|
||||
|
||||
class APInboxItem(Base):
|
||||
"""Raw incoming AP activity, stored for async processing."""
|
||||
__tablename__ = "ap_inbox_items"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
raw_json: Mapped[dict] = mapped_column(JSONB, nullable=False)
|
||||
activity_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
from_actor: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="pending", server_default="pending",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
processed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_inbox_state", "state"),
|
||||
Index("ix_ap_inbox_actor", "actor_profile_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APInboxItem {self.id} {self.activity_type} [{self.state}]>"
|
||||
|
||||
|
||||
class APAnchor(Base):
|
||||
"""OpenTimestamps anchoring batch — merkle tree of activities."""
|
||||
__tablename__ = "ap_anchors"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
merkle_root: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
tree_ipfs_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
ots_proof_cid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
activity_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
confirmed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
bitcoin_txid: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APAnchor {self.id} activities={self.activity_count}>"
|
||||
|
||||
|
||||
class IPFSPin(Base):
|
||||
"""Tracks content stored on IPFS — used by all domains."""
|
||||
__tablename__ = "ipfs_pins"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
content_hash: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
ipfs_cid: Mapped[str] = mapped_column(String(128), nullable=False, unique=True)
|
||||
pin_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
source_type: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
source_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
size_bytes: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ipfs_pin_source", "source_type", "source_id"),
|
||||
Index("ix_ipfs_pin_cid", "ipfs_cid", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<IPFSPin {self.id} {self.ipfs_cid[:16]}...>"
|
||||
|
||||
|
||||
class RemoteActor(Base):
|
||||
"""Cached profile of a remote actor we interact with."""
|
||||
__tablename__ = "ap_remote_actors"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_url: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
inbox_url: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
shared_inbox_url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
preferred_username: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
icon_url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
public_key_pem: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
domain: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
fetched_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_remote_actor_url", "actor_url", unique=True),
|
||||
Index("ix_ap_remote_actor_domain", "domain"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<RemoteActor {self.id} {self.preferred_username}@{self.domain}>"
|
||||
|
||||
|
||||
class APFollowing(Base):
|
||||
"""Outbound follow: local actor → remote actor."""
|
||||
__tablename__ = "ap_following"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
remote_actor_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
state: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="pending", server_default="pending",
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
accepted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile")
|
||||
remote_actor = relationship("RemoteActor")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("actor_profile_id", "remote_actor_id", name="uq_following"),
|
||||
Index("ix_ap_following_actor", "actor_profile_id"),
|
||||
Index("ix_ap_following_remote", "remote_actor_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APFollowing {self.id} [{self.state}]>"
|
||||
|
||||
|
||||
class APRemotePost(Base):
|
||||
"""A federated post ingested from a remote actor."""
|
||||
__tablename__ = "ap_remote_posts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
remote_actor_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
activity_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
object_id: Mapped[str] = mapped_column(String(512), unique=True, nullable=False)
|
||||
object_type: Mapped[str] = mapped_column(String(64), nullable=False, default="Note")
|
||||
content: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
url: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
attachment_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
tag_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
conversation: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
published: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
fetched_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
remote_actor = relationship("RemoteActor")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_remote_post_actor", "remote_actor_id"),
|
||||
Index("ix_ap_remote_post_published", "published"),
|
||||
Index("ix_ap_remote_post_object", "object_id", unique=True),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APRemotePost {self.id} {self.object_type}>"
|
||||
|
||||
|
||||
class APLocalPost(Base):
|
||||
"""A native post composed in the federation UI."""
|
||||
__tablename__ = "ap_local_posts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
content: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
visibility: Mapped[str] = mapped_column(
|
||||
String(20), nullable=False, default="public", server_default="public",
|
||||
)
|
||||
in_reply_to: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
published: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_local_post_actor", "actor_profile_id"),
|
||||
Index("ix_ap_local_post_published", "published"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APLocalPost {self.id}>"
|
||||
|
||||
|
||||
class APInteraction(Base):
|
||||
"""Like or boost (local or remote)."""
|
||||
__tablename__ = "ap_interactions"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=True,
|
||||
)
|
||||
remote_actor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="CASCADE"), nullable=True,
|
||||
)
|
||||
post_type: Mapped[str] = mapped_column(String(20), nullable=False) # local/remote
|
||||
post_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
interaction_type: Mapped[str] = mapped_column(String(20), nullable=False) # like/boost
|
||||
activity_id: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_interaction_post", "post_type", "post_id"),
|
||||
Index("ix_ap_interaction_actor", "actor_profile_id"),
|
||||
Index("ix_ap_interaction_remote", "remote_actor_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<APInteraction {self.id} {self.interaction_type}>"
|
||||
|
||||
|
||||
class APNotification(Base):
|
||||
"""Notification for a local actor."""
|
||||
__tablename__ = "ap_notifications"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
actor_profile_id: Mapped[int] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="CASCADE"), nullable=False,
|
||||
)
|
||||
notification_type: Mapped[str] = mapped_column(String(20), nullable=False)
|
||||
from_remote_actor_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_actors.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
from_actor_profile_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_actor_profiles.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
target_activity_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_activities.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
target_remote_post_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("ap_remote_posts.id", ondelete="SET NULL"), nullable=True,
|
||||
)
|
||||
read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
actor_profile = relationship("ActorProfile", foreign_keys=[actor_profile_id])
|
||||
from_remote_actor = relationship("RemoteActor")
|
||||
from_actor_profile = relationship("ActorProfile", foreign_keys=[from_actor_profile_id])
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_ap_notification_actor", "actor_profile_id"),
|
||||
Index("ix_ap_notification_read", "actor_profile_id", "read"),
|
||||
Index("ix_ap_notification_created", "created_at"),
|
||||
)
|
||||
@@ -10,6 +10,7 @@ blinker==1.9.0
|
||||
Brotli==1.1.0
|
||||
certifi==2025.10.5
|
||||
click==8.3.0
|
||||
cryptography>=41.0
|
||||
exceptiongroup==1.3.0
|
||||
Flask==3.1.2
|
||||
greenlet==3.2.4
|
||||
|
||||
@@ -5,6 +5,9 @@ calendar-domain tables on behalf of other domains.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import select, update, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
@@ -51,6 +54,12 @@ def _ticket_to_dto(ticket: Ticket) -> TicketDTO:
|
||||
entry = getattr(ticket, "entry", None)
|
||||
tt = getattr(ticket, "ticket_type", None)
|
||||
cal = getattr(entry, "calendar", None) if entry else None
|
||||
# Price: ticket type cost if available, else entry ticket_price
|
||||
price = None
|
||||
if tt and tt.cost is not None:
|
||||
price = tt.cost
|
||||
elif entry and entry.ticket_price is not None:
|
||||
price = entry.ticket_price
|
||||
return TicketDTO(
|
||||
id=ticket.id,
|
||||
code=ticket.code,
|
||||
@@ -62,6 +71,11 @@ def _ticket_to_dto(ticket: Ticket) -> TicketDTO:
|
||||
calendar_name=cal.name if cal else None,
|
||||
created_at=ticket.created_at,
|
||||
checked_in_at=ticket.checked_in_at,
|
||||
entry_id=entry.id if entry else None,
|
||||
ticket_type_id=ticket.ticket_type_id,
|
||||
price=price,
|
||||
order_id=ticket.order_id,
|
||||
calendar_container_id=cal.container_id if cal else None,
|
||||
)
|
||||
|
||||
|
||||
@@ -156,6 +170,75 @@ class SqlCalendarService:
|
||||
)
|
||||
return set(result.scalars().all())
|
||||
|
||||
async def visible_entries_for_period(
|
||||
self, session: AsyncSession, calendar_id: int,
|
||||
period_start: datetime, period_end: datetime,
|
||||
*, user_id: int | None, is_admin: bool, session_id: str | None,
|
||||
) -> list[CalendarEntryDTO]:
|
||||
"""Return visible entries for a calendar in a date range.
|
||||
|
||||
Visibility rules:
|
||||
- Everyone sees confirmed entries.
|
||||
- Current user/session sees their own entries (any state).
|
||||
- Admins also see ordered + provisional entries for all users.
|
||||
"""
|
||||
# User/session entries (any state)
|
||||
user_entries: list[CalendarEntry] = []
|
||||
if user_id or session_id:
|
||||
conditions = [
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
]
|
||||
if user_id:
|
||||
conditions.append(CalendarEntry.user_id == user_id)
|
||||
elif session_id:
|
||||
conditions.append(CalendarEntry.session_id == session_id)
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(*conditions)
|
||||
.options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
user_entries = list(result.scalars().all())
|
||||
|
||||
# Confirmed entries for everyone
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.state == "confirmed",
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
).options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
confirmed_entries = list(result.scalars().all())
|
||||
|
||||
# Admin: ordered + provisional for everyone
|
||||
admin_entries: list[CalendarEntry] = []
|
||||
if is_admin:
|
||||
result = await session.execute(
|
||||
select(CalendarEntry).where(
|
||||
CalendarEntry.calendar_id == calendar_id,
|
||||
CalendarEntry.state.in_(("ordered", "provisional")),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.start_at >= period_start,
|
||||
CalendarEntry.start_at < period_end,
|
||||
).options(selectinload(CalendarEntry.calendar))
|
||||
)
|
||||
admin_entries = list(result.scalars().all())
|
||||
|
||||
# Merge, deduplicate, sort
|
||||
entries_by_id: dict[int, CalendarEntry] = {}
|
||||
for e in confirmed_entries:
|
||||
entries_by_id[e.id] = e
|
||||
for e in admin_entries:
|
||||
entries_by_id[e.id] = e
|
||||
for e in user_entries:
|
||||
entries_by_id[e.id] = e
|
||||
|
||||
merged = sorted(entries_by_id.values(), key=lambda e: e.start_at or period_start)
|
||||
return [_entry_to_dto(e) for e in merged]
|
||||
|
||||
async def associated_entries(
|
||||
self, session: AsyncSession, content_type: str, content_id: int, page: int,
|
||||
) -> tuple[list[CalendarEntryDTO], bool]:
|
||||
@@ -293,10 +376,18 @@ class SqlCalendarService:
|
||||
async def adopt_entries_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
"""Adopt anonymous calendar entries for a logged-in user."""
|
||||
"""Adopt anonymous calendar entries for a logged-in user.
|
||||
|
||||
Only deletes stale *pending* entries for the user — confirmed/ordered
|
||||
entries must be preserved.
|
||||
"""
|
||||
await session.execute(
|
||||
update(CalendarEntry)
|
||||
.where(CalendarEntry.deleted_at.is_(None), CalendarEntry.user_id == user_id)
|
||||
.where(
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
CalendarEntry.user_id == user_id,
|
||||
CalendarEntry.state == "pending",
|
||||
)
|
||||
.values(deleted_at=func.now())
|
||||
)
|
||||
cal_result = await session.execute(
|
||||
@@ -356,3 +447,184 @@ class SqlCalendarService:
|
||||
.where(*filters)
|
||||
.values(state="provisional")
|
||||
)
|
||||
|
||||
# -- ticket methods -------------------------------------------------------
|
||||
|
||||
def _ticket_query_options(self):
|
||||
return [
|
||||
selectinload(Ticket.entry).selectinload(CalendarEntry.calendar),
|
||||
selectinload(Ticket.ticket_type),
|
||||
]
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
"""Reserved tickets for the given identity (cart line items)."""
|
||||
filters = [Ticket.state == "reserved"]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
"""Reserved tickets scoped to a page (via entry → calendar → container_id)."""
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
|
||||
entry_ids = select(CalendarEntry.id).where(
|
||||
CalendarEntry.calendar_id.in_(cal_ids),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
|
||||
filters = [
|
||||
Ticket.state == "reserved",
|
||||
Ticket.entry_id.in_(entry_ids),
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
"""Set order_id on reserved tickets at checkout."""
|
||||
filters = [Ticket.state == "reserved"]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
|
||||
if page_post_id is not None:
|
||||
cal_ids = select(Calendar.id).where(
|
||||
Calendar.container_type == "page",
|
||||
Calendar.container_id == page_post_id,
|
||||
Calendar.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
entry_ids = select(CalendarEntry.id).where(
|
||||
CalendarEntry.calendar_id.in_(cal_ids),
|
||||
CalendarEntry.deleted_at.is_(None),
|
||||
).scalar_subquery()
|
||||
filters.append(Ticket.entry_id.in_(entry_ids))
|
||||
|
||||
await session.execute(
|
||||
update(Ticket).where(*filters).values(order_id=order_id)
|
||||
)
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None:
|
||||
"""Reserved → confirmed on payment."""
|
||||
await session.execute(
|
||||
update(Ticket)
|
||||
.where(Ticket.order_id == order_id, Ticket.state == "reserved")
|
||||
.values(state="confirmed")
|
||||
)
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
"""Tickets for a given order (checkout return display)."""
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(Ticket.order_id == order_id)
|
||||
.order_by(Ticket.created_at.asc())
|
||||
.options(*self._ticket_query_options())
|
||||
)
|
||||
return [_ticket_to_dto(t) for t in result.scalars().all()]
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
"""Migrate anonymous reserved tickets to user on login."""
|
||||
result = await session.execute(
|
||||
select(Ticket).where(
|
||||
Ticket.session_id == session_id,
|
||||
Ticket.state == "reserved",
|
||||
)
|
||||
)
|
||||
for ticket in result.scalars().all():
|
||||
ticket.user_id = user_id
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session: AsyncSession, entry_id: int, count: int, *,
|
||||
user_id: int | None, session_id: str | None,
|
||||
ticket_type_id: int | None = None,
|
||||
) -> int:
|
||||
"""Adjust reserved ticket count to target. Returns new count."""
|
||||
import uuid
|
||||
|
||||
count = max(count, 0)
|
||||
|
||||
# Current reserved count
|
||||
filters = [
|
||||
Ticket.entry_id == entry_id,
|
||||
Ticket.state == "reserved",
|
||||
]
|
||||
if user_id is not None:
|
||||
filters.append(Ticket.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(Ticket.session_id == session_id)
|
||||
else:
|
||||
return 0
|
||||
if ticket_type_id is not None:
|
||||
filters.append(Ticket.ticket_type_id == ticket_type_id)
|
||||
|
||||
current = await session.scalar(
|
||||
select(func.count(Ticket.id)).where(*filters)
|
||||
) or 0
|
||||
|
||||
if count > current:
|
||||
# Create tickets
|
||||
for _ in range(count - current):
|
||||
ticket = Ticket(
|
||||
entry_id=entry_id,
|
||||
ticket_type_id=ticket_type_id,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
code=uuid.uuid4().hex,
|
||||
state="reserved",
|
||||
)
|
||||
session.add(ticket)
|
||||
await session.flush()
|
||||
elif count < current:
|
||||
# Cancel newest tickets
|
||||
to_cancel = current - count
|
||||
result = await session.execute(
|
||||
select(Ticket)
|
||||
.where(*filters)
|
||||
.order_by(Ticket.created_at.desc())
|
||||
.limit(to_cancel)
|
||||
)
|
||||
for ticket in result.scalars().all():
|
||||
ticket.state = "cancelled"
|
||||
await session.flush()
|
||||
|
||||
return count
|
||||
|
||||
@@ -93,6 +93,23 @@ class SqlCartService:
|
||||
calendar_count = len(cal_entries)
|
||||
calendar_total = sum(Decimal(str(e.cost or 0)) for e in cal_entries if e.cost is not None)
|
||||
|
||||
# --- tickets ---
|
||||
if page_post_id is not None:
|
||||
tickets = await services.calendar.tickets_for_page(
|
||||
session, page_post_id,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
else:
|
||||
tickets = await services.calendar.pending_tickets(
|
||||
session,
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
ticket_count = len(tickets)
|
||||
ticket_total = sum(Decimal(str(t.price or 0)) for t in tickets)
|
||||
|
||||
items = [_item_to_dto(ci) for ci in cart_items]
|
||||
|
||||
return CartSummaryDTO(
|
||||
@@ -101,6 +118,8 @@ class SqlCartService:
|
||||
calendar_count=calendar_count,
|
||||
calendar_total=calendar_total,
|
||||
items=items,
|
||||
ticket_count=ticket_count,
|
||||
ticket_total=ticket_total,
|
||||
)
|
||||
|
||||
async def cart_items(
|
||||
|
||||
1445
services/federation_impl.py
Normal file
1445
services/federation_impl.py
Normal file
File diff suppressed because it is too large
Load Diff
82
services/federation_publish.py
Normal file
82
services/federation_publish.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""Inline federation publication — called at write time, not via async handler.
|
||||
|
||||
Replaces the old pattern where emit_event("post.published") → async handler →
|
||||
publish_activity(). Now the originating service calls try_publish() directly,
|
||||
which creates the APActivity in the same DB transaction. AP delivery
|
||||
(federation.activity_created → inbox POST) stays async.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.services.registry import services
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def try_publish(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
user_id: int | None,
|
||||
activity_type: str,
|
||||
object_type: str,
|
||||
object_data: dict,
|
||||
source_type: str,
|
||||
source_id: int,
|
||||
) -> None:
|
||||
"""Publish an AP activity if federation is available and user has a profile.
|
||||
|
||||
Safe to call from any app — returns silently if federation isn't wired
|
||||
or the user has no actor profile.
|
||||
"""
|
||||
if not services.has("federation"):
|
||||
return
|
||||
|
||||
if not user_id:
|
||||
return
|
||||
|
||||
actor = await services.federation.get_actor_by_user_id(session, user_id)
|
||||
if not actor:
|
||||
return
|
||||
|
||||
# Dedup: don't re-Create if already published, don't re-Delete if already deleted
|
||||
existing = await services.federation.get_activity_for_source(
|
||||
session, source_type, source_id,
|
||||
)
|
||||
if existing:
|
||||
if activity_type == "Create" and existing.activity_type != "Delete":
|
||||
return # already published (allow re-Create after Delete/unpublish)
|
||||
if activity_type == "Update" and existing.activity_type == "Update":
|
||||
return # already updated (Ghost fires duplicate webhooks)
|
||||
if activity_type == "Delete" and existing.activity_type == "Delete":
|
||||
return # already deleted
|
||||
elif activity_type in ("Delete", "Update"):
|
||||
return # never published, nothing to delete/update
|
||||
|
||||
# Stable object ID: same source always gets the same object id so
|
||||
# Mastodon treats Create/Update/Delete as the same post.
|
||||
domain = os.getenv("AP_DOMAIN", "rose-ash.com")
|
||||
object_data["id"] = (
|
||||
f"https://{domain}/users/{actor.preferred_username}"
|
||||
f"/objects/{source_type.lower()}/{source_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await services.federation.publish_activity(
|
||||
session,
|
||||
actor_user_id=user_id,
|
||||
activity_type=activity_type,
|
||||
object_type=object_type,
|
||||
object_data=object_data,
|
||||
source_type=source_type,
|
||||
source_id=source_id,
|
||||
)
|
||||
log.info(
|
||||
"Published %s/%s for %s#%d by user %d",
|
||||
activity_type, object_type, source_type, source_id, user_id,
|
||||
)
|
||||
except Exception:
|
||||
log.exception("Failed to publish activity for %s#%d", source_type, source_id)
|
||||
@@ -21,6 +21,7 @@ from shared.contracts.protocols import (
|
||||
CalendarService,
|
||||
MarketService,
|
||||
CartService,
|
||||
FederationService,
|
||||
)
|
||||
|
||||
|
||||
@@ -37,6 +38,7 @@ class _ServiceRegistry:
|
||||
self._calendar: CalendarService | None = None
|
||||
self._market: MarketService | None = None
|
||||
self._cart: CartService | None = None
|
||||
self._federation: FederationService | None = None
|
||||
|
||||
# -- blog -----------------------------------------------------------------
|
||||
@property
|
||||
@@ -82,6 +84,17 @@ class _ServiceRegistry:
|
||||
def cart(self, impl: CartService) -> None:
|
||||
self._cart = impl
|
||||
|
||||
# -- federation -----------------------------------------------------------
|
||||
@property
|
||||
def federation(self) -> FederationService:
|
||||
if self._federation is None:
|
||||
raise RuntimeError("FederationService not registered")
|
||||
return self._federation
|
||||
|
||||
@federation.setter
|
||||
def federation(self, impl: FederationService) -> None:
|
||||
self._federation = impl
|
||||
|
||||
# -- introspection --------------------------------------------------------
|
||||
def has(self, name: str) -> bool:
|
||||
"""Check whether a domain service is registered."""
|
||||
|
||||
@@ -18,6 +18,9 @@ from shared.contracts.dtos import (
|
||||
ProductDTO,
|
||||
CartItemDTO,
|
||||
CartSummaryDTO,
|
||||
ActorProfileDTO,
|
||||
APActivityDTO,
|
||||
APFollowerDTO,
|
||||
)
|
||||
|
||||
|
||||
@@ -31,6 +34,9 @@ class StubBlogService:
|
||||
async def get_posts_by_ids(self, session: AsyncSession, ids: list[int]) -> list[PostDTO]:
|
||||
return []
|
||||
|
||||
async def search_posts(self, session, query, page=1, per_page=10):
|
||||
return [], 0
|
||||
|
||||
|
||||
class StubCalendarService:
|
||||
async def calendars_for_container(
|
||||
@@ -98,6 +104,48 @@ class StubCalendarService:
|
||||
) -> dict[int, list[CalendarEntryDTO]]:
|
||||
return {}
|
||||
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def tickets_for_page(
|
||||
self, session: AsyncSession, page_id: int, *, user_id: int | None, session_id: str | None,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int | None,
|
||||
session_id: str | None, page_post_id: int | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def get_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> list[TicketDTO]:
|
||||
return []
|
||||
|
||||
async def adopt_tickets_for_user(
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def adjust_ticket_quantity(
|
||||
self, session, entry_id, count, *, user_id, session_id, ticket_type_id=None,
|
||||
) -> int:
|
||||
return 0
|
||||
|
||||
async def entry_ids_for_content(self, session, content_type, content_id):
|
||||
return set()
|
||||
|
||||
async def visible_entries_for_period(self, session, calendar_id, period_start, period_end, *, user_id, is_admin, session_id):
|
||||
return []
|
||||
|
||||
|
||||
class StubMarketService:
|
||||
async def marketplaces_for_container(
|
||||
@@ -137,3 +185,107 @@ class StubCartService:
|
||||
self, session: AsyncSession, user_id: int, session_id: str,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class StubFederationService:
|
||||
"""No-op federation stub for apps that don't own federation."""
|
||||
|
||||
async def get_actor_by_username(self, session, username):
|
||||
return None
|
||||
|
||||
async def get_actor_by_user_id(self, session, user_id):
|
||||
return None
|
||||
|
||||
async def create_actor(self, session, user_id, preferred_username,
|
||||
display_name=None, summary=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def username_available(self, session, username):
|
||||
return False
|
||||
|
||||
async def publish_activity(self, session, *, actor_user_id, activity_type,
|
||||
object_type, object_data, source_type=None,
|
||||
source_id=None):
|
||||
return None
|
||||
|
||||
async def get_activity(self, session, activity_id):
|
||||
return None
|
||||
|
||||
async def get_outbox(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def get_activity_for_source(self, session, source_type, source_id):
|
||||
return None
|
||||
|
||||
async def get_followers(self, session, username):
|
||||
return []
|
||||
|
||||
async def add_follower(self, session, username, follower_acct, follower_inbox,
|
||||
follower_actor_url, follower_public_key=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def remove_follower(self, session, username, follower_acct):
|
||||
return False
|
||||
|
||||
async def get_or_fetch_remote_actor(self, session, actor_url):
|
||||
return None
|
||||
|
||||
async def search_remote_actor(self, session, acct):
|
||||
return None
|
||||
|
||||
async def send_follow(self, session, local_username, remote_actor_url):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def get_following(self, session, username, page=1, per_page=20):
|
||||
return [], 0
|
||||
|
||||
async def accept_follow_response(self, session, local_username, remote_actor_url):
|
||||
pass
|
||||
|
||||
async def unfollow(self, session, local_username, remote_actor_url):
|
||||
pass
|
||||
|
||||
async def ingest_remote_post(self, session, remote_actor_id, activity_json, object_json):
|
||||
pass
|
||||
|
||||
async def delete_remote_post(self, session, object_id):
|
||||
pass
|
||||
|
||||
async def get_remote_post(self, session, object_id):
|
||||
return None
|
||||
|
||||
async def get_home_timeline(self, session, actor_profile_id, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def get_public_timeline(self, session, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def create_local_post(self, session, actor_profile_id, content, visibility="public", in_reply_to=None):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def delete_local_post(self, session, actor_profile_id, post_id):
|
||||
raise RuntimeError("FederationService not available")
|
||||
|
||||
async def like_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def unlike_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def boost_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def unboost_post(self, session, actor_profile_id, object_id, author_inbox):
|
||||
pass
|
||||
|
||||
async def get_notifications(self, session, actor_profile_id, before=None, limit=20):
|
||||
return []
|
||||
|
||||
async def unread_notification_count(self, session, actor_profile_id):
|
||||
return 0
|
||||
|
||||
async def mark_notifications_read(self, session, actor_profile_id):
|
||||
pass
|
||||
|
||||
async def get_stats(self, session):
|
||||
return {"actors": 0, "activities": 0, "followers": 0}
|
||||
|
||||
236
utils/anchoring.py
Normal file
236
utils/anchoring.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Merkle tree construction and OpenTimestamps anchoring.
|
||||
|
||||
Ported from ~/art-dag/activity-pub/anchoring.py.
|
||||
Builds a SHA256 merkle tree from activity IDs, submits the root to
|
||||
OpenTimestamps for Bitcoin timestamping, and stores the tree + proof on IPFS.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.federation import APActivity, APAnchor
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
OTS_SERVERS = [
|
||||
"https://a.pool.opentimestamps.org",
|
||||
"https://b.pool.opentimestamps.org",
|
||||
"https://a.pool.eternitywall.com",
|
||||
]
|
||||
|
||||
|
||||
def _sha256(data: str | bytes) -> str:
|
||||
"""SHA256 hex digest."""
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def build_merkle_tree(items: list[str]) -> dict:
|
||||
"""Build a SHA256 merkle tree from a list of strings (activity IDs).
|
||||
|
||||
Returns:
|
||||
{
|
||||
"root": hex_str,
|
||||
"leaves": [hex_str, ...],
|
||||
"levels": [[hex_str, ...], ...], # bottom-up
|
||||
}
|
||||
"""
|
||||
if not items:
|
||||
raise ValueError("Cannot build merkle tree from empty list")
|
||||
|
||||
# Sort for deterministic ordering
|
||||
items = sorted(items)
|
||||
|
||||
# Leaf hashes
|
||||
leaves = [_sha256(item) for item in items]
|
||||
levels = [leaves[:]]
|
||||
|
||||
current = leaves[:]
|
||||
while len(current) > 1:
|
||||
next_level = []
|
||||
for i in range(0, len(current), 2):
|
||||
left = current[i]
|
||||
right = current[i + 1] if i + 1 < len(current) else left
|
||||
combined = _sha256(left + right)
|
||||
next_level.append(combined)
|
||||
levels.append(next_level)
|
||||
current = next_level
|
||||
|
||||
return {
|
||||
"root": current[0],
|
||||
"leaves": leaves,
|
||||
"levels": levels,
|
||||
}
|
||||
|
||||
|
||||
def get_merkle_proof(tree: dict, item: str) -> list[dict] | None:
|
||||
"""Generate a proof-of-membership for an item.
|
||||
|
||||
Returns a list of {sibling: hex, position: "left"|"right"} dicts,
|
||||
or None if the item is not in the tree.
|
||||
"""
|
||||
item_hash = _sha256(item)
|
||||
leaves = tree["leaves"]
|
||||
|
||||
try:
|
||||
idx = leaves.index(item_hash)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
proof = []
|
||||
for level in tree["levels"][:-1]: # skip root level
|
||||
if idx % 2 == 0:
|
||||
sibling_idx = idx + 1
|
||||
position = "right"
|
||||
else:
|
||||
sibling_idx = idx - 1
|
||||
position = "left"
|
||||
|
||||
if sibling_idx < len(level):
|
||||
proof.append({"sibling": level[sibling_idx], "position": position})
|
||||
else:
|
||||
proof.append({"sibling": level[idx], "position": position})
|
||||
|
||||
idx = idx // 2
|
||||
|
||||
return proof
|
||||
|
||||
|
||||
def verify_merkle_proof(item: str, proof: list[dict], root: str) -> bool:
|
||||
"""Verify a merkle proof against a root hash."""
|
||||
current = _sha256(item)
|
||||
for step in proof:
|
||||
sibling = step["sibling"]
|
||||
if step["position"] == "right":
|
||||
current = _sha256(current + sibling)
|
||||
else:
|
||||
current = _sha256(sibling + current)
|
||||
return current == root
|
||||
|
||||
|
||||
async def submit_to_opentimestamps(merkle_root: str) -> bytes | None:
|
||||
"""Submit a hash to OpenTimestamps. Returns the (incomplete) OTS proof bytes."""
|
||||
root_bytes = bytes.fromhex(merkle_root)
|
||||
|
||||
for server in OTS_SERVERS:
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.post(
|
||||
f"{server}/digest",
|
||||
content=root_bytes,
|
||||
headers={"Content-Type": "application/x-opentimestamps"},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
log.info("OTS proof obtained from %s", server)
|
||||
return resp.content
|
||||
except Exception:
|
||||
log.debug("OTS server %s failed", server, exc_info=True)
|
||||
continue
|
||||
|
||||
log.warning("All OTS servers failed for root %s", merkle_root)
|
||||
return None
|
||||
|
||||
|
||||
async def upgrade_ots_proof(proof_bytes: bytes) -> tuple[bytes, bool]:
|
||||
"""Try to upgrade an incomplete OTS proof to a Bitcoin-confirmed one.
|
||||
|
||||
Returns (proof_bytes, is_confirmed). The proof_bytes may be updated.
|
||||
"""
|
||||
# OpenTimestamps upgrade is done via the `ots` CLI or their calendar API.
|
||||
# For now, return the proof as-is with is_confirmed=False.
|
||||
# TODO: Implement calendar-based upgrade polling.
|
||||
return proof_bytes, False
|
||||
|
||||
|
||||
async def create_anchor(
|
||||
session: AsyncSession,
|
||||
batch_size: int = 100,
|
||||
) -> APAnchor | None:
|
||||
"""Anchor a batch of un-anchored activities.
|
||||
|
||||
1. Select activities without an anchor_id
|
||||
2. Build merkle tree from their activity_ids
|
||||
3. Store tree on IPFS
|
||||
4. Submit root to OpenTimestamps
|
||||
5. Store OTS proof on IPFS
|
||||
6. Create APAnchor record
|
||||
7. Link activities to anchor
|
||||
"""
|
||||
# Find un-anchored activities
|
||||
result = await session.execute(
|
||||
select(APActivity)
|
||||
.where(
|
||||
APActivity.anchor_id.is_(None),
|
||||
APActivity.is_local == True, # noqa: E712
|
||||
)
|
||||
.order_by(APActivity.created_at.asc())
|
||||
.limit(batch_size)
|
||||
)
|
||||
activities = result.scalars().all()
|
||||
|
||||
if not activities:
|
||||
log.debug("No un-anchored activities to process")
|
||||
return None
|
||||
|
||||
activity_ids = [a.activity_id for a in activities]
|
||||
log.info("Anchoring %d activities", len(activity_ids))
|
||||
|
||||
# Build merkle tree
|
||||
tree = build_merkle_tree(activity_ids)
|
||||
merkle_root = tree["root"]
|
||||
|
||||
# Store tree on IPFS
|
||||
tree_cid = None
|
||||
ots_proof_cid = None
|
||||
try:
|
||||
from shared.utils.ipfs_client import add_json, add_bytes, is_available
|
||||
if await is_available():
|
||||
tree_cid = await add_json({
|
||||
"root": merkle_root,
|
||||
"leaves": tree["leaves"],
|
||||
"activity_ids": activity_ids,
|
||||
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||
})
|
||||
log.info("Merkle tree stored on IPFS: %s", tree_cid)
|
||||
except Exception:
|
||||
log.exception("IPFS tree storage failed")
|
||||
|
||||
# Submit to OpenTimestamps
|
||||
ots_proof = await submit_to_opentimestamps(merkle_root)
|
||||
if ots_proof:
|
||||
try:
|
||||
from shared.utils.ipfs_client import add_bytes, is_available
|
||||
if await is_available():
|
||||
ots_proof_cid = await add_bytes(ots_proof)
|
||||
log.info("OTS proof stored on IPFS: %s", ots_proof_cid)
|
||||
except Exception:
|
||||
log.exception("IPFS OTS proof storage failed")
|
||||
|
||||
# Create anchor record
|
||||
anchor = APAnchor(
|
||||
merkle_root=merkle_root,
|
||||
tree_ipfs_cid=tree_cid,
|
||||
ots_proof_cid=ots_proof_cid,
|
||||
activity_count=len(activities),
|
||||
)
|
||||
session.add(anchor)
|
||||
await session.flush()
|
||||
|
||||
# Link activities to anchor
|
||||
for a in activities:
|
||||
a.anchor_id = anchor.id
|
||||
await session.flush()
|
||||
|
||||
log.info(
|
||||
"Anchor created: root=%s, activities=%d, tree_cid=%s",
|
||||
merkle_root, len(activities), tree_cid,
|
||||
)
|
||||
|
||||
return anchor
|
||||
54
utils/calendar_helpers.py
Normal file
54
utils/calendar_helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Pure calendar utility functions (no ORM dependencies).
|
||||
|
||||
Extracted from events/bp/calendar/services/calendar_view.py so that
|
||||
blog admin (and any other app) can use them without cross-app imports.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar as pycalendar
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from quart import request
|
||||
|
||||
|
||||
def parse_int_arg(name: str, default: int | None = None) -> int | None:
|
||||
"""Parse an integer query parameter from the request."""
|
||||
val = request.args.get(name, "").strip()
|
||||
if not val:
|
||||
return default
|
||||
try:
|
||||
return int(val)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
|
||||
def add_months(year: int, month: int, delta: int) -> tuple[int, int]:
|
||||
"""Add (or subtract) months to a given year/month, handling year overflow."""
|
||||
new_month = month + delta
|
||||
new_year = year + (new_month - 1) // 12
|
||||
new_month = ((new_month - 1) % 12) + 1
|
||||
return new_year, new_month
|
||||
|
||||
|
||||
def build_calendar_weeks(year: int, month: int) -> list[list[dict]]:
|
||||
"""Build a calendar grid for the given year and month.
|
||||
|
||||
Returns a list of weeks, where each week is a list of 7 day dicts.
|
||||
"""
|
||||
today = datetime.now(timezone.utc).date()
|
||||
cal = pycalendar.Calendar(firstweekday=0) # 0 = Monday
|
||||
weeks: list[list[dict]] = []
|
||||
|
||||
for week in cal.monthdatescalendar(year, month):
|
||||
week_days = []
|
||||
for d in week:
|
||||
week_days.append(
|
||||
{
|
||||
"date": d,
|
||||
"in_month": (d.month == month),
|
||||
"is_today": (d == today),
|
||||
}
|
||||
)
|
||||
weeks.append(week_days)
|
||||
|
||||
return weeks
|
||||
181
utils/http_signatures.py
Normal file
181
utils/http_signatures.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""RSA key generation and HTTP Signature signing/verification.
|
||||
|
||||
Keys are stored in DB (ActorProfile), not the filesystem.
|
||||
Ported from ~/art-dag/activity-pub/keys.py.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||
|
||||
|
||||
def generate_rsa_keypair() -> tuple[str, str]:
|
||||
"""Generate an RSA-2048 keypair.
|
||||
|
||||
Returns:
|
||||
(private_pem, public_pem) as UTF-8 strings.
|
||||
"""
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode()
|
||||
|
||||
public_pem = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode()
|
||||
|
||||
return private_pem, public_pem
|
||||
|
||||
|
||||
def sign_request(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
method: str,
|
||||
path: str,
|
||||
host: str,
|
||||
body: bytes | None = None,
|
||||
date: str | None = None,
|
||||
) -> dict[str, str]:
|
||||
"""Build HTTP Signature headers for an outgoing request.
|
||||
|
||||
Returns a dict of headers to merge into the request:
|
||||
``{"Signature": ..., "Date": ..., "Digest": ..., "Host": ...}``
|
||||
"""
|
||||
if date is None:
|
||||
date = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
|
||||
headers_to_sign = [
|
||||
f"(request-target): {method.lower()} {path}",
|
||||
f"host: {host}",
|
||||
f"date: {date}",
|
||||
]
|
||||
|
||||
out_headers: dict[str, str] = {
|
||||
"Host": host,
|
||||
"Date": date,
|
||||
}
|
||||
|
||||
if body is not None:
|
||||
digest = base64.b64encode(hashlib.sha256(body).digest()).decode()
|
||||
digest_header = f"SHA-256={digest}"
|
||||
headers_to_sign.append(f"digest: {digest_header}")
|
||||
out_headers["Digest"] = digest_header
|
||||
|
||||
signed_string = "\n".join(headers_to_sign)
|
||||
header_names = " ".join(
|
||||
h.split(":")[0] for h in headers_to_sign
|
||||
)
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
out_headers["Signature"] = (
|
||||
f'keyId="{key_id}",'
|
||||
f'headers="{header_names}",'
|
||||
f'signature="{signature_b64}",'
|
||||
f'algorithm="rsa-sha256"'
|
||||
)
|
||||
|
||||
return out_headers
|
||||
|
||||
|
||||
def verify_request_signature(
|
||||
public_key_pem: str,
|
||||
signature_header: str,
|
||||
method: str,
|
||||
path: str,
|
||||
headers: dict[str, str],
|
||||
) -> bool:
|
||||
"""Verify an incoming HTTP Signature.
|
||||
|
||||
Args:
|
||||
public_key_pem: PEM-encoded public key of the sender.
|
||||
signature_header: Value of the ``Signature`` header.
|
||||
method: HTTP method (GET, POST, etc.).
|
||||
path: Request path (e.g. ``/users/alice/inbox``).
|
||||
headers: All request headers (case-insensitive keys).
|
||||
|
||||
Returns:
|
||||
True if the signature is valid.
|
||||
"""
|
||||
# Parse Signature header
|
||||
parts: dict[str, str] = {}
|
||||
for part in signature_header.split(","):
|
||||
part = part.strip()
|
||||
eq = part.index("=")
|
||||
key = part[:eq]
|
||||
val = part[eq + 1:].strip('"')
|
||||
parts[key] = val
|
||||
|
||||
signed_headers = parts.get("headers", "date").split()
|
||||
signature_b64 = parts.get("signature", "")
|
||||
|
||||
# Reconstruct the signed string
|
||||
lines: list[str] = []
|
||||
# Normalize header lookup to lowercase
|
||||
lc_headers = {k.lower(): v for k, v in headers.items()}
|
||||
for h in signed_headers:
|
||||
if h == "(request-target)":
|
||||
lines.append(f"(request-target): {method.lower()} {path}")
|
||||
else:
|
||||
val = lc_headers.get(h, "")
|
||||
lines.append(f"{h}: {val}")
|
||||
|
||||
signed_string = "\n".join(lines)
|
||||
|
||||
public_key = serialization.load_pem_public_key(public_key_pem.encode())
|
||||
try:
|
||||
public_key.verify(
|
||||
base64.b64decode(signature_b64),
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def create_ld_signature(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
activity: dict,
|
||||
) -> dict:
|
||||
"""Create an RsaSignature2017 Linked Data signature for an activity."""
|
||||
canonical = json.dumps(activity, sort_keys=True, separators=(",", ":"))
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
canonical.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
return {
|
||||
"type": "RsaSignature2017",
|
||||
"creator": key_id,
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"signatureValue": signature_b64,
|
||||
}
|
||||
141
utils/ipfs_client.py
Normal file
141
utils/ipfs_client.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Async IPFS client for content-addressed storage.
|
||||
|
||||
All content can be stored on IPFS — blog posts, products, activities, etc.
|
||||
Ported from ~/art-dag/activity-pub/ipfs_client.py (converted to async httpx).
|
||||
|
||||
Config via environment:
|
||||
IPFS_API — multiaddr or URL (default: /ip4/127.0.0.1/tcp/5001)
|
||||
IPFS_TIMEOUT — request timeout in seconds (default: 60)
|
||||
IPFS_GATEWAY_URL — public gateway for CID links (default: https://ipfs.io)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IPFSError(Exception):
|
||||
"""Raised when an IPFS operation fails."""
|
||||
|
||||
|
||||
# -- Config ------------------------------------------------------------------
|
||||
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "60"))
|
||||
IPFS_GATEWAY_URL = os.getenv("IPFS_GATEWAY_URL", "https://ipfs.io")
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
"""Convert IPFS multiaddr to HTTP URL."""
|
||||
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if dns_match:
|
||||
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
|
||||
|
||||
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if ip4_match:
|
||||
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
|
||||
|
||||
if multiaddr.startswith("http"):
|
||||
return multiaddr
|
||||
return "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
# -- Async client functions --------------------------------------------------
|
||||
|
||||
async def add_bytes(data: bytes, *, pin: bool = True) -> str:
|
||||
"""Add raw bytes to IPFS.
|
||||
|
||||
Returns the CID.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/add",
|
||||
params={"pin": str(pin).lower()},
|
||||
files={"file": ("data", data)},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
cid = resp.json()["Hash"]
|
||||
logger.info("Added to IPFS: %d bytes -> %s", len(data), cid)
|
||||
return cid
|
||||
except Exception as e:
|
||||
logger.error("Failed to add bytes to IPFS: %s", e)
|
||||
raise IPFSError(f"Failed to add bytes: {e}") from e
|
||||
|
||||
|
||||
async def add_json(data: dict) -> str:
|
||||
"""Serialize dict to sorted JSON and add to IPFS."""
|
||||
json_bytes = json.dumps(data, indent=2, sort_keys=True).encode("utf-8")
|
||||
return await add_bytes(json_bytes, pin=True)
|
||||
|
||||
|
||||
async def get_bytes(cid: str) -> bytes | None:
|
||||
"""Fetch content from IPFS by CID."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/cat",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Retrieved from IPFS: %s (%d bytes)", cid, len(resp.content))
|
||||
return resp.content
|
||||
except Exception as e:
|
||||
logger.error("Failed to get from IPFS: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
async def pin_cid(cid: str) -> bool:
|
||||
"""Pin a CID on this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/add",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Pinned on IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to pin on IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def unpin_cid(cid: str) -> bool:
|
||||
"""Unpin a CID from this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/rm",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Unpinned from IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to unpin from IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def is_available() -> bool:
|
||||
"""Check if IPFS daemon is reachable."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5) as client:
|
||||
resp = await client.post(f"{IPFS_BASE_URL}/api/v0/id")
|
||||
return resp.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def gateway_url(cid: str) -> str:
|
||||
"""Return a public gateway URL for a CID."""
|
||||
return f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
|
||||
68
utils/webfinger.py
Normal file
68
utils/webfinger.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""WebFinger client for resolving remote AP actor profiles."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AP_CONTENT_TYPE = "application/activity+json"
|
||||
|
||||
|
||||
async def resolve_actor(acct: str) -> dict | None:
|
||||
"""Resolve user@domain to actor JSON via WebFinger + actor fetch.
|
||||
|
||||
Args:
|
||||
acct: Handle in the form ``user@domain`` (no leading ``@``).
|
||||
|
||||
Returns:
|
||||
Actor JSON-LD dict, or None if resolution fails.
|
||||
"""
|
||||
acct = acct.lstrip("@")
|
||||
if "@" not in acct:
|
||||
return None
|
||||
|
||||
_, domain = acct.rsplit("@", 1)
|
||||
webfinger_url = f"https://{domain}/.well-known/webfinger"
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
|
||||
# Step 1: WebFinger lookup
|
||||
resp = await client.get(
|
||||
webfinger_url,
|
||||
params={"resource": f"acct:{acct}"},
|
||||
headers={"Accept": "application/jrd+json, application/json"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
log.debug("WebFinger %s returned %d", webfinger_url, resp.status_code)
|
||||
return None
|
||||
|
||||
data = resp.json()
|
||||
# Find self link with AP content type
|
||||
actor_url = None
|
||||
for link in data.get("links", []):
|
||||
if link.get("rel") == "self" and link.get("type") in (
|
||||
AP_CONTENT_TYPE,
|
||||
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
|
||||
):
|
||||
actor_url = link.get("href")
|
||||
break
|
||||
|
||||
if not actor_url:
|
||||
log.debug("No AP self link in WebFinger response for %s", acct)
|
||||
return None
|
||||
|
||||
# Step 2: Fetch actor JSON
|
||||
resp = await client.get(
|
||||
actor_url,
|
||||
headers={"Accept": AP_CONTENT_TYPE},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
|
||||
log.debug("Actor fetch %s returned %d", actor_url, resp.status_code)
|
||||
except Exception:
|
||||
log.exception("WebFinger resolution failed for %s", acct)
|
||||
|
||||
return None
|
||||
Reference in New Issue
Block a user