Compare commits
246 Commits
e1f9c964f5
...
decoupling
| Author | SHA1 | Date | |
|---|---|---|---|
| 544892edd9 | |||
| c243d17eeb | |||
| 5b4cacaf19 | |||
| a8c0741f54 | |||
| 0af07f9f2e | |||
| 222738546a | |||
| 4098c32878 | |||
| 3bd4f4b661 | |||
| 5dd1161816 | |||
| 002cc49f2c | |||
| e6b0849ce3 | |||
| 8024fa5b13 | |||
| ea18a402d6 | |||
| e4e43177a8 | |||
| 8445c36270 | |||
| 5578923242 | |||
| 9754b892d6 | |||
| ab75e505a8 | |||
| 13bcf755f6 | |||
| 3d55145e5f | |||
| 8b2785ccb0 | |||
| 03196c3ad0 | |||
| 815c5285d5 | |||
| ed30f88f05 | |||
| 8aedbc9e62 | |||
| 8ceb9aee62 | |||
| 4668c30890 | |||
| 39f61eddd6 | |||
| 5436dfe76c | |||
| 4ede0368dc | |||
| a8e06e87fb | |||
| 588d240ddc | |||
| aa5c251a45 | |||
| 7ccb463a8b | |||
| 341fc4cf28 | |||
| 1a5969202e | |||
| 3bc5de126d | |||
| 1447122a0c | |||
| ab45e21c7c | |||
| c0d369eb8e | |||
| 755313bd29 | |||
| 01a67029f0 | |||
| b54f7b4b56 | |||
| 5ede32e21c | |||
| 7aea1f1be9 | |||
| 0ef4a93a92 | |||
| 48696498ef | |||
| b7d95a8b4e | |||
| e7d5c6734b | |||
| e4a6d2dfc8 | |||
| 0a5562243b | |||
| 2b41aaa6ce | |||
| cfe66e5342 | |||
| 382d1b7c7a | |||
| a580a53328 | |||
| 0f9af31ffe | |||
| e8bc228c7f | |||
| 17cebe07e7 | |||
| 82b411f25a | |||
| a643b3532d | |||
| 22802bd36b | |||
| 0d48fd22ee | |||
| b92e7a763e | |||
| fec5ecdfb1 | |||
| 269bcc02be | |||
| 9f2f0dacaf | |||
| 39e013a75e | |||
| 2df1014ee3 | |||
| e8a991834b | |||
| bc7a4a5128 | |||
| 8e4c2c139e | |||
| db3f48ec75 | |||
| b40f3d124c | |||
| 3809affcab | |||
| 81e51ae7bc | |||
| b6119b7f04 | |||
| 75cb5d43b9 | |||
| f628b35fc3 | |||
| 2e4fbd5777 | |||
| b47ad6224b | |||
| 2d08d6f787 | |||
| beebe559cd | |||
| b63aa72efb | |||
| 8cfa12de6b | |||
| 3dd62bd9bf | |||
| c926e5221d | |||
| d62643312a | |||
| 8852ab1108 | |||
| 1559c5c931 | |||
| 00efbc2a35 | |||
| 6c44a5f3d0 | |||
| 6d43404b12 | |||
| 97c4e25ba7 | |||
| f1b7fdd37d | |||
| 597b0d7a2f | |||
| ee41e30d5b | |||
| 5957bd8941 | |||
| a8edc26a1d | |||
| 6a331e4ad8 | |||
| 4a99bc56e9 | |||
| 4fe5afe3e6 | |||
| efae7f5533 | |||
| 105f4c4679 | |||
| a7cca2f720 | |||
| 8269977751 | |||
| 0df932bd94 | |||
| c220fe21d6 | |||
| f9d9697c67 | |||
| f4c2f4b6b8 | |||
| 881ed2cdcc | |||
| 2ce2077d14 | |||
| 8cf834dd55 | |||
| 4daecabf30 | |||
| 19240c6ca3 | |||
| 3e29c2a334 | |||
| a70d3648ec | |||
| 0d1ce92e52 | |||
| 09b5a5b4f6 | |||
| f0a100fd77 | |||
| 16da08ff05 | |||
| 5c6d83f474 | |||
| da8a766e3f | |||
| 9fa3b8800c | |||
| f24292f99d | |||
| de3a6e4dde | |||
| 0bb57136d2 | |||
| 495e6589dc | |||
| 903193d825 | |||
| eda95ec58b | |||
| d2f1da4944 | |||
| 53c4a0a1e0 | |||
| 9c6170ed31 | |||
| a0a0f5ebc2 | |||
| 6f1d5bac3c | |||
| b52ef719bf | |||
| 838ec982eb | |||
| e65232761b | |||
| 1c794b6c0e | |||
| d53b9648a9 | |||
| 8013317b41 | |||
| 04419a1ec6 | |||
| 573aec7dfa | |||
| 36b5f1d19d | |||
| 28c66c3650 | |||
| 5d9f1586af | |||
| fbb7a1422c | |||
| 09010db70e | |||
| 0fb87e3b1c | |||
| 996ddad2ea | |||
| f486e02413 | |||
| 69a0989b7a | |||
| 0c4682e4d7 | |||
| bcac8e5adc | |||
| e1b47e5b62 | |||
| ae134907a4 | |||
| db7342c7d2 | |||
| 94b1fca938 | |||
| 96b02d93df | |||
| fe34ea8e5b | |||
| f2d040c323 | |||
| 22460db450 | |||
| 1a74d811f7 | |||
| 1a179de547 | |||
| fa431ee13e | |||
| 76a9436ea1 | |||
| 8f8bc4fad9 | |||
| e45edbf362 | |||
| 1f3d98ecc1 | |||
| dd52417241 | |||
| 98aee1f656 | |||
| 81112c716b | |||
| cf7fbd8e9b | |||
| 00249dd2a9 | |||
| c015f3f02f | |||
| 404449fcab | |||
| 984ef9c65e | |||
| 6f0965aa9c | |||
| e65bd41ebe | |||
| bde2fd73b8 | |||
| 5cca66574e | |||
|
|
e9848653d7 | ||
|
|
c3ba28ea03 | ||
|
|
4c2e716558 | ||
|
|
b788f1f778 | ||
|
|
4f49985cd5 | ||
|
|
07cae101ad | ||
|
|
c53227d991 | ||
|
|
3bffb97ca1 | ||
|
|
84e3ff3a91 | ||
|
|
b9fe884ab9 | ||
|
|
3797a0c7c9 | ||
|
|
1ea9ae4050 | ||
|
|
507a5a66ff | ||
|
|
094b6c55cd | ||
|
|
97d2021a00 | ||
|
|
9f29073cda | ||
|
|
c53f3025d9 | ||
|
|
3053cb321d | ||
|
|
3be287532d | ||
|
|
95bd32bd71 | ||
|
|
50a9e5d952 | ||
|
|
961067841e | ||
|
|
0ccf897f74 | ||
|
|
c6271931a6 | ||
|
|
99ab363cfd | ||
|
|
8680ec37d6 | ||
|
|
b91a58f30a | ||
|
|
8f4104a4bf | ||
|
|
e454187035 | ||
|
|
f6cdf126e4 | ||
|
|
3b707ec8a0 | ||
|
|
5dafbdbda9 | ||
|
|
580f551700 | ||
|
|
f1d80a1777 | ||
|
|
57d2a6a6e3 | ||
|
|
0e14d2761a | ||
|
|
e4c7432303 | ||
|
|
b45a2b6c10 | ||
|
|
3dde4e79ab | ||
|
|
1dc87d0f64 | ||
|
|
d8206c7b3b | ||
|
|
a5717ec4d4 | ||
|
|
1b4e51c48c | ||
|
|
e7610bed7c | ||
|
|
e58def135d | ||
|
|
a13c361dee | ||
|
|
5173167f3e | ||
|
|
c590f2e039 | ||
|
|
1862fe96fc | ||
|
|
7784e6b2b0 | ||
|
|
97d4d4ce21 | ||
|
|
cc2dcbddd4 | ||
|
|
d77241602f | ||
|
|
ea9015f65b | ||
|
|
44694da76f | ||
|
|
f54b0fb5da | ||
|
|
4dff4cfafb | ||
|
|
80c94ebea7 | ||
|
|
3ca1c14432 | ||
|
|
66c0c23de9 | ||
|
|
660a6db6fb | ||
|
|
2fe27fb34a | ||
|
|
b3d853ad35 | ||
|
|
4d7f8cfea2 | ||
|
|
567888c9e0 | ||
|
|
8b52a11b67 |
236
.claude/plans/flickering-gathering-wilkes.md
Normal file
236
.claude/plans/flickering-gathering-wilkes.md
Normal file
@@ -0,0 +1,236 @@
|
||||
# Ticket Purchase Through Cart
|
||||
|
||||
## Context
|
||||
|
||||
Tickets (Ticket model) are currently created with state="reserved" immediately when a user clicks "Buy" (`POST /tickets/buy/`). They bypass the cart and checkout entirely — no cart display, no SumUp payment, no order linkage. The user wants tickets to flow through the cart exactly like products and calendar bookings: appear in the cart, go through checkout, get confirmed on payment. Login required. No reservation — if the event sells out before payment completes, the user gets refunded (admin handles refund; we show a notice).
|
||||
|
||||
## Current Flow vs Desired Flow
|
||||
|
||||
**Now:** Click Buy → Ticket created (state="reserved") → done (no cart, no payment)
|
||||
|
||||
**Desired:** Click Buy → Ticket created (state="pending", in cart) → Checkout → SumUp payment → Ticket confirmed
|
||||
|
||||
## Approach
|
||||
|
||||
Mirror the CalendarEntry pattern: CalendarEntry uses state="pending" to mean "in cart". We add state="pending" for Ticket. Pending tickets don't count toward availability (not allocated). At checkout, pending→reserved + linked to order. On payment, reserved→confirmed.
|
||||
|
||||
---
|
||||
|
||||
## Step 1: Update TicketDTO
|
||||
|
||||
**File:** `shared/contracts/dtos.py`
|
||||
|
||||
Add fields needed for cart display and page-grouping:
|
||||
- `entry_id: int` (for linking back)
|
||||
- `cost: Decimal` (ticket price — from ticket_type.cost or entry.ticket_price)
|
||||
- `calendar_container_id: int | None` (for page-grouping in cart)
|
||||
- `calendar_container_type: str | None`
|
||||
|
||||
Also add `ticket_count` and `ticket_total` to `CartSummaryDTO`.
|
||||
|
||||
## Step 2: Add ticket methods to CalendarService protocol
|
||||
|
||||
**File:** `shared/contracts/protocols.py`
|
||||
|
||||
```python
|
||||
async def pending_tickets(
|
||||
self, session: AsyncSession, *, user_id: int,
|
||||
) -> list[TicketDTO]: ...
|
||||
|
||||
async def claim_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int, user_id: int,
|
||||
page_post_id: int | None = None,
|
||||
) -> None: ...
|
||||
|
||||
async def confirm_tickets_for_order(
|
||||
self, session: AsyncSession, order_id: int,
|
||||
) -> None: ...
|
||||
```
|
||||
|
||||
## Step 3: Implement in SqlCalendarService
|
||||
|
||||
**File:** `shared/services/calendar_impl.py`
|
||||
|
||||
- **`pending_tickets`**: Query `Ticket` where `user_id` matches, `state="pending"`, eager-load entry→calendar + ticket_type. Map to TicketDTO with cost from `ticket_type.cost` or `entry.ticket_price`.
|
||||
- **`claim_tickets_for_order`**: UPDATE Ticket SET state="reserved", order_id=? WHERE user_id=? AND state="pending". If `page_post_id`, filter via entry→calendar→container.
|
||||
- **`confirm_tickets_for_order`**: UPDATE Ticket SET state="confirmed" WHERE order_id=? AND state="reserved".
|
||||
|
||||
Update `_ticket_to_dto` to populate the new fields (entry_id, cost, calendar_container_id/type).
|
||||
|
||||
## Step 4: Add stubs
|
||||
|
||||
**File:** `shared/services/stubs.py`
|
||||
|
||||
Add no-op stubs returning `[]`/`None` for the 3 new methods.
|
||||
|
||||
## Step 5: Update SqlCartService
|
||||
|
||||
**File:** `shared/services/cart_impl.py`
|
||||
|
||||
In `cart_summary()`, also query pending tickets via `services.calendar.pending_tickets()` and include `ticket_count` + `ticket_total` in the returned `CartSummaryDTO`.
|
||||
|
||||
## Step 6: Update cart internal API
|
||||
|
||||
**File:** `cart/bp/cart/api.py`
|
||||
|
||||
Add `ticket_count` and `ticket_total` to the JSON summary response. Query via `services.calendar.pending_tickets()`.
|
||||
|
||||
## Step 7: Add ticket cart service functions
|
||||
|
||||
**File:** `cart/bp/cart/services/calendar_cart.py`
|
||||
|
||||
Add:
|
||||
```python
|
||||
async def get_ticket_cart_entries(session):
|
||||
ident = current_cart_identity()
|
||||
if ident["user_id"] is None:
|
||||
return []
|
||||
return await services.calendar.pending_tickets(session, user_id=ident["user_id"])
|
||||
|
||||
def ticket_total(tickets) -> float:
|
||||
return sum((t.cost or 0) for t in tickets if t.cost is not None)
|
||||
```
|
||||
|
||||
**File:** `cart/bp/cart/services/__init__.py` — export the new functions.
|
||||
|
||||
## Step 8: Update cart page grouping
|
||||
|
||||
**File:** `cart/bp/cart/services/page_cart.py`
|
||||
|
||||
In `get_cart_grouped_by_page()`:
|
||||
- Fetch ticket cart entries via `get_ticket_cart_entries()`
|
||||
- Attach tickets to page groups by `calendar_container_id` (same pattern as calendar entries)
|
||||
- Add `ticket_count` and `ticket_total` to each group dict
|
||||
|
||||
## Step 9: Modify ticket buy route
|
||||
|
||||
**File:** `events/bp/tickets/routes.py` — `buy_tickets()`
|
||||
|
||||
- **Require login**: If `ident["user_id"]` is None, return error prompting sign-in
|
||||
- **Create with state="pending"** instead of "reserved"
|
||||
- **Remove availability check** at buy time (pending tickets not allocated)
|
||||
- Update response template to say "added to cart" instead of "reserved"
|
||||
|
||||
## Step 10: Update availability count
|
||||
|
||||
**File:** `events/bp/tickets/services/tickets.py` — `get_available_ticket_count()`
|
||||
|
||||
Change from counting `state != "cancelled"` to counting `state.in_(("reserved", "confirmed", "checked_in"))`. This excludes "pending" (in-cart) tickets from sold count.
|
||||
|
||||
## Step 11: Update buy form template
|
||||
|
||||
**File:** `events/templates/_types/tickets/_buy_form.html`
|
||||
|
||||
- If user not logged in, show "Sign in to buy tickets" link instead of buy form
|
||||
- Keep existing form for logged-in users
|
||||
|
||||
**File:** `events/templates/_types/tickets/_buy_result.html`
|
||||
|
||||
- Change "reserved" messaging to "added to cart"
|
||||
- Add link to cart app
|
||||
- Add sold-out refund notice: "If the event sells out before payment, you will be refunded."
|
||||
|
||||
## Step 12: Update cart display templates
|
||||
|
||||
**File:** `shared/browser/templates/_types/cart/_cart.html`
|
||||
|
||||
In `show_cart()` macro:
|
||||
- Add empty check: `{% if not cart and not calendar_cart_entries and not ticket_cart_entries %}`
|
||||
- Add tickets section after calendar bookings (same style)
|
||||
- Add sold-out notice under tickets section
|
||||
|
||||
In `summary()` and `cart_grand_total()` macros:
|
||||
- Include ticket_total in the grand total calculation
|
||||
|
||||
**File:** `shared/browser/templates/_types/cart/_mini.html`
|
||||
|
||||
- Add ticket count to the badge total
|
||||
|
||||
## Step 13: Update cart overview template
|
||||
|
||||
**File:** `cart/templates/_types/cart/overview/_main_panel.html`
|
||||
|
||||
- Add ticket count badge alongside product and calendar count badges
|
||||
|
||||
## Step 14: Update checkout flow
|
||||
|
||||
**File:** `cart/bp/cart/global_routes.py` — `checkout()`
|
||||
|
||||
- Fetch pending tickets: `get_ticket_cart_entries(g.s)`
|
||||
- Include ticket total in cart_total calculation
|
||||
- Include `not ticket_entries` in empty check
|
||||
- Pass tickets to `create_order_from_cart()` (or claim separately after)
|
||||
|
||||
**File:** `cart/bp/cart/page_routes.py` — `page_checkout()`
|
||||
|
||||
Same changes, scoped to page.
|
||||
|
||||
**File:** `cart/bp/cart/services/checkout.py` — `create_order_from_cart()`
|
||||
|
||||
- Accept new param `ticket_total: float` (add to order total)
|
||||
- After claiming calendar entries, also claim tickets: `services.calendar.claim_tickets_for_order()`
|
||||
- Include tickets in `resolve_page_config` page detection
|
||||
|
||||
## Step 15: Update payment confirmation
|
||||
|
||||
**File:** `cart/bp/cart/services/check_sumup_status.py`
|
||||
|
||||
When status == "PAID", also call `services.calendar.confirm_tickets_for_order(session, order.id)` alongside `confirm_entries_for_order`.
|
||||
|
||||
## Step 16: Update checkout return page
|
||||
|
||||
**File:** `cart/bp/cart/global_routes.py` — `checkout_return()`
|
||||
|
||||
- Also fetch tickets for order: `services.calendar.user_tickets()` filtered by order_id (or add a `get_tickets_for_order` method)
|
||||
|
||||
**File:** `shared/browser/templates/_types/order/_calendar_items.html`
|
||||
|
||||
- Add a tickets section showing ordered/confirmed tickets.
|
||||
|
||||
## Step 17: Sync shared files
|
||||
|
||||
Copy all changed shared files to blog/, cart/, events/, market/ submodules.
|
||||
|
||||
---
|
||||
|
||||
## Files Modified (Summary)
|
||||
|
||||
### Shared contracts/services:
|
||||
- `shared/contracts/dtos.py` — update TicketDTO, CartSummaryDTO
|
||||
- `shared/contracts/protocols.py` — add 3 methods to CalendarService
|
||||
- `shared/services/calendar_impl.py` — implement 3 new methods, update _ticket_to_dto
|
||||
- `shared/services/stubs.py` — add stubs
|
||||
- `shared/services/cart_impl.py` — include tickets in cart_summary
|
||||
|
||||
### Cart app:
|
||||
- `cart/bp/cart/api.py` — add ticket info to summary API
|
||||
- `cart/bp/cart/services/calendar_cart.py` — add ticket functions
|
||||
- `cart/bp/cart/services/__init__.py` — export new functions
|
||||
- `cart/bp/cart/services/page_cart.py` — include tickets in grouped view
|
||||
- `cart/bp/cart/global_routes.py` — include tickets in checkout + return
|
||||
- `cart/bp/cart/page_routes.py` — include tickets in page checkout
|
||||
- `cart/bp/cart/services/checkout.py` — include ticket total in order
|
||||
- `cart/bp/cart/services/check_sumup_status.py` — confirm tickets on payment
|
||||
|
||||
### Events app:
|
||||
- `events/bp/tickets/routes.py` — require login, state="pending"
|
||||
- `events/bp/tickets/services/tickets.py` — update availability count
|
||||
- `events/templates/_types/tickets/_buy_form.html` — login gate
|
||||
- `events/templates/_types/tickets/_buy_result.html` — "added to cart" messaging
|
||||
|
||||
### Templates (shared):
|
||||
- `shared/browser/templates/_types/cart/_cart.html` — ticket section + totals
|
||||
- `shared/browser/templates/_types/cart/_mini.html` — ticket count in badge
|
||||
- `cart/templates/_types/cart/overview/_main_panel.html` — ticket badge
|
||||
- `shared/browser/templates/_types/order/_calendar_items.html` — ticket section
|
||||
|
||||
## Verification
|
||||
|
||||
1. Go to an event entry with tickets configured (state="confirmed", ticket_price set)
|
||||
2. Click "Buy Tickets" while not logged in → should see "sign in" prompt
|
||||
3. Log in, click "Buy Tickets" → ticket created with state="pending"
|
||||
4. Navigate to cart → ticket appears alongside any products/bookings
|
||||
5. Proceed to checkout → SumUp payment page
|
||||
6. Complete payment → ticket state becomes "confirmed"
|
||||
7. Check cart mini badge shows ticket count
|
||||
8. Verify availability count doesn't include pending tickets
|
||||
177
.claude/plans/glittery-discovering-kahn.md
Normal file
177
.claude/plans/glittery-discovering-kahn.md
Normal file
@@ -0,0 +1,177 @@
|
||||
# Sexp Fragment Protocol: Component Defs Between Services
|
||||
|
||||
## Context
|
||||
|
||||
Fragment endpoints return raw sexp source (e.g., `(~blog-nav-wrapper :items ...)`). The consuming service embeds this in its page sexp, which the client evaluates. But blog-specific components like `~blog-nav-wrapper` are only in blog's `_COMPONENT_ENV` — not in market's. So market's `client_components_tag()` never sends them to the client, causing "Unknown component" errors.
|
||||
|
||||
The fix: transfer component definitions alongside fragments. Services tell the provider what they already have; the provider sends only what's missing. The consuming service registers received defs into its `_COMPONENT_ENV` so they're included in `client_components_tag()` output for the client.
|
||||
|
||||
## Approach: Structured Sexp Request/Response
|
||||
|
||||
Replace the current GET + `X-Fragment-Request` header protocol with POST + sexp body. This aligns with the vision in `docs/sexpr-internal-protocol-first.md`.
|
||||
|
||||
### Request format (POST body)
|
||||
```scheme
|
||||
(fragment-request
|
||||
:type "nav-tree"
|
||||
:params (:app-name "market" :path "/")
|
||||
:components (~blog-nav-wrapper ~blog-nav-item-link ~header-row-sx ...))
|
||||
```
|
||||
|
||||
`:components` lists component names already in the consumer's `_COMPONENT_ENV`. Provider skips these.
|
||||
|
||||
### Response format
|
||||
```scheme
|
||||
(fragment-response
|
||||
:defs ((defcomp ~blog-nav-wrapper (&key ...) ...) (defcomp ~blog-nav-item-link ...))
|
||||
:content (~blog-nav-wrapper :items ...))
|
||||
```
|
||||
|
||||
`:defs` contains only components the consumer doesn't have. `:content` is the fragment sexp (same as current response body).
|
||||
|
||||
## Changes
|
||||
|
||||
### 1. `shared/infrastructure/fragments.py` — Client side
|
||||
|
||||
**`fetch_fragment()`**: Switch from GET to POST with sexp body.
|
||||
|
||||
- Build request body using `sexp_call`:
|
||||
```python
|
||||
from shared.sexp.helpers import sexp_call, SexpExpr
|
||||
from shared.sexp.jinja_bridge import _COMPONENT_ENV
|
||||
|
||||
comp_names = [k for k in _COMPONENT_ENV if k.startswith("~")]
|
||||
body = sexp_call("fragment-request",
|
||||
type=fragment_type,
|
||||
params=params or {},
|
||||
components=SexpExpr("(" + " ".join(comp_names) + ")"))
|
||||
```
|
||||
- POST to same URL, body as `text/sexp`, keep `X-Fragment-Request` header for backward compat
|
||||
- Parse response: extract `:defs` and `:content` from the sexp response
|
||||
- Register defs into `_COMPONENT_ENV` via `register_components()`
|
||||
- Return `:content` wrapped as `SexpExpr`
|
||||
|
||||
**New helper `_parse_fragment_response(text)`**:
|
||||
- `parse()` the response sexp
|
||||
- Extract keyword args (reuse the keyword-extraction pattern from `evaluator.py`)
|
||||
- Return `(defs_source, content_source)` tuple
|
||||
|
||||
### 2. `shared/sexp/helpers.py` — Response builder
|
||||
|
||||
**New `fragment_response(content, request_text)`**:
|
||||
|
||||
```python
|
||||
def fragment_response(content: str, request_text: str) -> str:
|
||||
"""Build a structured fragment response with missing component defs."""
|
||||
from .parser import parse, serialize
|
||||
from .types import Keyword, Component
|
||||
from .jinja_bridge import _COMPONENT_ENV
|
||||
|
||||
# Parse request to get :components list
|
||||
req = parse(request_text)
|
||||
loaded = set()
|
||||
# extract :components keyword value
|
||||
...
|
||||
|
||||
# Diff against _COMPONENT_ENV, serialize missing defs
|
||||
defs_parts = []
|
||||
for key, val in _COMPONENT_ENV.items():
|
||||
if not isinstance(val, Component):
|
||||
continue
|
||||
if key in loaded or f"~{val.name}" in loaded:
|
||||
continue
|
||||
defs_parts.append(_serialize_defcomp(val))
|
||||
|
||||
defs_sexp = "(" + " ".join(defs_parts) + ")" if defs_parts else "nil"
|
||||
return sexp_call("fragment-response",
|
||||
defs=SexpExpr(defs_sexp),
|
||||
content=SexpExpr(content))
|
||||
```
|
||||
|
||||
### 3. Fragment endpoints — All services
|
||||
|
||||
**Generic change in each `bp/fragments/routes.py`**: Update the route handler to accept POST, read sexp body, use `fragment_response()` for the response.
|
||||
|
||||
The `get_fragment` handler becomes:
|
||||
```python
|
||||
@bp.route("/<fragment_type>", methods=["GET", "POST"])
|
||||
async def get_fragment(fragment_type: str):
|
||||
handler = _handlers.get(fragment_type)
|
||||
if handler is None:
|
||||
return Response("", status=200, content_type="text/sexp")
|
||||
content = await handler()
|
||||
|
||||
# Structured sexp protocol (POST with sexp body)
|
||||
request_body = await request.get_data(as_text=True)
|
||||
if request_body and request.content_type == "text/sexp":
|
||||
from shared.sexp.helpers import fragment_response
|
||||
body = fragment_response(content, request_body)
|
||||
return Response(body, status=200, content_type="text/sexp")
|
||||
|
||||
# Legacy GET fallback
|
||||
return Response(content, status=200, content_type="text/sexp")
|
||||
```
|
||||
|
||||
Since all fragment endpoints follow the identical `_handlers` + `get_fragment` pattern, we can extract this into a shared helper in `fragments.py` or a new `shared/infrastructure/fragment_endpoint.py`.
|
||||
|
||||
### 4. Extract shared fragment endpoint helper
|
||||
|
||||
To avoid touching every service's fragment routes, create a shared blueprint factory:
|
||||
|
||||
**`shared/infrastructure/fragment_endpoint.py`**:
|
||||
```python
|
||||
def create_fragment_blueprint(handlers: dict) -> Blueprint:
|
||||
"""Create a fragment endpoint blueprint with sexp protocol support."""
|
||||
bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments")
|
||||
|
||||
@bp.before_request
|
||||
async def _require_fragment_header():
|
||||
if not request.headers.get(FRAGMENT_HEADER):
|
||||
return Response("", status=403)
|
||||
|
||||
@bp.route("/<fragment_type>", methods=["GET", "POST"])
|
||||
async def get_fragment(fragment_type: str):
|
||||
handler = handlers.get(fragment_type)
|
||||
if handler is None:
|
||||
return Response("", status=200, content_type="text/sexp")
|
||||
content = await handler()
|
||||
|
||||
# Sexp protocol: POST with structured request/response
|
||||
if request.method == "POST" and request.content_type == "text/sexp":
|
||||
request_body = await request.get_data(as_text=True)
|
||||
from shared.sexp.helpers import fragment_response
|
||||
body = fragment_response(content, request_body)
|
||||
return Response(body, status=200, content_type="text/sexp")
|
||||
|
||||
return Response(content, status=200, content_type="text/sexp")
|
||||
|
||||
return bp
|
||||
```
|
||||
|
||||
Then each service's `register()` just returns `create_fragment_blueprint(_handlers)`. This is a small refactor since they all duplicate the same boilerplate today.
|
||||
|
||||
## Files to modify
|
||||
|
||||
| File | Change |
|
||||
|------|--------|
|
||||
| `shared/infrastructure/fragments.py` | POST sexp body, parse response, register defs |
|
||||
| `shared/sexp/helpers.py` | `fragment_response()` builder, `_serialize_defcomp()` |
|
||||
| `shared/infrastructure/fragment_endpoint.py` | **New** — shared blueprint factory |
|
||||
| `blog/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `market/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `events/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `cart/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `account/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `orders/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `federation/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
| `relations/bp/fragments/routes.py` | Use `create_fragment_blueprint` |
|
||||
|
||||
## Verification
|
||||
|
||||
1. Start blog + market services: `./dev.sh blog market`
|
||||
2. Load market page — should fetch nav-tree from blog with sexp protocol
|
||||
3. Check market logs: no "Unknown component" errors
|
||||
4. Inspect page source: `client_components_tag()` output includes `~blog-nav-wrapper` etc.
|
||||
5. Cross-domain sx-get navigation (blog → market) works without reload
|
||||
6. Run sexp tests: `python3 -m pytest shared/sexp/tests/ -x -q`
|
||||
7. Second page load: `:components` list in request includes blog nav components, response `:defs` is empty
|
||||
425
.claude/plans/glittery-zooming-hummingbird.md
Normal file
425
.claude/plans/glittery-zooming-hummingbird.md
Normal file
@@ -0,0 +1,425 @@
|
||||
# Phase 6: Full Cross-App Decoupling via Glue Services
|
||||
|
||||
## Context
|
||||
|
||||
Phases 1-5 are complete. All cross-domain FK constraints have been dropped (except `OrderItem.product_id` and `CartItem.product_id`/`market_place_id`/`user_id`, kept as pragmatic exceptions). Cross-domain **writes** go through glue services.
|
||||
|
||||
However, **25+ cross-app model imports** remain — apps still `from blog.models.ghost_content import Post`, `from market.models.market import CartItem`, etc. This means every app needs every other app's code on disk to start, making separate databases or independent deployment impossible.
|
||||
|
||||
**Goal:** Eliminate all cross-app model imports. Every app only imports from its own `models/`, from `shared/`, and from `glue/`. Cross-domain access goes through glue services. After this phase, each app could theoretically run against its own database.
|
||||
|
||||
---
|
||||
|
||||
## Inventory of Cross-App Imports to Eliminate
|
||||
|
||||
### Cart app imports (9 files, 4 foreign models):
|
||||
| File | Import | Usage |
|
||||
|------|--------|-------|
|
||||
| `cart/bp/cart/api.py` | `market.models.market.CartItem` | Query cart items |
|
||||
| `cart/bp/cart/api.py` | `market.models.market_place.MarketPlace` | Filter by container |
|
||||
| `cart/bp/cart/api.py` | `events.models.calendars.CalendarEntry, Calendar` | Query pending entries |
|
||||
| `cart/bp/cart/api.py` | `blog.models.ghost_content.Post` | Resolve page slug |
|
||||
| `cart/bp/cart/services/checkout.py` | `market.models.market.Product, CartItem` | Find cart items, validate products |
|
||||
| `cart/bp/cart/services/checkout.py` | `events.models.calendars.CalendarEntry, Calendar` | Resolve page containers |
|
||||
| `cart/bp/cart/services/checkout.py` | `market.models.market_place.MarketPlace` | Get container_id |
|
||||
| `cart/bp/cart/services/page_cart.py` | `market.models.market.CartItem` | Query page cart |
|
||||
| `cart/bp/cart/services/page_cart.py` | `market.models.market_place.MarketPlace` | Join for container |
|
||||
| `cart/bp/cart/services/page_cart.py` | `events.models.calendars.CalendarEntry, Calendar` | Query page entries |
|
||||
| `cart/bp/cart/services/page_cart.py` | `blog.models.ghost_content.Post` | Batch-load posts |
|
||||
| `cart/bp/cart/services/get_cart.py` | `market.models.market.CartItem` | Query cart items |
|
||||
| `cart/bp/cart/services/calendar_cart.py` | `events.models.calendars.CalendarEntry` | Query pending entries |
|
||||
| `cart/bp/cart/services/clear_cart_for_order.py` | `market.models.market.CartItem` | Soft-delete items |
|
||||
| `cart/bp/cart/services/clear_cart_for_order.py` | `market.models.market_place.MarketPlace` | Filter by page |
|
||||
| `cart/bp/orders/routes.py` | `market.models.market.Product` | Join for search |
|
||||
| `cart/bp/order/routes.py` | `market.models.market.Product` | Load product details |
|
||||
| `cart/app.py` | `blog.models.ghost_content.Post` | Page slug hydration |
|
||||
|
||||
### Blog app imports (8 files, 3 foreign models):
|
||||
| File | Import | Usage |
|
||||
|------|--------|-------|
|
||||
| `blog/bp/post/admin/routes.py` | `cart.models.page_config.PageConfig` (3 places) | Load/update page config |
|
||||
| `blog/bp/post/admin/routes.py` | `events.models.calendars.Calendar` (3 places) | Query calendars |
|
||||
| `blog/bp/post/admin/routes.py` | `market.models.market_place.MarketPlace` (3 places) | Query/create/delete markets |
|
||||
| `blog/bp/post/services/markets.py` | `market.models.market_place.MarketPlace` | Create/delete markets |
|
||||
| `blog/bp/post/services/markets.py` | `cart.models.page_config.PageConfig` | Check feature flag |
|
||||
| `blog/bp/post/services/entry_associations.py` | `events.models.calendars.CalendarEntry, CalendarEntryPost, Calendar` | Post-entry associations |
|
||||
| `blog/bp/post/routes.py` | `events.models.calendars.Calendar` | Page context |
|
||||
| `blog/bp/post/routes.py` | `market.models.market_place.MarketPlace` | Page context |
|
||||
| `blog/bp/blog/ghost_db.py` | `cart.models.page_config.PageConfig` | Query page configs |
|
||||
| `blog/bp/blog/ghost/ghost_sync.py` | `cart.models.page_config.PageConfig` | Sync page config |
|
||||
| `blog/bp/blog/services/posts_data.py` | `events.models.calendars.CalendarEntry, CalendarEntryPost` | Fetch associated entries |
|
||||
|
||||
### Events app imports (5 files, 3 foreign models):
|
||||
| File | Import | Usage |
|
||||
|------|--------|-------|
|
||||
| `events/app.py` | `blog.models.ghost_content.Post` | Page slug hydration |
|
||||
| `events/app.py` | `market.models.market_place.MarketPlace` | Context processor |
|
||||
| `events/bp/markets/services/markets.py` | `market.models.market_place.MarketPlace` | Create/delete markets |
|
||||
| `events/bp/markets/services/markets.py` | `blog.models.ghost_content.Post` | Validate post exists |
|
||||
| `events/bp/markets/routes.py` | `market.models.market_place.MarketPlace` | Query/delete markets |
|
||||
| `events/bp/calendars/services/calendars.py` | `blog.models.ghost_content.Post` | Validate post exists |
|
||||
| `events/bp/calendar_entry/services/post_associations.py` | `blog.models.ghost_content.Post` | Manage post-entry assocs |
|
||||
| `events/bp/payments/routes.py` | `cart.models.page_config.PageConfig` | Load/update SumUp config |
|
||||
|
||||
### Market app imports (1 file):
|
||||
| File | Import | Usage |
|
||||
|------|--------|-------|
|
||||
| `market/app.py` | `blog.models.ghost_content.Post` | Page slug hydration |
|
||||
|
||||
### Glue layer imports (2 files):
|
||||
| File | Import | Usage |
|
||||
|------|--------|-------|
|
||||
| `glue/services/cart_adoption.py` | `market.models.market.CartItem` | Adopt cart items |
|
||||
| `glue/services/cart_adoption.py` | `events.models.calendars.CalendarEntry` | Adopt entries |
|
||||
| `glue/services/order_lifecycle.py` | `events.models.calendars.CalendarEntry, Calendar` | Claim/confirm entries |
|
||||
|
||||
---
|
||||
|
||||
## Design Decisions
|
||||
|
||||
1. **Glue services return ORM objects** (not dicts) when the model is standalone — PageConfig, MarketPlace, Calendar, CalendarEntry. This avoids template changes and keeps SQLAlchemy lazy-load working.
|
||||
|
||||
2. **Glue services for Post return dicts** — other apps only need `{id, slug, title, is_page, feature_image}`. Returning the full ORM object would couple them to the blog schema.
|
||||
|
||||
3. **CartItem stays in `market/models/market.py`** — it has FKs to `products.id`, `market_places.id`, and `users.id`, plus relationships to `Product`, `MarketPlace`, and `User`. Moving it to cart/ would just reverse the cross-app import direction. Instead, cart reads CartItem through glue.
|
||||
|
||||
4. **OrderItem.product relationship uses string forward-ref** — already works via SQLAlchemy string resolution as long as Product is registered in the mapper. Glue setup handles this.
|
||||
|
||||
5. **Glue services are allowed to import from any app's models** — that's the glue layer's job. Apps call glue; glue touches models.
|
||||
|
||||
6. **blog/bp/post/services/markets.py and entry_associations.py move to glue** — these are pure cross-domain CRUD (blog writes to MarketPlace, blog reads CalendarEntry). They belong in glue.
|
||||
|
||||
---
|
||||
|
||||
## Step 1: Glue service for pages (Post access)
|
||||
|
||||
New file: `glue/services/pages.py`
|
||||
|
||||
Provides dict-based Post access for non-blog apps:
|
||||
|
||||
```python
|
||||
async def get_page_by_slug(session, slug) -> dict | None:
|
||||
"""Return {id, slug, title, is_page, feature_image, ...} or None."""
|
||||
|
||||
async def get_page_by_id(session, post_id) -> dict | None:
|
||||
"""Return page dict by id."""
|
||||
|
||||
async def get_pages_by_ids(session, post_ids) -> dict[int, dict]:
|
||||
"""Batch-load pages. Returns {id: page_dict}."""
|
||||
|
||||
async def page_exists(session, post_id) -> bool:
|
||||
"""Check if post exists (for validation before creating calendars/markets)."""
|
||||
|
||||
async def is_page(session, post_id) -> bool:
|
||||
"""Check if post exists and is_page=True."""
|
||||
|
||||
async def search_posts(session, query, page=1, per_page=10) -> tuple[list[dict], int]:
|
||||
"""Search posts by title (for events post_associations)."""
|
||||
```
|
||||
|
||||
All functions import `from blog.models.ghost_content import Post` internally.
|
||||
|
||||
**Files changed:**
|
||||
- `market/app.py` — replace `from blog.models.ghost_content import Post` with `from glue.services.pages import get_page_by_slug`
|
||||
- `events/app.py` — same
|
||||
- `cart/app.py` — same
|
||||
- `cart/bp/cart/api.py` — replace Post import with `from glue.services.pages import get_page_by_slug`
|
||||
- `cart/bp/cart/services/page_cart.py` — replace Post import with `from glue.services.pages import get_pages_by_ids`
|
||||
- `events/bp/calendars/services/calendars.py` — replace `from blog.models.ghost_content import Post` with `from glue.services.pages import page_exists, is_page`
|
||||
- `events/bp/markets/services/markets.py` — replace `from blog.models.ghost_content import Post` with `from glue.services.pages import page_exists, is_page`
|
||||
|
||||
---
|
||||
|
||||
## Step 2: Glue service for page config
|
||||
|
||||
New file: `glue/services/page_config.py`
|
||||
|
||||
```python
|
||||
async def get_page_config(session, post_id) -> PageConfig | None:
|
||||
"""Load PageConfig for a page."""
|
||||
|
||||
async def get_or_create_page_config(session, post_id) -> PageConfig:
|
||||
"""Load or create PageConfig. Emits container.child_attached if created."""
|
||||
|
||||
async def get_page_configs_by_ids(session, post_ids) -> dict[int, PageConfig]:
|
||||
"""Batch-load PageConfigs by container_id."""
|
||||
```
|
||||
|
||||
Imports `from cart.models.page_config import PageConfig` internally.
|
||||
|
||||
**Files changed:**
|
||||
- `blog/bp/post/admin/routes.py` — replace `from cart.models.page_config import PageConfig` with glue service calls
|
||||
- `blog/bp/post/services/markets.py` — replace PageConfig import
|
||||
- `blog/bp/blog/ghost_db.py` — replace PageConfig import
|
||||
- `blog/bp/blog/ghost/ghost_sync.py` — replace PageConfig import
|
||||
- `events/bp/payments/routes.py` — replace PageConfig import
|
||||
- `cart/bp/cart/services/checkout.py` — replace `from models.page_config import PageConfig` stays (same app)
|
||||
|
||||
---
|
||||
|
||||
## Step 3: Glue service for calendars (events access from blog)
|
||||
|
||||
New file: `glue/services/calendars.py`
|
||||
|
||||
```python
|
||||
async def get_calendars_for_page(session, post_id) -> list[Calendar]:
|
||||
"""Return active calendars for a page."""
|
||||
|
||||
async def get_calendar_entries_for_posts(session, post_ids) -> dict[int, list]:
|
||||
"""Fetch confirmed CalendarEntries associated with posts (via CalendarEntryPost).
|
||||
Returns {post_id: [entry, ...]}."""
|
||||
```
|
||||
|
||||
Move and adapt from `blog/bp/post/services/entry_associations.py`:
|
||||
|
||||
```python
|
||||
async def toggle_entry_association(session, post_id, entry_id) -> tuple[bool, str | None]:
|
||||
async def get_post_entry_ids(session, post_id) -> set[int]:
|
||||
async def get_associated_entries(session, post_id, page=1, per_page=10) -> dict:
|
||||
```
|
||||
|
||||
These functions import from `events.models.calendars` internally.
|
||||
|
||||
**Files changed:**
|
||||
- `blog/bp/post/routes.py` — replace `from events.models.calendars import Calendar` + `from market.models.market_place import MarketPlace` with glue service calls
|
||||
- `blog/bp/post/admin/routes.py` — replace Calendar imports with glue service calls
|
||||
- `blog/bp/post/services/entry_associations.py` — **delete file**, moved to glue
|
||||
- `blog/bp/blog/services/posts_data.py` — replace `from events.models.calendars import CalendarEntry, CalendarEntryPost` with glue service call
|
||||
|
||||
---
|
||||
|
||||
## Step 4: Glue service for marketplaces
|
||||
|
||||
New file: `glue/services/marketplaces.py`
|
||||
|
||||
```python
|
||||
async def get_marketplaces_for_page(session, post_id) -> list[MarketPlace]:
|
||||
"""Return active marketplaces for a page."""
|
||||
|
||||
async def create_marketplace(session, post_id, name) -> MarketPlace:
|
||||
"""Create marketplace (validates page exists via pages service)."""
|
||||
|
||||
async def soft_delete_marketplace(session, post_slug, market_slug) -> bool:
|
||||
"""Soft-delete a marketplace."""
|
||||
```
|
||||
|
||||
Move the logic from `blog/bp/post/services/markets.py` and `events/bp/markets/services/markets.py` (they're nearly identical).
|
||||
|
||||
**Files changed:**
|
||||
- `blog/bp/post/services/markets.py` — **delete file**, moved to glue
|
||||
- `blog/bp/post/admin/routes.py` — replace MarketPlace imports + service calls with glue
|
||||
- `blog/bp/post/routes.py` — replace MarketPlace import with glue service
|
||||
- `events/bp/markets/services/markets.py` — **delete file**, moved to glue
|
||||
- `events/bp/markets/routes.py` — replace MarketPlace import, use glue
|
||||
- `events/app.py` — replace MarketPlace import with glue service
|
||||
|
||||
---
|
||||
|
||||
## Step 5: Glue service for cart items (market model access from cart)
|
||||
|
||||
New file: `glue/services/cart_items.py`
|
||||
|
||||
```python
|
||||
async def get_cart_items(session, user_id=None, session_id=None, *, page_post_id=None) -> list[CartItem]:
|
||||
"""Get cart items for identity, optionally scoped to page."""
|
||||
|
||||
async def find_or_create_cart_item(session, product_id, user_id, session_id) -> CartItem | None:
|
||||
"""Find existing or create new cart item. Returns None if product missing."""
|
||||
|
||||
async def clear_cart_for_order(session, order, *, page_post_id=None) -> None:
|
||||
"""Soft-delete cart items for order identity."""
|
||||
|
||||
async def get_calendar_cart_entries(session, user_id=None, session_id=None, *, page_post_id=None) -> list[CalendarEntry]:
|
||||
"""Get pending calendar entries for identity, optionally scoped to page."""
|
||||
```
|
||||
|
||||
Imports `CartItem`, `Product`, `MarketPlace` from market, `CalendarEntry`, `Calendar` from events internally.
|
||||
|
||||
**Files changed:**
|
||||
- `cart/bp/cart/services/get_cart.py` — replace CartItem import with glue call
|
||||
- `cart/bp/cart/services/calendar_cart.py` — replace CalendarEntry import with glue call
|
||||
- `cart/bp/cart/services/clear_cart_for_order.py` — replace CartItem/MarketPlace imports with glue call
|
||||
- `cart/bp/cart/services/checkout.py` — replace CartItem/Product/MarketPlace/CalendarEntry/Calendar imports with glue calls
|
||||
- `cart/bp/cart/api.py` — replace CartItem/MarketPlace/CalendarEntry/Calendar imports with glue calls
|
||||
- `cart/bp/cart/services/page_cart.py` — replace CartItem/MarketPlace/CalendarEntry/Calendar imports with glue calls
|
||||
|
||||
---
|
||||
|
||||
## Step 6: Glue service for products (market access from cart orders)
|
||||
|
||||
New file: `glue/services/products.py`
|
||||
|
||||
```python
|
||||
async def get_product(session, product_id) -> Product | None:
|
||||
"""Get product by ID."""
|
||||
```
|
||||
|
||||
This is minimal — only needed by `cart/bp/order/routes.py` and `cart/bp/orders/routes.py` for search/display. However, `OrderItem.product` relationship already resolves via string forward-ref. We only need Product for the join-based search in orders listing.
|
||||
|
||||
**Files changed:**
|
||||
- `cart/bp/orders/routes.py` — replace `from market.models.market import Product` with glue import or use `OrderItem.product` relationship
|
||||
- `cart/bp/order/routes.py` — replace `from market.models.market import Product` (already uses OrderItem.product relationship for display)
|
||||
|
||||
---
|
||||
|
||||
## Step 7: Glue service for post associations (events-side)
|
||||
|
||||
Move `events/bp/calendar_entry/services/post_associations.py` into glue:
|
||||
|
||||
New additions to `glue/services/pages.py` (or separate file `glue/services/post_associations.py`):
|
||||
|
||||
```python
|
||||
async def add_post_to_entry(session, entry_id, post_id) -> tuple[bool, str | None]:
|
||||
async def remove_post_from_entry(session, entry_id, post_id) -> tuple[bool, str | None]:
|
||||
async def get_entry_posts(session, entry_id) -> list[dict]:
|
||||
async def search_posts_for_entry(session, query, page=1, per_page=10) -> tuple[list[dict], int]:
|
||||
```
|
||||
|
||||
**Files changed:**
|
||||
- `events/bp/calendar_entry/services/post_associations.py` — **delete file**, moved to glue
|
||||
- Update any routes in events that call this service to use glue instead
|
||||
|
||||
---
|
||||
|
||||
## Step 8: Update glue model registration
|
||||
|
||||
`glue/setup.py` needs to ensure all models from all apps are registered in SQLAlchemy's mapper when starting any app. This is because string-based relationship references (like `OrderItem.product → "Product"`) need the target model class registered.
|
||||
|
||||
```python
|
||||
def register_models():
|
||||
"""Import all model modules to register them with SQLAlchemy mapper."""
|
||||
# These are already imported by each app, but ensure completeness:
|
||||
try:
|
||||
import blog.models.ghost_content # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
import market.models.market # noqa
|
||||
import market.models.market_place # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
import cart.models.order # noqa
|
||||
import cart.models.page_config # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
import events.models.calendars # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
```
|
||||
|
||||
Each app's `app.py` calls `register_models()` at startup. The try/except guards handle Docker where only one app's code is present — but since all apps share `glue/` and the DB, all model files need to be importable.
|
||||
|
||||
**Note:** In Docker, each container only has its own app + shared + glue. For glue services that import from other apps' models, those models must be available. This means either:
|
||||
- (a) Include all model files in each container (symlinks or copies), or
|
||||
- (b) Have glue services that import other apps' models use try/except at import time
|
||||
|
||||
Since all apps already share one DB and all model files are available in development, option (a) is cleaner for production. Alternatively, the current Docker setup could be extended to include cross-app model files in each image.
|
||||
|
||||
---
|
||||
|
||||
## Step 9: Update existing glue services
|
||||
|
||||
**`glue/services/cart_adoption.py`** — already imports from market and events (correct — this is glue's job). No change needed.
|
||||
|
||||
**`glue/services/order_lifecycle.py`** — already imports from events. No change needed.
|
||||
|
||||
---
|
||||
|
||||
## Step 10: Clean up dead imports and update app.py files
|
||||
|
||||
After all glue services are wired:
|
||||
|
||||
- `cart/app.py` — remove `from blog.models.ghost_content import Post`, use `from glue.services.pages import get_page_by_slug`
|
||||
- `market/app.py` — remove `from blog.models.ghost_content import Post`, use `from glue.services.pages import get_page_by_slug`
|
||||
- `events/app.py` — remove `from blog.models.ghost_content import Post` and `from market.models.market_place import MarketPlace`
|
||||
- Remove any now-empty cross-app model directories if they exist
|
||||
|
||||
---
|
||||
|
||||
## Files Summary
|
||||
|
||||
| Repo | File | Change |
|
||||
|------|------|--------|
|
||||
| **glue** | `services/pages.py` | **NEW** — Post access (slug, id, exists, search) |
|
||||
| **glue** | `services/page_config.py` | **NEW** — PageConfig CRUD |
|
||||
| **glue** | `services/calendars.py` | **NEW** — Calendar queries + entry associations (from blog) |
|
||||
| **glue** | `services/marketplaces.py` | **NEW** — MarketPlace CRUD (from blog+events) |
|
||||
| **glue** | `services/cart_items.py` | **NEW** — CartItem/CalendarEntry queries for cart |
|
||||
| **glue** | `services/products.py` | **NEW** — Product access for cart orders |
|
||||
| **glue** | `services/post_associations.py` | **NEW** — Post-CalendarEntry associations (from events) |
|
||||
| **glue** | `setup.py` | Add `register_models()` |
|
||||
| **cart** | `app.py` | Replace Post import with glue |
|
||||
| **cart** | `bp/cart/api.py` | Replace all 4 cross-app imports with glue |
|
||||
| **cart** | `bp/cart/services/checkout.py` | Replace cross-app imports with glue |
|
||||
| **cart** | `bp/cart/services/page_cart.py` | Replace all cross-app imports with glue |
|
||||
| **cart** | `bp/cart/services/get_cart.py` | Replace CartItem import with glue |
|
||||
| **cart** | `bp/cart/services/calendar_cart.py` | Replace CalendarEntry import with glue |
|
||||
| **cart** | `bp/cart/services/clear_cart_for_order.py` | Replace CartItem/MarketPlace with glue |
|
||||
| **cart** | `bp/orders/routes.py` | Replace Product import with glue |
|
||||
| **cart** | `bp/order/routes.py` | Replace Product import with glue |
|
||||
| **blog** | `bp/post/admin/routes.py` | Replace PageConfig/Calendar/MarketPlace with glue |
|
||||
| **blog** | `bp/post/routes.py` | Replace Calendar/MarketPlace with glue |
|
||||
| **blog** | `bp/post/services/entry_associations.py` | **DELETE** — moved to `glue/services/calendars.py` |
|
||||
| **blog** | `bp/post/services/markets.py` | **DELETE** — moved to `glue/services/marketplaces.py` |
|
||||
| **blog** | `bp/blog/ghost_db.py` | Replace PageConfig import with glue |
|
||||
| **blog** | `bp/blog/ghost/ghost_sync.py` | Replace PageConfig import with glue |
|
||||
| **blog** | `bp/blog/services/posts_data.py` | Replace CalendarEntry/CalendarEntryPost with glue |
|
||||
| **events** | `app.py` | Replace Post + MarketPlace imports with glue |
|
||||
| **events** | `bp/markets/services/markets.py` | **DELETE** — moved to `glue/services/marketplaces.py` |
|
||||
| **events** | `bp/markets/routes.py` | Replace MarketPlace import, use glue |
|
||||
| **events** | `bp/calendars/services/calendars.py` | Replace Post import with glue |
|
||||
| **events** | `bp/calendar_entry/services/post_associations.py` | **DELETE** — moved to `glue/services/post_associations.py` |
|
||||
| **events** | `bp/payments/routes.py` | Replace PageConfig import with glue |
|
||||
| **market** | `app.py` | Replace Post import with glue |
|
||||
|
||||
---
|
||||
|
||||
## Implementation Order
|
||||
|
||||
1. **Step 1** (pages.py) — unlocks Steps 2-4 which depend on page validation
|
||||
2. **Step 2** (page_config.py) — independent after Step 1
|
||||
3. **Steps 3-4** (calendars.py, marketplaces.py) — can be done in parallel, both use pages.py
|
||||
4. **Step 5** (cart_items.py) — depends on steps 1, 3 for calendar queries
|
||||
5. **Step 6** (products.py) — independent
|
||||
6. **Step 7** (post_associations.py) — independent, uses pages.py
|
||||
7. **Steps 8-10** (registration, cleanup) — after all services exist
|
||||
|
||||
---
|
||||
|
||||
## What's NOT changing
|
||||
|
||||
- **CartItem stays in `market/models/market.py`** — moving it creates equal or worse coupling
|
||||
- **OrderItem stays in `cart/models/order.py`** with `product_id` FK — pragmatic exception
|
||||
- **OrderItem.product_id FK** — kept, denormalized `product_title` makes it non-critical
|
||||
- **CartItem.product_id FK** — kept, same DB
|
||||
- **CartItem.market_place_id FK** — kept, same DB
|
||||
- **CartItem.user_id FK** — kept, shared model
|
||||
- **Internal HTTP APIs** (cart/summary, coop/*, events/*) — not changing
|
||||
- **`shared/` models** (User, MagicLink, etc.) — shared across all apps by design
|
||||
|
||||
---
|
||||
|
||||
## Docker Consideration
|
||||
|
||||
For glue services to work in Docker (single app per container), model files from other apps must be importable. Options:
|
||||
1. **Copy model files** into each Docker image during build (just the `models/` dirs)
|
||||
2. **Use try/except** in glue services at import time (degrade gracefully)
|
||||
3. **Mount shared volume** with all model files
|
||||
|
||||
Recommend option 2 for now — glue services that can't import a model simply raise ImportError at call time, which only happens if the service is called from the wrong app (shouldn't happen in practice).
|
||||
|
||||
---
|
||||
|
||||
## Verification
|
||||
|
||||
1. `grep -r "from blog\.models" cart/ market/ events/ glue/` — should return zero results (only in blog/ itself)
|
||||
2. `grep -r "from market\.models" blog/ cart/ events/` — should return zero results (only in market/ and glue/)
|
||||
3. `grep -r "from cart\.models" blog/ market/ events/` — should return zero results (only in cart/ and glue/)
|
||||
4. `grep -r "from events\.models" blog/ cart/ market/` — should return zero results (only in events/ and glue/)
|
||||
5. All 4 apps start without import errors
|
||||
6. Checkout flow works end-to-end
|
||||
7. Blog admin: can toggle features, create/delete markets, manage calendar entries
|
||||
8. Events admin: can create calendars, manage markets, configure payments
|
||||
9. Market app: markets listing page loads correctly
|
||||
325
.claude/plans/hazy-sniffing-sphinx.md
Normal file
325
.claude/plans/hazy-sniffing-sphinx.md
Normal file
@@ -0,0 +1,325 @@
|
||||
# Split Cart into Microservices
|
||||
|
||||
## Context
|
||||
The cart app currently owns too much: CartItem, Order/OrderItem, PageConfig, ContainerRelation, plus all checkout/payment logic. We're splitting it into 4 pieces:
|
||||
|
||||
1. **Relations service** — internal only, owns ContainerRelation
|
||||
2. **Likes service** — internal only, unified generic likes replacing ProductLike + PostLike
|
||||
3. **PageConfig → blog** — move to blog (which already owns pages)
|
||||
4. **Orders service** — public (orders.rose-ash.com), owns Order/OrderItem + SumUp checkout
|
||||
|
||||
After the split, cart becomes a thin CartItem CRUD + inbox service.
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Relations Service (internal only)
|
||||
|
||||
### 1.1 Scaffold `relations/`
|
||||
Create minimal internal-only app (no templates, no context_fn):
|
||||
|
||||
| File | Notes |
|
||||
|------|-------|
|
||||
| `relations/__init__.py` | Empty |
|
||||
| `relations/path_setup.py` | Copy from cart |
|
||||
| `relations/app.py` | `create_base_app("relations")`, register data + actions BPs only |
|
||||
| `relations/services/__init__.py` | Empty `register_domain_services()` |
|
||||
| `relations/models/__init__.py` | `from shared.models.container_relation import ContainerRelation` |
|
||||
| `relations/bp/__init__.py` | Export `register_data`, `register_actions` |
|
||||
| `relations/bp/data/routes.py` | Move `get-children` handler from `cart/bp/data/routes.py:175-198` |
|
||||
| `relations/bp/actions/routes.py` | Move `attach-child` + `detach-child` from `cart/bp/actions/routes.py:112-153` |
|
||||
| `relations/alembic.ini` | Copy from cart, adjust path |
|
||||
| `relations/alembic/env.py` | MODELS=`["shared.models.container_relation"]`, TABLES=`{"container_relations"}` |
|
||||
| `relations/alembic/versions/0001_initial.py` | Create `container_relations` table |
|
||||
| `relations/Dockerfile` | Follow cart pattern, `COPY relations/ ./` |
|
||||
| `relations/entrypoint.sh` | Standard pattern, db=`db_relations` |
|
||||
|
||||
### 1.2 Retarget callers (`"cart"` → `"relations"`)
|
||||
|
||||
| File | Lines | Change |
|
||||
|------|-------|--------|
|
||||
| `events/bp/calendars/services/calendars.py` | 74, 111, 121 | `call_action("cart", ...)` → `call_action("relations", ...)` |
|
||||
| `blog/bp/menu_items/services/menu_items.py` | 83, 137, 141, 157 | Same |
|
||||
| `shared/services/market_impl.py` | 96, 109, 133 | Same |
|
||||
|
||||
### 1.3 Clean up cart
|
||||
- Remove `get-children` from `cart/bp/data/routes.py:175-198`
|
||||
- Remove `attach-child`, `detach-child` from `cart/bp/actions/routes.py:112-153`
|
||||
- Remove `"shared.models.container_relation"` and `"container_relations"` from `cart/alembic/env.py`
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Likes Service (internal only)
|
||||
|
||||
### 2.1 New unified model
|
||||
Single `likes` table in `db_likes`:
|
||||
|
||||
```python
|
||||
class Like(Base):
|
||||
__tablename__ = "likes"
|
||||
id: Mapped[int] (pk)
|
||||
user_id: Mapped[int] (not null, indexed)
|
||||
target_type: Mapped[str] (String 32, not null) # "product" or "post"
|
||||
target_slug: Mapped[str | None] (String 255) # for products
|
||||
target_id: Mapped[int | None] (Integer) # for posts
|
||||
created_at, updated_at, deleted_at
|
||||
|
||||
UniqueConstraint("user_id", "target_type", "target_slug")
|
||||
UniqueConstraint("user_id", "target_type", "target_id")
|
||||
Index("ix_likes_target", "target_type", "target_slug")
|
||||
```
|
||||
|
||||
Products use `target_type="product"`, `target_slug=slug`. Posts use `target_type="post"`, `target_id=post.id`.
|
||||
|
||||
### 2.2 Scaffold `likes/`
|
||||
|
||||
| File | Notes |
|
||||
|------|-------|
|
||||
| `likes/__init__.py` | Empty |
|
||||
| `likes/path_setup.py` | Standard |
|
||||
| `likes/app.py` | Internal-only, `create_base_app("likes")`, data + actions BPs |
|
||||
| `likes/services/__init__.py` | Empty `register_domain_services()` |
|
||||
| `likes/models/__init__.py` | Import Like |
|
||||
| `likes/models/like.py` | Generic Like model (above) |
|
||||
| `likes/bp/__init__.py` | Export register functions |
|
||||
| `likes/bp/data/routes.py` | `is-liked`, `liked-slugs`, `liked-ids` |
|
||||
| `likes/bp/actions/routes.py` | `toggle` action |
|
||||
| `likes/alembic.ini` | Standard |
|
||||
| `likes/alembic/env.py` | MODELS=`["likes.models.like"]`, TABLES=`{"likes"}` |
|
||||
| `likes/alembic/versions/0001_initial.py` | Create `likes` table |
|
||||
| `likes/Dockerfile` | Standard pattern |
|
||||
| `likes/entrypoint.sh` | Standard, db=`db_likes` |
|
||||
|
||||
### 2.3 Data endpoints (`likes/bp/data/routes.py`)
|
||||
- `is-liked`: params `user_id, target_type, target_slug/target_id` → `{"liked": bool}`
|
||||
- `liked-slugs`: params `user_id, target_type` → `["slug1", "slug2"]`
|
||||
- `liked-ids`: params `user_id, target_type` → `[1, 2, 3]`
|
||||
|
||||
### 2.4 Action endpoints (`likes/bp/actions/routes.py`)
|
||||
- `toggle`: payload `{user_id, target_type, target_slug?, target_id?}` → `{"liked": bool}`
|
||||
|
||||
### 2.5 Retarget market app
|
||||
|
||||
**`market/bp/product/routes.py`** (like_toggle, ~line 119):
|
||||
Replace `toggle_product_like(g.s, user_id, product_slug)` with:
|
||||
```python
|
||||
result = await call_action("likes", "toggle", payload={
|
||||
"user_id": user_id, "target_type": "product", "target_slug": product_slug
|
||||
})
|
||||
liked = result["liked"]
|
||||
```
|
||||
|
||||
**`market/bp/browse/services/db_backend.py`** (most complex):
|
||||
- `db_product_full` / `db_product_full_id`: Replace `ProductLike` subquery with `fetch_data("likes", "is-liked", ...)`. Annotate `is_liked` after query.
|
||||
- `db_products_nocounts` / `db_products_counts`: Fetch `liked_slugs` once via `fetch_data("likes", "liked-slugs", ...)`, filter `Product.slug.in_(liked_slugs)` for `?liked=true`, annotate `is_liked` post-query.
|
||||
|
||||
**Delete**: `toggle_product_like` from `market/bp/product/services/product_operations.py`
|
||||
|
||||
### 2.6 Retarget blog app
|
||||
|
||||
**`blog/bp/post/routes.py`** (like_toggle):
|
||||
Replace `toggle_post_like(g.s, user_id, post_id)` with `call_action("likes", "toggle", payload={...})`.
|
||||
|
||||
**Delete**: `toggle_post_like` from `blog/bp/post/services/post_operations.py`
|
||||
|
||||
### 2.7 Remove old like models
|
||||
- Remove `ProductLike` from `shared/models/market.py` (lines 118-131) + `Product.likes` relationship (lines 110-114)
|
||||
- Remove `PostLike` from `shared/models/ghost_content.py` + `Post.likes` relationship
|
||||
- Remove `product_likes` from market alembic TABLES
|
||||
- Remove `post_likes` from blog alembic TABLES
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: PageConfig → Blog
|
||||
|
||||
### 3.1 Replace blog proxy endpoints with direct DB queries
|
||||
|
||||
**`blog/bp/data/routes.py`** (lines 77-102): Replace the 3 proxy handlers that currently call `fetch_data("cart", ...)` with direct DB queries. Copy logic from `cart/bp/data/routes.py`:
|
||||
- `page-config` (cart lines 114-134)
|
||||
- `page-config-by-id` (cart lines 136-149)
|
||||
- `page-configs-batch` (cart lines 151-172)
|
||||
- `page-config-ensure` (cart lines 49-81) — add new
|
||||
|
||||
Also add the `_page_config_dict` helper (cart lines 203-213).
|
||||
|
||||
### 3.2 Move action to blog
|
||||
|
||||
**`blog/bp/actions/routes.py`** (~line 40): Replace `call_action("cart", "update-page-config", ...)` proxy with direct handler. Copy logic from `cart/bp/actions/routes.py:51-110`.
|
||||
|
||||
### 3.3 Blog callers become local
|
||||
|
||||
| File | Current | After |
|
||||
|------|---------|-------|
|
||||
| `blog/bp/post/admin/routes.py:34` | `fetch_data("cart", "page-config", ...)` | Direct DB query (blog now owns table) |
|
||||
| `blog/bp/post/admin/routes.py:87,132` | `call_action("cart", "update-page-config", ...)` | Direct call to local handler |
|
||||
| `blog/bp/post/services/markets.py:44` | `fetch_data("cart", "page-config", ...)` | Direct DB query |
|
||||
| `blog/bp/blog/ghost_db.py:295` | `fetch_data("cart", "page-configs-batch", ...)` | Direct DB query |
|
||||
|
||||
### 3.4 Retarget cross-service callers (`"cart"` → `"blog"`)
|
||||
|
||||
| File | Change |
|
||||
|------|--------|
|
||||
| `cart/bp/cart/services/page_cart.py:181` | `fetch_data("cart", "page-configs-batch", ...)` → `fetch_data("blog", "page-configs-batch", ...)` |
|
||||
| `cart/bp/cart/global_routes.py:274` | `fetch_data("cart", "page-config-by-id", ...)` → `fetch_data("blog", "page-config-by-id", ...)` |
|
||||
|
||||
(Note: `checkout.py:117` and `cart/app.py:177` already target `"blog"`)
|
||||
|
||||
### 3.5 Update blog alembic
|
||||
**`blog/alembic/env.py`**: Add `"shared.models.page_config"` to MODELS and `"page_configs"` to TABLES.
|
||||
|
||||
### 3.6 Clean up cart
|
||||
- Remove all `page-config*` handlers from `cart/bp/data/routes.py` (lines 49-172)
|
||||
- Remove `update-page-config` from `cart/bp/actions/routes.py` (lines 50-110)
|
||||
- Remove `"shared.models.page_config"` and `"page_configs"` from `cart/alembic/env.py`
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Orders Service (public, orders.rose-ash.com)
|
||||
|
||||
### 4.1 Scaffold `orders/`
|
||||
|
||||
| File | Notes |
|
||||
|------|-------|
|
||||
| `orders/__init__.py` | Empty |
|
||||
| `orders/path_setup.py` | Standard |
|
||||
| `orders/app.py` | Public app with `context_fn`, templates, fragments, page slug hydration |
|
||||
| `orders/services/__init__.py` | `register_domain_services()` |
|
||||
| `orders/models/__init__.py` | `from shared.models.order import Order, OrderItem` |
|
||||
| `orders/bp/__init__.py` | Export all BPs |
|
||||
| `orders/bp/order/` | Move from `cart/bp/order/` (single order: detail, pay, recheck) |
|
||||
| `orders/bp/orders/` | Move from `cart/bp/orders/` (order list + pagination) |
|
||||
| `orders/bp/checkout/routes.py` | Webhook + return routes from `cart/bp/cart/global_routes.py` |
|
||||
| `orders/bp/data/routes.py` | Minimal |
|
||||
| `orders/bp/actions/routes.py` | `create-order` action (called by cart during checkout) |
|
||||
| `orders/bp/fragments/routes.py` | `account-nav-item` fragment (orders link) |
|
||||
| `orders/templates/` | Move `_types/order/`, `_types/orders/`, checkout templates from cart |
|
||||
| `orders/alembic.ini` | Standard |
|
||||
| `orders/alembic/env.py` | MODELS=`["shared.models.order"]`, TABLES=`{"orders", "order_items"}` |
|
||||
| `orders/alembic/versions/0001_initial.py` | Create `orders` + `order_items` tables |
|
||||
| `orders/Dockerfile` | Standard, public-facing |
|
||||
| `orders/entrypoint.sh` | Standard, db=`db_orders` |
|
||||
|
||||
### 4.2 Move checkout services to orders
|
||||
|
||||
**Move to `orders/services/`:**
|
||||
- `checkout.py` — from `cart/bp/cart/services/checkout.py` (move: `create_order_from_cart`, `resolve_page_config`, `build_sumup_*`, `get_order_with_details`. Keep `find_or_create_cart_item` in cart.)
|
||||
- `check_sumup_status.py` — from `cart/bp/cart/services/check_sumup_status.py`
|
||||
|
||||
**`clear_cart_for_order`** stays in cart as new action:
|
||||
- Add `clear-cart-for-order` to `cart/bp/actions/routes.py`
|
||||
- Orders calls `call_action("cart", "clear-cart-for-order", payload={user_id, session_id, page_post_id})`
|
||||
|
||||
### 4.3 `create-order` action endpoint (`orders/bp/actions/routes.py`)
|
||||
Cart's `POST /checkout/` calls this:
|
||||
```
|
||||
Payload: {cart_items: [{product_id, product_title, product_slug, product_image,
|
||||
product_special_price, product_regular_price, product_price_currency,
|
||||
quantity, market_place_container_id}],
|
||||
calendar_entries, tickets, user_id, session_id,
|
||||
product_total, calendar_total, ticket_total,
|
||||
page_post_id, redirect_url, webhook_base_url}
|
||||
Returns: {order_id, sumup_hosted_url, page_config_id, sumup_reference, description}
|
||||
```
|
||||
|
||||
### 4.4 Refactor cart's checkout route
|
||||
`cart/bp/cart/global_routes.py` `POST /checkout/`:
|
||||
1. Load local cart data (get_cart, calendar entries, tickets, totals)
|
||||
2. Serialize cart items to dicts
|
||||
3. `result = await call_action("orders", "create-order", payload={...})`
|
||||
4. Redirect to `result["sumup_hosted_url"]`
|
||||
|
||||
Same for page-scoped checkout in `cart/bp/cart/page_routes.py`.
|
||||
|
||||
### 4.5 Move webhook + return routes to orders
|
||||
- `POST /checkout/webhook/<order_id>/` → `orders/bp/checkout/routes.py`
|
||||
- `GET /checkout/return/<order_id>/` → `orders/bp/checkout/routes.py`
|
||||
- SumUp redirect/webhook URLs must now point to orders.rose-ash.com
|
||||
|
||||
### 4.6 Move order list/detail routes
|
||||
- `cart/bp/order/` → `orders/bp/order/`
|
||||
- `cart/bp/orders/` → `orders/bp/orders/`
|
||||
|
||||
### 4.7 Move startup reconciliation
|
||||
`_reconcile_pending_orders` from `cart/app.py:209-265` → `orders/app.py`
|
||||
|
||||
### 4.8 Clean up cart
|
||||
- Remove `cart/bp/order/`, `cart/bp/orders/`
|
||||
- Remove checkout webhook/return from `cart/bp/cart/global_routes.py`
|
||||
- Remove `_reconcile_pending_orders` from `cart/app.py`
|
||||
- Remove order templates from `cart/templates/`
|
||||
- Remove `"shared.models.order"` and `"orders", "order_items"` from `cart/alembic/env.py`
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Infrastructure (applies to all new services)
|
||||
|
||||
### 5.1 docker-compose.yml
|
||||
Add 3 new services (relations, likes, orders) with own DATABASE_URL (db_relations, db_likes, db_orders), own REDIS_URL (Redis DB 7, 8, 9).
|
||||
|
||||
Add to `x-app-env`:
|
||||
```yaml
|
||||
INTERNAL_URL_RELATIONS: http://relations:8000
|
||||
INTERNAL_URL_LIKES: http://likes:8000
|
||||
INTERNAL_URL_ORDERS: http://orders:8000
|
||||
APP_URL_ORDERS: https://orders.rose-ash.com
|
||||
```
|
||||
|
||||
### 5.2 docker-compose.dev.yml
|
||||
Add all 3 services with dev volumes (ports 8008, 8009, 8010).
|
||||
Add to `x-sibling-models` for all 3 new services.
|
||||
|
||||
### 5.3 deploy.sh
|
||||
Add `relations likes orders` to APPS list.
|
||||
|
||||
### 5.4 Caddyfile (`/root/caddy/Caddyfile`)
|
||||
Add only orders (public):
|
||||
```
|
||||
orders.rose-ash.com { reverse_proxy rose-ash-dev-orders-1:8000 }
|
||||
```
|
||||
|
||||
### 5.5 shared/infrastructure/factory.py
|
||||
Add to model import loop: `"relations.models", "likes.models", "orders.models"`
|
||||
|
||||
### 5.6 shared/infrastructure/urls.py
|
||||
Add `orders_url(path)` helper.
|
||||
|
||||
### 5.7 All existing Dockerfiles
|
||||
Add sibling model COPY lines for the 3 new services to every existing Dockerfile (blog, market, cart, events, federation, account).
|
||||
|
||||
### 5.8 CLAUDE.md
|
||||
Update project structure and add notes about the new services.
|
||||
|
||||
---
|
||||
|
||||
## Data Migration (one-time, run before code switch)
|
||||
|
||||
1. `container_relations` from `db_cart` → `db_relations`
|
||||
2. `product_likes` from `db_market` + `post_likes` from `db_blog` → `db_likes.likes`
|
||||
3. `page_configs` from `db_cart` → `db_blog`
|
||||
4. `orders` + `order_items` from `db_cart` → `db_orders`
|
||||
|
||||
Use `pg_dump`/`pg_restore` or direct SQL for migration.
|
||||
|
||||
---
|
||||
|
||||
## Post-Split Cart State
|
||||
|
||||
After all 4 phases, cart owns only:
|
||||
- **Model**: CartItem (table in db_cart)
|
||||
- **Alembic**: `cart_items` only
|
||||
- **Data endpoints**: `cart-summary`, `cart-items`
|
||||
- **Action endpoints**: `adopt-cart-for-user`, `clear-cart-for-order` (new)
|
||||
- **Inbox handlers**: Add/Remove/Update `rose:CartItem`
|
||||
- **Public routes**: cart overview, page cart, add-to-cart, quantity, delete
|
||||
- **Fragments**: `cart-mini`
|
||||
- **Checkout**: POST /checkout/ (creates order via `call_action("orders", "create-order")`, redirects to SumUp)
|
||||
|
||||
---
|
||||
|
||||
## Verification
|
||||
1. **Relations**: Blog attach/detach marketplace to page; events attach/detach calendar
|
||||
2. **Likes**: Toggle product like on market page; toggle post like on blog; `?liked=true` filter
|
||||
3. **PageConfig**: Blog admin page config update; cart checkout resolves page config from blog
|
||||
4. **Orders**: Add to cart → checkout → SumUp redirect → webhook → order paid; order list/detail on orders.rose-ash.com
|
||||
5. No remaining `call_action("cart", "attach-child|detach-child|update-page-config")`
|
||||
6. No remaining `fetch_data("cart", "page-config*|get-children")`
|
||||
7. Cart alembic only manages `cart_items` table
|
||||
149
.claude/plans/rippling-tumbling-cocke.md
Normal file
149
.claude/plans/rippling-tumbling-cocke.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Ticket UX Improvements: +/- Buttons, Sold Count, Cart Grouping
|
||||
|
||||
## Context
|
||||
The entry page currently uses a numeric input + "Buy Tickets" button, which replaces itself with a confirmation after purchase. The cart lists each ticket individually. The user wants the ticket UX to match the product pattern: +/- buttons, "in basket" count, tickets grouped by event on cart.
|
||||
|
||||
## Requirements
|
||||
1. **Entry page**: Show tickets sold count + current user's "in basket" count
|
||||
2. **Entry page**: Replace qty input with "Add to basket" / +/- buttons (product pattern)
|
||||
3. **Entry page**: Keep form active after adding (don't replace with confirmation)
|
||||
4. **Cart page**: Group tickets by event (entry_id + ticket_type), show quantity with +/- buttons
|
||||
|
||||
---
|
||||
|
||||
## 1. Add `ticket_type_id` to TicketDTO
|
||||
|
||||
**File**: `shared/contracts/dtos.py`
|
||||
- Add `ticket_type_id: int | None = None` field to `TicketDTO`
|
||||
|
||||
**File**: `shared/services/calendar_impl.py`
|
||||
- In `_ticket_to_dto()`, populate `ticket_type_id=ticket.ticket_type_id`
|
||||
|
||||
**Sync**: Copy to all 4 app submodule copies.
|
||||
|
||||
## 2. New ticket service functions
|
||||
|
||||
**File**: `events/bp/tickets/services/tickets.py`
|
||||
- Add `get_user_reserved_count(session, entry_id, user_id, session_id, ticket_type_id=None) -> int`
|
||||
- Counts reserved tickets for this user+entry+type
|
||||
- Add `get_sold_ticket_count(session, entry_id) -> int`
|
||||
- Counts all non-cancelled tickets for this entry
|
||||
- Add `cancel_latest_reserved_ticket(session, entry_id, user_id, session_id, ticket_type_id=None) -> bool`
|
||||
- Finds the most recently created reserved ticket for this user+entry+type, sets state='cancelled'. Returns True if one was cancelled.
|
||||
|
||||
## 3. Add `adjust_quantity` route to events tickets blueprint
|
||||
|
||||
**File**: `events/bp/tickets/routes.py`
|
||||
- New route: `POST /tickets/adjust/`
|
||||
- Form fields: `entry_id`, `ticket_type_id` (optional), `count` (target quantity)
|
||||
- Logic:
|
||||
- Get current user reserved count for this entry/type
|
||||
- If count > current: create `(count - current)` tickets via `create_ticket()`
|
||||
- If count < current: cancel `(current - count)` tickets via `cancel_latest_reserved_ticket()` in a loop
|
||||
- If count == 0: cancel all
|
||||
- Check availability before adding (like existing `buy_tickets`)
|
||||
- Response: re-render `_buy_form.html` (HTMX swap replaces form, keeps it active)
|
||||
- Include OOB cart-mini update: `{{ mini(oob='true') }}`
|
||||
|
||||
## 4. Inject ticket counts into entry page context
|
||||
|
||||
**File**: `events/bp/calendar_entry/routes.py` — `inject_root` context processor
|
||||
- Add `ticket_sold_count`: total non-cancelled tickets for entry (via `get_sold_ticket_count`)
|
||||
- Add `user_ticket_count`: current user's reserved count (via `get_user_reserved_count`)
|
||||
- For multi-type entries, add `user_ticket_counts_by_type`: dict mapping ticket_type_id → count
|
||||
|
||||
## 5. Rewrite entry page buy form
|
||||
|
||||
**File**: `events/templates/_types/tickets/_buy_form.html`
|
||||
- Show "X sold" (from `ticket_sold_count`) alongside "X remaining"
|
||||
- Show "X in basket" for current user
|
||||
|
||||
**For single-price entries (no ticket types)**:
|
||||
- If `user_ticket_count == 0`: show "Add to basket" button (posts to `/tickets/adjust/` with count=1)
|
||||
- If `user_ticket_count > 0`: show `[-]` [count badge] `[+]` buttons
|
||||
- Minus: posts count=user_ticket_count-1
|
||||
- Plus: posts count=user_ticket_count+1
|
||||
- All forms: `hx-post`, `hx-target="#ticket-buy-{{ entry.id }}"`, `hx-swap="outerHTML"`
|
||||
|
||||
**For multi-type entries**:
|
||||
- Same pattern per ticket type row, using `user_ticket_counts_by_type[tt.id]`
|
||||
|
||||
Style: match product pattern exactly — emerald circular buttons, w-8 h-8, cart icon with badge.
|
||||
|
||||
## 6. Add ticket quantity route to cart app
|
||||
|
||||
**File**: `cart/bp/cart/global_routes.py`
|
||||
- New route: `POST /cart/ticket-quantity/`
|
||||
- Form fields: `entry_id`, `ticket_type_id` (optional), `count` (target quantity)
|
||||
- Logic: call into CalendarService or directly use ticket functions
|
||||
- Since cart app uses service contracts, add `adjust_ticket_quantity` to CalendarService protocol
|
||||
|
||||
**File**: `shared/contracts/protocols.py` — CalendarService
|
||||
- Add: `adjust_ticket_quantity(session, entry_id, count, *, user_id, session_id, ticket_type_id=None) -> int`
|
||||
|
||||
**File**: `shared/services/calendar_impl.py`
|
||||
- Implement `adjust_ticket_quantity`:
|
||||
- Same logic as events adjust route (create/cancel to match target count)
|
||||
- Return new count
|
||||
|
||||
**File**: `shared/services/stubs.py`
|
||||
- Add stub: returns 0
|
||||
|
||||
Response: `HX-Refresh: true` (same as product quantity route).
|
||||
|
||||
## 7. Cart page: group tickets by event with +/- buttons
|
||||
|
||||
**File**: `cart/templates/_types/cart/_cart.html` — ticket section (lines 63-95)
|
||||
- Replace individual ticket list with grouped display
|
||||
- Group `ticket_cart_entries` by `(entry_id, ticket_type_id)`:
|
||||
- Use Jinja `groupby` on `entry_id` first, then sub-group by `ticket_type_name`
|
||||
- Or pre-group in the route handler and pass as a dict
|
||||
|
||||
**Approach**: Pre-group in the route handler for cleaner templates.
|
||||
|
||||
**File**: `cart/bp/cart/page_routes.py` — `page_view`
|
||||
- After getting `page_tickets`, group them into a list of dicts:
|
||||
```
|
||||
[{"entry_name": ..., "entry_id": ..., "ticket_type_name": ..., "ticket_type_id": ...,
|
||||
"entry_start_at": ..., "entry_end_at": ..., "price": ..., "quantity": N}]
|
||||
```
|
||||
- Pass as `ticket_groups` to template
|
||||
|
||||
**File**: `cart/bp/cart/global_routes.py` — overview/checkout routes
|
||||
- Same grouping for global cart view if tickets appear there
|
||||
|
||||
**Cart ticket group template**: Each group shows:
|
||||
- Event name + ticket type (if any)
|
||||
- Date/time
|
||||
- Price per ticket
|
||||
- `-` [qty] `+` buttons (posting to `/cart/ticket-quantity/`)
|
||||
- Line total (price × qty)
|
||||
|
||||
Match product `cart_item` macro style (article card with quantity controls).
|
||||
|
||||
## 8. Cart summary update
|
||||
|
||||
**File**: `cart/templates/_types/cart/_cart.html` — `summary` macro
|
||||
- Update Items count: include ticket quantities in total (currently just product quantities)
|
||||
|
||||
## Files to modify (summary)
|
||||
- `shared/contracts/dtos.py` — add ticket_type_id to TicketDTO
|
||||
- `shared/contracts/protocols.py` — add adjust_ticket_quantity to CalendarService
|
||||
- `shared/services/calendar_impl.py` — implement adjust_ticket_quantity, update _ticket_to_dto
|
||||
- `shared/services/stubs.py` — add stub
|
||||
- `events/bp/tickets/services/tickets.py` — add count/cancel functions
|
||||
- `events/bp/tickets/routes.py` — add adjust route
|
||||
- `events/bp/calendar_entry/routes.py` — inject sold/user counts
|
||||
- `events/templates/_types/tickets/_buy_form.html` — rewrite with +/- pattern
|
||||
- `cart/bp/cart/global_routes.py` — add ticket-quantity route
|
||||
- `cart/bp/cart/page_routes.py` — group tickets
|
||||
- `cart/templates/_types/cart/_cart.html` — grouped ticket display with +/-
|
||||
- All 4 app `shared/` submodule copies synced
|
||||
|
||||
## Verification
|
||||
1. Visit entry page → see "X sold", "X in basket", "Add to basket" button
|
||||
2. Click "Add to basket" → form stays, shows `-` [1] `+`, basket count shows "1 in basket"
|
||||
3. Click `+` → count increases, sold count increases
|
||||
4. Click `-` → count decreases, ticket cancelled
|
||||
5. Visit cart page → tickets grouped by event, +/- buttons work
|
||||
6. Checkout flow still works (existing tests)
|
||||
171
.claude/plans/unified-inventing-kay.md
Normal file
171
.claude/plans/unified-inventing-kay.md
Normal file
@@ -0,0 +1,171 @@
|
||||
# Social Network Sharing Integration
|
||||
|
||||
## Context
|
||||
|
||||
Rose Ash already has ActivityPub for federated social sharing. This plan adds OAuth-based sharing to mainstream social networks — Facebook, Instagram, Threads, Twitter/X, LinkedIn, and Mastodon. Users connect their social accounts via the account dashboard, then manually share content (blog posts, events, products) via a share button on content pages.
|
||||
|
||||
All social logic lives in the **account** microservice. Content apps get a share button that opens the account share page.
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Data Model + Encryption
|
||||
|
||||
### 1a. `shared/models/social_connection.py` (NEW)
|
||||
- SQLAlchemy 2.0 model following `oauth_grant.py` pattern
|
||||
- Table `social_connections` in db_account
|
||||
- Columns: `id`, `user_id` (FK to users.id with CASCADE), `platform` (facebook/instagram/threads/twitter/linkedin/mastodon), `platform_user_id`, `platform_username`, `display_name`, `access_token_enc`, `refresh_token_enc`, `token_expires_at`, `scopes`, `extra_data` (JSONB — mastodon instance URL, facebook page ID, etc.), `created_at`, `updated_at`, `revoked_at`
|
||||
- Indexes: `(user_id, platform)`, unique `(platform, platform_user_id)`
|
||||
|
||||
### 1b. `shared/models/__init__.py` (MODIFY)
|
||||
- Add `from .social_connection import SocialConnection`
|
||||
|
||||
### 1c. `shared/infrastructure/social_crypto.py` (NEW)
|
||||
- Fernet encrypt/decrypt using `SOCIAL_ENCRYPTION_KEY` env var
|
||||
- `encrypt_token(plaintext) -> str`, `decrypt_token(ciphertext) -> str`
|
||||
|
||||
### 1d. Alembic migration (NEW)
|
||||
- Creates `social_connections` table
|
||||
|
||||
### 1e. `docker-compose.yml` (MODIFY)
|
||||
- Add to `x-app-env`: `SOCIAL_ENCRYPTION_KEY`, plus per-platform credentials (`SOCIAL_FACEBOOK_APP_ID`, `SOCIAL_FACEBOOK_APP_SECRET`, `SOCIAL_TWITTER_CLIENT_ID`, `SOCIAL_TWITTER_CLIENT_SECRET`, `SOCIAL_LINKEDIN_CLIENT_ID`, `SOCIAL_LINKEDIN_CLIENT_SECRET`)
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Platform OAuth Clients
|
||||
|
||||
All in `account/services/social_platforms/`:
|
||||
|
||||
### 2a. `base.py` (NEW)
|
||||
- `OAuthResult` dataclass (platform_user_id, tokens, expiry, extra_data)
|
||||
- `ShareResult` dataclass (success, platform_post_id, platform_post_url, error)
|
||||
- `SocialPlatform` abstract base class: `get_authorize_url()`, `exchange_code()`, `refresh_access_token()`, `share_link()`, `verify_token()`
|
||||
|
||||
### 2b. `meta.py` (NEW) — Facebook + Instagram + Threads
|
||||
- **Facebook**: OAuth2 via Graph API, `pages_manage_posts` scope, exchange user token → long-lived → page token, post via `/{page_id}/feed`
|
||||
- **Instagram**: Same Meta OAuth, `instagram_basic` + `instagram_content_publish` scopes, business/creator accounts only, container → publish workflow
|
||||
- **Threads**: Separate OAuth at threads.net, `threads_basic` + `threads_content_publish` scopes, container → publish
|
||||
|
||||
### 2c. `twitter.py` (NEW) — Twitter/X
|
||||
- OAuth 2.0 with PKCE, `tweet.write` + `offline.access` scopes
|
||||
- Post via `POST https://api.twitter.com/2/tweets`
|
||||
|
||||
### 2d. `linkedin.py` (NEW) — LinkedIn
|
||||
- OAuth 2.0, `w_member_social` + `openid` scopes
|
||||
- Post via LinkedIn Posts API
|
||||
|
||||
### 2e. `mastodon.py` (NEW) — Mastodon
|
||||
- Dynamic app registration per instance (`POST /api/v1/apps`)
|
||||
- OAuth 2.0, `write:statuses` scope
|
||||
- Post via `POST /api/v1/statuses`
|
||||
- Instance URL stored in `extra_data["instance_url"]`
|
||||
|
||||
### 2f. `__init__.py` (NEW) — Platform registry
|
||||
- `PLATFORMS` dict, lazy-initialized from env vars
|
||||
- Mastodon always available (no pre-configured credentials)
|
||||
- `get_platform(name)`, `available_platforms()`
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Account Social Blueprint
|
||||
|
||||
### 3a. `account/bp/social/__init__.py` (NEW)
|
||||
### 3b. `account/bp/social/routes.py` (NEW)
|
||||
|
||||
Routes (all require login):
|
||||
- `GET /social/` — list connected accounts + available platforms
|
||||
- `GET /social/connect/<platform>/` — start OAuth redirect (Mastodon: accept instance URL param)
|
||||
- `GET /social/callback/<platform>/` — OAuth callback, exchange code, encrypt & store tokens
|
||||
- `POST /social/disconnect/<int:id>/` — soft-delete (set revoked_at)
|
||||
- `GET /social/share/` — share page (params: url, title, description, image)
|
||||
- `POST /social/share/` — execute share to selected accounts, return results
|
||||
|
||||
OAuth state stored in session (nonce + platform + redirect params).
|
||||
|
||||
### 3c. `account/bp/__init__.py` (MODIFY)
|
||||
- Add `from .social.routes import register as register_social_bp`
|
||||
|
||||
### 3d. `account/app.py` (MODIFY)
|
||||
- Register social blueprint **before** account blueprint (account has catch-all `/<slug>/`)
|
||||
```python
|
||||
app.register_blueprint(register_auth_bp())
|
||||
app.register_blueprint(register_social_bp()) # <-- NEW, before account
|
||||
app.register_blueprint(register_account_bp())
|
||||
app.register_blueprint(register_fragments())
|
||||
```
|
||||
|
||||
### 3e. `account/templates/_types/auth/_nav.html` (MODIFY)
|
||||
- Add "social" link between newsletters and `account_nav_html`
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Templates
|
||||
|
||||
### 4a. `account/templates/_types/auth/_social_panel.html` (NEW)
|
||||
- Platform cards with icons (Font Awesome: `fa-facebook`, `fa-instagram`, `fa-threads`, `fa-x-twitter`, `fa-linkedin`, `fa-mastodon`)
|
||||
- Connected accounts per platform: display name, username, disconnect button
|
||||
- "Connect" button per platform
|
||||
- Mastodon: instance URL input before connecting
|
||||
|
||||
### 4b. `account/templates/_types/auth/_share_panel.html` (NEW)
|
||||
- Content preview card (title, image, URL)
|
||||
- Connected accounts as checkboxes grouped by platform
|
||||
- Optional message textarea
|
||||
- Share button → HTMX POST to `/social/share/`
|
||||
|
||||
### 4c. `account/templates/_types/auth/_share_result.html` (NEW)
|
||||
- Per-platform success/failure with links to created posts
|
||||
|
||||
### 4d. `account/templates/_types/auth/_mastodon_connect.html` (NEW)
|
||||
- Instance URL input form
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Share Button in Content Apps
|
||||
|
||||
### 5a. `account/bp/fragments/routes.py` (MODIFY)
|
||||
- Add `share-button` handler: accepts url, title, description, image params
|
||||
- Returns a share icon/link pointing to `account.rose-ash.com/social/share/?url=...&title=...`
|
||||
|
||||
### 5b. `account/templates/fragments/share_button.html` (NEW)
|
||||
- Small button: `<a href="..." target="_blank"><i class="fa-solid fa-share-nodes"></i> Share</a>`
|
||||
|
||||
### 5c. Content app integration
|
||||
- Blog post detail: fetch `share-button` fragment from account, render in post template
|
||||
- Events detail: same pattern
|
||||
- Market product detail: same pattern
|
||||
- Each passes its own public URL, title, description, image to the fragment
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Token Refresh + Share History
|
||||
|
||||
### 6a. Token refresh in share flow
|
||||
- Before posting, check `token_expires_at`; if expired, call `refresh_access_token()`
|
||||
- Update encrypted tokens in DB
|
||||
- If refresh fails, mark connection with error and prompt reconnect
|
||||
|
||||
### 6b. `shared/models/social_share.py` (NEW, optional)
|
||||
- Table `social_shares`: connection_id, shared_url, shared_title, platform_post_id, platform_post_url, status, error_message, created_at
|
||||
- Prevents duplicate shares, enables "shared" indicator on content pages
|
||||
|
||||
---
|
||||
|
||||
## Key Patterns to Follow
|
||||
|
||||
| Pattern | Reference File |
|
||||
|---------|---------------|
|
||||
| ORM model (mapped_column, FK, indexes) | `shared/models/oauth_grant.py` |
|
||||
| Blueprint registration + OOB template | `account/bp/account/routes.py` |
|
||||
| Fragment handler dict | `account/bp/fragments/routes.py` |
|
||||
| Account nav link | `account/templates/_types/auth/_nav.html` |
|
||||
| httpx async client | `shared/infrastructure/actions.py` |
|
||||
|
||||
## Verification
|
||||
|
||||
1. Generate `SOCIAL_ENCRYPTION_KEY`, add to `.env`
|
||||
2. Run Alembic migration
|
||||
3. Start account app, navigate to `/social/`
|
||||
4. Connect a test Mastodon account (easiest — no app review needed)
|
||||
5. Navigate to a blog post, click Share, select Mastodon account, verify post appears
|
||||
6. Disconnect account, verify soft-delete
|
||||
7. Test token refresh by connecting Facebook with short-lived token
|
||||
@@ -2,11 +2,11 @@ name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, decoupling]
|
||||
branches: ['**']
|
||||
|
||||
env:
|
||||
REGISTRY: registry.rose-ash.com:5000
|
||||
COOP_DIR: /root/rose-ash
|
||||
APP_DIR: /root/rose-ash
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd ${{ env.COOP_DIR }}
|
||||
cd ${{ env.APP_DIR }}
|
||||
|
||||
# Save current HEAD before updating
|
||||
OLD_HEAD=\$(git rev-parse HEAD 2>/dev/null || echo none)
|
||||
@@ -58,13 +58,22 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
for app in blog market cart events federation account; do
|
||||
# Map compose service name to source directory
|
||||
app_dir() {
|
||||
case \"\$1\" in
|
||||
sx_docs) echo \"sx\" ;;
|
||||
*) echo \"\$1\" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
for app in blog market cart events federation account relations likes orders test sx_docs; do
|
||||
dir=\$(app_dir \"\$app\")
|
||||
IMAGE_EXISTS=\$(docker image ls -q ${{ env.REGISTRY }}/\$app:latest 2>/dev/null)
|
||||
if [ \"\$REBUILD_ALL\" = true ] || echo \"\$CHANGED\" | grep -q \"^\$app/\" || [ -z \"\$IMAGE_EXISTS\" ]; then
|
||||
if [ \"\$REBUILD_ALL\" = true ] || echo \"\$CHANGED\" | grep -q \"^\$dir/\" || [ -z \"\$IMAGE_EXISTS\" ]; then
|
||||
echo \"Building \$app...\"
|
||||
docker build \
|
||||
--build-arg CACHEBUST=\$(date +%s) \
|
||||
-f \$app/Dockerfile \
|
||||
-f \$dir/Dockerfile \
|
||||
-t ${{ env.REGISTRY }}/\$app:latest \
|
||||
-t ${{ env.REGISTRY }}/\$app:${{ github.sha }} \
|
||||
.
|
||||
@@ -75,9 +84,20 @@ jobs:
|
||||
fi
|
||||
done
|
||||
|
||||
source .env
|
||||
docker stack deploy -c docker-compose.yml coop
|
||||
echo 'Waiting for services to update...'
|
||||
sleep 10
|
||||
docker stack services coop
|
||||
# Deploy swarm stack only on main branch
|
||||
if [ '${{ github.ref_name }}' = 'main' ]; then
|
||||
source .env
|
||||
docker stack deploy -c docker-compose.yml rose-ash
|
||||
echo 'Waiting for swarm services to update...'
|
||||
sleep 10
|
||||
docker stack services rose-ash
|
||||
else
|
||||
echo 'Skipping swarm deploy (branch: ${{ github.ref_name }})'
|
||||
fi
|
||||
|
||||
# Dev stack always deployed (bind-mounted source + auto-reload)
|
||||
echo 'Deploying dev stack...'
|
||||
docker compose -p rose-ash-dev -f docker-compose.yml -f docker-compose.dev.yml up -d
|
||||
echo 'Dev stack deployed'
|
||||
docker compose -p rose-ash-dev -f docker-compose.yml -f docker-compose.dev.yml ps
|
||||
"
|
||||
|
||||
144
CLAUDE.md
Normal file
144
CLAUDE.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# Rose Ash Monorepo
|
||||
|
||||
Cooperative web platform: federated content, commerce, events, and media processing. Each domain runs as an independent Quart microservice with its own database, communicating via HMAC-signed internal HTTP and ActivityPub events.
|
||||
|
||||
## Deployment
|
||||
|
||||
- **Do NOT push** until explicitly told to. Pushes reload code to dev automatically.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
blog/ # Content management, Ghost CMS sync, navigation, WYSIWYG editor
|
||||
market/ # Product catalog, marketplace pages, web scraping
|
||||
cart/ # Shopping cart CRUD, checkout (delegates order creation to orders)
|
||||
events/ # Calendar & event management, ticketing
|
||||
federation/ # ActivityPub social hub, user profiles
|
||||
account/ # OAuth2 authorization server, user dashboard, membership
|
||||
orders/ # Order history, SumUp payment/webhook handling, reconciliation
|
||||
relations/ # (internal) Cross-domain parent/child relationship tracking
|
||||
likes/ # (internal) Unified like/favourite tracking across domains
|
||||
shared/ # Shared library: models, infrastructure, templates, static assets
|
||||
artdag/ # Art DAG — media processing engine (separate codebase, see below)
|
||||
```
|
||||
|
||||
### Shared Library (`shared/`)
|
||||
|
||||
```
|
||||
shared/
|
||||
models/ # Canonical SQLAlchemy ORM models for all domains
|
||||
db/ # Async session management, per-domain DB support, alembic helpers
|
||||
infrastructure/ # App factory, OAuth, ActivityPub, fragments, internal auth, Jinja
|
||||
services/ # Domain service implementations + DI registry
|
||||
contracts/ # DTOs and service protocols
|
||||
browser/ # Middleware, Redis caching, CSRF, error handlers
|
||||
events/ # Activity bus + background processor (AP-shaped events)
|
||||
config/ # YAML config loading (frozen/readonly)
|
||||
static/ # Shared CSS, JS, images
|
||||
templates/ # Base HTML layouts, partials (inherited by all apps)
|
||||
```
|
||||
|
||||
### Art DAG (`artdag/`)
|
||||
|
||||
Federated content-addressed DAG execution engine for distributed media processing.
|
||||
|
||||
```
|
||||
artdag/
|
||||
core/ # DAG engine (artdag package) — nodes, effects, analysis, planning
|
||||
l1/ # L1 Celery rendering server (FastAPI + Celery + Redis + PostgreSQL)
|
||||
l2/ # L2 ActivityPub registry (FastAPI + PostgreSQL)
|
||||
common/ # Shared templates, middleware, models (artdag_common package)
|
||||
client/ # CLI client
|
||||
test/ # Integration & e2e tests
|
||||
```
|
||||
|
||||
## Tech Stack
|
||||
|
||||
**Web platform:** Python 3.11+, Quart (async Flask), SQLAlchemy (asyncpg), Jinja2, HTMX, PostgreSQL, Redis, Docker Swarm, Hypercorn.
|
||||
|
||||
**Art DAG:** FastAPI, Celery, JAX (CPU/GPU), IPFS/Kubo, Pydantic.
|
||||
|
||||
## Key Commands
|
||||
|
||||
### Development
|
||||
```bash
|
||||
./dev.sh # Start all services + infra (db, redis, pgbouncer)
|
||||
./dev.sh blog market # Start specific services + infra
|
||||
./dev.sh --build blog # Rebuild image then start
|
||||
./dev.sh down # Stop everything
|
||||
./dev.sh logs blog # Tail service logs
|
||||
```
|
||||
|
||||
### Deployment
|
||||
```bash
|
||||
./deploy.sh # Auto-detect changed apps, build + push + restart
|
||||
./deploy.sh blog market # Deploy specific apps
|
||||
./deploy.sh --all # Deploy everything
|
||||
```
|
||||
|
||||
### Art DAG
|
||||
```bash
|
||||
cd artdag/l1 && pytest tests/ # L1 unit tests
|
||||
cd artdag/core && pytest tests/ # Core unit tests
|
||||
cd artdag/test && python run.py # Full integration pipeline
|
||||
cd artdag/l1 && ruff check . # Lint
|
||||
cd artdag/l1 && mypy app/types.py app/routers/recipes.py tests/
|
||||
```
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
### Web Platform
|
||||
|
||||
- **App factory:** `create_base_app(name, context_fn, before_request_fns, domain_services_fn)` in `shared/infrastructure/factory.py` — creates Quart app with DB, Redis, CSRF, OAuth, AP, session management
|
||||
- **Blueprint pattern:** Each blueprint exposes `register() -> Blueprint`, handlers stored in `_handlers` dict
|
||||
- **Per-service database:** Each service has own PostgreSQL DB via PgBouncer; cross-domain data fetched via HTTP
|
||||
- **Alembic per-service:** Each service declares `MODELS` and `TABLES` in `alembic/env.py`, delegates to `shared.db.alembic_env.run_alembic()`
|
||||
- **Inter-service reads:** `fetch_data(service, query, params)` → GET `/internal/data/{query}` (HMAC-signed, 3s timeout)
|
||||
- **Inter-service writes:** `call_action(service, action, payload)` → POST `/internal/actions/{action}` (HMAC-signed, 5s timeout)
|
||||
- **Inter-service AP inbox:** `send_internal_activity()` → POST `/internal/inbox` (HMAC-signed, AP-shaped activities for cross-service writes)
|
||||
- **Fragments:** HTML fragments fetched cross-service via `fetch_fragments()` for composing shared UI (nav, cart mini, auth menu)
|
||||
- **Soft deletes:** Models use `deleted_at` column pattern
|
||||
- **Context processors:** Each app provides its own `context_fn` that assembles template context from local DB + cross-service fragments
|
||||
|
||||
### Auth
|
||||
|
||||
- **Account** is the OAuth2 authorization server; all other apps are OAuth clients
|
||||
- Per-app first-party session cookies (Safari ITP compatible), synchronized via device ID
|
||||
- Grant verification: apps check grant validity against account DB (cached in Redis)
|
||||
- Silent SSO: `prompt=none` OAuth flow for automatic cross-app login
|
||||
- ActivityPub: RSA signatures, per-app virtual actor projections sharing same keypair
|
||||
|
||||
### Art DAG
|
||||
|
||||
- **3-Phase Execution:** Analyze → Plan → Execute (tasks in `artdag/l1/tasks/`)
|
||||
- **Content-Addressed:** All data identified by SHA3-256 hashes or IPFS CIDs
|
||||
- **S-Expression Effects:** Composable effect language in `artdag/l1/sexp_effects/`
|
||||
- **Storage:** Local filesystem, S3, or IPFS backends
|
||||
- L1 ↔ L2: scoped JWT tokens; L2: password + OAuth SSO
|
||||
|
||||
## Domains
|
||||
|
||||
| Service | Public URL | Dev Port |
|
||||
|---------|-----------|----------|
|
||||
| blog | blog.rose-ash.com | 8001 |
|
||||
| market | market.rose-ash.com | 8002 |
|
||||
| cart | cart.rose-ash.com | 8003 |
|
||||
| events | events.rose-ash.com | 8004 |
|
||||
| federation | federation.rose-ash.com | 8005 |
|
||||
| account | account.rose-ash.com | 8006 |
|
||||
| relations | (internal only) | 8008 |
|
||||
| likes | (internal only) | 8009 |
|
||||
| orders | orders.rose-ash.com | 8010 |
|
||||
|
||||
## Key Config Files
|
||||
|
||||
- `docker-compose.yml` / `docker-compose.dev.yml` — service definitions, env vars, volumes
|
||||
- `deploy.sh` / `dev.sh` — deployment and development scripts
|
||||
- `shared/infrastructure/factory.py` — app factory (all services use this)
|
||||
- `{service}/alembic/env.py` — per-service migration config
|
||||
- `_config/app-config.yaml` — runtime YAML config (mounted into containers)
|
||||
|
||||
## Tools
|
||||
|
||||
- Use Context7 MCP for up-to-date library documentation
|
||||
- Playwright MCP is available for browser automation/testing
|
||||
@@ -16,6 +16,9 @@ app_urls:
|
||||
events: "https://events.rose-ash.com"
|
||||
federation: "https://federation.rose-ash.com"
|
||||
account: "https://account.rose-ash.com"
|
||||
sx: "https://sx.rose-ash.com"
|
||||
test: "https://test.rose-ash.com"
|
||||
orders: "https://orders.rose-ash.com"
|
||||
cache:
|
||||
fs_root: /app/_snapshot # <- absolute path to your snapshot dir
|
||||
categories:
|
||||
|
||||
11
_config/init-databases.sql
Normal file
11
_config/init-databases.sql
Normal file
@@ -0,0 +1,11 @@
|
||||
-- Per-domain databases for the coop stack.
|
||||
-- Run once on fresh deployments (not needed for existing single-DB setups
|
||||
-- that use the split-databases.sh migration script instead).
|
||||
--
|
||||
-- Usage: psql -U postgres -f init-databases.sql
|
||||
|
||||
CREATE DATABASE db_account;
|
||||
CREATE DATABASE db_blog;
|
||||
CREATE DATABASE db_market; -- also houses cart tables (commerce bounded context)
|
||||
CREATE DATABASE db_events;
|
||||
CREATE DATABASE db_federation;
|
||||
17
_config/move-page-configs.sql
Normal file
17
_config/move-page-configs.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- Move page_configs data from db_events to db_blog.
|
||||
-- Run after split-databases.sh if page_configs data ended up in db_events.
|
||||
--
|
||||
-- Usage:
|
||||
-- PGHOST=db PGUSER=postgres PGPASSWORD=change-me psql -f move-page-configs.sql
|
||||
--
|
||||
|
||||
-- Step 1: Dump page_configs from db_events into db_blog
|
||||
\c db_events
|
||||
COPY page_configs TO '/tmp/page_configs.csv' WITH CSV HEADER;
|
||||
|
||||
\c db_blog
|
||||
TRUNCATE page_configs;
|
||||
COPY page_configs FROM '/tmp/page_configs.csv' WITH CSV HEADER;
|
||||
|
||||
-- Step 2: Verify
|
||||
SELECT count(*) AS blog_page_configs FROM page_configs;
|
||||
153
_config/split-databases.sh
Executable file
153
_config/split-databases.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# split-databases.sh — Migrate from single appdb to per-domain databases.
|
||||
#
|
||||
# Prerequisites:
|
||||
# - All apps stopped (5-min maintenance window)
|
||||
# - init-databases.sql already run (CREATE DATABASE db_*)
|
||||
# - Run from a host that can reach the Postgres container
|
||||
#
|
||||
# Usage:
|
||||
# PGHOST=db PGUSER=postgres PGPASSWORD=change-me bash split-databases.sh
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
SOURCE_DB="${SOURCE_DB:-appdb}"
|
||||
|
||||
# ── Table → database mapping ───────────────────────────────────────────────
|
||||
|
||||
declare -A DB_TABLES
|
||||
|
||||
DB_TABLES[db_account]="
|
||||
users
|
||||
magic_links
|
||||
oauth_codes
|
||||
oauth_grants
|
||||
ghost_labels
|
||||
user_labels
|
||||
ghost_newsletters
|
||||
user_newsletters
|
||||
ghost_tiers
|
||||
ghost_subscriptions
|
||||
kv
|
||||
"
|
||||
|
||||
DB_TABLES[db_blog]="
|
||||
authors
|
||||
tags
|
||||
posts
|
||||
post_authors
|
||||
post_tags
|
||||
post_likes
|
||||
menu_items
|
||||
menu_nodes
|
||||
container_relations
|
||||
page_configs
|
||||
"
|
||||
|
||||
DB_TABLES[db_market]="
|
||||
products
|
||||
product_images
|
||||
product_sections
|
||||
product_labels
|
||||
product_stickers
|
||||
product_attributes
|
||||
product_nutrition
|
||||
product_allergens
|
||||
product_likes
|
||||
product_logs
|
||||
market_places
|
||||
nav_tops
|
||||
nav_subs
|
||||
listings
|
||||
listing_items
|
||||
link_errors
|
||||
link_externals
|
||||
subcategory_redirects
|
||||
cart_items
|
||||
orders
|
||||
order_items
|
||||
"
|
||||
|
||||
# db_cart merged into db_market — cart and market share the same bounded context
|
||||
# (commerce). Cart needs direct read access to products/market_places.
|
||||
|
||||
DB_TABLES[db_events]="
|
||||
calendars
|
||||
calendar_slots
|
||||
calendar_entries
|
||||
calendar_entry_posts
|
||||
ticket_types
|
||||
tickets
|
||||
"
|
||||
|
||||
DB_TABLES[db_federation]="
|
||||
ap_anchors
|
||||
ap_actor_profiles
|
||||
ap_activities
|
||||
ap_followers
|
||||
ap_inbox_items
|
||||
ap_remote_actors
|
||||
ap_following
|
||||
ap_remote_posts
|
||||
ap_local_posts
|
||||
ap_interactions
|
||||
ap_notifications
|
||||
ap_delivery_log
|
||||
ipfs_pins
|
||||
"
|
||||
|
||||
# ── Migrate each domain ────────────────────────────────────────────────────
|
||||
|
||||
for target_db in db_account db_blog db_market db_events db_federation; do
|
||||
tables="${DB_TABLES[$target_db]}"
|
||||
table_list=""
|
||||
for t in $tables; do
|
||||
table_list="$table_list --table=$t"
|
||||
done
|
||||
|
||||
echo "=== Migrating $target_db ==="
|
||||
echo " Tables: $(echo $tables | tr '\n' ' ')"
|
||||
|
||||
# Dump schema + data for these tables from the source DB
|
||||
pg_dump "$SOURCE_DB" $table_list --no-owner --no-privileges \
|
||||
| psql -q "$target_db"
|
||||
|
||||
echo " Done."
|
||||
done
|
||||
|
||||
# ── Stamp Alembic head in each domain DB ──────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo "=== Stamping Alembic head in each DB ==="
|
||||
for target_db in db_account db_blog db_market db_events db_federation; do
|
||||
# Create alembic_version table and stamp current head
|
||||
psql -q "$target_db" <<'SQL'
|
||||
CREATE TABLE IF NOT EXISTS alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL,
|
||||
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
|
||||
);
|
||||
DELETE FROM alembic_version;
|
||||
INSERT INTO alembic_version (version_num) VALUES ('w3u1q9r0s1');
|
||||
SQL
|
||||
echo " $target_db stamped at w3u1q9r0s1"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Migration complete ==="
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Update docker-compose.yml — set per-app DATABASE_URL to the new DBs"
|
||||
echo " 2. Remove schema_sql config (no longer needed)"
|
||||
echo " 3. Redeploy all services"
|
||||
echo ""
|
||||
echo "Per-app DATABASE_URL values:"
|
||||
echo " blog: postgresql+asyncpg://postgres:change-me@db:5432/db_blog"
|
||||
echo " market: postgresql+asyncpg://postgres:change-me@db:5432/db_market"
|
||||
echo " cart: postgresql+asyncpg://postgres:change-me@db:5432/db_market (shared with market)"
|
||||
echo " events: postgresql+asyncpg://postgres:change-me@db:5432/db_events"
|
||||
echo " federation: postgresql+asyncpg://postgres:change-me@db:5432/db_federation"
|
||||
echo " account: postgresql+asyncpg://postgres:change-me@db:5432/db_account"
|
||||
echo ""
|
||||
echo " DATABASE_URL_ACCOUNT: postgresql+asyncpg://postgres:change-me@db:5432/db_account"
|
||||
echo " DATABASE_URL_FEDERATION: postgresql+asyncpg://postgres:change-me@db:5432/db_federation"
|
||||
@@ -38,6 +38,12 @@ COPY events/__init__.py ./events/__init__.py
|
||||
COPY events/models/ ./events/models/
|
||||
COPY federation/__init__.py ./federation/__init__.py
|
||||
COPY federation/models/ ./federation/models/
|
||||
COPY relations/__init__.py ./relations/__init__.py
|
||||
COPY relations/models/ ./relations/models/
|
||||
COPY likes/__init__.py ./likes/__init__.py
|
||||
COPY likes/models/ ./likes/models/
|
||||
COPY orders/__init__.py ./orders/__init__.py
|
||||
COPY orders/models/ ./orders/models/
|
||||
|
||||
# ---------- Runtime setup ----------
|
||||
COPY account/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
|
||||
18
account/alembic/env.py
Normal file
18
account/alembic/env.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from alembic import context
|
||||
from shared.db.alembic_env import run_alembic
|
||||
|
||||
MODELS = [
|
||||
"shared.models.user",
|
||||
"shared.models.ghost_membership_entities",
|
||||
"shared.models.magic_link",
|
||||
"shared.models.oauth_code",
|
||||
"shared.models.oauth_grant",
|
||||
]
|
||||
|
||||
TABLES = frozenset({
|
||||
"users", "user_labels", "user_newsletters",
|
||||
"magic_links", "oauth_codes", "oauth_grants",
|
||||
"ghost_labels", "ghost_newsletters", "ghost_tiers", "ghost_subscriptions",
|
||||
})
|
||||
|
||||
run_alembic(context.config, MODELS, TABLES)
|
||||
209
account/alembic/versions/0001_initial.py
Normal file
209
account/alembic/versions/0001_initial.py
Normal file
@@ -0,0 +1,209 @@
|
||||
"""Initial account tables
|
||||
|
||||
Revision ID: acct_0001
|
||||
Revises: -
|
||||
Create Date: 2026-02-26
|
||||
"""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
revision = "acct_0001"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _table_exists(conn, name):
|
||||
result = conn.execute(sa.text(
|
||||
"SELECT 1 FROM information_schema.tables WHERE table_schema='public' AND table_name=:t"
|
||||
), {"t": name})
|
||||
return result.scalar() is not None
|
||||
|
||||
|
||||
def upgrade():
|
||||
if _table_exists(op.get_bind(), "users"):
|
||||
return
|
||||
|
||||
# 1. users
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("email", sa.String(255), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("now()")),
|
||||
sa.Column("last_login_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("ghost_id", sa.String(64), nullable=True),
|
||||
sa.Column("name", sa.String(255), nullable=True),
|
||||
sa.Column("ghost_status", sa.String(50), nullable=True),
|
||||
sa.Column("ghost_subscribed", sa.Boolean(), nullable=False, server_default=sa.true()),
|
||||
sa.Column("ghost_note", sa.Text(), nullable=True),
|
||||
sa.Column("avatar_image", sa.Text(), nullable=True),
|
||||
sa.Column("stripe_customer_id", sa.String(255), nullable=True),
|
||||
sa.Column("ghost_raw", JSONB(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_user_email", "users", ["email"], unique=True)
|
||||
op.create_index(op.f("ix_users_ghost_id"), "users", ["ghost_id"], unique=True)
|
||||
op.create_index(op.f("ix_users_stripe_customer_id"), "users", ["stripe_customer_id"])
|
||||
|
||||
# 2. ghost_labels
|
||||
op.create_table(
|
||||
"ghost_labels",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ghost_id", sa.String(64), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("slug", sa.String(255), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_ghost_labels_ghost_id"), "ghost_labels", ["ghost_id"], unique=True)
|
||||
|
||||
# 3. user_labels
|
||||
op.create_table(
|
||||
"user_labels",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("label_id", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["label_id"], ["ghost_labels.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("user_id", "label_id", name="uq_user_label"),
|
||||
)
|
||||
op.create_index(op.f("ix_user_labels_user_id"), "user_labels", ["user_id"])
|
||||
op.create_index(op.f("ix_user_labels_label_id"), "user_labels", ["label_id"])
|
||||
|
||||
# 4. ghost_newsletters
|
||||
op.create_table(
|
||||
"ghost_newsletters",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ghost_id", sa.String(64), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("slug", sa.String(255), nullable=True),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_ghost_newsletters_ghost_id"), "ghost_newsletters", ["ghost_id"], unique=True)
|
||||
|
||||
# 5. user_newsletters
|
||||
op.create_table(
|
||||
"user_newsletters",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("newsletter_id", sa.Integer(), nullable=True),
|
||||
sa.Column("subscribed", sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["newsletter_id"], ["ghost_newsletters.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("user_id", "newsletter_id", name="uq_user_newsletter"),
|
||||
)
|
||||
op.create_index(op.f("ix_user_newsletters_user_id"), "user_newsletters", ["user_id"])
|
||||
op.create_index(op.f("ix_user_newsletters_newsletter_id"), "user_newsletters", ["newsletter_id"])
|
||||
|
||||
# 6. ghost_tiers
|
||||
op.create_table(
|
||||
"ghost_tiers",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ghost_id", sa.String(64), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("slug", sa.String(255), nullable=True),
|
||||
sa.Column("type", sa.String(50), nullable=True),
|
||||
sa.Column("visibility", sa.String(50), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_ghost_tiers_ghost_id"), "ghost_tiers", ["ghost_id"], unique=True)
|
||||
|
||||
# 7. ghost_subscriptions
|
||||
op.create_table(
|
||||
"ghost_subscriptions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ghost_id", sa.String(64), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("status", sa.String(50), nullable=True),
|
||||
sa.Column("tier_id", sa.Integer(), nullable=True),
|
||||
sa.Column("cadence", sa.String(50), nullable=True),
|
||||
sa.Column("price_amount", sa.Integer(), nullable=True),
|
||||
sa.Column("price_currency", sa.String(10), nullable=True),
|
||||
sa.Column("stripe_customer_id", sa.String(255), nullable=True),
|
||||
sa.Column("stripe_subscription_id", sa.String(255), nullable=True),
|
||||
sa.Column("raw", JSONB(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["tier_id"], ["ghost_tiers.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_ghost_subscriptions_ghost_id"), "ghost_subscriptions", ["ghost_id"], unique=True)
|
||||
op.create_index(op.f("ix_ghost_subscriptions_user_id"), "ghost_subscriptions", ["user_id"])
|
||||
op.create_index(op.f("ix_ghost_subscriptions_tier_id"), "ghost_subscriptions", ["tier_id"])
|
||||
op.create_index(op.f("ix_ghost_subscriptions_stripe_customer_id"), "ghost_subscriptions", ["stripe_customer_id"])
|
||||
op.create_index(op.f("ix_ghost_subscriptions_stripe_subscription_id"), "ghost_subscriptions", ["stripe_subscription_id"])
|
||||
|
||||
# 8. magic_links
|
||||
op.create_table(
|
||||
"magic_links",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("token", sa.String(128), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("purpose", sa.String(32), nullable=False),
|
||||
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("used_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("now()")),
|
||||
sa.Column("ip", sa.String(64), nullable=True),
|
||||
sa.Column("user_agent", sa.String(256), nullable=True),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_magic_link_token", "magic_links", ["token"], unique=True)
|
||||
op.create_index("ix_magic_link_user", "magic_links", ["user_id"])
|
||||
|
||||
# 9. oauth_codes
|
||||
op.create_table(
|
||||
"oauth_codes",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("code", sa.String(128), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("client_id", sa.String(64), nullable=False),
|
||||
sa.Column("redirect_uri", sa.String(512), nullable=False),
|
||||
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("used_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("grant_token", sa.String(128), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("now()")),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_oauth_code_code", "oauth_codes", ["code"], unique=True)
|
||||
op.create_index("ix_oauth_code_user", "oauth_codes", ["user_id"])
|
||||
|
||||
# 10. oauth_grants
|
||||
op.create_table(
|
||||
"oauth_grants",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("token", sa.String(128), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("client_id", sa.String(64), nullable=False),
|
||||
sa.Column("issuer_session", sa.String(128), nullable=False),
|
||||
sa.Column("device_id", sa.String(128), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("now()")),
|
||||
sa.Column("revoked_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_oauth_grant_token", "oauth_grants", ["token"], unique=True)
|
||||
op.create_index(op.f("ix_oauth_grants_user_id"), "oauth_grants", ["user_id"])
|
||||
op.create_index("ix_oauth_grant_issuer", "oauth_grants", ["issuer_session"])
|
||||
op.create_index("ix_oauth_grant_device", "oauth_grants", ["device_id", "client_id"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("oauth_grants")
|
||||
op.drop_table("oauth_codes")
|
||||
op.drop_table("magic_links")
|
||||
op.drop_table("ghost_subscriptions")
|
||||
op.drop_table("ghost_tiers")
|
||||
op.drop_table("user_newsletters")
|
||||
op.drop_table("ghost_newsletters")
|
||||
op.drop_table("user_labels")
|
||||
op.drop_table("ghost_labels")
|
||||
op.drop_table("users")
|
||||
86
account/alembic/versions/0002_hash_oauth_tokens.py
Normal file
86
account/alembic/versions/0002_hash_oauth_tokens.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Add token_hash columns to oauth_grants and oauth_codes
|
||||
|
||||
Revision ID: acct_0002
|
||||
Revises: acct_0001
|
||||
Create Date: 2026-02-26
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
revision = "acct_0002"
|
||||
down_revision = "acct_0001"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _hash(token: str) -> str:
|
||||
return hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Add new hash columns
|
||||
op.add_column("oauth_grants", sa.Column("token_hash", sa.String(64), nullable=True))
|
||||
op.add_column("oauth_codes", sa.Column("code_hash", sa.String(64), nullable=True))
|
||||
op.add_column("oauth_codes", sa.Column("grant_token_hash", sa.String(64), nullable=True))
|
||||
|
||||
# Backfill hashes from existing plaintext tokens
|
||||
conn = op.get_bind()
|
||||
grants = conn.execute(sa.text("SELECT id, token FROM oauth_grants WHERE token IS NOT NULL"))
|
||||
for row in grants:
|
||||
conn.execute(
|
||||
sa.text("UPDATE oauth_grants SET token_hash = :h WHERE id = :id"),
|
||||
{"h": _hash(row.token), "id": row.id},
|
||||
)
|
||||
|
||||
codes = conn.execute(sa.text("SELECT id, code, grant_token FROM oauth_codes WHERE code IS NOT NULL"))
|
||||
for row in codes:
|
||||
params = {"id": row.id, "ch": _hash(row.code)}
|
||||
params["gh"] = _hash(row.grant_token) if row.grant_token else None
|
||||
conn.execute(
|
||||
sa.text("UPDATE oauth_codes SET code_hash = :ch, grant_token_hash = :gh WHERE id = :id"),
|
||||
params,
|
||||
)
|
||||
|
||||
# Create unique indexes on hash columns
|
||||
op.create_index("ix_oauth_grant_token_hash", "oauth_grants", ["token_hash"], unique=True)
|
||||
op.create_index("ix_oauth_code_code_hash", "oauth_codes", ["code_hash"], unique=True)
|
||||
|
||||
# Make original token columns nullable (keep for rollback safety)
|
||||
op.alter_column("oauth_grants", "token", nullable=True)
|
||||
op.alter_column("oauth_codes", "code", nullable=True)
|
||||
|
||||
# Drop old unique indexes on plaintext columns
|
||||
try:
|
||||
op.drop_index("ix_oauth_grant_token", "oauth_grants")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
op.drop_index("ix_oauth_code_code", "oauth_codes")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Restore original NOT NULL constraints
|
||||
op.alter_column("oauth_grants", "token", nullable=False)
|
||||
op.alter_column("oauth_codes", "code", nullable=False)
|
||||
|
||||
# Drop hash columns and indexes
|
||||
try:
|
||||
op.drop_index("ix_oauth_grant_token_hash", "oauth_grants")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
op.drop_index("ix_oauth_code_code_hash", "oauth_codes")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
op.drop_column("oauth_grants", "token_hash")
|
||||
op.drop_column("oauth_codes", "code_hash")
|
||||
op.drop_column("oauth_codes", "grant_token_hash")
|
||||
|
||||
# Restore original unique indexes
|
||||
op.create_index("ix_oauth_grant_token", "oauth_grants", ["token"], unique=True)
|
||||
op.create_index("ix_oauth_code_code", "oauth_codes", ["code"], unique=True)
|
||||
43
account/alembic/versions/0003_add_user_profile_fields.py
Normal file
43
account/alembic/versions/0003_add_user_profile_fields.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Add author profile fields to users table.
|
||||
|
||||
Merges Ghost Author profile data into User — bio, profile_image, cover_image,
|
||||
website, location, facebook, twitter, slug, is_admin.
|
||||
|
||||
Revision ID: 0003
|
||||
Revises: 0002_hash_oauth_tokens
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "acct_0003"
|
||||
down_revision = "acct_0002"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("users", sa.Column("slug", sa.String(191), nullable=True))
|
||||
op.add_column("users", sa.Column("bio", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("profile_image", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("cover_image", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("website", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("location", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("facebook", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column("twitter", sa.Text(), nullable=True))
|
||||
op.add_column("users", sa.Column(
|
||||
"is_admin", sa.Boolean(), nullable=False, server_default=sa.text("false"),
|
||||
))
|
||||
op.create_index("ix_users_slug", "users", ["slug"], unique=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_users_slug")
|
||||
op.drop_column("users", "is_admin")
|
||||
op.drop_column("users", "twitter")
|
||||
op.drop_column("users", "facebook")
|
||||
op.drop_column("users", "location")
|
||||
op.drop_column("users", "website")
|
||||
op.drop_column("users", "cover_image")
|
||||
op.drop_column("users", "profile_image")
|
||||
op.drop_column("users", "bio")
|
||||
op.drop_column("users", "slug")
|
||||
@@ -1,12 +1,12 @@
|
||||
from __future__ import annotations
|
||||
import path_setup # noqa: F401 # adds shared/ to sys.path
|
||||
import sx.sx_components as sx_components # noqa: F401 # ensure Hypercorn --reload watches this file
|
||||
from pathlib import Path
|
||||
|
||||
from quart import g, request
|
||||
from jinja2 import FileSystemLoader, ChoiceLoader
|
||||
|
||||
from shared.infrastructure.factory import create_base_app
|
||||
from shared.services.registry import services
|
||||
|
||||
from bp import register_account_bp, register_auth_bp, register_fragments
|
||||
|
||||
@@ -14,27 +14,45 @@ from bp import register_account_bp, register_auth_bp, register_fragments
|
||||
async def account_context() -> dict:
|
||||
"""Account app context processor."""
|
||||
from shared.infrastructure.context import base_context
|
||||
from shared.services.navigation import get_navigation_tree
|
||||
from shared.infrastructure.cart_identity import current_cart_identity
|
||||
from shared.infrastructure.fragments import fetch_fragment
|
||||
from shared.infrastructure.fragments import fetch_fragments
|
||||
from shared.infrastructure.data_client import fetch_data
|
||||
from shared.contracts.dtos import CartSummaryDTO, dto_from_dict
|
||||
|
||||
ctx = await base_context()
|
||||
|
||||
ctx["nav_tree_html"] = await fetch_fragment(
|
||||
"blog", "nav-tree",
|
||||
params={"app_name": "account", "path": request.path},
|
||||
)
|
||||
# Fallback for _nav.html when nav-tree fragment fetch fails
|
||||
ctx["menu_items"] = await get_navigation_tree(g.s)
|
||||
# menu_nodes lives in db_blog; nav-tree fragment provides the real nav
|
||||
ctx["menu_items"] = []
|
||||
|
||||
# Cart data (consistent with all other apps)
|
||||
# Cart data via internal data endpoint
|
||||
ident = current_cart_identity()
|
||||
summary = await services.cart.cart_summary(
|
||||
g.s, user_id=ident["user_id"], session_id=ident["session_id"],
|
||||
)
|
||||
summary_params = {}
|
||||
if ident["user_id"] is not None:
|
||||
summary_params["user_id"] = ident["user_id"]
|
||||
if ident["session_id"] is not None:
|
||||
summary_params["session_id"] = ident["session_id"]
|
||||
raw = await fetch_data("cart", "cart-summary", params=summary_params, required=False)
|
||||
summary = dto_from_dict(CartSummaryDTO, raw) if raw else CartSummaryDTO()
|
||||
ctx["cart_count"] = summary.count + summary.calendar_count + summary.ticket_count
|
||||
ctx["cart_total"] = float(summary.total + summary.calendar_total + summary.ticket_total)
|
||||
|
||||
# Pre-fetch cross-app HTML fragments concurrently
|
||||
user = getattr(g, "user", None)
|
||||
cart_params = {}
|
||||
if ident["user_id"] is not None:
|
||||
cart_params["user_id"] = ident["user_id"]
|
||||
if ident["session_id"] is not None:
|
||||
cart_params["session_id"] = ident["session_id"]
|
||||
|
||||
cart_mini, auth_menu, nav_tree = await fetch_fragments([
|
||||
("cart", "cart-mini", cart_params or None),
|
||||
("account", "auth-menu", {"email": user.email} if user else None),
|
||||
("blog", "nav-tree", {"app_name": "account", "path": request.path}),
|
||||
])
|
||||
ctx["cart_mini"] = cart_mini
|
||||
ctx["auth_menu"] = auth_menu
|
||||
ctx["nav_tree"] = nav_tree
|
||||
|
||||
return ctx
|
||||
|
||||
|
||||
@@ -54,11 +72,44 @@ def create_app() -> "Quart":
|
||||
app.jinja_loader,
|
||||
])
|
||||
|
||||
# Setup defpage routes
|
||||
import sx.sx_components # noqa: F811 — ensure components loaded
|
||||
from sxc.pages import setup_account_pages
|
||||
setup_account_pages()
|
||||
|
||||
# --- blueprints ---
|
||||
app.register_blueprint(register_auth_bp())
|
||||
app.register_blueprint(register_account_bp())
|
||||
|
||||
account_bp = register_account_bp()
|
||||
from shared.sx.pages import mount_pages
|
||||
mount_pages(account_bp, "account")
|
||||
app.register_blueprint(account_bp)
|
||||
|
||||
app.register_blueprint(register_fragments())
|
||||
|
||||
from bp.actions.routes import register as register_actions
|
||||
app.register_blueprint(register_actions())
|
||||
|
||||
from bp.data.routes import register as register_data
|
||||
app.register_blueprint(register_data())
|
||||
|
||||
# --- Ghost membership sync at startup (background) ---
|
||||
# Runs as a background task to avoid blocking Hypercorn's startup timeout.
|
||||
@app.before_serving
|
||||
async def _schedule_ghost_membership_sync():
|
||||
import asyncio
|
||||
async def _sync():
|
||||
from services.ghost_membership import sync_all_membership_from_ghost
|
||||
from shared.db.session import get_session
|
||||
try:
|
||||
async with get_session() as s:
|
||||
await sync_all_membership_from_ghost(s)
|
||||
await s.commit()
|
||||
print("[account] Ghost membership sync complete")
|
||||
except Exception as e:
|
||||
print(f"[account] Ghost membership sync failed (non-fatal): {e}")
|
||||
asyncio.get_event_loop().create_task(_sync())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
"""Account pages blueprint.
|
||||
|
||||
Moved from federation/bp/auth — newsletters, fragment pages (tickets, bookings).
|
||||
Mounted at root /.
|
||||
Mounted at root /. GET page handlers replaced by defpage.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import (
|
||||
Blueprint,
|
||||
request,
|
||||
render_template,
|
||||
make_response,
|
||||
redirect,
|
||||
g,
|
||||
)
|
||||
@@ -19,88 +17,64 @@ from shared.models import UserNewsletter
|
||||
from shared.models.ghost_membership_entities import GhostNewsletter
|
||||
from shared.infrastructure.urls import login_url
|
||||
from shared.infrastructure.fragments import fetch_fragment, fetch_fragments
|
||||
|
||||
oob = {
|
||||
"oob_extends": "oob_elements.html",
|
||||
"extends": "_types/root/_index.html",
|
||||
"parent_id": "root-header-child",
|
||||
"child_id": "auth-header-child",
|
||||
"header": "_types/auth/header/_header.html",
|
||||
"parent_header": "_types/root/header/_header.html",
|
||||
"nav": "_types/auth/_nav.html",
|
||||
"main": "_types/auth/_main_panel.html",
|
||||
}
|
||||
from shared.sx.helpers import sx_response
|
||||
|
||||
|
||||
def register(url_prefix="/"):
|
||||
account_bp = Blueprint("account", __name__, url_prefix=url_prefix)
|
||||
|
||||
@account_bp.context_processor
|
||||
async def context():
|
||||
events_nav, cart_nav = await fetch_fragments([
|
||||
@account_bp.before_request
|
||||
async def _prepare_page_data():
|
||||
"""Fetch account_nav fragments and load data for defpage routes."""
|
||||
# Fetch account nav items for layout (was in context_processor)
|
||||
events_nav, cart_nav, artdag_nav = await fetch_fragments([
|
||||
("events", "account-nav-item", {}),
|
||||
("cart", "account-nav-item", {}),
|
||||
])
|
||||
return {"oob": oob, "account_nav_html": events_nav + cart_nav}
|
||||
("artdag", "nav-item", {}),
|
||||
], required=False)
|
||||
g.account_nav = events_nav + cart_nav + artdag_nav
|
||||
|
||||
@account_bp.get("/")
|
||||
async def account():
|
||||
from shared.browser.app.utils.htmx import is_htmx_request
|
||||
if request.method != "GET":
|
||||
return
|
||||
|
||||
if not g.get("user"):
|
||||
return redirect(login_url("/"))
|
||||
endpoint = request.endpoint or ""
|
||||
|
||||
if not is_htmx_request():
|
||||
html = await render_template("_types/auth/index.html")
|
||||
else:
|
||||
html = await render_template("_types/auth/_oob_elements.html")
|
||||
|
||||
return await make_response(html)
|
||||
|
||||
@account_bp.get("/newsletters/")
|
||||
async def newsletters():
|
||||
from shared.browser.app.utils.htmx import is_htmx_request
|
||||
|
||||
if not g.get("user"):
|
||||
return redirect(login_url("/newsletters/"))
|
||||
|
||||
result = await g.s.execute(
|
||||
select(GhostNewsletter).order_by(GhostNewsletter.name)
|
||||
)
|
||||
all_newsletters = result.scalars().all()
|
||||
|
||||
sub_result = await g.s.execute(
|
||||
select(UserNewsletter).where(
|
||||
UserNewsletter.user_id == g.user.id,
|
||||
# Newsletters page — load newsletter data
|
||||
if endpoint.endswith("defpage_newsletters"):
|
||||
result = await g.s.execute(
|
||||
select(GhostNewsletter).order_by(GhostNewsletter.name)
|
||||
)
|
||||
)
|
||||
user_subs = {un.newsletter_id: un for un in sub_result.scalars().all()}
|
||||
all_newsletters = result.scalars().all()
|
||||
|
||||
newsletter_list = []
|
||||
for nl in all_newsletters:
|
||||
un = user_subs.get(nl.id)
|
||||
newsletter_list.append({
|
||||
"newsletter": nl,
|
||||
"un": un,
|
||||
"subscribed": un.subscribed if un else False,
|
||||
})
|
||||
|
||||
nl_oob = {**oob, "main": "_types/auth/_newsletters_panel.html"}
|
||||
|
||||
if not is_htmx_request():
|
||||
html = await render_template(
|
||||
"_types/auth/index.html",
|
||||
oob=nl_oob,
|
||||
newsletter_list=newsletter_list,
|
||||
)
|
||||
else:
|
||||
html = await render_template(
|
||||
"_types/auth/_oob_elements.html",
|
||||
oob=nl_oob,
|
||||
newsletter_list=newsletter_list,
|
||||
sub_result = await g.s.execute(
|
||||
select(UserNewsletter).where(
|
||||
UserNewsletter.user_id == g.user.id,
|
||||
)
|
||||
)
|
||||
user_subs = {un.newsletter_id: un for un in sub_result.scalars().all()}
|
||||
|
||||
return await make_response(html)
|
||||
newsletter_list = []
|
||||
for nl in all_newsletters:
|
||||
un = user_subs.get(nl.id)
|
||||
newsletter_list.append({
|
||||
"newsletter": nl,
|
||||
"un": un,
|
||||
"subscribed": un.subscribed if un else False,
|
||||
})
|
||||
g.newsletters_data = newsletter_list
|
||||
|
||||
# Fragment page — load fragment from events service
|
||||
elif endpoint.endswith("defpage_fragment_page"):
|
||||
slug = request.view_args.get("slug")
|
||||
if slug and g.get("user"):
|
||||
fragment_html = await fetch_fragment(
|
||||
"events", "account-page",
|
||||
params={"slug": slug, "user_id": str(g.user.id)},
|
||||
)
|
||||
if not fragment_html:
|
||||
from quart import abort
|
||||
abort(404)
|
||||
g.fragment_page_data = fragment_html
|
||||
|
||||
@account_bp.post("/newsletter/<int:newsletter_id>/toggle/")
|
||||
async def toggle_newsletter(newsletter_id: int):
|
||||
@@ -127,42 +101,7 @@ def register(url_prefix="/"):
|
||||
|
||||
await g.s.flush()
|
||||
|
||||
return await render_template(
|
||||
"_types/auth/_newsletter_toggle.html",
|
||||
un=un,
|
||||
)
|
||||
|
||||
# Catch-all for fragment-provided pages — must be last
|
||||
@account_bp.get("/<slug>/")
|
||||
async def fragment_page(slug):
|
||||
from shared.browser.app.utils.htmx import is_htmx_request
|
||||
from quart import abort
|
||||
|
||||
if not g.get("user"):
|
||||
return redirect(login_url(f"/{slug}/"))
|
||||
|
||||
fragment_html = await fetch_fragment(
|
||||
"events", "account-page",
|
||||
params={"slug": slug, "user_id": str(g.user.id)},
|
||||
)
|
||||
if not fragment_html:
|
||||
abort(404)
|
||||
|
||||
w_oob = {**oob, "main": "_types/auth/_fragment_panel.html"}
|
||||
|
||||
if not is_htmx_request():
|
||||
html = await render_template(
|
||||
"_types/auth/index.html",
|
||||
oob=w_oob,
|
||||
page_fragment_html=fragment_html,
|
||||
)
|
||||
else:
|
||||
html = await render_template(
|
||||
"_types/auth/_oob_elements.html",
|
||||
oob=w_oob,
|
||||
page_fragment_html=fragment_html,
|
||||
)
|
||||
|
||||
return await make_response(html)
|
||||
from sx.sx_components import render_newsletter_toggle
|
||||
return sx_response(render_newsletter_toggle(un))
|
||||
|
||||
return account_bp
|
||||
|
||||
67
account/bp/actions/routes.py
Normal file
67
account/bp/actions/routes.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Account app action endpoints.
|
||||
|
||||
Exposes write operations at ``/internal/actions/<action_name>`` for
|
||||
cross-app callers (blog webhooks) via the internal action client.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import Blueprint, g, jsonify, request
|
||||
|
||||
from shared.infrastructure.actions import ACTION_HEADER
|
||||
|
||||
|
||||
def register() -> Blueprint:
|
||||
bp = Blueprint("actions", __name__, url_prefix="/internal/actions")
|
||||
|
||||
@bp.before_request
|
||||
async def _require_action_header():
|
||||
if not request.headers.get(ACTION_HEADER):
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if not validate_internal_request():
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
_handlers: dict[str, object] = {}
|
||||
|
||||
@bp.post("/<action_name>")
|
||||
async def handle_action(action_name: str):
|
||||
handler = _handlers.get(action_name)
|
||||
if handler is None:
|
||||
return jsonify({"error": "unknown action"}), 404
|
||||
try:
|
||||
result = await handler()
|
||||
return jsonify(result)
|
||||
except Exception as exc:
|
||||
import logging
|
||||
logging.getLogger(__name__).exception("Action %s failed", action_name)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
|
||||
# --- ghost-sync-member ---
|
||||
async def _ghost_sync_member():
|
||||
"""Sync a single Ghost member into db_account."""
|
||||
data = await request.get_json()
|
||||
ghost_id = data.get("ghost_id")
|
||||
if not ghost_id:
|
||||
return {"error": "ghost_id required"}, 400
|
||||
|
||||
from services.ghost_membership import sync_single_member
|
||||
await sync_single_member(g.s, ghost_id)
|
||||
return {"ok": True}
|
||||
|
||||
_handlers["ghost-sync-member"] = _ghost_sync_member
|
||||
|
||||
# --- ghost-push-member ---
|
||||
async def _ghost_push_member():
|
||||
"""Push a local user's membership data to Ghost."""
|
||||
data = await request.get_json()
|
||||
user_id = data.get("user_id")
|
||||
if not user_id:
|
||||
return {"error": "user_id required"}, 400
|
||||
|
||||
from services.ghost_membership import sync_member_to_ghost
|
||||
result_id = await sync_member_to_ghost(g.s, int(user_id))
|
||||
return {"ok": True, "ghost_id": result_id}
|
||||
|
||||
_handlers["ghost-push-member"] = _ghost_push_member
|
||||
|
||||
return bp
|
||||
@@ -5,13 +5,13 @@ OAuth2 authorize endpoint, grant verification, and SSO logout.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import secrets
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
from quart import (
|
||||
Blueprint,
|
||||
request,
|
||||
render_template,
|
||||
redirect,
|
||||
url_for,
|
||||
session as qsession,
|
||||
@@ -25,9 +25,10 @@ from sqlalchemy.exc import SQLAlchemyError
|
||||
from shared.db.session import get_session
|
||||
from shared.models import User
|
||||
from shared.models.oauth_code import OAuthCode
|
||||
from shared.models.oauth_grant import OAuthGrant
|
||||
from shared.models.oauth_grant import OAuthGrant, hash_token
|
||||
from shared.infrastructure.urls import account_url, app_url
|
||||
from shared.infrastructure.cart_identity import current_cart_identity
|
||||
from shared.infrastructure.rate_limit import rate_limit, check_poll_backoff
|
||||
from shared.events import emit_activity
|
||||
|
||||
from .services import (
|
||||
@@ -43,7 +44,7 @@ from .services import (
|
||||
SESSION_USER_KEY = "uid"
|
||||
ACCOUNT_SESSION_KEY = "account_sid"
|
||||
|
||||
ALLOWED_CLIENTS = {"blog", "market", "cart", "events", "federation", "artdag"}
|
||||
ALLOWED_CLIENTS = {"blog", "market", "cart", "events", "federation", "orders", "test", "sx", "artdag", "artdag_l2"}
|
||||
|
||||
|
||||
def register(url_prefix="/auth"):
|
||||
@@ -97,7 +98,8 @@ def register(url_prefix="/auth"):
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
grant = OAuthGrant(
|
||||
token=grant_token,
|
||||
token=None,
|
||||
token_hash=hash_token(grant_token),
|
||||
user_id=g.user.id,
|
||||
client_id=client_id,
|
||||
issuer_session=account_sid,
|
||||
@@ -106,19 +108,21 @@ def register(url_prefix="/auth"):
|
||||
s.add(grant)
|
||||
|
||||
oauth_code = OAuthCode(
|
||||
code=code,
|
||||
code=None,
|
||||
code_hash=hash_token(code),
|
||||
user_id=g.user.id,
|
||||
client_id=client_id,
|
||||
redirect_uri=redirect_uri,
|
||||
expires_at=expires,
|
||||
grant_token=grant_token,
|
||||
grant_token=None,
|
||||
grant_token_hash=hash_token(grant_token),
|
||||
)
|
||||
s.add(oauth_code)
|
||||
|
||||
sep = "&" if "?" in redirect_uri else "?"
|
||||
return redirect(
|
||||
f"{redirect_uri}{sep}code={code}&state={state}"
|
||||
f"&account_did={account_did}"
|
||||
f"&account_did={account_did}&grant_token={grant_token}"
|
||||
)
|
||||
|
||||
# --- OAuth2 token exchange (for external clients like artdag) -------------
|
||||
@@ -148,11 +152,15 @@ def register(url_prefix="/auth"):
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
code_h = hash_token(code)
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
# Look up by hash first (new grants), fall back to plaintext (migration)
|
||||
result = await s.execute(
|
||||
select(OAuthCode)
|
||||
.where(OAuthCode.code == code)
|
||||
.where(
|
||||
(OAuthCode.code_hash == code_h) | (OAuthCode.code == code)
|
||||
)
|
||||
.with_for_update()
|
||||
)
|
||||
oauth_code = result.scalar_one_or_none()
|
||||
@@ -196,13 +204,22 @@ def register(url_prefix="/auth"):
|
||||
if not token:
|
||||
return jsonify({"valid": False}), 200
|
||||
|
||||
token_h = hash_token(token)
|
||||
async with get_session() as s:
|
||||
grant = await s.scalar(
|
||||
select(OAuthGrant).where(OAuthGrant.token == token)
|
||||
select(OAuthGrant).where(
|
||||
(OAuthGrant.token_hash == token_h) | (OAuthGrant.token == token)
|
||||
)
|
||||
)
|
||||
if not grant or grant.revoked_at is not None:
|
||||
return jsonify({"valid": False}), 200
|
||||
return jsonify({"valid": True}), 200
|
||||
user = await s.get(User, grant.user_id)
|
||||
return jsonify({
|
||||
"valid": True,
|
||||
"user_id": grant.user_id,
|
||||
"username": user.email if user else "",
|
||||
"display_name": user.name if user else "",
|
||||
}), 200
|
||||
|
||||
@auth_bp.get("/internal/check-device")
|
||||
async def check_device():
|
||||
@@ -250,12 +267,23 @@ def register(url_prefix="/auth"):
|
||||
store_login_redirect_target()
|
||||
cross_cart_sid = request.args.get("cart_sid")
|
||||
if cross_cart_sid:
|
||||
qsession["cart_sid"] = cross_cart_sid
|
||||
import re
|
||||
# Validate cart_sid is a hex token (32 chars from token_hex(16))
|
||||
if re.fullmatch(r"[0-9a-f]{32}", cross_cart_sid):
|
||||
qsession["cart_sid"] = cross_cart_sid
|
||||
if g.get("user"):
|
||||
redirect_url = pop_login_redirect_target()
|
||||
return redirect(redirect_url)
|
||||
return await render_template("auth/login.html")
|
||||
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_login_page
|
||||
ctx = await get_template_context()
|
||||
return await render_login_page(ctx)
|
||||
|
||||
@rate_limit(
|
||||
key_func=lambda: request.headers.get("X-Forwarded-For", request.remote_addr),
|
||||
max_requests=10, window_seconds=900, scope="magic_ip",
|
||||
)
|
||||
@auth_bp.post("/start/")
|
||||
async def start_login():
|
||||
form = await request.form
|
||||
@@ -263,14 +291,22 @@ def register(url_prefix="/auth"):
|
||||
|
||||
is_valid, email = validate_email(email_input)
|
||||
if not is_valid:
|
||||
return (
|
||||
await render_template(
|
||||
"auth/login.html",
|
||||
error="Please enter a valid email address.",
|
||||
email=email_input,
|
||||
),
|
||||
400,
|
||||
)
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_login_page
|
||||
ctx = await get_template_context(error="Please enter a valid email address.", email=email_input)
|
||||
return await render_login_page(ctx), 400
|
||||
|
||||
# Per-email rate limit: 5 magic links per 15 minutes
|
||||
from shared.infrastructure.rate_limit import _check_rate_limit
|
||||
try:
|
||||
allowed, _ = await _check_rate_limit(f"magic_email:{email}", 5, 900)
|
||||
if not allowed:
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_check_email_page
|
||||
ctx = await get_template_context(email=email, email_error=None)
|
||||
return await render_check_email_page(ctx), 200
|
||||
except Exception:
|
||||
pass # Redis down — allow the request
|
||||
|
||||
user = await find_or_create_user(g.s, email)
|
||||
token, expires = await create_magic_link(g.s, user.id)
|
||||
@@ -288,11 +324,10 @@ def register(url_prefix="/auth"):
|
||||
"Please try again in a moment."
|
||||
)
|
||||
|
||||
return await render_template(
|
||||
"auth/check_email.html",
|
||||
email=email,
|
||||
email_error=email_error,
|
||||
)
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_check_email_page
|
||||
ctx = await get_template_context(email=email, email_error=email_error)
|
||||
return await render_check_email_page(ctx)
|
||||
|
||||
@auth_bp.get("/magic/<token>/")
|
||||
async def magic(token: str):
|
||||
@@ -305,20 +340,17 @@ def register(url_prefix="/auth"):
|
||||
user, error = await validate_magic_link(s, token)
|
||||
|
||||
if error:
|
||||
return (
|
||||
await render_template("auth/login.html", error=error),
|
||||
400,
|
||||
)
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_login_page
|
||||
ctx = await get_template_context(error=error)
|
||||
return await render_login_page(ctx), 400
|
||||
user_id = user.id
|
||||
|
||||
except Exception:
|
||||
return (
|
||||
await render_template(
|
||||
"auth/login.html",
|
||||
error="Could not sign you in right now. Please try again.",
|
||||
),
|
||||
502,
|
||||
)
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_login_page
|
||||
ctx = await get_template_context(error="Could not sign you in right now. Please try again.")
|
||||
return await render_login_page(ctx), 502
|
||||
|
||||
assert user_id is not None
|
||||
|
||||
@@ -364,15 +396,14 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Signal login for this device so client apps can detect it
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
import time as _time
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.set(
|
||||
f"did_auth:{g.device_id}",
|
||||
str(_time.time()).encode(),
|
||||
ex=30 * 24 * 3600,
|
||||
)
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.set(
|
||||
f"did_auth:{g.device_id}",
|
||||
str(_time.time()).encode(),
|
||||
ex=30 * 24 * 3600,
|
||||
)
|
||||
except Exception:
|
||||
current_app.logger.exception("[auth] failed to set did_auth in Redis")
|
||||
|
||||
@@ -398,10 +429,9 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Clear login signal for this device
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.delete(f"did_auth:{g.device_id}")
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.delete(f"did_auth:{g.device_id}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -446,10 +476,9 @@ def register(url_prefix="/auth"):
|
||||
|
||||
# Clear login signal for this device
|
||||
try:
|
||||
from shared.browser.app.redis_cacher import get_redis
|
||||
_redis = get_redis()
|
||||
if _redis:
|
||||
await _redis.delete(f"did_auth:{g.device_id}")
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
_auth_r = await get_auth_redis()
|
||||
await _auth_r.delete(f"did_auth:{g.device_id}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -483,4 +512,248 @@ def register(url_prefix="/auth"):
|
||||
resp.delete_cookie("blog_session", domain=".rose-ash.com", path="/")
|
||||
return resp
|
||||
|
||||
# --- Device Authorization Flow (RFC 8628) ---------------------------------
|
||||
|
||||
_DEVICE_ALPHABET = "ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
_DEVICE_CODE_TTL = 900 # 15 minutes
|
||||
_DEVICE_POLL_INTERVAL = 5
|
||||
|
||||
def _generate_user_code() -> str:
|
||||
"""Generate an unambiguous 8-char user code like KBMN-TWRP."""
|
||||
chars = [secrets.choice(_DEVICE_ALPHABET) for _ in range(8)]
|
||||
return "".join(chars[:4]) + "-" + "".join(chars[4:])
|
||||
|
||||
async def _approve_device(device_code: str, user) -> bool:
|
||||
"""Approve a pending device flow and create an OAuthGrant."""
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
raw = await r.get(f"devflow:{device_code}")
|
||||
if not raw:
|
||||
return False
|
||||
|
||||
blob = json.loads(raw)
|
||||
if blob.get("status") != "pending":
|
||||
return False
|
||||
|
||||
account_sid = qsession.get(ACCOUNT_SESSION_KEY)
|
||||
if not account_sid:
|
||||
account_sid = secrets.token_urlsafe(32)
|
||||
qsession[ACCOUNT_SESSION_KEY] = account_sid
|
||||
|
||||
grant_token = secrets.token_urlsafe(48)
|
||||
|
||||
async with get_session() as s:
|
||||
async with s.begin():
|
||||
grant = OAuthGrant(
|
||||
token=None,
|
||||
token_hash=hash_token(grant_token),
|
||||
user_id=user.id,
|
||||
client_id=blob["client_id"],
|
||||
issuer_session=account_sid,
|
||||
)
|
||||
s.add(grant)
|
||||
|
||||
# Update Redis blob
|
||||
blob["status"] = "approved"
|
||||
blob["user_id"] = user.id
|
||||
blob["grant_token"] = grant_token
|
||||
user_code = blob["user_code"]
|
||||
|
||||
ttl = await r.ttl(f"devflow:{device_code}")
|
||||
if ttl and ttl > 0:
|
||||
await r.set(f"devflow:{device_code}", json.dumps(blob).encode(), ex=ttl)
|
||||
else:
|
||||
await r.set(f"devflow:{device_code}", json.dumps(blob).encode(), ex=_DEVICE_CODE_TTL)
|
||||
|
||||
# Remove reverse lookup (code already used)
|
||||
normalized_uc = user_code.replace("-", "").upper()
|
||||
await r.delete(f"devflow_uc:{normalized_uc}")
|
||||
|
||||
return True
|
||||
|
||||
@rate_limit(
|
||||
key_func=lambda: request.headers.get("X-Forwarded-For", request.remote_addr),
|
||||
max_requests=10, window_seconds=3600, scope="dev_auth",
|
||||
)
|
||||
@csrf_exempt
|
||||
@auth_bp.post("/device/authorize")
|
||||
@auth_bp.post("/device/authorize/")
|
||||
async def device_authorize():
|
||||
"""RFC 8628 — CLI requests a device code."""
|
||||
data = await request.get_json(silent=True) or {}
|
||||
client_id = data.get("client_id", "")
|
||||
|
||||
if client_id not in ALLOWED_CLIENTS:
|
||||
return jsonify({"error": "invalid_client"}), 400
|
||||
|
||||
device_code = secrets.token_urlsafe(32)
|
||||
user_code = _generate_user_code()
|
||||
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
|
||||
blob = json.dumps({
|
||||
"client_id": client_id,
|
||||
"user_code": user_code,
|
||||
"status": "pending",
|
||||
"user_id": None,
|
||||
"grant_token": None,
|
||||
}).encode()
|
||||
|
||||
normalized_uc = user_code.replace("-", "").upper()
|
||||
pipe = r.pipeline()
|
||||
pipe.set(f"devflow:{device_code}", blob, ex=_DEVICE_CODE_TTL)
|
||||
pipe.set(f"devflow_uc:{normalized_uc}", device_code.encode(), ex=_DEVICE_CODE_TTL)
|
||||
await pipe.execute()
|
||||
|
||||
verification_uri = account_url("/auth/device")
|
||||
|
||||
return jsonify({
|
||||
"device_code": device_code,
|
||||
"user_code": user_code,
|
||||
"verification_uri": verification_uri,
|
||||
"expires_in": _DEVICE_CODE_TTL,
|
||||
"interval": _DEVICE_POLL_INTERVAL,
|
||||
})
|
||||
|
||||
@csrf_exempt
|
||||
@auth_bp.post("/device/token")
|
||||
@auth_bp.post("/device/token/")
|
||||
async def device_token():
|
||||
"""RFC 8628 — CLI polls for the grant token."""
|
||||
data = await request.get_json(silent=True) or {}
|
||||
device_code = data.get("device_code", "")
|
||||
client_id = data.get("client_id", "")
|
||||
|
||||
if not device_code or client_id not in ALLOWED_CLIENTS:
|
||||
return jsonify({"error": "invalid_request"}), 400
|
||||
|
||||
# Enforce polling backoff per RFC 8628
|
||||
try:
|
||||
poll_ok, interval = await check_poll_backoff(device_code)
|
||||
if not poll_ok:
|
||||
return jsonify({"error": "slow_down", "interval": interval}), 400
|
||||
except Exception:
|
||||
pass # Redis down — allow the request
|
||||
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
raw = await r.get(f"devflow:{device_code}")
|
||||
if not raw:
|
||||
return jsonify({"error": "expired_token"}), 400
|
||||
|
||||
blob = json.loads(raw)
|
||||
|
||||
if blob.get("client_id") != client_id:
|
||||
return jsonify({"error": "invalid_request"}), 400
|
||||
|
||||
if blob["status"] == "pending":
|
||||
return jsonify({"error": "authorization_pending"}), 428
|
||||
|
||||
if blob["status"] == "denied":
|
||||
return jsonify({"error": "access_denied"}), 400
|
||||
|
||||
if blob["status"] == "approved":
|
||||
async with get_session() as s:
|
||||
user = await s.get(User, blob["user_id"])
|
||||
if not user:
|
||||
return jsonify({"error": "access_denied"}), 400
|
||||
|
||||
# Clean up Redis
|
||||
await r.delete(f"devflow:{device_code}")
|
||||
|
||||
return jsonify({
|
||||
"access_token": blob["grant_token"],
|
||||
"token_type": "bearer",
|
||||
"user_id": blob["user_id"],
|
||||
"username": user.email or "",
|
||||
"display_name": user.name or "",
|
||||
})
|
||||
|
||||
return jsonify({"error": "invalid_request"}), 400
|
||||
|
||||
@auth_bp.get("/device")
|
||||
@auth_bp.get("/device/")
|
||||
async def device_form():
|
||||
"""Browser form where user enters the code displayed in terminal."""
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_page
|
||||
code = request.args.get("code", "")
|
||||
ctx = await get_template_context(code=code)
|
||||
return await render_device_page(ctx)
|
||||
|
||||
@auth_bp.post("/device")
|
||||
@auth_bp.post("/device/")
|
||||
async def device_submit():
|
||||
"""Browser submit — validates code, approves if logged in."""
|
||||
form = await request.form
|
||||
user_code = (form.get("code") or "").strip().replace("-", "").upper()
|
||||
|
||||
if not user_code or len(user_code) != 8:
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_page
|
||||
ctx = await get_template_context(error="Please enter a valid 8-character code.", code=form.get("code", ""))
|
||||
return await render_device_page(ctx), 400
|
||||
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
device_code = await r.get(f"devflow_uc:{user_code}")
|
||||
if not device_code:
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_page
|
||||
ctx = await get_template_context(error="Code not found or expired. Please try again.", code=form.get("code", ""))
|
||||
return await render_device_page(ctx), 400
|
||||
|
||||
if isinstance(device_code, bytes):
|
||||
device_code = device_code.decode()
|
||||
|
||||
# Not logged in — redirect to login, then come back to complete
|
||||
if not g.get("user"):
|
||||
complete_url = url_for("auth.device_complete", code=device_code)
|
||||
store_login_redirect_target()
|
||||
return redirect(url_for("auth.login_form", next=complete_url))
|
||||
|
||||
# Logged in — approve immediately
|
||||
ok = await _approve_device(device_code, g.user)
|
||||
if not ok:
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_page
|
||||
ctx = await get_template_context(error="Code expired or already used.")
|
||||
return await render_device_page(ctx), 400
|
||||
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_approved_page
|
||||
ctx = await get_template_context()
|
||||
return await render_device_approved_page(ctx)
|
||||
|
||||
@auth_bp.get("/device/complete")
|
||||
@auth_bp.get("/device/complete/")
|
||||
async def device_complete():
|
||||
"""Post-login redirect — completes approval after magic link auth."""
|
||||
from shared.sx.page import get_template_context
|
||||
from sx.sx_components import render_device_page, render_device_approved_page
|
||||
|
||||
device_code = request.args.get("code", "")
|
||||
|
||||
if not device_code:
|
||||
return redirect(url_for("auth.device_form"))
|
||||
|
||||
if not g.get("user"):
|
||||
store_login_redirect_target()
|
||||
return redirect(url_for("auth.login_form"))
|
||||
|
||||
ok = await _approve_device(device_code, g.user)
|
||||
if not ok:
|
||||
ctx = await get_template_context(
|
||||
error="Code expired or already used. Please start the login process again in your terminal.",
|
||||
)
|
||||
return await render_device_page(ctx), 400
|
||||
|
||||
ctx = await get_template_context()
|
||||
return await render_device_approved_page(ctx)
|
||||
|
||||
return auth_bp
|
||||
|
||||
0
account/bp/data/__init__.py
Normal file
0
account/bp/data/__init__.py
Normal file
67
account/bp/data/routes.py
Normal file
67
account/bp/data/routes.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Account app data endpoints.
|
||||
|
||||
Exposes read-only JSON queries at ``/internal/data/<query_name>`` for
|
||||
cross-app callers via the internal data client.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import Blueprint, g, jsonify, request
|
||||
|
||||
from shared.infrastructure.data_client import DATA_HEADER
|
||||
from sqlalchemy import select
|
||||
from shared.models import User
|
||||
|
||||
|
||||
def register() -> Blueprint:
|
||||
bp = Blueprint("data", __name__, url_prefix="/internal/data")
|
||||
|
||||
@bp.before_request
|
||||
async def _require_data_header():
|
||||
if not request.headers.get(DATA_HEADER):
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if not validate_internal_request():
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
_handlers: dict[str, object] = {}
|
||||
|
||||
@bp.get("/<query_name>")
|
||||
async def handle_query(query_name: str):
|
||||
handler = _handlers.get(query_name)
|
||||
if handler is None:
|
||||
return jsonify({"error": "unknown query"}), 404
|
||||
result = await handler()
|
||||
return jsonify(result)
|
||||
|
||||
# --- user-by-email ---
|
||||
async def _user_by_email():
|
||||
"""Return user_id for a given email address."""
|
||||
email = request.args.get("email", "").strip().lower()
|
||||
if not email:
|
||||
return None
|
||||
result = await g.s.execute(
|
||||
select(User.id).where(User.email.ilike(email))
|
||||
)
|
||||
row = result.first()
|
||||
if not row:
|
||||
return None
|
||||
return {"user_id": row[0]}
|
||||
|
||||
_handlers["user-by-email"] = _user_by_email
|
||||
|
||||
# --- newsletters ---
|
||||
async def _newsletters():
|
||||
"""Return all Ghost newsletters (for blog post editor)."""
|
||||
from shared.models.ghost_membership_entities import GhostNewsletter
|
||||
result = await g.s.execute(
|
||||
select(GhostNewsletter.id, GhostNewsletter.ghost_id, GhostNewsletter.name, GhostNewsletter.slug)
|
||||
.order_by(GhostNewsletter.name)
|
||||
)
|
||||
return [
|
||||
{"id": row[0], "ghost_id": row[1], "name": row[2], "slug": row[3]}
|
||||
for row in result.all()
|
||||
]
|
||||
|
||||
_handlers["newsletters"] = _newsletters
|
||||
|
||||
return bp
|
||||
@@ -1,41 +1,23 @@
|
||||
"""Account app fragment endpoints.
|
||||
|
||||
Exposes HTML fragments at ``/internal/fragments/<type>`` for consumption
|
||||
Exposes sx fragments at ``/internal/fragments/<type>`` for consumption
|
||||
by other coop apps via the fragment client.
|
||||
|
||||
Fragments:
|
||||
auth-menu Desktop + mobile auth menu (sign-in or user link)
|
||||
All handlers are defined declaratively in .sx files under
|
||||
``account/sx/handlers/`` and dispatched via the sx handler registry.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import Blueprint, Response, request, render_template
|
||||
from quart import Blueprint, Response, request
|
||||
|
||||
from shared.infrastructure.fragments import FRAGMENT_HEADER
|
||||
from shared.sx.handlers import get_handler, execute_handler
|
||||
|
||||
|
||||
def register():
|
||||
bp = Blueprint("fragments", __name__, url_prefix="/internal/fragments")
|
||||
|
||||
# ---------------------------------------------------------------
|
||||
# Fragment handlers
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
async def _auth_menu():
|
||||
user_email = request.args.get("email", "")
|
||||
return await render_template(
|
||||
"fragments/auth_menu.html",
|
||||
user_email=user_email,
|
||||
)
|
||||
|
||||
_handlers = {
|
||||
"auth-menu": _auth_menu,
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------
|
||||
# Routing
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
@bp.before_request
|
||||
async def _require_fragment_header():
|
||||
if not request.headers.get(FRAGMENT_HEADER):
|
||||
@@ -43,10 +25,12 @@ def register():
|
||||
|
||||
@bp.get("/<fragment_type>")
|
||||
async def get_fragment(fragment_type: str):
|
||||
handler = _handlers.get(fragment_type)
|
||||
if handler is None:
|
||||
return Response("", status=200, content_type="text/html")
|
||||
html = await handler()
|
||||
return Response(html, status=200, content_type="text/html")
|
||||
handler_def = get_handler("account", fragment_type)
|
||||
if handler_def is not None:
|
||||
result = await execute_handler(
|
||||
handler_def, "account", args=dict(request.args),
|
||||
)
|
||||
return Response(result, status=200, content_type="text/sx")
|
||||
return Response("", status=200, content_type="text/sx")
|
||||
|
||||
return bp
|
||||
|
||||
42
account/entrypoint.sh
Normal file → Executable file
42
account/entrypoint.sh
Normal file → Executable file
@@ -10,17 +10,53 @@ if [[ -n "${DATABASE_HOST:-}" && -n "${DATABASE_PORT:-}" ]]; then
|
||||
done
|
||||
fi
|
||||
|
||||
# Create own database + run own migrations
|
||||
if [[ "${RUN_MIGRATIONS:-}" == "true" && -n "${ALEMBIC_DATABASE_URL:-}" ]]; then
|
||||
python3 -c "
|
||||
import os, re
|
||||
url = os.environ['ALEMBIC_DATABASE_URL']
|
||||
m = re.match(r'postgresql\+\w+://([^:]+):([^@]+)@([^:]+):(\d+)/(.+)', url)
|
||||
if not m:
|
||||
print('Could not parse ALEMBIC_DATABASE_URL, skipping DB creation')
|
||||
exit(0)
|
||||
user, password, host, port, dbname = m.groups()
|
||||
|
||||
import psycopg
|
||||
conn = psycopg.connect(
|
||||
f'postgresql://{user}:{password}@{host}:{port}/postgres',
|
||||
autocommit=True,
|
||||
)
|
||||
cur = conn.execute('SELECT 1 FROM pg_database WHERE datname = %s', (dbname,))
|
||||
if not cur.fetchone():
|
||||
conn.execute(f'CREATE DATABASE {dbname}')
|
||||
print(f'Created database {dbname}')
|
||||
else:
|
||||
print(f'Database {dbname} already exists')
|
||||
conn.close()
|
||||
" || echo "DB creation failed (non-fatal), continuing..."
|
||||
|
||||
echo "Running account Alembic migrations..."
|
||||
if [ -d account ]; then (cd account && alembic upgrade head); else alembic upgrade head; fi
|
||||
fi
|
||||
|
||||
# Clear Redis page cache on deploy
|
||||
if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
echo "Flushing Redis cache..."
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushall()
|
||||
r.flushdb()
|
||||
print('Redis cache cleared.')
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
# Start the app
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app})..."
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000}
|
||||
RELOAD_FLAG=""
|
||||
if [[ "${RELOAD:-}" == "true" ]]; then
|
||||
RELOAD_FLAG="--reload"
|
||||
python3 -m shared.dev_watcher &
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app}) with auto-reload..."
|
||||
else
|
||||
echo "Starting Hypercorn (${APP_MODULE:-app:app})..."
|
||||
fi
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" --bind 0.0.0.0:${PORT:-8000} --workers ${WORKERS:-2} --keep-alive 75 ${RELOAD_FLAG}
|
||||
|
||||
@@ -5,23 +5,7 @@ from __future__ import annotations
|
||||
def register_domain_services() -> None:
|
||||
"""Register services for the account app.
|
||||
|
||||
Account needs all domain services since widgets (tickets, bookings)
|
||||
pull data from blog, calendar, market, cart, and federation.
|
||||
Account is a consumer-only dashboard app. It has no own domain.
|
||||
All cross-app data comes via fragments and HTTP data endpoints.
|
||||
"""
|
||||
from shared.services.registry import services
|
||||
from shared.services.federation_impl import SqlFederationService
|
||||
from shared.services.blog_impl import SqlBlogService
|
||||
from shared.services.calendar_impl import SqlCalendarService
|
||||
from shared.services.market_impl import SqlMarketService
|
||||
from shared.services.cart_impl import SqlCartService
|
||||
|
||||
if not services.has("federation"):
|
||||
services.federation = SqlFederationService()
|
||||
if not services.has("blog"):
|
||||
services.blog = SqlBlogService()
|
||||
if not services.has("calendar"):
|
||||
services.calendar = SqlCalendarService()
|
||||
if not services.has("market"):
|
||||
services.market = SqlMarketService()
|
||||
if not services.has("cart"):
|
||||
services.cart = SqlCartService()
|
||||
pass
|
||||
|
||||
621
account/services/ghost_membership.py
Normal file
621
account/services/ghost_membership.py
Normal file
@@ -0,0 +1,621 @@
|
||||
"""Ghost membership sync — account-owned.
|
||||
|
||||
Handles Ghost ↔ DB sync for user/membership data:
|
||||
- Ghost → DB: fetch members from Ghost API, upsert into account tables
|
||||
- DB → Ghost: push local user changes back to Ghost API
|
||||
|
||||
All tables involved (users, ghost_labels, user_labels, ghost_newsletters,
|
||||
user_newsletters, ghost_tiers, ghost_subscriptions) live in db_account.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select, delete, or_, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
|
||||
from shared.models import User
|
||||
from shared.models.ghost_membership_entities import (
|
||||
GhostLabel, UserLabel,
|
||||
GhostNewsletter, UserNewsletter,
|
||||
GhostTier, GhostSubscription,
|
||||
)
|
||||
|
||||
from shared.infrastructure.ghost_admin_token import make_ghost_admin_jwt
|
||||
from urllib.parse import quote
|
||||
|
||||
GHOST_ADMIN_API_URL = os.environ.get("GHOST_ADMIN_API_URL", "")
|
||||
|
||||
|
||||
def _auth_header() -> dict[str, str]:
|
||||
return {"Authorization": f"Ghost {make_ghost_admin_jwt()}"}
|
||||
|
||||
|
||||
def _iso(val: str | None) -> datetime | None:
|
||||
if not val:
|
||||
return None
|
||||
return datetime.fromisoformat(val.replace("Z", "+00:00"))
|
||||
|
||||
|
||||
def _to_str_or_none(v) -> Optional[str]:
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, (dict, list, set, tuple, bytes, bytearray)):
|
||||
return None
|
||||
s = str(v).strip()
|
||||
return s or None
|
||||
|
||||
|
||||
def _sanitize_member_payload(payload: dict) -> dict:
|
||||
"""Coerce types Ghost expects and drop empties to avoid 422/500 quirks."""
|
||||
out: dict = {}
|
||||
|
||||
email = _to_str_or_none(payload.get("email"))
|
||||
if email:
|
||||
out["email"] = email.lower()
|
||||
|
||||
name = _to_str_or_none(payload.get("name"))
|
||||
if name is not None:
|
||||
out["name"] = name
|
||||
|
||||
note = _to_str_or_none(payload.get("note"))
|
||||
if note is not None:
|
||||
out["note"] = note
|
||||
|
||||
if "subscribed" in payload:
|
||||
out["subscribed"] = bool(payload.get("subscribed"))
|
||||
|
||||
labels = []
|
||||
for item in payload.get("labels") or []:
|
||||
gid = _to_str_or_none(item.get("id"))
|
||||
gname = _to_str_or_none(item.get("name"))
|
||||
if gid:
|
||||
labels.append({"id": gid})
|
||||
elif gname:
|
||||
labels.append({"name": gname})
|
||||
if labels:
|
||||
out["labels"] = labels
|
||||
|
||||
newsletters = []
|
||||
for item in payload.get("newsletters") or []:
|
||||
gid = _to_str_or_none(item.get("id"))
|
||||
gname = _to_str_or_none(item.get("name"))
|
||||
row = {"subscribed": bool(item.get("subscribed", True))}
|
||||
if gid:
|
||||
row["id"] = gid
|
||||
newsletters.append(row)
|
||||
elif gname:
|
||||
row["name"] = gname
|
||||
newsletters.append(row)
|
||||
if newsletters:
|
||||
out["newsletters"] = newsletters
|
||||
|
||||
gid = _to_str_or_none(payload.get("id"))
|
||||
if gid:
|
||||
out["id"] = gid
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _member_email(m: dict[str, Any]) -> Optional[str]:
|
||||
email = (m.get("email") or "").strip().lower() or None
|
||||
return email
|
||||
|
||||
|
||||
# ---- upsert helpers for related entities ----
|
||||
|
||||
async def _upsert_label(sess: AsyncSession, data: dict) -> GhostLabel:
|
||||
res = await sess.execute(select(GhostLabel).where(GhostLabel.ghost_id == data["id"]))
|
||||
obj = res.scalar_one_or_none()
|
||||
if not obj:
|
||||
obj = GhostLabel(ghost_id=data["id"])
|
||||
sess.add(obj)
|
||||
obj.name = data.get("name") or obj.name
|
||||
obj.slug = data.get("slug") or obj.slug
|
||||
await sess.flush()
|
||||
return obj
|
||||
|
||||
|
||||
async def _upsert_newsletter(sess: AsyncSession, data: dict) -> GhostNewsletter:
|
||||
res = await sess.execute(select(GhostNewsletter).where(GhostNewsletter.ghost_id == data["id"]))
|
||||
obj = res.scalar_one_or_none()
|
||||
if not obj:
|
||||
obj = GhostNewsletter(ghost_id=data["id"])
|
||||
sess.add(obj)
|
||||
obj.name = data.get("name") or obj.name
|
||||
obj.slug = data.get("slug") or obj.slug
|
||||
obj.description = data.get("description") or obj.description
|
||||
await sess.flush()
|
||||
return obj
|
||||
|
||||
|
||||
async def _upsert_tier(sess: AsyncSession, data: dict) -> GhostTier:
|
||||
res = await sess.execute(select(GhostTier).where(GhostTier.ghost_id == data["id"]))
|
||||
obj = res.scalar_one_or_none()
|
||||
if not obj:
|
||||
obj = GhostTier(ghost_id=data["id"])
|
||||
sess.add(obj)
|
||||
obj.name = data.get("name") or obj.name
|
||||
obj.slug = data.get("slug") or obj.slug
|
||||
obj.type = data.get("type") or obj.type
|
||||
obj.visibility = data.get("visibility") or obj.visibility
|
||||
await sess.flush()
|
||||
return obj
|
||||
|
||||
|
||||
def _price_cents(sd: dict) -> Optional[int]:
|
||||
try:
|
||||
return int((sd.get("price") or {}).get("amount"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# ---- find/create user by ghost_id or email ----
|
||||
|
||||
async def _find_or_create_user_by_ghost_or_email(sess: AsyncSession, data: dict) -> User:
|
||||
ghost_id = data.get("id")
|
||||
email = _member_email(data)
|
||||
|
||||
if ghost_id:
|
||||
res = await sess.execute(select(User).where(User.ghost_id == ghost_id))
|
||||
u = res.scalar_one_or_none()
|
||||
if u:
|
||||
return u
|
||||
|
||||
if email:
|
||||
res = await sess.execute(select(User).where(User.email.ilike(email)))
|
||||
u = res.scalar_one_or_none()
|
||||
if u:
|
||||
if ghost_id and not u.ghost_id:
|
||||
u.ghost_id = ghost_id
|
||||
return u
|
||||
|
||||
u = User(email=email or f"_ghost_{ghost_id}@invalid.local")
|
||||
if ghost_id:
|
||||
u.ghost_id = ghost_id
|
||||
sess.add(u)
|
||||
await sess.flush()
|
||||
return u
|
||||
|
||||
|
||||
# ---- apply membership data to user ----
|
||||
|
||||
async def _apply_user_membership(sess: AsyncSession, user: User, m: dict) -> User:
|
||||
"""Apply Ghost member payload to local User."""
|
||||
sess.add(user)
|
||||
|
||||
user.name = m.get("name") or user.name
|
||||
user.ghost_status = m.get("status") or user.ghost_status
|
||||
user.ghost_subscribed = bool(m.get("subscribed", True))
|
||||
user.ghost_note = m.get("note") or user.ghost_note
|
||||
user.avatar_image = m.get("avatar_image") or user.avatar_image
|
||||
user.stripe_customer_id = (
|
||||
(m.get("stripe") or {}).get("customer_id")
|
||||
or (m.get("customer") or {}).get("id")
|
||||
or m.get("stripe_customer_id")
|
||||
or user.stripe_customer_id
|
||||
)
|
||||
user.ghost_raw = dict(m)
|
||||
flag_modified(user, "ghost_raw")
|
||||
|
||||
await sess.flush()
|
||||
|
||||
# Labels join
|
||||
label_ids: list[int] = []
|
||||
for ld in m.get("labels") or []:
|
||||
lbl = await _upsert_label(sess, ld)
|
||||
label_ids.append(lbl.id)
|
||||
await sess.execute(delete(UserLabel).where(UserLabel.user_id == user.id))
|
||||
for lid in label_ids:
|
||||
sess.add(UserLabel(user_id=user.id, label_id=lid))
|
||||
await sess.flush()
|
||||
|
||||
# Newsletters join with subscribed flag
|
||||
nl_rows: list[tuple[int, bool]] = []
|
||||
for nd in m.get("newsletters") or []:
|
||||
nl = await _upsert_newsletter(sess, nd)
|
||||
nl_rows.append((nl.id, bool(nd.get("subscribed", True))))
|
||||
await sess.execute(delete(UserNewsletter).where(UserNewsletter.user_id == user.id))
|
||||
for nl_id, subbed in nl_rows:
|
||||
sess.add(UserNewsletter(user_id=user.id, newsletter_id=nl_id, subscribed=subbed))
|
||||
await sess.flush()
|
||||
|
||||
# Subscriptions
|
||||
for sd in m.get("subscriptions") or []:
|
||||
sid = sd.get("id")
|
||||
if not sid:
|
||||
continue
|
||||
|
||||
tier_id: Optional[int] = None
|
||||
if sd.get("tier"):
|
||||
tier = await _upsert_tier(sess, sd["tier"])
|
||||
await sess.flush()
|
||||
tier_id = tier.id
|
||||
|
||||
res = await sess.execute(select(GhostSubscription).where(GhostSubscription.ghost_id == sid))
|
||||
sub = res.scalar_one_or_none()
|
||||
if not sub:
|
||||
sub = GhostSubscription(ghost_id=sid, user_id=user.id)
|
||||
sess.add(sub)
|
||||
|
||||
sub.user_id = user.id
|
||||
sub.status = sd.get("status") or sub.status
|
||||
sub.cadence = (sd.get("plan") or {}).get("interval") or sd.get("cadence") or sub.cadence
|
||||
sub.price_amount = _price_cents(sd)
|
||||
sub.price_currency = (sd.get("price") or {}).get("currency") or sub.price_currency
|
||||
sub.stripe_customer_id = (
|
||||
(sd.get("customer") or {}).get("id")
|
||||
or (sd.get("stripe") or {}).get("customer_id")
|
||||
or sub.stripe_customer_id
|
||||
)
|
||||
sub.stripe_subscription_id = (
|
||||
sd.get("stripe_subscription_id")
|
||||
or (sd.get("stripe") or {}).get("subscription_id")
|
||||
or sub.stripe_subscription_id
|
||||
)
|
||||
if tier_id is not None:
|
||||
sub.tier_id = tier_id
|
||||
sub.raw = dict(sd)
|
||||
flag_modified(sub, "raw")
|
||||
|
||||
await sess.flush()
|
||||
return user
|
||||
|
||||
|
||||
# =====================================================
|
||||
# PUSH MEMBERS FROM LOCAL DB -> GHOST (DB -> Ghost)
|
||||
# =====================================================
|
||||
|
||||
def _ghost_member_payload_base(u: User) -> dict:
|
||||
email = _to_str_or_none(getattr(u, "email", None))
|
||||
payload: dict = {}
|
||||
if email:
|
||||
payload["email"] = email.lower()
|
||||
|
||||
name = _to_str_or_none(getattr(u, "name", None))
|
||||
if name:
|
||||
payload["name"] = name
|
||||
|
||||
note = _to_str_or_none(getattr(u, "ghost_note", None))
|
||||
if note:
|
||||
payload["note"] = note
|
||||
|
||||
subscribed = getattr(u, "ghost_subscribed", True)
|
||||
payload["subscribed"] = bool(subscribed)
|
||||
|
||||
return payload
|
||||
|
||||
|
||||
async def _newsletters_for_user(sess: AsyncSession, user_id: int) -> list[dict]:
|
||||
q = await sess.execute(
|
||||
select(GhostNewsletter.ghost_id, UserNewsletter.subscribed, GhostNewsletter.name)
|
||||
.join(UserNewsletter, UserNewsletter.newsletter_id == GhostNewsletter.id)
|
||||
.where(UserNewsletter.user_id == user_id)
|
||||
)
|
||||
seen = set()
|
||||
out: list[dict] = []
|
||||
for gid, subscribed, name in q.all():
|
||||
gid = (gid or "").strip() or None
|
||||
name = (name or "").strip() or None
|
||||
row: dict = {"subscribed": bool(subscribed)}
|
||||
if gid:
|
||||
key = ("id", gid)
|
||||
if key in seen:
|
||||
continue
|
||||
row["id"] = gid
|
||||
seen.add(key)
|
||||
out.append(row)
|
||||
elif name:
|
||||
key = ("name", name.lower())
|
||||
if key in seen:
|
||||
continue
|
||||
row["name"] = name
|
||||
seen.add(key)
|
||||
out.append(row)
|
||||
return out
|
||||
|
||||
|
||||
async def _labels_for_user(sess: AsyncSession, user_id: int) -> list[dict]:
|
||||
q = await sess.execute(
|
||||
select(GhostLabel.ghost_id, GhostLabel.name)
|
||||
.join(UserLabel, UserLabel.label_id == GhostLabel.id)
|
||||
.where(UserLabel.user_id == user_id)
|
||||
)
|
||||
seen = set()
|
||||
out: list[dict] = []
|
||||
for gid, name in q.all():
|
||||
gid = (gid or "").strip() or None
|
||||
name = (name or "").strip() or None
|
||||
if gid:
|
||||
key = ("id", gid)
|
||||
if key not in seen:
|
||||
out.append({"id": gid})
|
||||
seen.add(key)
|
||||
elif name:
|
||||
key = ("name", name.lower())
|
||||
if key not in seen:
|
||||
out.append({"name": name})
|
||||
seen.add(key)
|
||||
return out
|
||||
|
||||
|
||||
async def _ghost_find_member_by_email(email: str) -> Optional[dict]:
|
||||
if not email:
|
||||
return None
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.get(
|
||||
f"{GHOST_ADMIN_API_URL}/members/?filter=email:{quote(email)}&limit=1",
|
||||
headers=_auth_header(),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
members = resp.json().get("members") or []
|
||||
return members[0] if members else None
|
||||
|
||||
|
||||
async def _ghost_upsert_member(payload: dict, ghost_id: str | None = None) -> dict:
|
||||
"""Create/update a member, with sanitization + 5xx retry/backoff."""
|
||||
safe_keys = ("email", "name", "note", "subscribed", "labels", "newsletters", "id")
|
||||
pl_raw = {k: v for k, v in payload.items() if k in safe_keys}
|
||||
pl = _sanitize_member_payload(pl_raw)
|
||||
|
||||
async def _request_with_retry(client: httpx.AsyncClient, method: str, url: str, json: dict) -> httpx.Response:
|
||||
delay = 0.5
|
||||
for attempt in range(3):
|
||||
r = await client.request(method, url, headers=_auth_header(), json=json)
|
||||
if r.status_code >= 500:
|
||||
if attempt < 2:
|
||||
await asyncio.sleep(delay)
|
||||
delay *= 2
|
||||
continue
|
||||
return r
|
||||
return r
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
|
||||
async def _put(mid: str, p: dict) -> dict:
|
||||
r = await _request_with_retry(
|
||||
client, "PUT",
|
||||
f"{GHOST_ADMIN_API_URL}/members/{mid}/",
|
||||
{"members": [p]},
|
||||
)
|
||||
if r.status_code == 404:
|
||||
existing = await _ghost_find_member_by_email(p.get("email", ""))
|
||||
if existing and existing.get("id"):
|
||||
r2 = await _request_with_retry(
|
||||
client, "PUT",
|
||||
f"{GHOST_ADMIN_API_URL}/members/{existing['id']}/",
|
||||
{"members": [p]},
|
||||
)
|
||||
r2.raise_for_status()
|
||||
return (r2.json().get("members") or [None])[0] or {}
|
||||
r3 = await _request_with_retry(
|
||||
client, "POST",
|
||||
f"{GHOST_ADMIN_API_URL}/members/",
|
||||
{"members": [p]},
|
||||
)
|
||||
r3.raise_for_status()
|
||||
return (r3.json().get("members") or [None])[0] or {}
|
||||
|
||||
if r.status_code == 422:
|
||||
body = (r.text or "").lower()
|
||||
retry = dict(p)
|
||||
dropped = False
|
||||
if '"note"' in body or "for note" in body:
|
||||
retry.pop("note", None); dropped = True
|
||||
if '"name"' in body or "for name" in body:
|
||||
retry.pop("name", None); dropped = True
|
||||
if "labels.name" in body:
|
||||
retry.pop("labels", None); dropped = True
|
||||
if dropped:
|
||||
r2 = await _request_with_retry(
|
||||
client, "PUT",
|
||||
f"{GHOST_ADMIN_API_URL}/members/{mid}/",
|
||||
{"members": [retry]},
|
||||
)
|
||||
if r2.status_code == 404:
|
||||
existing = await _ghost_find_member_by_email(retry.get("email", ""))
|
||||
if existing and existing.get("id"):
|
||||
r3 = await _request_with_retry(
|
||||
client, "PUT",
|
||||
f"{GHOST_ADMIN_API_URL}/members/{existing['id']}/",
|
||||
{"members": [retry]},
|
||||
)
|
||||
r3.raise_for_status()
|
||||
return (r3.json().get("members") or [None])[0] or {}
|
||||
r3 = await _request_with_retry(
|
||||
client, "POST",
|
||||
f"{GHOST_ADMIN_API_URL}/members/",
|
||||
{"members": [retry]},
|
||||
)
|
||||
r3.raise_for_status()
|
||||
return (r3.json().get("members") or [None])[0] or {}
|
||||
r2.raise_for_status()
|
||||
return (r2.json().get("members") or [None])[0] or {}
|
||||
r.raise_for_status()
|
||||
return (r.json().get("members") or [None])[0] or {}
|
||||
|
||||
async def _post_upsert(p: dict) -> dict:
|
||||
r = await _request_with_retry(
|
||||
client, "POST",
|
||||
f"{GHOST_ADMIN_API_URL}/members/?upsert=true",
|
||||
{"members": [p]},
|
||||
)
|
||||
if r.status_code == 422:
|
||||
lower = (r.text or "").lower()
|
||||
|
||||
retry = dict(p)
|
||||
changed = False
|
||||
if '"note"' in lower or "for note" in lower:
|
||||
retry.pop("note", None); changed = True
|
||||
if '"name"' in lower or "for name" in lower:
|
||||
retry.pop("name", None); changed = True
|
||||
if "labels.name" in lower:
|
||||
retry.pop("labels", None); changed = True
|
||||
|
||||
if changed:
|
||||
r2 = await _request_with_retry(
|
||||
client, "POST",
|
||||
f"{GHOST_ADMIN_API_URL}/members/?upsert=true",
|
||||
{"members": [retry]},
|
||||
)
|
||||
if r2.status_code != 422:
|
||||
r2.raise_for_status()
|
||||
return (r2.json().get("members") or [None])[0] or {}
|
||||
lower = (r2.text or "").lower()
|
||||
|
||||
if "already exists" in lower and "email address" in lower:
|
||||
existing = await _ghost_find_member_by_email(p.get("email", ""))
|
||||
if existing and existing.get("id"):
|
||||
return await _put(existing["id"], p)
|
||||
|
||||
raise httpx.HTTPStatusError(
|
||||
"Validation error, cannot edit member.",
|
||||
request=r.request,
|
||||
response=r,
|
||||
)
|
||||
r.raise_for_status()
|
||||
return (r.json().get("members") or [None])[0] or {}
|
||||
|
||||
if ghost_id:
|
||||
return await _put(ghost_id, pl)
|
||||
return await _post_upsert(pl)
|
||||
|
||||
|
||||
async def sync_member_to_ghost(sess: AsyncSession, user_id: int) -> Optional[str]:
|
||||
"""Push a single user's membership data to Ghost."""
|
||||
res = await sess.execute(select(User).where(User.id == user_id))
|
||||
user = res.scalar_one_or_none()
|
||||
if not user:
|
||||
return None
|
||||
|
||||
payload = _ghost_member_payload_base(user)
|
||||
|
||||
labels = await _labels_for_user(sess, user.id)
|
||||
if labels:
|
||||
payload["labels"] = labels
|
||||
|
||||
ghost_member = await _ghost_upsert_member(payload, ghost_id=user.ghost_id)
|
||||
|
||||
if ghost_member:
|
||||
gm_id = ghost_member.get("id")
|
||||
if gm_id and user.ghost_id != gm_id:
|
||||
user.ghost_id = gm_id
|
||||
user.ghost_raw = dict(ghost_member)
|
||||
flag_modified(user, "ghost_raw")
|
||||
await sess.flush()
|
||||
return user.ghost_id or gm_id
|
||||
return user.ghost_id
|
||||
|
||||
|
||||
async def sync_members_to_ghost(
|
||||
sess: AsyncSession,
|
||||
changed_since: Optional[datetime] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> int:
|
||||
"""Upsert a batch of users to Ghost. Returns count processed."""
|
||||
stmt = select(User.id)
|
||||
if changed_since:
|
||||
stmt = stmt.where(
|
||||
or_(
|
||||
User.created_at >= changed_since,
|
||||
and_(User.last_login_at != None, User.last_login_at >= changed_since),
|
||||
)
|
||||
)
|
||||
if limit:
|
||||
stmt = stmt.limit(limit)
|
||||
|
||||
ids = [row[0] for row in (await sess.execute(stmt)).all()]
|
||||
processed = 0
|
||||
for uid in ids:
|
||||
try:
|
||||
await sync_member_to_ghost(sess, uid)
|
||||
processed += 1
|
||||
except httpx.HTTPStatusError as e:
|
||||
print(f"[ghost sync] failed upsert for user {uid}: {e.response.status_code} {e.response.text}")
|
||||
except Exception as e:
|
||||
print(f"[ghost sync] failed upsert for user {uid}: {e}")
|
||||
return processed
|
||||
|
||||
|
||||
# =====================================================
|
||||
# Membership fetch/sync (Ghost -> DB) bulk + single
|
||||
# =====================================================
|
||||
|
||||
async def fetch_all_members_from_ghost() -> list[dict[str, Any]]:
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
resp = await client.get(
|
||||
f"{GHOST_ADMIN_API_URL}/members/?include=labels,subscriptions,tiers,newsletters&limit=all",
|
||||
headers=_auth_header(),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp.json().get("members", [])
|
||||
|
||||
|
||||
async def sync_all_membership_from_ghost(sess: AsyncSession) -> None:
|
||||
"""Bulk sync: fetch all members from Ghost, upsert into DB."""
|
||||
members = await fetch_all_members_from_ghost()
|
||||
|
||||
label_bucket: Dict[str, dict[str, Any]] = {}
|
||||
tier_bucket: Dict[str, dict[str, Any]] = {}
|
||||
newsletter_bucket: Dict[str, dict[str, Any]] = {}
|
||||
|
||||
for m in members:
|
||||
for l in m.get("labels") or []:
|
||||
label_bucket[l["id"]] = l
|
||||
for n in m.get("newsletters") or []:
|
||||
newsletter_bucket[n["id"]] = n
|
||||
for s in m.get("subscriptions") or []:
|
||||
t = s.get("tier")
|
||||
if isinstance(t, dict) and t.get("id"):
|
||||
tier_bucket[t["id"]] = t
|
||||
|
||||
for L in label_bucket.values():
|
||||
await _upsert_label(sess, L)
|
||||
for T in tier_bucket.values():
|
||||
await _upsert_tier(sess, T)
|
||||
for N in newsletter_bucket.values():
|
||||
await _upsert_newsletter(sess, N)
|
||||
|
||||
for gm in members:
|
||||
user = await _find_or_create_user_by_ghost_or_email(sess, gm)
|
||||
await _apply_user_membership(sess, user, gm)
|
||||
|
||||
|
||||
async def fetch_single_member_from_ghost(ghost_id: str) -> Optional[dict[str, Any]]:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.get(
|
||||
f"{GHOST_ADMIN_API_URL}/members/{ghost_id}/?include=labels,newsletters,subscriptions,tiers",
|
||||
headers=_auth_header(),
|
||||
)
|
||||
if resp.status_code == 404:
|
||||
return None
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
items = data.get("members") or data.get("member") or []
|
||||
if isinstance(items, dict):
|
||||
return items
|
||||
return (items[0] if items else None)
|
||||
|
||||
|
||||
async def sync_single_member(sess: AsyncSession, ghost_id: str) -> None:
|
||||
"""Sync a single member from Ghost into DB."""
|
||||
m = await fetch_single_member_from_ghost(ghost_id)
|
||||
if m is None:
|
||||
return
|
||||
|
||||
for l in m.get("labels") or []:
|
||||
await _upsert_label(sess, l)
|
||||
for n in m.get("newsletters") or []:
|
||||
await _upsert_newsletter(sess, n)
|
||||
for s in m.get("subscriptions") or []:
|
||||
if isinstance(s.get("tier"), dict):
|
||||
await _upsert_tier(sess, s["tier"])
|
||||
|
||||
user = await _find_or_create_user_by_ghost_or_email(sess, m)
|
||||
await _apply_user_membership(sess, user, m)
|
||||
0
account/sx/__init__.py
Normal file
0
account/sx/__init__.py
Normal file
29
account/sx/auth.sx
Normal file
29
account/sx/auth.sx
Normal file
@@ -0,0 +1,29 @@
|
||||
;; Auth page components (device auth — account-specific)
|
||||
;; Login and check-email components are shared: see shared/sx/templates/auth.sx
|
||||
|
||||
(defcomp ~account-device-error (&key error)
|
||||
(when error
|
||||
(div :class "bg-red-50 border border-red-200 text-red-700 p-3 rounded mb-4"
|
||||
error)))
|
||||
|
||||
(defcomp ~account-device-form (&key error action csrf-token code)
|
||||
(div :class "py-8 max-w-md mx-auto"
|
||||
(h1 :class "text-2xl font-bold mb-6" "Authorize device")
|
||||
(p :class "text-stone-600 mb-4" "Enter the code shown in your terminal to sign in.")
|
||||
error
|
||||
(form :method "post" :action action :class "space-y-4"
|
||||
(input :type "hidden" :name "csrf_token" :value csrf-token)
|
||||
(div
|
||||
(label :for "code" :class "block text-sm font-medium mb-1" "Device code")
|
||||
(input :type "text" :name "code" :id "code" :value code :placeholder "XXXX-XXXX"
|
||||
:required true :autofocus true :maxlength "9" :autocomplete "off" :spellcheck "false"
|
||||
:class "w-full border border-stone-300 rounded px-3 py-3 text-center text-2xl tracking-widest font-mono uppercase focus:outline-none focus:ring-2 focus:ring-stone-500"))
|
||||
(button :type "submit"
|
||||
:class "w-full bg-stone-800 text-white py-2 px-4 rounded hover:bg-stone-700 transition"
|
||||
"Authorize"))))
|
||||
|
||||
(defcomp ~account-device-approved ()
|
||||
(div :class "py-8 max-w-md mx-auto text-center"
|
||||
(h1 :class "text-2xl font-bold mb-4" "Device authorized")
|
||||
(p :class "text-stone-600" "You can close this window and return to your terminal.")))
|
||||
|
||||
43
account/sx/dashboard.sx
Normal file
43
account/sx/dashboard.sx
Normal file
@@ -0,0 +1,43 @@
|
||||
;; Account dashboard components
|
||||
|
||||
(defcomp ~account-error-banner (&key error)
|
||||
(when error
|
||||
(div :class "rounded-lg border border-red-200 bg-red-50 text-red-800 px-4 py-3 text-sm"
|
||||
error)))
|
||||
|
||||
(defcomp ~account-user-email (&key email)
|
||||
(when email
|
||||
(p :class "text-sm text-stone-500 mt-1" email)))
|
||||
|
||||
(defcomp ~account-user-name (&key name)
|
||||
(when name
|
||||
(p :class "text-sm text-stone-600" name)))
|
||||
|
||||
(defcomp ~account-logout-form (&key csrf-token)
|
||||
(form :action "/auth/logout/" :method "post"
|
||||
(input :type "hidden" :name "csrf_token" :value csrf-token)
|
||||
(button :type "submit"
|
||||
:class "inline-flex items-center gap-2 rounded-full border border-stone-300 px-4 py-2 text-sm font-medium text-stone-700 hover:bg-stone-50 transition"
|
||||
(i :class "fa-solid fa-right-from-bracket text-xs") " Sign out")))
|
||||
|
||||
(defcomp ~account-label-item (&key name)
|
||||
(span :class "inline-flex items-center rounded-full border border-stone-200 px-3 py-1 text-xs font-medium bg-white/60"
|
||||
name))
|
||||
|
||||
(defcomp ~account-labels-section (&key items)
|
||||
(when items
|
||||
(div
|
||||
(h2 :class "text-base font-semibold tracking-tight mb-3" "Labels")
|
||||
(div :class "flex flex-wrap gap-2" items))))
|
||||
|
||||
(defcomp ~account-main-panel (&key error email name logout labels)
|
||||
(div :class "w-full max-w-3xl mx-auto px-4 py-6"
|
||||
(div :class "bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-8"
|
||||
error
|
||||
(div :class "flex items-center justify-between"
|
||||
(div
|
||||
(h1 :class "text-xl font-semibold tracking-tight" "Account")
|
||||
email
|
||||
name)
|
||||
logout)
|
||||
labels)))
|
||||
8
account/sx/handlers/auth-menu.sx
Normal file
8
account/sx/handlers/auth-menu.sx
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Account auth-menu fragment handler
|
||||
;;
|
||||
;; Renders the desktop + mobile auth menu (sign-in or user link).
|
||||
|
||||
(defhandler auth-menu (&key email)
|
||||
(~auth-menu
|
||||
:user-email (when email email)
|
||||
:account-url (app-url "account" "")))
|
||||
31
account/sx/newsletters.sx
Normal file
31
account/sx/newsletters.sx
Normal file
@@ -0,0 +1,31 @@
|
||||
;; Newsletter management components
|
||||
|
||||
(defcomp ~account-newsletter-desc (&key description)
|
||||
(when description
|
||||
(p :class "text-xs text-stone-500 mt-0.5 truncate" description)))
|
||||
|
||||
(defcomp ~account-newsletter-toggle (&key id url hdrs target cls checked knob-cls)
|
||||
(div :id id :class "flex items-center"
|
||||
(button :sx-post url :sx-headers hdrs :sx-target target :sx-swap "outerHTML"
|
||||
:class cls :role "switch" :aria-checked checked
|
||||
(span :class knob-cls))))
|
||||
|
||||
|
||||
(defcomp ~account-newsletter-item (&key name desc toggle)
|
||||
(div :class "flex items-center justify-between py-4 first:pt-0 last:pb-0"
|
||||
(div :class "min-w-0 flex-1"
|
||||
(p :class "text-sm font-medium text-stone-800" name)
|
||||
desc)
|
||||
(div :class "ml-4 flex-shrink-0" toggle)))
|
||||
|
||||
(defcomp ~account-newsletter-list (&key items)
|
||||
(div :class "divide-y divide-stone-100" items))
|
||||
|
||||
(defcomp ~account-newsletter-empty ()
|
||||
(p :class "text-sm text-stone-500" "No newsletters available."))
|
||||
|
||||
(defcomp ~account-newsletters-panel (&key list)
|
||||
(div :class "w-full max-w-3xl mx-auto px-4 py-6"
|
||||
(div :class "bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6"
|
||||
(h1 :class "text-xl font-semibold tracking-tight" "Newsletters")
|
||||
list)))
|
||||
339
account/sx/sx_components.py
Normal file
339
account/sx/sx_components.py
Normal file
@@ -0,0 +1,339 @@
|
||||
"""
|
||||
Account service s-expression page components.
|
||||
|
||||
Renders account dashboard, newsletters, fragment pages, login, and device
|
||||
auth pages. Called from route handlers in place of ``render_template()``.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from shared.sx.jinja_bridge import load_service_components
|
||||
from shared.sx.helpers import (
|
||||
call_url, sx_call, SxExpr,
|
||||
root_header_sx, full_page_sx,
|
||||
)
|
||||
|
||||
# Load account-specific .sx components + handlers at import time
|
||||
load_service_components(os.path.dirname(os.path.dirname(__file__)),
|
||||
service_name="account")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Header helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _auth_nav_sx(ctx: dict) -> str:
|
||||
"""Auth section desktop nav items."""
|
||||
parts = [
|
||||
sx_call("nav-link",
|
||||
href=call_url(ctx, "account_url", "/newsletters/"),
|
||||
label="newsletters",
|
||||
select_colours=ctx.get("select_colours", ""),
|
||||
)
|
||||
]
|
||||
account_nav = ctx.get("account_nav")
|
||||
if account_nav:
|
||||
parts.append(account_nav)
|
||||
return "(<> " + " ".join(parts) + ")"
|
||||
|
||||
|
||||
def _auth_header_sx(ctx: dict, *, oob: bool = False) -> str:
|
||||
"""Build the account section header row."""
|
||||
return sx_call(
|
||||
"menu-row-sx",
|
||||
id="auth-row", level=1, colour="sky",
|
||||
link_href=call_url(ctx, "account_url", "/"),
|
||||
link_label="account", icon="fa-solid fa-user",
|
||||
nav=SxExpr(_auth_nav_sx(ctx)),
|
||||
child_id="auth-header-child", oob=oob,
|
||||
)
|
||||
|
||||
|
||||
def _auth_nav_mobile_sx(ctx: dict) -> str:
|
||||
"""Mobile nav menu for auth section."""
|
||||
parts = [
|
||||
sx_call("nav-link",
|
||||
href=call_url(ctx, "account_url", "/newsletters/"),
|
||||
label="newsletters",
|
||||
select_colours=ctx.get("select_colours", ""),
|
||||
)
|
||||
]
|
||||
account_nav = ctx.get("account_nav")
|
||||
if account_nav:
|
||||
parts.append(account_nav)
|
||||
return "(<> " + " ".join(parts) + ")"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Account dashboard (GET /)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _account_main_panel_sx(ctx: dict) -> str:
|
||||
"""Account info panel with user details and logout."""
|
||||
from quart import g
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
|
||||
user = getattr(g, "user", None)
|
||||
error = ctx.get("error", "")
|
||||
|
||||
error_sx = sx_call("account-error-banner", error=error) if error else ""
|
||||
|
||||
user_email_sx = ""
|
||||
user_name_sx = ""
|
||||
if user:
|
||||
user_email_sx = sx_call("account-user-email", email=user.email)
|
||||
if user.name:
|
||||
user_name_sx = sx_call("account-user-name", name=user.name)
|
||||
|
||||
logout_sx = sx_call("account-logout-form", csrf_token=generate_csrf_token())
|
||||
|
||||
labels_sx = ""
|
||||
if user and hasattr(user, "labels") and user.labels:
|
||||
label_items = " ".join(
|
||||
sx_call("account-label-item", name=label.name)
|
||||
for label in user.labels
|
||||
)
|
||||
labels_sx = sx_call("account-labels-section",
|
||||
items=SxExpr("(<> " + label_items + ")"))
|
||||
|
||||
return sx_call(
|
||||
"account-main-panel",
|
||||
error=SxExpr(error_sx) if error_sx else None,
|
||||
email=SxExpr(user_email_sx) if user_email_sx else None,
|
||||
name=SxExpr(user_name_sx) if user_name_sx else None,
|
||||
logout=SxExpr(logout_sx),
|
||||
labels=SxExpr(labels_sx) if labels_sx else None,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Newsletters (GET /newsletters/)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _newsletter_toggle_sx(un: Any, account_url_fn: Any, csrf_token: str) -> str:
|
||||
"""Render a single newsletter toggle switch."""
|
||||
nid = un.newsletter_id
|
||||
toggle_url = account_url_fn(f"/newsletter/{nid}/toggle/")
|
||||
if un.subscribed:
|
||||
bg = "bg-emerald-500"
|
||||
translate = "translate-x-6"
|
||||
checked = "true"
|
||||
else:
|
||||
bg = "bg-stone-300"
|
||||
translate = "translate-x-1"
|
||||
checked = "false"
|
||||
return sx_call(
|
||||
"account-newsletter-toggle",
|
||||
id=f"nl-{nid}", url=toggle_url,
|
||||
hdrs=f'{{"X-CSRFToken": "{csrf_token}"}}',
|
||||
target=f"#nl-{nid}",
|
||||
cls=f"relative inline-flex h-6 w-11 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-emerald-500 focus:ring-offset-2 {bg}",
|
||||
checked=checked,
|
||||
knob_cls=f"inline-block h-4 w-4 rounded-full bg-white shadow transform transition-transform {translate}",
|
||||
)
|
||||
|
||||
|
||||
def _newsletter_toggle_off_sx(nid: int, toggle_url: str, csrf_token: str) -> str:
|
||||
"""Render an unsubscribed newsletter toggle (no subscription record yet)."""
|
||||
return sx_call(
|
||||
"account-newsletter-toggle",
|
||||
id=f"nl-{nid}", url=toggle_url,
|
||||
hdrs=f'{{"X-CSRFToken": "{csrf_token}"}}',
|
||||
target=f"#nl-{nid}",
|
||||
cls="relative inline-flex h-6 w-11 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-emerald-500 focus:ring-offset-2 bg-stone-300",
|
||||
checked="false",
|
||||
knob_cls="inline-block h-4 w-4 rounded-full bg-white shadow transform transition-transform translate-x-1",
|
||||
)
|
||||
|
||||
|
||||
def _newsletters_panel_sx(ctx: dict, newsletter_list: list) -> str:
|
||||
"""Newsletters management panel."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
|
||||
account_url_fn = ctx.get("account_url") or (lambda p: p)
|
||||
csrf = generate_csrf_token()
|
||||
|
||||
if newsletter_list:
|
||||
items = []
|
||||
for item in newsletter_list:
|
||||
nl = item["newsletter"]
|
||||
un = item.get("un")
|
||||
|
||||
desc_sx = sx_call(
|
||||
"account-newsletter-desc", description=nl.description
|
||||
) if nl.description else ""
|
||||
|
||||
if un:
|
||||
toggle = _newsletter_toggle_sx(un, account_url_fn, csrf)
|
||||
else:
|
||||
toggle_url = account_url_fn(f"/newsletter/{nl.id}/toggle/")
|
||||
toggle = _newsletter_toggle_off_sx(nl.id, toggle_url, csrf)
|
||||
|
||||
items.append(sx_call(
|
||||
"account-newsletter-item",
|
||||
name=nl.name,
|
||||
desc=SxExpr(desc_sx) if desc_sx else None,
|
||||
toggle=SxExpr(toggle),
|
||||
))
|
||||
list_sx = sx_call(
|
||||
"account-newsletter-list",
|
||||
items=SxExpr("(<> " + " ".join(items) + ")"),
|
||||
)
|
||||
else:
|
||||
list_sx = sx_call("account-newsletter-empty")
|
||||
|
||||
return sx_call("account-newsletters-panel", list=SxExpr(list_sx))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Auth pages (login, device, check_email)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _login_page_content(ctx: dict) -> str:
|
||||
"""Login form content."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from quart import url_for
|
||||
|
||||
error = ctx.get("error", "")
|
||||
email = ctx.get("email", "")
|
||||
action = url_for("auth.start_login")
|
||||
|
||||
error_sx = sx_call("auth-error-banner", error=error) if error else ""
|
||||
|
||||
return sx_call(
|
||||
"auth-login-form",
|
||||
error=SxExpr(error_sx) if error_sx else None,
|
||||
action=action,
|
||||
csrf_token=generate_csrf_token(), email=email,
|
||||
)
|
||||
|
||||
|
||||
def _device_page_content(ctx: dict) -> str:
|
||||
"""Device authorization form content."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from quart import url_for
|
||||
|
||||
error = ctx.get("error", "")
|
||||
code = ctx.get("code", "")
|
||||
action = url_for("auth.device_submit")
|
||||
|
||||
error_sx = sx_call("account-device-error", error=error) if error else ""
|
||||
|
||||
return sx_call(
|
||||
"account-device-form",
|
||||
error=SxExpr(error_sx) if error_sx else None,
|
||||
action=action,
|
||||
csrf_token=generate_csrf_token(), code=code,
|
||||
)
|
||||
|
||||
|
||||
def _device_approved_content() -> str:
|
||||
"""Device approved success content."""
|
||||
return sx_call("account-device-approved")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API: Account dashboard
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def _fragment_content(frag: object) -> str:
|
||||
"""Convert a fragment response to sx content string.
|
||||
|
||||
SxExpr (from text/sx responses) is embedded as-is; plain strings
|
||||
(from text/html) are wrapped in ``~rich-text``.
|
||||
"""
|
||||
from shared.sx.parser import SxExpr
|
||||
if isinstance(frag, SxExpr):
|
||||
return frag.source
|
||||
s = str(frag) if frag else ""
|
||||
if not s:
|
||||
return ""
|
||||
return f'(~rich-text :html "{_sx_escape(s)}")'
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API: Auth pages (login, device)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def render_login_page(ctx: dict) -> str:
|
||||
"""Full page: login form."""
|
||||
hdr = root_header_sx(ctx)
|
||||
return full_page_sx(ctx, header_rows=hdr,
|
||||
content=_login_page_content(ctx),
|
||||
meta_html='<title>Login \u2014 Rose Ash</title>')
|
||||
|
||||
|
||||
async def render_device_page(ctx: dict) -> str:
|
||||
"""Full page: device authorization form."""
|
||||
hdr = root_header_sx(ctx)
|
||||
return full_page_sx(ctx, header_rows=hdr,
|
||||
content=_device_page_content(ctx),
|
||||
meta_html='<title>Authorize Device \u2014 Rose Ash</title>')
|
||||
|
||||
|
||||
async def render_device_approved_page(ctx: dict) -> str:
|
||||
"""Full page: device approved."""
|
||||
hdr = root_header_sx(ctx)
|
||||
return full_page_sx(ctx, header_rows=hdr,
|
||||
content=_device_approved_content(),
|
||||
meta_html='<title>Device Authorized \u2014 Rose Ash</title>')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API: Check email page (POST /start/ success)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _check_email_content(email: str, email_error: str | None = None) -> str:
|
||||
"""Check email confirmation content."""
|
||||
from markupsafe import escape
|
||||
|
||||
error_sx = sx_call(
|
||||
"auth-check-email-error", error=str(escape(email_error))
|
||||
) if email_error else ""
|
||||
|
||||
return sx_call(
|
||||
"auth-check-email",
|
||||
email=str(escape(email)),
|
||||
error=SxExpr(error_sx) if error_sx else None,
|
||||
)
|
||||
|
||||
|
||||
async def render_check_email_page(ctx: dict) -> str:
|
||||
"""Full page: check email after magic link sent."""
|
||||
email = ctx.get("email", "")
|
||||
email_error = ctx.get("email_error")
|
||||
hdr = root_header_sx(ctx)
|
||||
return full_page_sx(ctx, header_rows=hdr,
|
||||
content=_check_email_content(email, email_error),
|
||||
meta_html='<title>Check your email \u2014 Rose Ash</title>')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API: Fragment renderers for POST handlers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def render_newsletter_toggle(un) -> str:
|
||||
"""Render a newsletter toggle switch for POST response (uses account_url)."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from quart import g
|
||||
account_url_fn = getattr(g, "_account_url", None)
|
||||
if account_url_fn is None:
|
||||
from shared.infrastructure.urls import account_url
|
||||
account_url_fn = account_url
|
||||
return _newsletter_toggle_sx(un, account_url_fn, generate_csrf_token())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _sx_escape(s: str) -> str:
|
||||
"""Escape a string for embedding in sx string literals."""
|
||||
return s.replace("\\", "\\\\").replace('"', '\\"')
|
||||
0
account/sxc/__init__.py
Normal file
0
account/sxc/__init__.py
Normal file
105
account/sxc/pages/__init__.py
Normal file
105
account/sxc/pages/__init__.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""Account defpage setup — registers layouts, page helpers, and loads .sx pages."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
def setup_account_pages() -> None:
|
||||
"""Register account-specific layouts, page helpers, and load page definitions."""
|
||||
_register_account_layouts()
|
||||
_register_account_helpers()
|
||||
_load_account_page_files()
|
||||
|
||||
|
||||
def _load_account_page_files() -> None:
|
||||
import os
|
||||
from shared.sx.pages import load_page_dir
|
||||
load_page_dir(os.path.dirname(__file__), "account")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Layouts
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _register_account_layouts() -> None:
|
||||
from shared.sx.layouts import register_custom_layout
|
||||
register_custom_layout("account", _account_full, _account_oob, _account_mobile)
|
||||
|
||||
|
||||
def _account_full(ctx: dict, **kw: Any) -> str:
|
||||
from shared.sx.helpers import root_header_sx, header_child_sx
|
||||
from sx.sx_components import _auth_header_sx
|
||||
|
||||
root_hdr = root_header_sx(ctx)
|
||||
hdr_child = header_child_sx(_auth_header_sx(ctx))
|
||||
return "(<> " + root_hdr + " " + hdr_child + ")"
|
||||
|
||||
|
||||
def _account_oob(ctx: dict, **kw: Any) -> str:
|
||||
from shared.sx.helpers import root_header_sx
|
||||
from sx.sx_components import _auth_header_sx
|
||||
|
||||
return "(<> " + _auth_header_sx(ctx, oob=True) + " " + root_header_sx(ctx, oob=True) + ")"
|
||||
|
||||
|
||||
def _account_mobile(ctx: dict, **kw: Any) -> str:
|
||||
from shared.sx.helpers import mobile_menu_sx, mobile_root_nav_sx, sx_call, SxExpr
|
||||
from sx.sx_components import _auth_nav_mobile_sx
|
||||
ctx = _inject_account_nav(ctx)
|
||||
auth_section = sx_call("mobile-menu-section",
|
||||
label="account", href="/", level=1, colour="sky",
|
||||
items=SxExpr(_auth_nav_mobile_sx(ctx)))
|
||||
return mobile_menu_sx(auth_section, mobile_root_nav_sx(ctx))
|
||||
|
||||
|
||||
def _inject_account_nav(ctx: dict) -> dict:
|
||||
"""Ensure account_nav is in ctx from g.account_nav."""
|
||||
if "account_nav" not in ctx:
|
||||
from quart import g
|
||||
ctx = dict(ctx)
|
||||
ctx["account_nav"] = getattr(g, "account_nav", "")
|
||||
return ctx
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Page helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _register_account_helpers() -> None:
|
||||
from shared.sx.pages import register_page_helpers
|
||||
|
||||
register_page_helpers("account", {
|
||||
"account-content": _h_account_content,
|
||||
"newsletters-content": _h_newsletters_content,
|
||||
"fragment-content": _h_fragment_content,
|
||||
})
|
||||
|
||||
|
||||
def _h_account_content():
|
||||
from sx.sx_components import _account_main_panel_sx
|
||||
return _account_main_panel_sx({})
|
||||
|
||||
|
||||
def _h_newsletters_content():
|
||||
from quart import g
|
||||
d = getattr(g, "newsletters_data", None)
|
||||
if not d:
|
||||
from shared.sx.helpers import sx_call
|
||||
return sx_call("account-newsletter-empty")
|
||||
from shared.sx.page import get_template_context_sync
|
||||
from sx.sx_components import _newsletters_panel_sx
|
||||
# Build a minimal ctx with account_url
|
||||
ctx = {"account_url": getattr(g, "_account_url", None)}
|
||||
if ctx["account_url"] is None:
|
||||
from shared.infrastructure.urls import account_url
|
||||
ctx["account_url"] = account_url
|
||||
return _newsletters_panel_sx(ctx, d)
|
||||
|
||||
|
||||
def _h_fragment_content():
|
||||
from quart import g
|
||||
frag = getattr(g, "fragment_page_data", None)
|
||||
if not frag:
|
||||
return ""
|
||||
from sx.sx_components import _fragment_content
|
||||
return _fragment_content(frag)
|
||||
31
account/sxc/pages/account.sx
Normal file
31
account/sxc/pages/account.sx
Normal file
@@ -0,0 +1,31 @@
|
||||
;; Account app — declarative page definitions
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Account dashboard
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defpage account-dashboard
|
||||
:path "/"
|
||||
:auth :login
|
||||
:layout :account
|
||||
:content (account-content))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Newsletters
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defpage newsletters
|
||||
:path "/newsletters/"
|
||||
:auth :login
|
||||
:layout :account
|
||||
:content (newsletters-content))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Fragment pages (tickets, bookings, etc. from events service)
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defpage fragment-page
|
||||
:path "/<slug>/"
|
||||
:auth :login
|
||||
:layout :account
|
||||
:content (fragment-content))
|
||||
@@ -1,44 +0,0 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6">
|
||||
|
||||
<h1 class="text-xl font-semibold tracking-tight">Bookings</h1>
|
||||
|
||||
{% if bookings %}
|
||||
<div class="divide-y divide-stone-100">
|
||||
{% for booking in bookings %}
|
||||
<div class="py-4 first:pt-0 last:pb-0">
|
||||
<div class="flex items-start justify-between gap-4">
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="text-sm font-medium text-stone-800">{{ booking.name }}</p>
|
||||
<div class="mt-1 flex flex-wrap items-center gap-x-3 gap-y-1 text-xs text-stone-500">
|
||||
<span>{{ booking.start_at.strftime('%d %b %Y, %H:%M') }}</span>
|
||||
{% if booking.end_at %}
|
||||
<span>– {{ booking.end_at.strftime('%H:%M') }}</span>
|
||||
{% endif %}
|
||||
{% if booking.calendar_name %}
|
||||
<span>· {{ booking.calendar_name }}</span>
|
||||
{% endif %}
|
||||
{% if booking.cost %}
|
||||
<span>· £{{ booking.cost }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-shrink-0">
|
||||
{% if booking.state == 'confirmed' %}
|
||||
<span class="inline-flex items-center rounded-full bg-emerald-50 border border-emerald-200 px-2.5 py-0.5 text-xs font-medium text-emerald-700">confirmed</span>
|
||||
{% elif booking.state == 'provisional' %}
|
||||
<span class="inline-flex items-center rounded-full bg-amber-50 border border-amber-200 px-2.5 py-0.5 text-xs font-medium text-amber-700">provisional</span>
|
||||
{% else %}
|
||||
<span class="inline-flex items-center rounded-full bg-stone-50 border border-stone-200 px-2.5 py-0.5 text-xs font-medium text-stone-600">{{ booking.state }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-stone-500">No bookings yet.</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -1 +0,0 @@
|
||||
{{ page_fragment_html | safe }}
|
||||
@@ -1,49 +0,0 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-8">
|
||||
|
||||
{% if error %}
|
||||
<div class="rounded-lg border border-red-200 bg-red-50 text-red-800 px-4 py-3 text-sm">
|
||||
{{ error }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Account header #}
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 class="text-xl font-semibold tracking-tight">Account</h1>
|
||||
{% if g.user %}
|
||||
<p class="text-sm text-stone-500 mt-1">{{ g.user.email }}</p>
|
||||
{% if g.user.name %}
|
||||
<p class="text-sm text-stone-600">{{ g.user.name }}</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<form action="/auth/logout/" method="post">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<button
|
||||
type="submit"
|
||||
class="inline-flex items-center gap-2 rounded-full border border-stone-300 px-4 py-2 text-sm font-medium text-stone-700 hover:bg-stone-50 transition"
|
||||
>
|
||||
<i class="fa-solid fa-right-from-bracket text-xs"></i>
|
||||
Sign out
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{# Labels #}
|
||||
{% set labels = g.user.labels if g.user is defined and g.user.labels is defined else [] %}
|
||||
{% if labels %}
|
||||
<div>
|
||||
<h2 class="text-base font-semibold tracking-tight mb-3">Labels</h2>
|
||||
<div class="flex flex-wrap gap-2">
|
||||
{% for label in labels %}
|
||||
<span class="inline-flex items-center rounded-full border border-stone-200 px-3 py-1 text-xs font-medium bg-white/60">
|
||||
{{ label.name }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,7 +0,0 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% call links.link(account_url('/newsletters/'), hx_select_search, select_colours, True, aclass=styles.nav_button) %}
|
||||
newsletters
|
||||
{% endcall %}
|
||||
{% if account_nav_html %}
|
||||
{{ account_nav_html | safe }}
|
||||
{% endif %}
|
||||
@@ -1,17 +0,0 @@
|
||||
<div id="nl-{{ un.newsletter_id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ account_url('/newsletter/' ~ un.newsletter_id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ un.newsletter_id }}"
|
||||
hx-swap="outerHTML"
|
||||
class="relative inline-flex h-6 w-11 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-emerald-500 focus:ring-offset-2
|
||||
{% if un.subscribed %}bg-emerald-500{% else %}bg-stone-300{% endif %}"
|
||||
role="switch"
|
||||
aria-checked="{{ 'true' if un.subscribed else 'false' }}"
|
||||
>
|
||||
<span
|
||||
class="inline-block h-4 w-4 rounded-full bg-white shadow transform transition-transform
|
||||
{% if un.subscribed %}translate-x-6{% else %}translate-x-1{% endif %}"
|
||||
></span>
|
||||
</button>
|
||||
</div>
|
||||
@@ -1,46 +0,0 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6">
|
||||
|
||||
<h1 class="text-xl font-semibold tracking-tight">Newsletters</h1>
|
||||
|
||||
{% if newsletter_list %}
|
||||
<div class="divide-y divide-stone-100">
|
||||
{% for item in newsletter_list %}
|
||||
<div class="flex items-center justify-between py-4 first:pt-0 last:pb-0">
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="text-sm font-medium text-stone-800">{{ item.newsletter.name }}</p>
|
||||
{% if item.newsletter.description %}
|
||||
<p class="text-xs text-stone-500 mt-0.5 truncate">{{ item.newsletter.description }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="ml-4 flex-shrink-0">
|
||||
{% if item.un %}
|
||||
{% with un=item.un %}
|
||||
{% include "_types/auth/_newsletter_toggle.html" %}
|
||||
{% endwith %}
|
||||
{% else %}
|
||||
{# No subscription row yet — show an off toggle that will create one #}
|
||||
<div id="nl-{{ item.newsletter.id }}" class="flex items-center">
|
||||
<button
|
||||
hx-post="{{ account_url('/newsletter/' ~ item.newsletter.id ~ '/toggle/') }}"
|
||||
hx-headers='{"X-CSRFToken": "{{ csrf_token() }}"}'
|
||||
hx-target="#nl-{{ item.newsletter.id }}"
|
||||
hx-swap="outerHTML"
|
||||
class="relative inline-flex h-6 w-11 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-emerald-500 focus:ring-offset-2 bg-stone-300"
|
||||
role="switch"
|
||||
aria-checked="false"
|
||||
>
|
||||
<span class="inline-block h-4 w-4 rounded-full bg-white shadow transform transition-transform translate-x-1"></span>
|
||||
</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-stone-500">No newsletters available.</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,29 +0,0 @@
|
||||
{% extends 'oob_elements.html' %}
|
||||
|
||||
{# OOB elements for HTMX navigation - all elements that need updating #}
|
||||
|
||||
{# Import shared OOB macros #}
|
||||
{% from '_types/root/_oob_menu.html' import mobile_menu with context %}
|
||||
|
||||
{# Header with app title - includes cart-mini, navigation, and market-specific header #}
|
||||
|
||||
{% block oobs %}
|
||||
|
||||
{% from '_types/root/_n/macros.html' import oob_header with context %}
|
||||
{{oob_header('root-header-child', 'auth-header-child', '_types/auth/header/_header.html')}}
|
||||
|
||||
{% from '_types/root/header/_header.html' import header_row with context %}
|
||||
{{ header_row(oob=True) }}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
{% block mobile_menu %}
|
||||
{% include '_types/auth/_nav.html' %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
{% block content %}
|
||||
{% include oob.main %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
<div class="w-full max-w-3xl mx-auto px-4 py-6">
|
||||
<div class="bg-white/70 backdrop-blur rounded-2xl shadow border border-stone-200 p-6 sm:p-8 space-y-6">
|
||||
|
||||
<h1 class="text-xl font-semibold tracking-tight">Tickets</h1>
|
||||
|
||||
{% if tickets %}
|
||||
<div class="divide-y divide-stone-100">
|
||||
{% for ticket in tickets %}
|
||||
<div class="py-4 first:pt-0 last:pb-0">
|
||||
<div class="flex items-start justify-between gap-4">
|
||||
<div class="min-w-0 flex-1">
|
||||
<a href="{{ events_url('/tickets/' ~ ticket.code ~ '/') }}"
|
||||
class="text-sm font-medium text-stone-800 hover:text-emerald-700 transition">
|
||||
{{ ticket.entry_name }}
|
||||
</a>
|
||||
<div class="mt-1 flex flex-wrap items-center gap-x-3 gap-y-1 text-xs text-stone-500">
|
||||
<span>{{ ticket.entry_start_at.strftime('%d %b %Y, %H:%M') }}</span>
|
||||
{% if ticket.calendar_name %}
|
||||
<span>· {{ ticket.calendar_name }}</span>
|
||||
{% endif %}
|
||||
{% if ticket.ticket_type_name %}
|
||||
<span>· {{ ticket.ticket_type_name }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex-shrink-0">
|
||||
{% if ticket.state == 'checked_in' %}
|
||||
<span class="inline-flex items-center rounded-full bg-blue-50 border border-blue-200 px-2.5 py-0.5 text-xs font-medium text-blue-700">checked in</span>
|
||||
{% elif ticket.state == 'confirmed' %}
|
||||
<span class="inline-flex items-center rounded-full bg-emerald-50 border border-emerald-200 px-2.5 py-0.5 text-xs font-medium text-emerald-700">confirmed</span>
|
||||
{% else %}
|
||||
<span class="inline-flex items-center rounded-full bg-amber-50 border border-amber-200 px-2.5 py-0.5 text-xs font-medium text-amber-700">{{ ticket.state }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-stone-500">No tickets yet.</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,33 +0,0 @@
|
||||
{% extends "_types/root/index.html" %}
|
||||
{% block content %}
|
||||
<div class="w-full max-w-md">
|
||||
<div class="bg-white/70 dark:bg-neutral-900/70 backdrop-blur rounded-2xl shadow p-6 sm:p-8 border border-neutral-200 dark:border-neutral-800">
|
||||
<h1 class="text-2xl font-semibold tracking-tight">Check your email</h1>
|
||||
|
||||
<p class="text-base text-stone-700 dark:text-stone-300 mt-3">
|
||||
If an account exists for
|
||||
<strong class="text-stone-900 dark:text-white">{{ email }}</strong>,
|
||||
you’ll receive a link to sign in. It expires in 15 minutes.
|
||||
</p>
|
||||
|
||||
{% if email_error %}
|
||||
<div
|
||||
class="mt-4 rounded-lg border border-red-300 bg-red-50 text-red-700 text-sm px-3 py-2 flex items-start gap-2"
|
||||
role="alert"
|
||||
>
|
||||
<span class="font-medium">Heads up:</span>
|
||||
<span>{{ email_error }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<p class="mt-6 text-sm">
|
||||
<a
|
||||
href="{{ blog_url('/auth/login/') }}"
|
||||
class="text-stone-600 dark:text-stone-300 hover:underline"
|
||||
>
|
||||
← Back
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,12 +0,0 @@
|
||||
{% import 'macros/links.html' as links %}
|
||||
{% macro header_row(oob=False) %}
|
||||
{% call links.menu_row(id='auth-row', oob=oob) %}
|
||||
{% call links.link(account_url('/'), hx_select_search ) %}
|
||||
<i class="fa-solid fa-user"></i>
|
||||
<div>account</div>
|
||||
{% endcall %}
|
||||
{% call links.desktop_nav() %}
|
||||
{% include "_types/auth/_nav.html" %}
|
||||
{% endcall %}
|
||||
{% endcall %}
|
||||
{% endmacro %}
|
||||
@@ -1,18 +0,0 @@
|
||||
{% extends "_types/root/_index.html" %}
|
||||
|
||||
|
||||
{% block root_header_child %}
|
||||
{% from '_types/root/_n/macros.html' import index_row with context %}
|
||||
{% call index_row('auth-header-child', '_types/auth/header/_header.html') %}
|
||||
{% block auth_header_child %}
|
||||
{% endblock %}
|
||||
{% endcall %}
|
||||
{% endblock %}
|
||||
|
||||
{% block _main_mobile_menu %}
|
||||
{% include "_types/auth/_nav.html" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% include '_types/auth/_main_panel.html' %}
|
||||
{% endblock %}
|
||||
@@ -1,18 +0,0 @@
|
||||
{% extends oob.extends %}
|
||||
|
||||
|
||||
{% block root_header_child %}
|
||||
{% from '_types/root/_n/macros.html' import index_row with context %}
|
||||
{% call index_row(oob.child_id, oob.header) %}
|
||||
{% block auth_header_child %}
|
||||
{% endblock %}
|
||||
{% endcall %}
|
||||
{% endblock %}
|
||||
|
||||
{% block _main_mobile_menu %}
|
||||
{% include oob.nav %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% include oob.main %}
|
||||
{% endblock %}
|
||||
@@ -1,46 +0,0 @@
|
||||
{% extends "_types/root/index.html" %}
|
||||
{% block content %}
|
||||
<div class="w-full max-w-md">
|
||||
<div class="bg-white/70 dark:bg-neutral-900/70 backdrop-blur rounded-2xl shadow p-6 sm:p-8 border border-neutral-200 dark:border-neutral-800">
|
||||
<h1 class="text-2xl font-semibold tracking-tight">Sign in</h1>
|
||||
<p class="mt-2 text-sm text-neutral-600 dark:text-neutral-400">
|
||||
Enter your email and we’ll email you a one-time sign-in link.
|
||||
</p>
|
||||
|
||||
{% if error %}
|
||||
<div class="mt-4 rounded-lg border border-red-200 bg-red-50 text-red-800 dark:border-red-900/40 dark:bg-red-950/40 dark:text-red-200 px-4 py-3 text-sm">
|
||||
{{ error }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<form
|
||||
method="post" action="{{ blog_url('/auth/start/') }}"
|
||||
class="mt-6 space-y-5"
|
||||
>
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div>
|
||||
<label for="email" class="block text-sm font-medium text-neutral-700 dark:text-neutral-300">
|
||||
Email
|
||||
</label>
|
||||
<input
|
||||
type="email"
|
||||
id="email"
|
||||
name="email"
|
||||
value="{{ email or '' }}"
|
||||
required
|
||||
class="mt-2 block w-full rounded-lg border border-neutral-300 dark:border-neutral-700 bg-white dark:bg-neutral-900 px-3 py-2 text-neutral-900 dark:text-neutral-100 shadow-sm focus:outline-none focus:ring-2 focus:ring-offset-0 focus:ring-neutral-900 dark:focus:ring-neutral-200"
|
||||
autocomplete="email"
|
||||
inputmode="email"
|
||||
>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
class="inline-flex w-full items-center justify-center rounded-lg bg-neutral-900 px-4 py-2.5 text-sm font-medium text-white hover:bg-neutral-800 focus:outline-none focus:ring-2 focus:ring-neutral-900 disabled:opacity-50 dark:bg-neutral-50 dark:text-neutral-900 dark:hover:bg-white"
|
||||
>
|
||||
Send link
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,19 +0,0 @@
|
||||
{% extends "_types/root/_index.html" %}
|
||||
{% block meta %}{% endblock %}
|
||||
{% block title %}Check your email — Rose Ash{% endblock %}
|
||||
{% block content %}
|
||||
<div class="py-8 max-w-md mx-auto text-center">
|
||||
<h1 class="text-2xl font-bold mb-4">Check your email</h1>
|
||||
<p class="text-stone-600 mb-2">
|
||||
We sent a sign-in link to <strong>{{ email }}</strong>.
|
||||
</p>
|
||||
<p class="text-stone-500 text-sm">
|
||||
Click the link in the email to sign in. The link expires in 15 minutes.
|
||||
</p>
|
||||
{% if email_error %}
|
||||
<div class="bg-yellow-50 border border-yellow-200 text-yellow-700 p-3 rounded mt-4">
|
||||
{{ email_error }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,36 +0,0 @@
|
||||
{% extends "_types/root/_index.html" %}
|
||||
{% block meta %}{% endblock %}
|
||||
{% block title %}Login — Rose Ash{% endblock %}
|
||||
{% block content %}
|
||||
<div class="py-8 max-w-md mx-auto">
|
||||
<h1 class="text-2xl font-bold mb-6">Sign in</h1>
|
||||
|
||||
{% if error %}
|
||||
<div class="bg-red-50 border border-red-200 text-red-700 p-3 rounded mb-4">
|
||||
{{ error }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<form method="post" action="{{ url_for('auth.start_login') }}" class="space-y-4">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div>
|
||||
<label for="email" class="block text-sm font-medium mb-1">Email address</label>
|
||||
<input
|
||||
type="email"
|
||||
name="email"
|
||||
id="email"
|
||||
value="{{ email | default('') }}"
|
||||
required
|
||||
autofocus
|
||||
class="w-full border border-stone-300 rounded px-3 py-2 focus:outline-none focus:ring-2 focus:ring-stone-500"
|
||||
>
|
||||
</div>
|
||||
<button
|
||||
type="submit"
|
||||
class="w-full bg-stone-800 text-white py-2 px-4 rounded hover:bg-stone-700 transition"
|
||||
>
|
||||
Send magic link
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,36 +0,0 @@
|
||||
{# Desktop auth menu #}
|
||||
<span id="auth-menu-desktop" class="hidden md:inline-flex">
|
||||
{% if user_email %}
|
||||
<a
|
||||
href="{{ account_url('/') }}"
|
||||
class="justify-center cursor-pointer flex flex-row items-center p-3 gap-2 rounded bg-stone-200 text-black"
|
||||
data-close-details
|
||||
>
|
||||
<i class="fa-solid fa-user"></i>
|
||||
<span>{{ user_email }}</span>
|
||||
</a>
|
||||
{% else %}
|
||||
<a
|
||||
href="{{ account_url('/') }}"
|
||||
class="justify-center cursor-pointer flex flex-row items-center p-3 gap-2 rounded bg-stone-200 text-black"
|
||||
data-close-details
|
||||
>
|
||||
<i class="fa-solid fa-key"></i>
|
||||
<span>sign in or register</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</span>
|
||||
{# Mobile auth menu #}
|
||||
<span id="auth-menu-mobile" class="block md:hidden text-md font-bold">
|
||||
{% if user_email %}
|
||||
<a href="{{ account_url('/') }}" data-close-details>
|
||||
<i class="fa-solid fa-user"></i>
|
||||
<span>{{ user_email }}</span>
|
||||
</a>
|
||||
{% else %}
|
||||
<a href="{{ account_url('/') }}">
|
||||
<i class="fa-solid fa-key"></i>
|
||||
<span>sign in or register</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</span>
|
||||
0
account/tests/__init__.py
Normal file
0
account/tests/__init__.py
Normal file
39
account/tests/test_auth_operations.py
Normal file
39
account/tests/test_auth_operations.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Unit tests for account auth operations."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from account.bp.auth.services.auth_operations import validate_email
|
||||
|
||||
|
||||
class TestValidateEmail:
|
||||
def test_valid_email(self):
|
||||
ok, email = validate_email("user@example.com")
|
||||
assert ok is True
|
||||
assert email == "user@example.com"
|
||||
|
||||
def test_uppercase_lowered(self):
|
||||
ok, email = validate_email("USER@EXAMPLE.COM")
|
||||
assert ok is True
|
||||
assert email == "user@example.com"
|
||||
|
||||
def test_whitespace_stripped(self):
|
||||
ok, email = validate_email(" user@example.com ")
|
||||
assert ok is True
|
||||
assert email == "user@example.com"
|
||||
|
||||
def test_empty_string(self):
|
||||
ok, email = validate_email("")
|
||||
assert ok is False
|
||||
|
||||
def test_no_at_sign(self):
|
||||
ok, email = validate_email("notanemail")
|
||||
assert ok is False
|
||||
|
||||
def test_just_at(self):
|
||||
ok, email = validate_email("@")
|
||||
assert ok is True # has "@", passes the basic check
|
||||
|
||||
def test_spaces_only(self):
|
||||
ok, email = validate_email(" ")
|
||||
assert ok is False
|
||||
164
account/tests/test_ghost_membership.py
Normal file
164
account/tests/test_ghost_membership.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""Unit tests for Ghost membership helpers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from account.services.ghost_membership import (
|
||||
_iso, _to_str_or_none, _member_email,
|
||||
_price_cents, _sanitize_member_payload,
|
||||
)
|
||||
|
||||
|
||||
class TestIso:
|
||||
def test_none(self):
|
||||
assert _iso(None) is None
|
||||
|
||||
def test_empty(self):
|
||||
assert _iso("") is None
|
||||
|
||||
def test_z_suffix(self):
|
||||
result = _iso("2024-06-15T12:00:00Z")
|
||||
assert isinstance(result, datetime)
|
||||
assert result.year == 2024
|
||||
|
||||
def test_offset(self):
|
||||
result = _iso("2024-06-15T12:00:00+00:00")
|
||||
assert isinstance(result, datetime)
|
||||
|
||||
|
||||
class TestToStrOrNone:
|
||||
def test_none(self):
|
||||
assert _to_str_or_none(None) is None
|
||||
|
||||
def test_dict(self):
|
||||
assert _to_str_or_none({"a": 1}) is None
|
||||
|
||||
def test_list(self):
|
||||
assert _to_str_or_none([1, 2]) is None
|
||||
|
||||
def test_bytes(self):
|
||||
assert _to_str_or_none(b"hello") is None
|
||||
|
||||
def test_empty_string(self):
|
||||
assert _to_str_or_none("") is None
|
||||
|
||||
def test_whitespace_only(self):
|
||||
assert _to_str_or_none(" ") is None
|
||||
|
||||
def test_valid_string(self):
|
||||
assert _to_str_or_none("hello") == "hello"
|
||||
|
||||
def test_int(self):
|
||||
assert _to_str_or_none(42) == "42"
|
||||
|
||||
def test_strips_whitespace(self):
|
||||
assert _to_str_or_none(" hi ") == "hi"
|
||||
|
||||
def test_set(self):
|
||||
assert _to_str_or_none({1, 2}) is None
|
||||
|
||||
def test_tuple(self):
|
||||
assert _to_str_or_none((1,)) is None
|
||||
|
||||
def test_bytearray(self):
|
||||
assert _to_str_or_none(bytearray(b"x")) is None
|
||||
|
||||
|
||||
class TestMemberEmail:
|
||||
def test_normal(self):
|
||||
assert _member_email({"email": "USER@EXAMPLE.COM"}) == "user@example.com"
|
||||
|
||||
def test_none(self):
|
||||
assert _member_email({"email": None}) is None
|
||||
|
||||
def test_empty(self):
|
||||
assert _member_email({"email": ""}) is None
|
||||
|
||||
def test_whitespace(self):
|
||||
assert _member_email({"email": " "}) is None
|
||||
|
||||
def test_missing_key(self):
|
||||
assert _member_email({}) is None
|
||||
|
||||
def test_strips(self):
|
||||
assert _member_email({"email": " a@b.com "}) == "a@b.com"
|
||||
|
||||
|
||||
class TestPriceCents:
|
||||
def test_valid(self):
|
||||
assert _price_cents({"price": {"amount": 1500}}) == 1500
|
||||
|
||||
def test_string_amount(self):
|
||||
assert _price_cents({"price": {"amount": "2000"}}) == 2000
|
||||
|
||||
def test_missing_price(self):
|
||||
assert _price_cents({}) is None
|
||||
|
||||
def test_missing_amount(self):
|
||||
assert _price_cents({"price": {}}) is None
|
||||
|
||||
def test_none_amount(self):
|
||||
assert _price_cents({"price": {"amount": None}}) is None
|
||||
|
||||
def test_nested_none(self):
|
||||
assert _price_cents({"price": None}) is None
|
||||
|
||||
|
||||
class TestSanitizeMemberPayload:
|
||||
def test_email_lowercased(self):
|
||||
result = _sanitize_member_payload({"email": "USER@EXAMPLE.COM"})
|
||||
assert result["email"] == "user@example.com"
|
||||
|
||||
def test_empty_email_excluded(self):
|
||||
result = _sanitize_member_payload({"email": ""})
|
||||
assert "email" not in result
|
||||
|
||||
def test_name_included(self):
|
||||
result = _sanitize_member_payload({"name": "Alice"})
|
||||
assert result["name"] == "Alice"
|
||||
|
||||
def test_note_included(self):
|
||||
result = _sanitize_member_payload({"note": "VIP"})
|
||||
assert result["note"] == "VIP"
|
||||
|
||||
def test_subscribed_bool(self):
|
||||
result = _sanitize_member_payload({"subscribed": 1})
|
||||
assert result["subscribed"] is True
|
||||
|
||||
def test_labels_with_id(self):
|
||||
result = _sanitize_member_payload({
|
||||
"labels": [{"id": "abc"}, {"name": "VIP"}]
|
||||
})
|
||||
assert result["labels"] == [{"id": "abc"}, {"name": "VIP"}]
|
||||
|
||||
def test_labels_empty_items_excluded(self):
|
||||
result = _sanitize_member_payload({
|
||||
"labels": [{"id": None, "name": None}]
|
||||
})
|
||||
assert "labels" not in result
|
||||
|
||||
def test_newsletters_with_id(self):
|
||||
result = _sanitize_member_payload({
|
||||
"newsletters": [{"id": "n1", "subscribed": True}]
|
||||
})
|
||||
assert result["newsletters"] == [{"subscribed": True, "id": "n1"}]
|
||||
|
||||
def test_newsletters_default_subscribed(self):
|
||||
result = _sanitize_member_payload({
|
||||
"newsletters": [{"name": "Weekly"}]
|
||||
})
|
||||
assert result["newsletters"][0]["subscribed"] is True
|
||||
|
||||
def test_dict_email_excluded(self):
|
||||
result = _sanitize_member_payload({"email": {"bad": "input"}})
|
||||
assert "email" not in result
|
||||
|
||||
def test_id_passthrough(self):
|
||||
result = _sanitize_member_payload({"id": "ghost-member-123"})
|
||||
assert result["id"] == "ghost-member-123"
|
||||
|
||||
def test_empty_payload(self):
|
||||
result = _sanitize_member_payload({})
|
||||
assert result == {}
|
||||
8
artdag/.dockerignore
Normal file
8
artdag/.dockerignore
Normal file
@@ -0,0 +1,8 @@
|
||||
.git
|
||||
.gitea
|
||||
**/.env
|
||||
**/.env.gpu
|
||||
**/__pycache__
|
||||
**/.pytest_cache
|
||||
**/*.pyc
|
||||
test/
|
||||
114
artdag/.gitea/workflows/ci.yml
Normal file
114
artdag/.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
REGISTRY: registry.rose-ash.com:5000
|
||||
ARTDAG_DIR: /root/art-dag-mono
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
apt-get update && apt-get install -y --no-install-recommends openssh-client
|
||||
|
||||
- name: Set up SSH
|
||||
env:
|
||||
SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
mkdir -p ~/.ssh
|
||||
echo "$SSH_KEY" > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true
|
||||
|
||||
- name: Build and deploy
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd ${{ env.ARTDAG_DIR }}
|
||||
|
||||
OLD_HEAD=\$(git rev-parse HEAD 2>/dev/null || echo none)
|
||||
|
||||
git fetch origin main
|
||||
git reset --hard origin/main
|
||||
|
||||
NEW_HEAD=\$(git rev-parse HEAD)
|
||||
|
||||
# Change detection
|
||||
BUILD_L1=false
|
||||
BUILD_L2=false
|
||||
if [ \"\$OLD_HEAD\" = \"none\" ] || [ \"\$OLD_HEAD\" = \"\$NEW_HEAD\" ]; then
|
||||
BUILD_L1=true
|
||||
BUILD_L2=true
|
||||
else
|
||||
CHANGED=\$(git diff --name-only \$OLD_HEAD \$NEW_HEAD)
|
||||
# common/ or core/ change -> rebuild both
|
||||
if echo \"\$CHANGED\" | grep -qE '^(common|core)/'; then
|
||||
BUILD_L1=true
|
||||
BUILD_L2=true
|
||||
fi
|
||||
if echo \"\$CHANGED\" | grep -q '^l1/'; then
|
||||
BUILD_L1=true
|
||||
fi
|
||||
if echo \"\$CHANGED\" | grep -q '^l2/'; then
|
||||
BUILD_L2=true
|
||||
fi
|
||||
if echo \"\$CHANGED\" | grep -q '^client/'; then
|
||||
BUILD_L1=true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Build L1
|
||||
if [ \"\$BUILD_L1\" = true ]; then
|
||||
echo 'Building L1...'
|
||||
docker build \
|
||||
--build-arg CACHEBUST=\$(date +%s) \
|
||||
-f l1/Dockerfile \
|
||||
-t ${{ env.REGISTRY }}/celery-l1-server:latest \
|
||||
-t ${{ env.REGISTRY }}/celery-l1-server:${{ github.sha }} \
|
||||
.
|
||||
docker push ${{ env.REGISTRY }}/celery-l1-server:latest
|
||||
docker push ${{ env.REGISTRY }}/celery-l1-server:${{ github.sha }}
|
||||
else
|
||||
echo 'Skipping L1 (no changes)'
|
||||
fi
|
||||
|
||||
# Build L2
|
||||
if [ \"\$BUILD_L2\" = true ]; then
|
||||
echo 'Building L2...'
|
||||
docker build \
|
||||
--build-arg CACHEBUST=\$(date +%s) \
|
||||
-f l2/Dockerfile \
|
||||
-t ${{ env.REGISTRY }}/l2-server:latest \
|
||||
-t ${{ env.REGISTRY }}/l2-server:${{ github.sha }} \
|
||||
.
|
||||
docker push ${{ env.REGISTRY }}/l2-server:latest
|
||||
docker push ${{ env.REGISTRY }}/l2-server:${{ github.sha }}
|
||||
else
|
||||
echo 'Skipping L2 (no changes)'
|
||||
fi
|
||||
|
||||
# Deploy stacks (--resolve-image always forces re-pull of :latest)
|
||||
if [ \"\$BUILD_L1\" = true ]; then
|
||||
cd l1 && source .env && docker stack deploy --resolve-image always -c docker-compose.yml celery && cd ..
|
||||
echo 'L1 stack deployed'
|
||||
fi
|
||||
if [ \"\$BUILD_L2\" = true ]; then
|
||||
cd l2 && source .env && docker stack deploy --resolve-image always -c docker-compose.yml activitypub && cd ..
|
||||
echo 'L2 stack deployed'
|
||||
fi
|
||||
|
||||
sleep 10
|
||||
echo '=== L1 Services ==='
|
||||
docker stack services celery
|
||||
echo '=== L2 Services ==='
|
||||
docker stack services activitypub
|
||||
"
|
||||
74
artdag/CLAUDE.md
Normal file
74
artdag/CLAUDE.md
Normal file
@@ -0,0 +1,74 @@
|
||||
# Art DAG Monorepo
|
||||
|
||||
Federated content-addressed DAG execution engine for distributed media processing with ActivityPub ownership and provenance tracking.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
core/ # DAG engine (artdag package) - nodes, effects, analysis, planning
|
||||
l1/ # L1 Celery rendering server (FastAPI + Celery + Redis + PostgreSQL)
|
||||
l2/ # L2 ActivityPub registry (FastAPI + PostgreSQL)
|
||||
common/ # Shared templates, middleware, models (artdag_common package)
|
||||
client/ # CLI client
|
||||
test/ # Integration & e2e tests
|
||||
```
|
||||
|
||||
## Tech Stack
|
||||
|
||||
Python 3.11+, FastAPI, Celery, Redis, PostgreSQL (asyncpg for L1), SQLAlchemy, Pydantic, JAX (CPU/GPU), IPFS/Kubo, Docker Swarm, HTMX + Jinja2 for web UI.
|
||||
|
||||
## Key Commands
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
cd l1 && pytest tests/ # L1 unit tests
|
||||
cd core && pytest tests/ # Core unit tests
|
||||
cd test && python run.py # Full integration pipeline
|
||||
```
|
||||
- pytest uses `asyncio_mode = "auto"` for async tests
|
||||
- Test files: `test_*.py`, fixtures in `conftest.py`
|
||||
|
||||
### Linting & Type Checking (L1)
|
||||
```bash
|
||||
cd l1 && ruff check . # Lint (E, F, I, UP rules)
|
||||
cd l1 && mypy app/types.py app/routers/recipes.py tests/
|
||||
```
|
||||
- Line length: 100 chars (E501 ignored)
|
||||
- Mypy: strict on `app/types.py`, `app/routers/recipes.py`, `tests/`; gradual elsewhere
|
||||
- Mypy ignores imports for: celery, redis, artdag, artdag_common, ipfs_client
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
docker build -f l1/Dockerfile -t celery-l1-server:latest .
|
||||
docker build -f l1/Dockerfile.gpu -t celery-l1-gpu:latest .
|
||||
docker build -f l2/Dockerfile -t l2-server:latest .
|
||||
./deploy.sh # Build, push, deploy stacks
|
||||
```
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
- **3-Phase Execution**: Analyze -> Plan -> Execute (tasks in `l1/tasks/`)
|
||||
- **Content-Addressed**: All data identified by SHA3-256 hashes or IPFS CIDs
|
||||
- **Services Pattern**: Business logic in `app/services/`, API endpoints in `app/routers/`
|
||||
- **Types Module**: Pydantic models and TypedDicts in `app/types.py`
|
||||
- **Celery Tasks**: In `l1/tasks/`, decorated with `@app.task`
|
||||
- **S-Expression Effects**: Composable effect language in `l1/sexp_effects/`
|
||||
- **Storage**: Local filesystem, S3, or IPFS backends (`storage_providers.py`)
|
||||
|
||||
## Auth
|
||||
|
||||
- L1 <-> L2: scoped JWT tokens (no shared secrets)
|
||||
- L2: password + OAuth SSO, token revocation in Redis (30-day expiry)
|
||||
- Federation: ActivityPub RSA signatures (`core/artdag/activitypub/`)
|
||||
|
||||
## Key Config Files
|
||||
|
||||
- `l1/pyproject.toml` - mypy, pytest, ruff config for L1
|
||||
- `l1/celery_app.py` - Celery initialization
|
||||
- `l1/database.py` / `l2/db.py` - SQLAlchemy models
|
||||
- `l1/docker-compose.yml` / `l2/docker-compose.yml` - Swarm stacks
|
||||
|
||||
## Tools
|
||||
|
||||
- Use Context7 MCP for up-to-date library documentation
|
||||
- Playwright MCP is available for browser automation/testing
|
||||
5
artdag/client/.gitignore
vendored
Normal file
5
artdag/client/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
.venv/
|
||||
venv/
|
||||
.scripts
|
||||
263
artdag/client/README.md
Normal file
263
artdag/client/README.md
Normal file
@@ -0,0 +1,263 @@
|
||||
# Art DAG Client
|
||||
|
||||
CLI for interacting with the Art DAG L1 rendering server.
|
||||
|
||||
## Setup
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
```bash
|
||||
# Set L1 server URL (default: http://localhost:8100)
|
||||
export ARTDAG_SERVER=http://localhost:8100
|
||||
|
||||
# Set L2 server URL for auth (default: http://localhost:8200)
|
||||
export ARTDAG_L2=https://artdag.rose-ash.com
|
||||
|
||||
# Or pass with commands
|
||||
./artdag.py --server http://localhost:8100 --l2 https://artdag.rose-ash.com <command>
|
||||
```
|
||||
|
||||
## Authentication
|
||||
|
||||
Most commands require authentication. Login credentials are stored locally in `~/.artdag/token.json`.
|
||||
|
||||
```bash
|
||||
# Register a new account
|
||||
artdag register <username> [--email user@example.com]
|
||||
|
||||
# Login
|
||||
artdag login <username>
|
||||
|
||||
# Check current user
|
||||
artdag whoami
|
||||
|
||||
# Logout
|
||||
artdag logout
|
||||
```
|
||||
|
||||
## Commands Reference
|
||||
|
||||
### Server & Stats
|
||||
|
||||
```bash
|
||||
# Show server info
|
||||
artdag info
|
||||
|
||||
# Show user stats (counts of runs, recipes, effects, media, storage)
|
||||
artdag stats
|
||||
|
||||
# List known named assets
|
||||
artdag assets
|
||||
```
|
||||
|
||||
### Runs
|
||||
|
||||
```bash
|
||||
# List runs (with pagination)
|
||||
artdag runs [--limit N] [--offset N]
|
||||
|
||||
# Start a run
|
||||
artdag run <recipe> <input_cid> [--name output_name] [--wait]
|
||||
|
||||
# Get run status
|
||||
artdag status <run_id>
|
||||
|
||||
# Get detailed run info
|
||||
artdag status <run_id> --plan # Show execution plan with steps
|
||||
artdag status <run_id> --artifacts # Show output artifacts
|
||||
artdag status <run_id> --analysis # Show audio analysis data
|
||||
|
||||
# Delete a run
|
||||
artdag delete-run <run_id> [--force]
|
||||
```
|
||||
|
||||
### Recipes
|
||||
|
||||
```bash
|
||||
# List recipes (with pagination)
|
||||
artdag recipes [--limit N] [--offset N]
|
||||
|
||||
# Show recipe details
|
||||
artdag recipe <recipe_id>
|
||||
|
||||
# Upload a recipe (YAML or S-expression)
|
||||
artdag upload-recipe <filepath>
|
||||
|
||||
# Run a recipe with inputs
|
||||
artdag run-recipe <recipe_id> -i node_id:cid [--wait]
|
||||
|
||||
# Delete a recipe
|
||||
artdag delete-recipe <recipe_id> [--force]
|
||||
```
|
||||
|
||||
### Effects
|
||||
|
||||
```bash
|
||||
# List effects (with pagination)
|
||||
artdag effects [--limit N] [--offset N]
|
||||
|
||||
# Show effect details
|
||||
artdag effect <cid>
|
||||
|
||||
# Show effect with source code
|
||||
artdag effect <cid> --source
|
||||
|
||||
# Upload an effect (.py file)
|
||||
artdag upload-effect <filepath>
|
||||
```
|
||||
|
||||
### Media / Cache
|
||||
|
||||
```bash
|
||||
# List cached content (with pagination and type filter)
|
||||
artdag cache [--limit N] [--offset N] [--type all|image|video|audio]
|
||||
|
||||
# View/download cached content
|
||||
artdag view <cid> # Show metadata (size, type, friendly name)
|
||||
artdag view <cid> --raw # Get raw content info
|
||||
artdag view <cid> -o output.mp4 # Download raw file
|
||||
artdag view <cid> -o - | mpv - # Pipe raw content to player
|
||||
|
||||
# Upload file to cache and IPFS
|
||||
artdag upload <filepath>
|
||||
|
||||
# Import local file to cache (local server only)
|
||||
artdag import <filepath>
|
||||
|
||||
# View/update metadata
|
||||
artdag meta <cid> # View metadata
|
||||
artdag meta <cid> -d "Description" # Set description
|
||||
artdag meta <cid> -t "tag1,tag2" # Set tags
|
||||
artdag meta <cid> --publish "my-video" # Publish to L2
|
||||
|
||||
# Delete cached content
|
||||
artdag delete-cache <cid> [--force]
|
||||
```
|
||||
|
||||
### Storage Providers
|
||||
|
||||
```bash
|
||||
# List storage providers
|
||||
artdag storage list
|
||||
|
||||
# Add a provider (interactive)
|
||||
artdag storage add <type> [--name friendly_name] [--capacity GB]
|
||||
# Types: pinata, web3storage, nftstorage, infura, filebase, storj, local
|
||||
|
||||
# Test provider connectivity
|
||||
artdag storage test <id>
|
||||
|
||||
# Delete a provider
|
||||
artdag storage delete <id> [--force]
|
||||
```
|
||||
|
||||
### Folders & Collections
|
||||
|
||||
```bash
|
||||
# Folders
|
||||
artdag folder list
|
||||
artdag folder create <path>
|
||||
artdag folder delete <path>
|
||||
|
||||
# Collections
|
||||
artdag collection list
|
||||
artdag collection create <name>
|
||||
artdag collection delete <name>
|
||||
```
|
||||
|
||||
### v2 API (3-Phase Execution)
|
||||
|
||||
```bash
|
||||
# Generate execution plan
|
||||
artdag plan <recipe_file> -i name:cid [--features beats,energy] [--output plan.json]
|
||||
|
||||
# Execute a plan
|
||||
artdag execute-plan <plan_file> [--wait]
|
||||
|
||||
# Run recipe (plan + execute in one step)
|
||||
artdag run-v2 <recipe_file> -i name:cid [--wait]
|
||||
|
||||
# Check v2 run status
|
||||
artdag run-status <run_id>
|
||||
```
|
||||
|
||||
### Publishing to L2
|
||||
|
||||
```bash
|
||||
# Publish a run output to L2
|
||||
artdag publish <run_id> <output_name>
|
||||
```
|
||||
|
||||
### Data Management
|
||||
|
||||
```bash
|
||||
# Clear all user data (preserves storage configs)
|
||||
artdag clear-data [--force]
|
||||
```
|
||||
|
||||
## Example Workflows
|
||||
|
||||
### Basic Rendering
|
||||
|
||||
```bash
|
||||
# Login
|
||||
artdag login myuser
|
||||
|
||||
# Check available assets
|
||||
artdag assets
|
||||
|
||||
# Run an effect on an input
|
||||
artdag run dog cat --wait
|
||||
|
||||
# View runs
|
||||
artdag runs
|
||||
|
||||
# Download result
|
||||
artdag view <output_cid> -o result.mp4
|
||||
```
|
||||
|
||||
### Recipe-Based Processing
|
||||
|
||||
```bash
|
||||
# Upload a recipe
|
||||
artdag upload-recipe my-recipe.yaml
|
||||
|
||||
# View recipes
|
||||
artdag recipes
|
||||
|
||||
# Run with inputs
|
||||
artdag run-recipe <recipe_id> -i video:bafkrei... --wait
|
||||
|
||||
# View run plan
|
||||
artdag status <run_id> --plan
|
||||
```
|
||||
|
||||
### Managing Storage
|
||||
|
||||
```bash
|
||||
# Add Pinata storage
|
||||
artdag storage add pinata --name "My Pinata"
|
||||
|
||||
# Test connection
|
||||
artdag storage test 1
|
||||
|
||||
# View all providers
|
||||
artdag storage list
|
||||
```
|
||||
|
||||
### Browsing Media
|
||||
|
||||
```bash
|
||||
# List all media
|
||||
artdag cache
|
||||
|
||||
# Filter by type
|
||||
artdag cache --type video --limit 20
|
||||
|
||||
# View with pagination
|
||||
artdag cache --offset 20 --limit 20
|
||||
```
|
||||
2316
artdag/client/artdag.py
Executable file
2316
artdag/client/artdag.py
Executable file
File diff suppressed because it is too large
Load Diff
3
artdag/client/requirements.txt
Normal file
3
artdag/client/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
click>=8.0.0
|
||||
requests>=2.31.0
|
||||
PyYAML>=6.0
|
||||
38
artdag/client/test_gpu_effects.sexp
Normal file
38
artdag/client/test_gpu_effects.sexp
Normal file
@@ -0,0 +1,38 @@
|
||||
;; GPU Effects Performance Test
|
||||
;; Tests rotation, zoom, hue-shift, ripple
|
||||
|
||||
(stream "gpu_effects_test"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load primitives
|
||||
(require-primitives "geometry")
|
||||
(require-primitives "core")
|
||||
(require-primitives "math")
|
||||
(require-primitives "image")
|
||||
(require-primitives "color_ops")
|
||||
|
||||
;; Frame pipeline - test GPU effects
|
||||
(frame
|
||||
(let [;; Create a base gradient image
|
||||
r (+ 0.5 (* 0.5 (math:sin (* t 1))))
|
||||
g (+ 0.5 (* 0.5 (math:sin (* t 1.3))))
|
||||
b (+ 0.5 (* 0.5 (math:sin (* t 1.7))))
|
||||
color [(* r 255) (* g 255) (* b 255)]
|
||||
base (image:make-image 1920 1080 color)
|
||||
|
||||
;; Apply rotation (this is the main GPU bottleneck we optimized)
|
||||
angle (* t 30)
|
||||
rotated (geometry:rotate base angle)
|
||||
|
||||
;; Apply hue shift
|
||||
hue-shift (* 180 (math:sin (* t 0.5)))
|
||||
hued (color_ops:hue-shift rotated hue-shift)
|
||||
|
||||
;; Apply brightness based on time
|
||||
brightness (+ 0.8 (* 0.4 (math:sin (* t 2))))
|
||||
bright (color_ops:brightness hued brightness)]
|
||||
|
||||
bright)))
|
||||
26
artdag/client/test_simple.sexp
Normal file
26
artdag/client/test_simple.sexp
Normal file
@@ -0,0 +1,26 @@
|
||||
;; Simple Test - No external assets required
|
||||
;; Just generates a color gradient that changes over time
|
||||
|
||||
(stream "simple_test"
|
||||
:fps 30
|
||||
:width 720
|
||||
:height 720
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives
|
||||
(require-primitives "geometry")
|
||||
(require-primitives "core")
|
||||
(require-primitives "math")
|
||||
(require-primitives "image")
|
||||
(require-primitives "color_ops")
|
||||
|
||||
;; Frame pipeline - animated gradient
|
||||
(frame
|
||||
(let [;; Time-based color cycling (0-1 range)
|
||||
r (+ 0.5 (* 0.5 (math:sin (* t 1))))
|
||||
g (+ 0.5 (* 0.5 (math:sin (* t 1.3))))
|
||||
b (+ 0.5 (* 0.5 (math:sin (* t 1.7))))
|
||||
;; Convert to 0-255 range and create solid color frame
|
||||
color [(* r 255) (* g 255) (* b 255)]
|
||||
frame (image:make-image 720 720 color)]
|
||||
frame)))
|
||||
293
artdag/common/README.md
Normal file
293
artdag/common/README.md
Normal file
@@ -0,0 +1,293 @@
|
||||
# artdag-common
|
||||
|
||||
Shared components for Art-DAG L1 (celery) and L2 (activity-pub) servers.
|
||||
|
||||
## Features
|
||||
|
||||
- **Jinja2 Templating**: Unified template environment with shared base templates
|
||||
- **Reusable Components**: Cards, tables, pagination, DAG visualization, media preview
|
||||
- **Authentication Middleware**: Cookie and JWT token parsing
|
||||
- **Content Negotiation**: HTML/JSON/ActivityPub format detection
|
||||
- **Utility Functions**: Hash truncation, file size formatting, status colors
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
pip install -e /path/to/artdag-common
|
||||
|
||||
# Or add to requirements.txt
|
||||
-e file:../common
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```python
|
||||
from fastapi import FastAPI, Request
|
||||
from artdag_common import create_jinja_env, render
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
# Initialize templates with app-specific directory
|
||||
templates = create_jinja_env("app/templates")
|
||||
|
||||
@app.get("/")
|
||||
async def home(request: Request):
|
||||
return render(templates, "home.html", request, title="Home")
|
||||
```
|
||||
|
||||
## Package Structure
|
||||
|
||||
```
|
||||
artdag_common/
|
||||
├── __init__.py # Package exports
|
||||
├── constants.py # CDN URLs, colors, configs
|
||||
├── rendering.py # Jinja2 environment and helpers
|
||||
├── middleware/
|
||||
│ ├── auth.py # Authentication utilities
|
||||
│ └── content_negotiation.py # Accept header parsing
|
||||
├── models/
|
||||
│ ├── requests.py # Shared request models
|
||||
│ └── responses.py # Shared response models
|
||||
├── utils/
|
||||
│ ├── formatting.py # Text/date formatting
|
||||
│ ├── media.py # Media type detection
|
||||
│ └── pagination.py # Pagination helpers
|
||||
└── templates/
|
||||
├── base.html # Base layout template
|
||||
└── components/
|
||||
├── badge.html # Status/type badges
|
||||
├── card.html # Info cards
|
||||
├── dag.html # Cytoscape DAG visualization
|
||||
├── media_preview.html # Video/image/audio preview
|
||||
├── pagination.html # HTMX pagination
|
||||
└── table.html # Styled tables
|
||||
```
|
||||
|
||||
## Jinja2 Templates
|
||||
|
||||
### Base Template
|
||||
|
||||
The `base.html` template provides:
|
||||
- Dark theme with Tailwind CSS
|
||||
- HTMX integration
|
||||
- Navigation slot
|
||||
- Content block
|
||||
- Optional Cytoscape.js block
|
||||
|
||||
```html
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}My Page{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Hello World</h1>
|
||||
{% endblock %}
|
||||
```
|
||||
|
||||
### Reusable Components
|
||||
|
||||
#### Card
|
||||
|
||||
```html
|
||||
{% include "components/card.html" %}
|
||||
```
|
||||
|
||||
```html
|
||||
<!-- Usage in your template -->
|
||||
<div class="...card styles...">
|
||||
{% block card_title %}Title{% endblock %}
|
||||
{% block card_content %}Content{% endblock %}
|
||||
</div>
|
||||
```
|
||||
|
||||
#### Badge
|
||||
|
||||
Status and type badges with appropriate colors:
|
||||
|
||||
```html
|
||||
{% from "components/badge.html" import status_badge, type_badge %}
|
||||
|
||||
{{ status_badge("completed") }} <!-- Green -->
|
||||
{{ status_badge("failed") }} <!-- Red -->
|
||||
{{ type_badge("video") }}
|
||||
```
|
||||
|
||||
#### DAG Visualization
|
||||
|
||||
Interactive Cytoscape.js graph:
|
||||
|
||||
```html
|
||||
{% include "components/dag.html" %}
|
||||
```
|
||||
|
||||
Requires passing `nodes` and `edges` data to template context.
|
||||
|
||||
#### Media Preview
|
||||
|
||||
Responsive media preview with format detection:
|
||||
|
||||
```html
|
||||
{% include "components/media_preview.html" %}
|
||||
```
|
||||
|
||||
Supports video, audio, and image formats.
|
||||
|
||||
#### Pagination
|
||||
|
||||
HTMX-powered infinite scroll pagination:
|
||||
|
||||
```html
|
||||
{% include "components/pagination.html" %}
|
||||
```
|
||||
|
||||
## Template Rendering
|
||||
|
||||
### Full Page Render
|
||||
|
||||
```python
|
||||
from artdag_common import render
|
||||
|
||||
@app.get("/runs/{run_id}")
|
||||
async def run_detail(run_id: str, request: Request):
|
||||
run = get_run(run_id)
|
||||
return render(templates, "runs/detail.html", request, run=run)
|
||||
```
|
||||
|
||||
### Fragment Render (HTMX)
|
||||
|
||||
```python
|
||||
from artdag_common import render_fragment
|
||||
|
||||
@app.get("/runs/{run_id}/status")
|
||||
async def run_status_fragment(run_id: str):
|
||||
run = get_run(run_id)
|
||||
html = render_fragment(templates, "components/status.html", status=run.status)
|
||||
return HTMLResponse(html)
|
||||
```
|
||||
|
||||
## Authentication Middleware
|
||||
|
||||
### UserContext
|
||||
|
||||
```python
|
||||
from artdag_common.middleware.auth import UserContext, get_user_from_cookie
|
||||
|
||||
@app.get("/profile")
|
||||
async def profile(request: Request):
|
||||
user = get_user_from_cookie(request)
|
||||
if not user:
|
||||
return RedirectResponse("/login")
|
||||
return {"username": user.username, "actor_id": user.actor_id}
|
||||
```
|
||||
|
||||
### Token Parsing
|
||||
|
||||
```python
|
||||
from artdag_common.middleware.auth import get_user_from_header, decode_jwt_claims
|
||||
|
||||
@app.get("/api/me")
|
||||
async def api_me(request: Request):
|
||||
user = get_user_from_header(request)
|
||||
if not user:
|
||||
raise HTTPException(401, "Not authenticated")
|
||||
return {"user": user.username}
|
||||
```
|
||||
|
||||
## Content Negotiation
|
||||
|
||||
Detect what response format the client wants:
|
||||
|
||||
```python
|
||||
from artdag_common.middleware.content_negotiation import wants_html, wants_json, wants_activity_json
|
||||
|
||||
@app.get("/users/{username}")
|
||||
async def user_profile(username: str, request: Request):
|
||||
user = get_user(username)
|
||||
|
||||
if wants_activity_json(request):
|
||||
return ActivityPubActor(user)
|
||||
elif wants_json(request):
|
||||
return user.dict()
|
||||
else:
|
||||
return render(templates, "users/profile.html", request, user=user)
|
||||
```
|
||||
|
||||
## Constants
|
||||
|
||||
### CDN URLs
|
||||
|
||||
```python
|
||||
from artdag_common import TAILWIND_CDN, HTMX_CDN, CYTOSCAPE_CDN
|
||||
|
||||
# Available in templates as globals:
|
||||
# {{ TAILWIND_CDN }}
|
||||
# {{ HTMX_CDN }}
|
||||
# {{ CYTOSCAPE_CDN }}
|
||||
```
|
||||
|
||||
### Node Colors
|
||||
|
||||
```python
|
||||
from artdag_common import NODE_COLORS
|
||||
|
||||
# {
|
||||
# "SOURCE": "#3b82f6", # Blue
|
||||
# "EFFECT": "#22c55e", # Green
|
||||
# "OUTPUT": "#a855f7", # Purple
|
||||
# "ANALYSIS": "#f59e0b", # Amber
|
||||
# "_LIST": "#6366f1", # Indigo
|
||||
# "default": "#6b7280", # Gray
|
||||
# }
|
||||
```
|
||||
|
||||
### Status Colors
|
||||
|
||||
```python
|
||||
STATUS_COLORS = {
|
||||
"completed": "bg-green-600",
|
||||
"cached": "bg-blue-600",
|
||||
"running": "bg-yellow-600",
|
||||
"pending": "bg-gray-600",
|
||||
"failed": "bg-red-600",
|
||||
}
|
||||
```
|
||||
|
||||
## Custom Jinja2 Filters
|
||||
|
||||
The following filters are available in all templates:
|
||||
|
||||
| Filter | Usage | Description |
|
||||
|--------|-------|-------------|
|
||||
| `truncate_hash` | `{{ hash\|truncate_hash }}` | Shorten hash to 16 chars with ellipsis |
|
||||
| `format_size` | `{{ bytes\|format_size }}` | Format bytes as KB/MB/GB |
|
||||
| `status_color` | `{{ status\|status_color }}` | Get Tailwind class for status |
|
||||
|
||||
Example:
|
||||
|
||||
```html
|
||||
<span class="{{ run.status|status_color }}">
|
||||
{{ run.status }}
|
||||
</span>
|
||||
|
||||
<code>{{ content_hash|truncate_hash }}</code>
|
||||
|
||||
<span>{{ file_size|format_size }}</span>
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
cd /root/art-dag/common
|
||||
|
||||
# Install in development mode
|
||||
pip install -e .
|
||||
|
||||
# Run tests
|
||||
pytest
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `fastapi>=0.100.0` - Web framework
|
||||
- `jinja2>=3.1.0` - Templating engine
|
||||
- `pydantic>=2.0.0` - Data validation
|
||||
18
artdag/common/artdag_common/__init__.py
Normal file
18
artdag/common/artdag_common/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Art-DAG Common Library
|
||||
|
||||
Shared components for L1 (celery) and L2 (activity-pub) servers.
|
||||
"""
|
||||
|
||||
from .constants import NODE_COLORS, TAILWIND_CDN, HTMX_CDN, CYTOSCAPE_CDN
|
||||
from .rendering import create_jinja_env, render, render_fragment
|
||||
|
||||
__all__ = [
|
||||
"NODE_COLORS",
|
||||
"TAILWIND_CDN",
|
||||
"HTMX_CDN",
|
||||
"CYTOSCAPE_CDN",
|
||||
"create_jinja_env",
|
||||
"render",
|
||||
"render_fragment",
|
||||
]
|
||||
76
artdag/common/artdag_common/constants.py
Normal file
76
artdag/common/artdag_common/constants.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Shared constants for Art-DAG servers.
|
||||
"""
|
||||
|
||||
# CDN URLs
|
||||
TAILWIND_CDN = "https://cdn.tailwindcss.com?plugins=typography"
|
||||
HTMX_CDN = "https://unpkg.com/htmx.org@1.9.10"
|
||||
CYTOSCAPE_CDN = "https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.28.1/cytoscape.min.js"
|
||||
DAGRE_CDN = "https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"
|
||||
CYTOSCAPE_DAGRE_CDN = "https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"
|
||||
|
||||
# Node colors for DAG visualization
|
||||
NODE_COLORS = {
|
||||
"SOURCE": "#3b82f6", # Blue - input sources
|
||||
"EFFECT": "#22c55e", # Green - processing effects
|
||||
"OUTPUT": "#a855f7", # Purple - final outputs
|
||||
"ANALYSIS": "#f59e0b", # Amber - analysis nodes
|
||||
"_LIST": "#6366f1", # Indigo - list aggregation
|
||||
"default": "#6b7280", # Gray - unknown types
|
||||
}
|
||||
|
||||
# Status colors
|
||||
STATUS_COLORS = {
|
||||
"completed": "bg-green-600",
|
||||
"cached": "bg-blue-600",
|
||||
"running": "bg-yellow-600",
|
||||
"pending": "bg-gray-600",
|
||||
"failed": "bg-red-600",
|
||||
}
|
||||
|
||||
# Tailwind dark theme configuration
|
||||
TAILWIND_CONFIG = """
|
||||
<script>
|
||||
tailwind.config = {
|
||||
darkMode: 'class',
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
dark: {
|
||||
600: '#374151',
|
||||
700: '#1f2937',
|
||||
800: '#111827',
|
||||
900: '#030712',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<style type="text/tailwindcss">
|
||||
@layer utilities {
|
||||
.prose-invert {
|
||||
--tw-prose-body: #d1d5db;
|
||||
--tw-prose-headings: #f9fafb;
|
||||
--tw-prose-lead: #9ca3af;
|
||||
--tw-prose-links: #60a5fa;
|
||||
--tw-prose-bold: #f9fafb;
|
||||
--tw-prose-counters: #9ca3af;
|
||||
--tw-prose-bullets: #6b7280;
|
||||
--tw-prose-hr: #374151;
|
||||
--tw-prose-quotes: #f3f4f6;
|
||||
--tw-prose-quote-borders: #374151;
|
||||
--tw-prose-captions: #9ca3af;
|
||||
--tw-prose-code: #f9fafb;
|
||||
--tw-prose-pre-code: #e5e7eb;
|
||||
--tw-prose-pre-bg: #1f2937;
|
||||
--tw-prose-th-borders: #4b5563;
|
||||
--tw-prose-td-borders: #374151;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
"""
|
||||
|
||||
# Default pagination settings
|
||||
DEFAULT_PAGE_SIZE = 20
|
||||
MAX_PAGE_SIZE = 100
|
||||
91
artdag/common/artdag_common/fragments.py
Normal file
91
artdag/common/artdag_common/fragments.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Fragment client for fetching HTML fragments from coop apps.
|
||||
|
||||
Lightweight httpx-based client (no Quart dependency) for Art-DAG to consume
|
||||
coop app fragments like nav-tree, auth-menu, and cart-mini.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from typing import Sequence
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
FRAGMENT_HEADER = "X-Fragment-Request"
|
||||
|
||||
_client: httpx.AsyncClient | None = None
|
||||
_DEFAULT_TIMEOUT = 2.0
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(_DEFAULT_TIMEOUT),
|
||||
follow_redirects=False,
|
||||
)
|
||||
return _client
|
||||
|
||||
|
||||
def _internal_url(app_name: str) -> str:
|
||||
"""Resolve internal base URL for a coop app.
|
||||
|
||||
Looks up ``INTERNAL_URL_{APP}`` first, falls back to ``http://{app}:8000``.
|
||||
"""
|
||||
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
||||
return os.getenv(env_key, f"http://{app_name}:8000").rstrip("/")
|
||||
|
||||
|
||||
async def fetch_fragment(
|
||||
app_name: str,
|
||||
fragment_type: str,
|
||||
*,
|
||||
params: dict | None = None,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
required: bool = False,
|
||||
) -> str:
|
||||
"""Fetch an HTML fragment from a coop app.
|
||||
|
||||
Returns empty string on failure by default (required=False).
|
||||
"""
|
||||
base = _internal_url(app_name)
|
||||
url = f"{base}/internal/fragments/{fragment_type}"
|
||||
try:
|
||||
resp = await _get_client().get(
|
||||
url,
|
||||
params=params,
|
||||
headers={FRAGMENT_HEADER: "1"},
|
||||
timeout=timeout,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.text
|
||||
msg = f"Fragment {app_name}/{fragment_type} returned {resp.status_code}"
|
||||
log.warning(msg)
|
||||
if required:
|
||||
raise RuntimeError(msg)
|
||||
return ""
|
||||
except RuntimeError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
msg = f"Fragment {app_name}/{fragment_type} failed: {exc}"
|
||||
log.warning(msg)
|
||||
if required:
|
||||
raise RuntimeError(msg) from exc
|
||||
return ""
|
||||
|
||||
|
||||
async def fetch_fragments(
|
||||
requests: Sequence[tuple[str, str, dict | None]],
|
||||
*,
|
||||
timeout: float = _DEFAULT_TIMEOUT,
|
||||
required: bool = False,
|
||||
) -> list[str]:
|
||||
"""Fetch multiple fragments concurrently."""
|
||||
return list(await asyncio.gather(*(
|
||||
fetch_fragment(app, ftype, params=params, timeout=timeout, required=required)
|
||||
for app, ftype, params in requests
|
||||
)))
|
||||
16
artdag/common/artdag_common/middleware/__init__.py
Normal file
16
artdag/common/artdag_common/middleware/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
Middleware and FastAPI dependencies for Art-DAG servers.
|
||||
"""
|
||||
|
||||
from .auth import UserContext, get_user_from_cookie, get_user_from_header, require_auth
|
||||
from .content_negotiation import wants_html, wants_json, ContentType
|
||||
|
||||
__all__ = [
|
||||
"UserContext",
|
||||
"get_user_from_cookie",
|
||||
"get_user_from_header",
|
||||
"require_auth",
|
||||
"wants_html",
|
||||
"wants_json",
|
||||
"ContentType",
|
||||
]
|
||||
Binary file not shown.
276
artdag/common/artdag_common/middleware/auth.py
Normal file
276
artdag/common/artdag_common/middleware/auth.py
Normal file
@@ -0,0 +1,276 @@
|
||||
"""
|
||||
Authentication middleware and dependencies.
|
||||
|
||||
Provides common authentication patterns for L1 and L2 servers.
|
||||
Each server can extend or customize these as needed.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Optional, Awaitable, Any
|
||||
import base64
|
||||
import json
|
||||
|
||||
from fastapi import Request, HTTPException, Depends
|
||||
from fastapi.responses import RedirectResponse
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserContext:
|
||||
"""User context extracted from authentication."""
|
||||
username: str
|
||||
actor_id: str # Full actor ID like "@user@server.com"
|
||||
token: Optional[str] = None
|
||||
l2_server: Optional[str] = None # L2 server URL for this user
|
||||
email: Optional[str] = None # User's email address
|
||||
|
||||
@property
|
||||
def display_name(self) -> str:
|
||||
"""Get display name (username without @ prefix)."""
|
||||
return self.username.lstrip("@")
|
||||
|
||||
|
||||
def get_user_from_cookie(request: Request) -> Optional[UserContext]:
|
||||
"""
|
||||
Extract user context from session cookie.
|
||||
|
||||
Supports two cookie formats:
|
||||
1. artdag_session: base64-encoded JSON {"username": "user", "actor_id": "@user@server.com"}
|
||||
2. auth_token: raw JWT token (used by L1 servers)
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
UserContext if valid cookie found, None otherwise
|
||||
"""
|
||||
# Try artdag_session cookie first (base64-encoded JSON)
|
||||
cookie = request.cookies.get("artdag_session")
|
||||
if cookie:
|
||||
try:
|
||||
data = json.loads(base64.b64decode(cookie))
|
||||
username = data.get("username", "")
|
||||
actor_id = data.get("actor_id", "")
|
||||
if not actor_id and username:
|
||||
actor_id = f"@{username}"
|
||||
return UserContext(
|
||||
username=username,
|
||||
actor_id=actor_id,
|
||||
email=data.get("email", ""),
|
||||
)
|
||||
except (json.JSONDecodeError, ValueError, KeyError):
|
||||
pass
|
||||
|
||||
# Try auth_token cookie (raw JWT, used by L1)
|
||||
token = request.cookies.get("auth_token")
|
||||
if token:
|
||||
claims = decode_jwt_claims(token)
|
||||
if claims:
|
||||
username = claims.get("username") or claims.get("sub", "")
|
||||
actor_id = claims.get("actor_id") or claims.get("actor")
|
||||
if not actor_id and username:
|
||||
actor_id = f"@{username}"
|
||||
return UserContext(
|
||||
username=username,
|
||||
actor_id=actor_id or "",
|
||||
token=token,
|
||||
email=claims.get("email", ""),
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_user_from_header(request: Request) -> Optional[UserContext]:
|
||||
"""
|
||||
Extract user context from Authorization header.
|
||||
|
||||
Supports:
|
||||
- Bearer <token> format (JWT or opaque token)
|
||||
- Basic <base64(user:pass)> format
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
UserContext if valid header found, None otherwise
|
||||
"""
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
if auth_header.startswith("Bearer "):
|
||||
token = auth_header[7:]
|
||||
# Attempt to decode JWT claims
|
||||
claims = decode_jwt_claims(token)
|
||||
if claims:
|
||||
username = claims.get("username") or claims.get("sub", "")
|
||||
actor_id = claims.get("actor_id") or claims.get("actor")
|
||||
# Default actor_id to @username if not provided
|
||||
if not actor_id and username:
|
||||
actor_id = f"@{username}"
|
||||
return UserContext(
|
||||
username=username,
|
||||
actor_id=actor_id or "",
|
||||
token=token,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def decode_jwt_claims(token: str) -> Optional[dict]:
|
||||
"""
|
||||
Decode JWT claims without verification.
|
||||
|
||||
This is useful for extracting user info from a token
|
||||
when full verification is handled elsewhere.
|
||||
|
||||
Args:
|
||||
token: JWT token string
|
||||
|
||||
Returns:
|
||||
Claims dict if valid JWT format, None otherwise
|
||||
"""
|
||||
try:
|
||||
parts = token.split(".")
|
||||
if len(parts) != 3:
|
||||
return None
|
||||
|
||||
# Decode payload (second part)
|
||||
payload = parts[1]
|
||||
# Add padding if needed
|
||||
padding = 4 - len(payload) % 4
|
||||
if padding != 4:
|
||||
payload += "=" * padding
|
||||
|
||||
return json.loads(base64.urlsafe_b64decode(payload))
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def create_auth_dependency(
|
||||
token_validator: Optional[Callable[[str], Awaitable[Optional[dict]]]] = None,
|
||||
allow_cookie: bool = True,
|
||||
allow_header: bool = True,
|
||||
):
|
||||
"""
|
||||
Create a customized auth dependency for a specific server.
|
||||
|
||||
Args:
|
||||
token_validator: Optional async function to validate tokens with backend
|
||||
allow_cookie: Whether to check cookies for auth
|
||||
allow_header: Whether to check Authorization header
|
||||
|
||||
Returns:
|
||||
FastAPI dependency function
|
||||
"""
|
||||
async def get_current_user(request: Request) -> Optional[UserContext]:
|
||||
ctx = None
|
||||
|
||||
# Try header first (API clients)
|
||||
if allow_header:
|
||||
ctx = get_user_from_header(request)
|
||||
if ctx and token_validator:
|
||||
# Validate token with backend
|
||||
validated = await token_validator(ctx.token)
|
||||
if not validated:
|
||||
ctx = None
|
||||
|
||||
# Fall back to cookie (browser)
|
||||
if ctx is None and allow_cookie:
|
||||
ctx = get_user_from_cookie(request)
|
||||
|
||||
return ctx
|
||||
|
||||
return get_current_user
|
||||
|
||||
|
||||
async def require_auth(request: Request) -> UserContext:
|
||||
"""
|
||||
Dependency that requires authentication.
|
||||
|
||||
Raises HTTPException 401 if not authenticated.
|
||||
Use with Depends() in route handlers.
|
||||
|
||||
Example:
|
||||
@app.get("/protected")
|
||||
async def protected_route(user: UserContext = Depends(require_auth)):
|
||||
return {"user": user.username}
|
||||
"""
|
||||
# Try header first
|
||||
ctx = get_user_from_header(request)
|
||||
if ctx is None:
|
||||
ctx = get_user_from_cookie(request)
|
||||
|
||||
if ctx is None:
|
||||
# Check Accept header to determine response type
|
||||
accept = request.headers.get("accept", "")
|
||||
if "text/html" in accept:
|
||||
raise HTTPException(
|
||||
status_code=302,
|
||||
headers={"Location": "/login"}
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
return ctx
|
||||
|
||||
|
||||
def require_owner(resource_owner_field: str = "username"):
|
||||
"""
|
||||
Dependency factory that requires the user to own the resource.
|
||||
|
||||
Args:
|
||||
resource_owner_field: Field name on the resource that contains owner username
|
||||
|
||||
Returns:
|
||||
Dependency function
|
||||
|
||||
Example:
|
||||
@app.delete("/items/{item_id}")
|
||||
async def delete_item(
|
||||
item: Item = Depends(get_item),
|
||||
user: UserContext = Depends(require_owner("created_by"))
|
||||
):
|
||||
...
|
||||
"""
|
||||
async def check_ownership(
|
||||
request: Request,
|
||||
user: UserContext = Depends(require_auth),
|
||||
) -> UserContext:
|
||||
# The actual ownership check must be done in the route
|
||||
# after fetching the resource
|
||||
return user
|
||||
|
||||
return check_ownership
|
||||
|
||||
|
||||
def set_auth_cookie(response: Any, user: UserContext, max_age: int = 86400 * 30) -> None:
|
||||
"""
|
||||
Set authentication cookie on response.
|
||||
|
||||
Args:
|
||||
response: FastAPI response object
|
||||
user: User context to store
|
||||
max_age: Cookie max age in seconds (default 30 days)
|
||||
"""
|
||||
cookie_data = {
|
||||
"username": user.username,
|
||||
"actor_id": user.actor_id,
|
||||
}
|
||||
if user.email:
|
||||
cookie_data["email"] = user.email
|
||||
data = json.dumps(cookie_data)
|
||||
cookie_value = base64.b64encode(data.encode()).decode()
|
||||
|
||||
response.set_cookie(
|
||||
key="artdag_session",
|
||||
value=cookie_value,
|
||||
max_age=max_age,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=True, # Require HTTPS in production
|
||||
)
|
||||
|
||||
|
||||
def clear_auth_cookie(response: Any) -> None:
|
||||
"""Clear authentication cookie."""
|
||||
response.delete_cookie(key="artdag_session")
|
||||
174
artdag/common/artdag_common/middleware/content_negotiation.py
Normal file
174
artdag/common/artdag_common/middleware/content_negotiation.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""
|
||||
Content negotiation utilities.
|
||||
|
||||
Helps determine what response format the client wants.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
|
||||
class ContentType(Enum):
|
||||
"""Response content types."""
|
||||
HTML = "text/html"
|
||||
JSON = "application/json"
|
||||
ACTIVITY_JSON = "application/activity+json"
|
||||
XML = "application/xml"
|
||||
|
||||
|
||||
def wants_html(request: Request) -> bool:
|
||||
"""
|
||||
Check if the client wants HTML response.
|
||||
|
||||
Returns True if:
|
||||
- Accept header contains text/html
|
||||
- Accept header contains application/xhtml+xml
|
||||
- No Accept header (browser default)
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if HTML is preferred
|
||||
"""
|
||||
accept = request.headers.get("accept", "")
|
||||
|
||||
# No accept header usually means browser
|
||||
if not accept:
|
||||
return True
|
||||
|
||||
# Check for HTML preferences
|
||||
if "text/html" in accept:
|
||||
return True
|
||||
if "application/xhtml" in accept:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def wants_json(request: Request) -> bool:
|
||||
"""
|
||||
Check if the client wants JSON response.
|
||||
|
||||
Returns True if:
|
||||
- Accept header contains application/json
|
||||
- Accept header does NOT contain text/html
|
||||
- Request has .json suffix (convention)
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if JSON is preferred
|
||||
"""
|
||||
accept = request.headers.get("accept", "")
|
||||
|
||||
# Explicit JSON preference
|
||||
if "application/json" in accept:
|
||||
# But not if HTML is also requested (browsers often send both)
|
||||
if "text/html" not in accept:
|
||||
return True
|
||||
|
||||
# Check URL suffix convention
|
||||
if request.url.path.endswith(".json"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def wants_activity_json(request: Request) -> bool:
|
||||
"""
|
||||
Check if the client wants ActivityPub JSON-LD response.
|
||||
|
||||
Used for federation with other ActivityPub servers.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if ActivityPub format is preferred
|
||||
"""
|
||||
accept = request.headers.get("accept", "")
|
||||
|
||||
if "application/activity+json" in accept:
|
||||
return True
|
||||
if "application/ld+json" in accept:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_preferred_type(request: Request) -> ContentType:
|
||||
"""
|
||||
Determine the preferred content type from Accept header.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
ContentType enum value
|
||||
"""
|
||||
if wants_activity_json(request):
|
||||
return ContentType.ACTIVITY_JSON
|
||||
if wants_json(request):
|
||||
return ContentType.JSON
|
||||
return ContentType.HTML
|
||||
|
||||
|
||||
def is_htmx_request(request: Request) -> bool:
|
||||
"""
|
||||
Check if this is an HTMX request (partial page update).
|
||||
|
||||
HTMX requests set the HX-Request header.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if this is an HTMX request
|
||||
"""
|
||||
return request.headers.get("HX-Request") == "true"
|
||||
|
||||
|
||||
def get_htmx_target(request: Request) -> Optional[str]:
|
||||
"""
|
||||
Get the HTMX target element ID.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
Target element ID or None
|
||||
"""
|
||||
return request.headers.get("HX-Target")
|
||||
|
||||
|
||||
def get_htmx_trigger(request: Request) -> Optional[str]:
|
||||
"""
|
||||
Get the HTMX trigger element ID.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
Trigger element ID or None
|
||||
"""
|
||||
return request.headers.get("HX-Trigger")
|
||||
|
||||
|
||||
def is_ios_request(request: Request) -> bool:
|
||||
"""
|
||||
Check if request is from iOS device.
|
||||
|
||||
Useful for video format selection (iOS prefers MP4).
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if iOS user agent detected
|
||||
"""
|
||||
user_agent = request.headers.get("user-agent", "").lower()
|
||||
return "iphone" in user_agent or "ipad" in user_agent
|
||||
25
artdag/common/artdag_common/models/__init__.py
Normal file
25
artdag/common/artdag_common/models/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
Shared Pydantic models for Art-DAG servers.
|
||||
"""
|
||||
|
||||
from .requests import (
|
||||
PaginationParams,
|
||||
PublishRequest,
|
||||
StorageConfigRequest,
|
||||
MetadataUpdateRequest,
|
||||
)
|
||||
from .responses import (
|
||||
PaginatedResponse,
|
||||
ErrorResponse,
|
||||
SuccessResponse,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"PaginationParams",
|
||||
"PublishRequest",
|
||||
"StorageConfigRequest",
|
||||
"MetadataUpdateRequest",
|
||||
"PaginatedResponse",
|
||||
"ErrorResponse",
|
||||
"SuccessResponse",
|
||||
]
|
||||
74
artdag/common/artdag_common/models/requests.py
Normal file
74
artdag/common/artdag_common/models/requests.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""
|
||||
Request models shared across L1 and L2 servers.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..constants import DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE
|
||||
|
||||
|
||||
class PaginationParams(BaseModel):
|
||||
"""Common pagination parameters."""
|
||||
page: int = Field(default=1, ge=1, description="Page number (1-indexed)")
|
||||
limit: int = Field(
|
||||
default=DEFAULT_PAGE_SIZE,
|
||||
ge=1,
|
||||
le=MAX_PAGE_SIZE,
|
||||
description="Items per page"
|
||||
)
|
||||
|
||||
@property
|
||||
def offset(self) -> int:
|
||||
"""Calculate offset for database queries."""
|
||||
return (self.page - 1) * self.limit
|
||||
|
||||
|
||||
class PublishRequest(BaseModel):
|
||||
"""Request to publish content to L2/storage."""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
description: Optional[str] = Field(default=None, max_length=2000)
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
storage_id: Optional[str] = Field(default=None, description="Target storage provider")
|
||||
|
||||
|
||||
class MetadataUpdateRequest(BaseModel):
|
||||
"""Request to update content metadata."""
|
||||
name: Optional[str] = Field(default=None, max_length=255)
|
||||
description: Optional[str] = Field(default=None, max_length=2000)
|
||||
tags: Optional[List[str]] = Field(default=None)
|
||||
metadata: Optional[Dict[str, Any]] = Field(default=None)
|
||||
|
||||
|
||||
class StorageConfigRequest(BaseModel):
|
||||
"""Request to configure a storage provider."""
|
||||
provider_type: str = Field(..., description="Provider type (pinata, web3storage, local, etc.)")
|
||||
name: str = Field(..., min_length=1, max_length=100)
|
||||
api_key: Optional[str] = Field(default=None)
|
||||
api_secret: Optional[str] = Field(default=None)
|
||||
endpoint: Optional[str] = Field(default=None)
|
||||
config: Optional[Dict[str, Any]] = Field(default_factory=dict)
|
||||
is_default: bool = Field(default=False)
|
||||
|
||||
|
||||
class RecipeRunRequest(BaseModel):
|
||||
"""Request to run a recipe."""
|
||||
recipe_id: str = Field(..., description="Recipe content hash or ID")
|
||||
inputs: Dict[str, str] = Field(..., description="Map of input name to content hash")
|
||||
features: List[str] = Field(
|
||||
default=["beats", "energy"],
|
||||
description="Analysis features to extract"
|
||||
)
|
||||
|
||||
|
||||
class PlanRequest(BaseModel):
|
||||
"""Request to generate an execution plan."""
|
||||
recipe_yaml: str = Field(..., description="Recipe YAML content")
|
||||
input_hashes: Dict[str, str] = Field(..., description="Map of input name to content hash")
|
||||
features: List[str] = Field(default=["beats", "energy"])
|
||||
|
||||
|
||||
class ExecutePlanRequest(BaseModel):
|
||||
"""Request to execute a generated plan."""
|
||||
plan_json: str = Field(..., description="JSON-serialized execution plan")
|
||||
run_id: Optional[str] = Field(default=None, description="Optional run ID for tracking")
|
||||
96
artdag/common/artdag_common/models/responses.py
Normal file
96
artdag/common/artdag_common/models/responses.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
Response models shared across L1 and L2 servers.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, Generic, TypeVar
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
"""Generic paginated response."""
|
||||
data: List[Any] = Field(default_factory=list)
|
||||
pagination: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
items: List[Any],
|
||||
page: int,
|
||||
limit: int,
|
||||
total: int,
|
||||
) -> "PaginatedResponse":
|
||||
"""Create a paginated response."""
|
||||
return cls(
|
||||
data=items,
|
||||
pagination={
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": page * limit < total,
|
||||
"total_pages": (total + limit - 1) // limit,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
"""Standard error response."""
|
||||
error: str = Field(..., description="Error message")
|
||||
detail: Optional[str] = Field(default=None, description="Detailed error info")
|
||||
code: Optional[str] = Field(default=None, description="Error code")
|
||||
|
||||
|
||||
class SuccessResponse(BaseModel):
|
||||
"""Standard success response."""
|
||||
success: bool = Field(default=True)
|
||||
message: Optional[str] = Field(default=None)
|
||||
data: Optional[Dict[str, Any]] = Field(default=None)
|
||||
|
||||
|
||||
class RunStatus(BaseModel):
|
||||
"""Run execution status."""
|
||||
run_id: str
|
||||
status: str = Field(..., description="pending, running, completed, failed")
|
||||
recipe: Optional[str] = None
|
||||
plan_id: Optional[str] = None
|
||||
output_hash: Optional[str] = None
|
||||
output_ipfs_cid: Optional[str] = None
|
||||
total_steps: int = 0
|
||||
cached_steps: int = 0
|
||||
completed_steps: int = 0
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class CacheItemResponse(BaseModel):
|
||||
"""Cached content item response."""
|
||||
content_hash: str
|
||||
media_type: Optional[str] = None
|
||||
size: Optional[int] = None
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
ipfs_cid: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
|
||||
|
||||
class RecipeResponse(BaseModel):
|
||||
"""Recipe response."""
|
||||
recipe_id: str
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
inputs: List[Dict[str, Any]] = Field(default_factory=list)
|
||||
outputs: List[str] = Field(default_factory=list)
|
||||
node_count: int = 0
|
||||
created_at: Optional[str] = None
|
||||
|
||||
|
||||
class StorageProviderResponse(BaseModel):
|
||||
"""Storage provider configuration response."""
|
||||
storage_id: str
|
||||
provider_type: str
|
||||
name: str
|
||||
is_default: bool = False
|
||||
is_connected: bool = False
|
||||
usage_bytes: Optional[int] = None
|
||||
pin_count: int = 0
|
||||
160
artdag/common/artdag_common/rendering.py
Normal file
160
artdag/common/artdag_common/rendering.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Jinja2 template rendering system for Art-DAG servers.
|
||||
|
||||
Provides a unified template environment that can load from:
|
||||
1. The shared artdag_common/templates directory
|
||||
2. App-specific template directories
|
||||
|
||||
Usage:
|
||||
from artdag_common import create_jinja_env, render
|
||||
|
||||
# In app initialization
|
||||
templates = create_jinja_env("app/templates")
|
||||
|
||||
# In route handler
|
||||
return render(templates, "runs/detail.html", request, run=run, user=user)
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from jinja2 import Environment, ChoiceLoader, FileSystemLoader, PackageLoader, select_autoescape
|
||||
|
||||
from .constants import (
|
||||
TAILWIND_CDN,
|
||||
HTMX_CDN,
|
||||
CYTOSCAPE_CDN,
|
||||
DAGRE_CDN,
|
||||
CYTOSCAPE_DAGRE_CDN,
|
||||
TAILWIND_CONFIG,
|
||||
NODE_COLORS,
|
||||
STATUS_COLORS,
|
||||
)
|
||||
|
||||
|
||||
def create_jinja_env(*template_dirs: Union[str, Path]) -> Environment:
|
||||
"""
|
||||
Create a Jinja2 environment with the shared templates and optional app-specific dirs.
|
||||
|
||||
Args:
|
||||
*template_dirs: Additional template directories to search (app-specific)
|
||||
|
||||
Returns:
|
||||
Configured Jinja2 Environment
|
||||
|
||||
Example:
|
||||
env = create_jinja_env("/app/templates", "/app/custom")
|
||||
"""
|
||||
loaders = []
|
||||
|
||||
# Add app-specific directories first (higher priority)
|
||||
for template_dir in template_dirs:
|
||||
path = Path(template_dir)
|
||||
if path.exists():
|
||||
loaders.append(FileSystemLoader(str(path)))
|
||||
|
||||
# Add shared templates from this package (lower priority, fallback)
|
||||
loaders.append(PackageLoader("artdag_common", "templates"))
|
||||
|
||||
env = Environment(
|
||||
loader=ChoiceLoader(loaders),
|
||||
autoescape=select_autoescape(["html", "xml"]),
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
)
|
||||
|
||||
# Add global context available to all templates
|
||||
env.globals.update({
|
||||
"TAILWIND_CDN": TAILWIND_CDN,
|
||||
"HTMX_CDN": HTMX_CDN,
|
||||
"CYTOSCAPE_CDN": CYTOSCAPE_CDN,
|
||||
"DAGRE_CDN": DAGRE_CDN,
|
||||
"CYTOSCAPE_DAGRE_CDN": CYTOSCAPE_DAGRE_CDN,
|
||||
"TAILWIND_CONFIG": TAILWIND_CONFIG,
|
||||
"NODE_COLORS": NODE_COLORS,
|
||||
"STATUS_COLORS": STATUS_COLORS,
|
||||
})
|
||||
|
||||
# Add custom filters
|
||||
env.filters["truncate_hash"] = truncate_hash
|
||||
env.filters["format_size"] = format_size
|
||||
env.filters["status_color"] = status_color
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def render(
|
||||
env: Environment,
|
||||
template_name: str,
|
||||
request: Request,
|
||||
status_code: int = 200,
|
||||
**context: Any,
|
||||
) -> HTMLResponse:
|
||||
"""
|
||||
Render a template to an HTMLResponse.
|
||||
|
||||
Args:
|
||||
env: Jinja2 environment
|
||||
template_name: Template file path (e.g., "runs/detail.html")
|
||||
request: FastAPI request object
|
||||
status_code: HTTP status code (default 200)
|
||||
**context: Template context variables
|
||||
|
||||
Returns:
|
||||
HTMLResponse with rendered content
|
||||
"""
|
||||
template = env.get_template(template_name)
|
||||
html = template.render(request=request, **context)
|
||||
return HTMLResponse(html, status_code=status_code)
|
||||
|
||||
|
||||
def render_fragment(
|
||||
env: Environment,
|
||||
template_name: str,
|
||||
**context: Any,
|
||||
) -> str:
|
||||
"""
|
||||
Render a template fragment to a string (for HTMX partial updates).
|
||||
|
||||
Args:
|
||||
env: Jinja2 environment
|
||||
template_name: Template file path
|
||||
**context: Template context variables
|
||||
|
||||
Returns:
|
||||
Rendered HTML string
|
||||
"""
|
||||
template = env.get_template(template_name)
|
||||
return template.render(**context)
|
||||
|
||||
|
||||
# Custom Jinja2 filters
|
||||
|
||||
def truncate_hash(value: str, length: int = 16) -> str:
|
||||
"""Truncate a hash to specified length with ellipsis."""
|
||||
if not value:
|
||||
return ""
|
||||
if len(value) <= length:
|
||||
return value
|
||||
return f"{value[:length]}..."
|
||||
|
||||
|
||||
def format_size(size_bytes: Optional[int]) -> str:
|
||||
"""Format file size in human-readable form."""
|
||||
if size_bytes is None:
|
||||
return "Unknown"
|
||||
if size_bytes < 1024:
|
||||
return f"{size_bytes} B"
|
||||
elif size_bytes < 1024 * 1024:
|
||||
return f"{size_bytes / 1024:.1f} KB"
|
||||
elif size_bytes < 1024 * 1024 * 1024:
|
||||
return f"{size_bytes / (1024 * 1024):.1f} MB"
|
||||
else:
|
||||
return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB"
|
||||
|
||||
|
||||
def status_color(status: str) -> str:
|
||||
"""Get Tailwind CSS class for a status."""
|
||||
return STATUS_COLORS.get(status, STATUS_COLORS["pending"])
|
||||
96
artdag/common/artdag_common/templates/_base.html
Normal file
96
artdag/common/artdag_common/templates/_base.html
Normal file
@@ -0,0 +1,96 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{% block title %}Art-DAG{% endblock %}</title>
|
||||
|
||||
<!-- Tailwind CSS (same CDN as coop) -->
|
||||
<script src="https://cdn.tailwindcss.com?plugins=typography"></script>
|
||||
<script>
|
||||
tailwind.config = {
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
dark: {
|
||||
600: '#374151',
|
||||
700: '#1f2937',
|
||||
800: '#111827',
|
||||
900: '#030712',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<!-- HTMX -->
|
||||
<script src="https://unpkg.com/htmx.org@2.0.8"></script>
|
||||
<!-- Hyperscript (for nav-tree scrolling arrows) -->
|
||||
<script src="https://unpkg.com/hyperscript.org@0.9.12"></script>
|
||||
<!-- Font Awesome (for auth-menu + nav icons) -->
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.1/css/all.min.css">
|
||||
|
||||
{% block head %}{% endblock %}
|
||||
|
||||
<style>
|
||||
/* HTMX loading indicator */
|
||||
.htmx-indicator { display: none; }
|
||||
.htmx-request .htmx-indicator { display: inline-flex; }
|
||||
img { max-width: 100%; height: auto; }
|
||||
.no-scrollbar::-webkit-scrollbar { display: none; }
|
||||
.no-scrollbar { -ms-overflow-style: none; scrollbar-width: none; }
|
||||
.scrollbar-hide::-webkit-scrollbar { display: none; }
|
||||
.scrollbar-hide { -ms-overflow-style: none; scrollbar-width: none; }
|
||||
</style>
|
||||
<script>
|
||||
if (matchMedia('(hover: hover) and (pointer: fine)').matches) {
|
||||
document.documentElement.classList.add('hover-capable');
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body class="bg-stone-50 text-stone-900 min-h-screen">
|
||||
<div class="max-w-screen-2xl mx-auto py-1 px-1">
|
||||
{% block header %}
|
||||
{# Coop-style header: sky banner with title, nav-tree, auth-menu, cart-mini #}
|
||||
<div class="w-full">
|
||||
<div class="flex flex-col items-center md:flex-row justify-center md:justify-between w-full p-1 bg-sky-500">
|
||||
<div class="w-full flex flex-row items-top">
|
||||
{# Cart mini #}
|
||||
{% block cart_mini %}{% endblock %}
|
||||
|
||||
{# Site title #}
|
||||
<div class="font-bold text-5xl flex-1">
|
||||
<a href="/" class="flex justify-center md:justify-start">
|
||||
<h1>{% block brand %}Art-DAG{% endblock %}</h1>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{# Desktop nav: nav-tree + auth-menu #}
|
||||
<nav class="hidden md:flex gap-4 text-sm ml-2 justify-end items-center flex-0">
|
||||
{% block nav_tree %}{% endblock %}
|
||||
{% block auth_menu %}{% endblock %}
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{# Mobile auth #}
|
||||
<div class="block md:hidden text-md font-bold">
|
||||
{% block auth_menu_mobile %}{% endblock %}
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{# App-specific sub-nav (Runs, Recipes, Effects, etc.) #}
|
||||
{% block sub_nav %}{% endblock %}
|
||||
|
||||
</div>{# close max-w-screen-2xl wrapper #}
|
||||
|
||||
<main class="bg-dark-800 text-gray-100 min-h-screen">
|
||||
<div class="max-w-screen-2xl mx-auto px-4 py-4">
|
||||
{% block content %}{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
|
||||
{% block footer %}{% endblock %}
|
||||
{% block scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
64
artdag/common/artdag_common/templates/components/badge.html
Normal file
64
artdag/common/artdag_common/templates/components/badge.html
Normal file
@@ -0,0 +1,64 @@
|
||||
{#
|
||||
Badge component for status and type indicators.
|
||||
|
||||
Usage:
|
||||
{% from "components/badge.html" import badge, status_badge, type_badge %}
|
||||
|
||||
{{ badge("Active", "green") }}
|
||||
{{ status_badge("completed") }}
|
||||
{{ type_badge("EFFECT") }}
|
||||
#}
|
||||
|
||||
{% macro badge(text, color="gray", class="") %}
|
||||
<span class="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-{{ color }}-600/20 text-{{ color }}-400 {{ class }}">
|
||||
{{ text }}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro status_badge(status, class="") %}
|
||||
{% set colors = {
|
||||
"completed": "green",
|
||||
"cached": "blue",
|
||||
"running": "yellow",
|
||||
"pending": "gray",
|
||||
"failed": "red",
|
||||
"active": "green",
|
||||
"inactive": "gray",
|
||||
} %}
|
||||
{% set color = colors.get(status, "gray") %}
|
||||
<span class="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-{{ color }}-600/20 text-{{ color }}-400 {{ class }}">
|
||||
{% if status == "running" %}
|
||||
<svg class="animate-spin -ml-0.5 mr-1.5 h-3 w-3" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
||||
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
|
||||
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
|
||||
</svg>
|
||||
{% endif %}
|
||||
{{ status | capitalize }}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro type_badge(node_type, class="") %}
|
||||
{% set colors = {
|
||||
"SOURCE": "blue",
|
||||
"EFFECT": "green",
|
||||
"OUTPUT": "purple",
|
||||
"ANALYSIS": "amber",
|
||||
"_LIST": "indigo",
|
||||
} %}
|
||||
{% set color = colors.get(node_type, "gray") %}
|
||||
<span class="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-{{ color }}-600/20 text-{{ color }}-400 {{ class }}">
|
||||
{{ node_type }}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro role_badge(role, class="") %}
|
||||
{% set colors = {
|
||||
"input": "blue",
|
||||
"output": "purple",
|
||||
"intermediate": "gray",
|
||||
} %}
|
||||
{% set color = colors.get(role, "gray") %}
|
||||
<span class="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-{{ color }}-600/20 text-{{ color }}-400 {{ class }}">
|
||||
{{ role | capitalize }}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
45
artdag/common/artdag_common/templates/components/card.html
Normal file
45
artdag/common/artdag_common/templates/components/card.html
Normal file
@@ -0,0 +1,45 @@
|
||||
{#
|
||||
Card component for displaying information.
|
||||
|
||||
Usage:
|
||||
{% include "components/card.html" with title="Status", content="Active", class="col-span-2" %}
|
||||
|
||||
Or as a block:
|
||||
{% call card(title="Details") %}
|
||||
<p>Card content here</p>
|
||||
{% endcall %}
|
||||
#}
|
||||
|
||||
{% macro card(title=None, class="") %}
|
||||
<div class="bg-dark-600 rounded-lg p-4 {{ class }}">
|
||||
{% if title %}
|
||||
<h3 class="text-sm font-medium text-gray-400 mb-2">{{ title }}</h3>
|
||||
{% endif %}
|
||||
<div class="text-white">
|
||||
{{ caller() if caller else "" }}
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro stat_card(title, value, color="white", class="") %}
|
||||
<div class="bg-dark-600 rounded-lg p-4 text-center {{ class }}">
|
||||
<div class="text-2xl font-bold text-{{ color }}-400">{{ value }}</div>
|
||||
<div class="text-sm text-gray-400">{{ title }}</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro info_card(title, items, class="") %}
|
||||
<div class="bg-dark-600 rounded-lg p-4 {{ class }}">
|
||||
{% if title %}
|
||||
<h3 class="text-sm font-medium text-gray-400 mb-3">{{ title }}</h3>
|
||||
{% endif %}
|
||||
<dl class="space-y-2">
|
||||
{% for label, value in items %}
|
||||
<div class="flex justify-between">
|
||||
<dt class="text-gray-400">{{ label }}</dt>
|
||||
<dd class="text-white font-mono text-sm">{{ value }}</dd>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</dl>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
176
artdag/common/artdag_common/templates/components/dag.html
Normal file
176
artdag/common/artdag_common/templates/components/dag.html
Normal file
@@ -0,0 +1,176 @@
|
||||
{#
|
||||
Cytoscape.js DAG visualization component.
|
||||
|
||||
Usage:
|
||||
{% from "components/dag.html" import dag_container, dag_scripts, dag_legend %}
|
||||
|
||||
{# In head block #}
|
||||
{{ dag_scripts() }}
|
||||
|
||||
{# In content #}
|
||||
{{ dag_container(id="plan-dag", height="400px") }}
|
||||
{{ dag_legend() }}
|
||||
|
||||
{# In scripts block #}
|
||||
<script>
|
||||
initDag('plan-dag', {{ nodes | tojson }}, {{ edges | tojson }});
|
||||
</script>
|
||||
#}
|
||||
|
||||
{% macro dag_scripts() %}
|
||||
<script src="{{ CYTOSCAPE_CDN }}"></script>
|
||||
<script src="{{ DAGRE_CDN }}"></script>
|
||||
<script src="{{ CYTOSCAPE_DAGRE_CDN }}"></script>
|
||||
<script>
|
||||
// Global Cytoscape instance for WebSocket updates
|
||||
window.artdagCy = null;
|
||||
|
||||
function initDag(containerId, nodes, edges) {
|
||||
const nodeColors = {{ NODE_COLORS | tojson }};
|
||||
|
||||
window.artdagCy = cytoscape({
|
||||
container: document.getElementById(containerId),
|
||||
elements: {
|
||||
nodes: nodes,
|
||||
edges: edges
|
||||
},
|
||||
style: [
|
||||
{
|
||||
selector: 'node',
|
||||
style: {
|
||||
'label': 'data(label)',
|
||||
'text-valign': 'center',
|
||||
'text-halign': 'center',
|
||||
'background-color': function(ele) {
|
||||
return nodeColors[ele.data('nodeType')] || nodeColors['default'];
|
||||
},
|
||||
'color': '#fff',
|
||||
'font-size': '10px',
|
||||
'width': 80,
|
||||
'height': 40,
|
||||
'shape': 'round-rectangle',
|
||||
'text-wrap': 'wrap',
|
||||
'text-max-width': '70px',
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'node[status="cached"], node[status="completed"]',
|
||||
style: {
|
||||
'border-width': 3,
|
||||
'border-color': '#22c55e'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'node[status="running"]',
|
||||
style: {
|
||||
'border-width': 3,
|
||||
'border-color': '#eab308',
|
||||
'border-style': 'dashed'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'node:selected',
|
||||
style: {
|
||||
'border-width': 3,
|
||||
'border-color': '#3b82f6'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'edge',
|
||||
style: {
|
||||
'width': 2,
|
||||
'line-color': '#6b7280',
|
||||
'target-arrow-color': '#6b7280',
|
||||
'target-arrow-shape': 'triangle',
|
||||
'curve-style': 'bezier'
|
||||
}
|
||||
}
|
||||
],
|
||||
layout: {
|
||||
name: 'dagre',
|
||||
rankDir: 'TB',
|
||||
nodeSep: 50,
|
||||
rankSep: 80,
|
||||
padding: 20
|
||||
},
|
||||
userZoomingEnabled: true,
|
||||
userPanningEnabled: true,
|
||||
boxSelectionEnabled: false
|
||||
});
|
||||
|
||||
// Click handler for node details
|
||||
window.artdagCy.on('tap', 'node', function(evt) {
|
||||
const node = evt.target;
|
||||
const data = node.data();
|
||||
showNodeDetails(data);
|
||||
});
|
||||
|
||||
return window.artdagCy;
|
||||
}
|
||||
|
||||
function showNodeDetails(data) {
|
||||
const panel = document.getElementById('node-details');
|
||||
if (!panel) return;
|
||||
|
||||
panel.innerHTML = `
|
||||
<h4 class="font-medium text-white mb-2">${data.label || data.id}</h4>
|
||||
<dl class="space-y-1 text-sm">
|
||||
<div class="flex justify-between">
|
||||
<dt class="text-gray-400">Type</dt>
|
||||
<dd class="text-white">${data.nodeType || 'Unknown'}</dd>
|
||||
</div>
|
||||
<div class="flex justify-between">
|
||||
<dt class="text-gray-400">Status</dt>
|
||||
<dd class="text-white">${data.status || 'pending'}</dd>
|
||||
</div>
|
||||
${data.cacheId ? `
|
||||
<div class="flex justify-between">
|
||||
<dt class="text-gray-400">Cache ID</dt>
|
||||
<dd class="text-white font-mono text-xs">${data.cacheId.substring(0, 16)}...</dd>
|
||||
</div>
|
||||
` : ''}
|
||||
${data.level !== undefined ? `
|
||||
<div class="flex justify-between">
|
||||
<dt class="text-gray-400">Level</dt>
|
||||
<dd class="text-white">${data.level}</dd>
|
||||
</div>
|
||||
` : ''}
|
||||
</dl>
|
||||
`;
|
||||
panel.classList.remove('hidden');
|
||||
}
|
||||
|
||||
// Future WebSocket support: update node status in real-time
|
||||
function updateNodeStatus(stepId, status, cacheId) {
|
||||
if (!window.artdagCy) return;
|
||||
const node = window.artdagCy.getElementById(stepId);
|
||||
if (node && node.length > 0) {
|
||||
node.data('status', status);
|
||||
if (cacheId) {
|
||||
node.data('cacheId', cacheId);
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro dag_container(id="dag-container", height="400px", class="") %}
|
||||
<div id="{{ id }}" class="w-full bg-dark-700 rounded-lg {{ class }}" style="height: {{ height }};"></div>
|
||||
<div id="node-details" class="hidden mt-4 p-4 bg-dark-600 rounded-lg"></div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro dag_legend(node_types=None) %}
|
||||
{% set types = node_types or ["SOURCE", "EFFECT", "_LIST"] %}
|
||||
<div class="flex gap-4 text-sm flex-wrap mt-4">
|
||||
{% for type in types %}
|
||||
<span class="flex items-center gap-2">
|
||||
<span class="w-4 h-4 rounded" style="background-color: {{ NODE_COLORS.get(type, NODE_COLORS.default) }}"></span>
|
||||
{{ type }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
<span class="flex items-center gap-2">
|
||||
<span class="w-4 h-4 rounded border-2 border-green-500 bg-dark-600"></span>
|
||||
Cached
|
||||
</span>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,98 @@
|
||||
{#
|
||||
Media preview component for videos, images, and audio.
|
||||
|
||||
Usage:
|
||||
{% from "components/media_preview.html" import media_preview, video_player, image_preview, audio_player %}
|
||||
|
||||
{{ media_preview(content_hash, media_type, title="Preview") }}
|
||||
{{ video_player(src="/cache/abc123/mp4", poster="/cache/abc123/thumb") }}
|
||||
#}
|
||||
|
||||
{% macro media_preview(content_hash, media_type, title=None, class="", show_download=True) %}
|
||||
<div class="bg-dark-600 rounded-lg overflow-hidden {{ class }}">
|
||||
{% if title %}
|
||||
<div class="px-4 py-2 border-b border-dark-500">
|
||||
<h3 class="text-sm font-medium text-gray-400">{{ title }}</h3>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="aspect-video bg-dark-700 flex items-center justify-center">
|
||||
{% if media_type == "video" %}
|
||||
{{ video_player("/cache/" + content_hash + "/mp4") }}
|
||||
{% elif media_type == "image" %}
|
||||
{{ image_preview("/cache/" + content_hash + "/raw") }}
|
||||
{% elif media_type == "audio" %}
|
||||
{{ audio_player("/cache/" + content_hash + "/raw") }}
|
||||
{% else %}
|
||||
<div class="text-gray-400 text-center p-4">
|
||||
<svg class="w-12 h-12 mx-auto mb-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
|
||||
</svg>
|
||||
<p>Preview not available</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if show_download %}
|
||||
<div class="px-4 py-2 border-t border-dark-500">
|
||||
<a href="/cache/{{ content_hash }}/raw" download
|
||||
class="text-blue-400 hover:text-blue-300 text-sm">
|
||||
Download original
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro video_player(src, poster=None, autoplay=False, muted=True, loop=False, class="") %}
|
||||
<video
|
||||
class="w-full h-full object-contain {{ class }}"
|
||||
controls
|
||||
playsinline
|
||||
{% if poster %}poster="{{ poster }}"{% endif %}
|
||||
{% if autoplay %}autoplay{% endif %}
|
||||
{% if muted %}muted{% endif %}
|
||||
{% if loop %}loop{% endif %}
|
||||
>
|
||||
<source src="{{ src }}" type="video/mp4">
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro image_preview(src, alt="", class="") %}
|
||||
<img
|
||||
src="{{ src }}"
|
||||
alt="{{ alt }}"
|
||||
class="w-full h-full object-contain {{ class }}"
|
||||
loading="lazy"
|
||||
>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro audio_player(src, class="") %}
|
||||
<div class="w-full px-4 {{ class }}">
|
||||
<audio controls class="w-full">
|
||||
<source src="{{ src }}">
|
||||
Your browser does not support the audio element.
|
||||
</audio>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro thumbnail(content_hash, media_type, size="w-24 h-24", class="") %}
|
||||
<div class="bg-dark-700 rounded {{ size }} flex items-center justify-center overflow-hidden {{ class }}">
|
||||
{% if media_type == "image" %}
|
||||
<img src="/cache/{{ content_hash }}/raw" alt="" class="w-full h-full object-cover" loading="lazy">
|
||||
{% elif media_type == "video" %}
|
||||
<svg class="w-8 h-8 text-gray-400" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M8 5v14l11-7z"/>
|
||||
</svg>
|
||||
{% elif media_type == "audio" %}
|
||||
<svg class="w-8 h-8 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 19V6l12-3v13M9 19c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zm12-3c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zM9 10l12-3"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<svg class="w-8 h-8 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
|
||||
</svg>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,82 @@
|
||||
{#
|
||||
Pagination component with HTMX infinite scroll support.
|
||||
|
||||
Usage:
|
||||
{% from "components/pagination.html" import infinite_scroll_trigger, page_links %}
|
||||
|
||||
{# Infinite scroll (HTMX) #}
|
||||
{{ infinite_scroll_trigger(url="/items?page=2", colspan=3, has_more=True) }}
|
||||
|
||||
{# Traditional pagination #}
|
||||
{{ page_links(current_page=1, total_pages=5, base_url="/items") }}
|
||||
#}
|
||||
|
||||
{% macro infinite_scroll_trigger(url, colspan=1, has_more=True, target=None) %}
|
||||
{% if has_more %}
|
||||
<tr hx-get="{{ url }}"
|
||||
hx-trigger="revealed"
|
||||
hx-swap="afterend"
|
||||
{% if target %}hx-target="{{ target }}"{% endif %}
|
||||
class="htmx-indicator-row">
|
||||
<td colspan="{{ colspan }}" class="text-center py-4">
|
||||
<span class="text-gray-400 htmx-indicator">
|
||||
<svg class="animate-spin h-5 w-5 inline mr-2" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
||||
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
|
||||
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
|
||||
</svg>
|
||||
Loading more...
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro page_links(current_page, total_pages, base_url, class="") %}
|
||||
<nav class="flex items-center justify-center space-x-2 {{ class }}">
|
||||
{# Previous button #}
|
||||
{% if current_page > 1 %}
|
||||
<a href="{{ base_url }}?page={{ current_page - 1 }}"
|
||||
class="px-3 py-2 rounded-lg bg-dark-600 text-gray-300 hover:bg-dark-500 transition-colors">
|
||||
← Previous
|
||||
</a>
|
||||
{% else %}
|
||||
<span class="px-3 py-2 rounded-lg bg-dark-700 text-gray-500 cursor-not-allowed">
|
||||
← Previous
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
{# Page numbers #}
|
||||
<div class="flex items-center space-x-1">
|
||||
{% for page in range(1, total_pages + 1) %}
|
||||
{% if page == current_page %}
|
||||
<span class="px-3 py-2 rounded-lg bg-blue-600 text-white">{{ page }}</span>
|
||||
{% elif page == 1 or page == total_pages or (page >= current_page - 2 and page <= current_page + 2) %}
|
||||
<a href="{{ base_url }}?page={{ page }}"
|
||||
class="px-3 py-2 rounded-lg bg-dark-600 text-gray-300 hover:bg-dark-500 transition-colors">
|
||||
{{ page }}
|
||||
</a>
|
||||
{% elif page == current_page - 3 or page == current_page + 3 %}
|
||||
<span class="px-2 text-gray-500">...</span>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{# Next button #}
|
||||
{% if current_page < total_pages %}
|
||||
<a href="{{ base_url }}?page={{ current_page + 1 }}"
|
||||
class="px-3 py-2 rounded-lg bg-dark-600 text-gray-300 hover:bg-dark-500 transition-colors">
|
||||
Next →
|
||||
</a>
|
||||
{% else %}
|
||||
<span class="px-3 py-2 rounded-lg bg-dark-700 text-gray-500 cursor-not-allowed">
|
||||
Next →
|
||||
</span>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro page_info(page, limit, total) %}
|
||||
<div class="text-sm text-gray-400">
|
||||
Showing {{ (page - 1) * limit + 1 }}-{{ [page * limit, total] | min }} of {{ total }}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
51
artdag/common/artdag_common/templates/components/table.html
Normal file
51
artdag/common/artdag_common/templates/components/table.html
Normal file
@@ -0,0 +1,51 @@
|
||||
{#
|
||||
Table component with dark theme styling.
|
||||
|
||||
Usage:
|
||||
{% from "components/table.html" import table, table_row %}
|
||||
|
||||
{% call table(columns=["Name", "Status", "Actions"]) %}
|
||||
{% for item in items %}
|
||||
{{ table_row([item.name, item.status, actions_html]) }}
|
||||
{% endfor %}
|
||||
{% endcall %}
|
||||
#}
|
||||
|
||||
{% macro table(columns, class="", id="") %}
|
||||
<div class="overflow-x-auto {{ class }}" {% if id %}id="{{ id }}"{% endif %}>
|
||||
<table class="w-full text-sm">
|
||||
<thead class="text-gray-400 border-b border-dark-600">
|
||||
<tr>
|
||||
{% for col in columns %}
|
||||
<th class="text-left py-3 px-4 font-medium">{{ col }}</th>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-dark-600">
|
||||
{{ caller() }}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro table_row(cells, class="", href=None) %}
|
||||
<tr class="hover:bg-dark-600/50 transition-colors {{ class }}">
|
||||
{% for cell in cells %}
|
||||
<td class="py-3 px-4">
|
||||
{% if href and loop.first %}
|
||||
<a href="{{ href }}" class="text-blue-400 hover:text-blue-300">{{ cell }}</a>
|
||||
{% else %}
|
||||
{{ cell | safe }}
|
||||
{% endif %}
|
||||
</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro empty_row(colspan, message="No items found") %}
|
||||
<tr>
|
||||
<td colspan="{{ colspan }}" class="py-8 text-center text-gray-400">
|
||||
{{ message }}
|
||||
</td>
|
||||
</tr>
|
||||
{% endmacro %}
|
||||
19
artdag/common/artdag_common/utils/__init__.py
Normal file
19
artdag/common/artdag_common/utils/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Utility functions shared across Art-DAG servers.
|
||||
"""
|
||||
|
||||
from .pagination import paginate, get_pagination_params
|
||||
from .media import detect_media_type, get_media_extension, is_streamable
|
||||
from .formatting import format_date, format_size, truncate_hash, format_duration
|
||||
|
||||
__all__ = [
|
||||
"paginate",
|
||||
"get_pagination_params",
|
||||
"detect_media_type",
|
||||
"get_media_extension",
|
||||
"is_streamable",
|
||||
"format_date",
|
||||
"format_size",
|
||||
"truncate_hash",
|
||||
"format_duration",
|
||||
]
|
||||
165
artdag/common/artdag_common/utils/formatting.py
Normal file
165
artdag/common/artdag_common/utils/formatting.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""
|
||||
Formatting utilities for display.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, Union
|
||||
|
||||
|
||||
def format_date(
|
||||
value: Optional[Union[str, datetime]],
|
||||
length: int = 10,
|
||||
include_time: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Format a date/datetime for display.
|
||||
|
||||
Args:
|
||||
value: Date string or datetime object
|
||||
length: Length to truncate to (default 10 for YYYY-MM-DD)
|
||||
include_time: Whether to include time portion
|
||||
|
||||
Returns:
|
||||
Formatted date string
|
||||
"""
|
||||
if value is None:
|
||||
return ""
|
||||
|
||||
if isinstance(value, str):
|
||||
# Parse ISO format string
|
||||
try:
|
||||
if "T" in value:
|
||||
dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
else:
|
||||
return value[:length]
|
||||
except ValueError:
|
||||
return value[:length]
|
||||
else:
|
||||
dt = value
|
||||
|
||||
if include_time:
|
||||
return dt.strftime("%Y-%m-%d %H:%M")
|
||||
return dt.strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def format_size(size_bytes: Optional[int]) -> str:
|
||||
"""
|
||||
Format file size in human-readable form.
|
||||
|
||||
Args:
|
||||
size_bytes: Size in bytes
|
||||
|
||||
Returns:
|
||||
Human-readable size string (e.g., "1.5 MB")
|
||||
"""
|
||||
if size_bytes is None:
|
||||
return "Unknown"
|
||||
if size_bytes < 0:
|
||||
return "Unknown"
|
||||
if size_bytes == 0:
|
||||
return "0 B"
|
||||
|
||||
units = ["B", "KB", "MB", "GB", "TB"]
|
||||
unit_index = 0
|
||||
size = float(size_bytes)
|
||||
|
||||
while size >= 1024 and unit_index < len(units) - 1:
|
||||
size /= 1024
|
||||
unit_index += 1
|
||||
|
||||
if unit_index == 0:
|
||||
return f"{int(size)} {units[unit_index]}"
|
||||
return f"{size:.1f} {units[unit_index]}"
|
||||
|
||||
|
||||
def truncate_hash(value: str, length: int = 16, suffix: str = "...") -> str:
|
||||
"""
|
||||
Truncate a hash or long string with ellipsis.
|
||||
|
||||
Args:
|
||||
value: String to truncate
|
||||
length: Maximum length before truncation
|
||||
suffix: Suffix to add when truncated
|
||||
|
||||
Returns:
|
||||
Truncated string
|
||||
"""
|
||||
if not value:
|
||||
return ""
|
||||
if len(value) <= length:
|
||||
return value
|
||||
return f"{value[:length]}{suffix}"
|
||||
|
||||
|
||||
def format_duration(seconds: Optional[float]) -> str:
|
||||
"""
|
||||
Format duration in human-readable form.
|
||||
|
||||
Args:
|
||||
seconds: Duration in seconds
|
||||
|
||||
Returns:
|
||||
Human-readable duration string (e.g., "2m 30s")
|
||||
"""
|
||||
if seconds is None or seconds < 0:
|
||||
return "Unknown"
|
||||
|
||||
if seconds < 1:
|
||||
return f"{int(seconds * 1000)}ms"
|
||||
|
||||
if seconds < 60:
|
||||
return f"{seconds:.1f}s"
|
||||
|
||||
minutes = int(seconds // 60)
|
||||
remaining_seconds = int(seconds % 60)
|
||||
|
||||
if minutes < 60:
|
||||
if remaining_seconds:
|
||||
return f"{minutes}m {remaining_seconds}s"
|
||||
return f"{minutes}m"
|
||||
|
||||
hours = minutes // 60
|
||||
remaining_minutes = minutes % 60
|
||||
|
||||
if remaining_minutes:
|
||||
return f"{hours}h {remaining_minutes}m"
|
||||
return f"{hours}h"
|
||||
|
||||
|
||||
def format_count(count: int) -> str:
|
||||
"""
|
||||
Format a count with abbreviation for large numbers.
|
||||
|
||||
Args:
|
||||
count: Number to format
|
||||
|
||||
Returns:
|
||||
Formatted string (e.g., "1.2K", "3.5M")
|
||||
"""
|
||||
if count < 1000:
|
||||
return str(count)
|
||||
if count < 1000000:
|
||||
return f"{count / 1000:.1f}K"
|
||||
if count < 1000000000:
|
||||
return f"{count / 1000000:.1f}M"
|
||||
return f"{count / 1000000000:.1f}B"
|
||||
|
||||
|
||||
def format_percentage(value: float, decimals: int = 1) -> str:
|
||||
"""
|
||||
Format a percentage value.
|
||||
|
||||
Args:
|
||||
value: Percentage value (0-100 or 0-1)
|
||||
decimals: Number of decimal places
|
||||
|
||||
Returns:
|
||||
Formatted percentage string
|
||||
"""
|
||||
# Assume 0-1 if less than 1
|
||||
if value <= 1:
|
||||
value *= 100
|
||||
|
||||
if decimals == 0:
|
||||
return f"{int(value)}%"
|
||||
return f"{value:.{decimals}f}%"
|
||||
166
artdag/common/artdag_common/utils/media.py
Normal file
166
artdag/common/artdag_common/utils/media.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Media type detection and handling utilities.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
import mimetypes
|
||||
|
||||
# Initialize mimetypes database
|
||||
mimetypes.init()
|
||||
|
||||
# Media type categories
|
||||
VIDEO_TYPES = {"video/mp4", "video/webm", "video/quicktime", "video/x-msvideo", "video/avi"}
|
||||
IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp", "image/svg+xml"}
|
||||
AUDIO_TYPES = {"audio/mpeg", "audio/wav", "audio/ogg", "audio/flac", "audio/aac", "audio/mp3"}
|
||||
|
||||
# File extension mappings
|
||||
EXTENSION_TO_CATEGORY = {
|
||||
# Video
|
||||
".mp4": "video",
|
||||
".webm": "video",
|
||||
".mov": "video",
|
||||
".avi": "video",
|
||||
".mkv": "video",
|
||||
# Image
|
||||
".jpg": "image",
|
||||
".jpeg": "image",
|
||||
".png": "image",
|
||||
".gif": "image",
|
||||
".webp": "image",
|
||||
".svg": "image",
|
||||
# Audio
|
||||
".mp3": "audio",
|
||||
".wav": "audio",
|
||||
".ogg": "audio",
|
||||
".flac": "audio",
|
||||
".aac": "audio",
|
||||
".m4a": "audio",
|
||||
}
|
||||
|
||||
|
||||
def detect_media_type(path: Path) -> str:
|
||||
"""
|
||||
Detect the media category for a file.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
|
||||
Returns:
|
||||
Category string: "video", "image", "audio", or "unknown"
|
||||
"""
|
||||
if not path:
|
||||
return "unknown"
|
||||
|
||||
# Try extension first
|
||||
ext = path.suffix.lower()
|
||||
if ext in EXTENSION_TO_CATEGORY:
|
||||
return EXTENSION_TO_CATEGORY[ext]
|
||||
|
||||
# Try mimetypes
|
||||
mime_type, _ = mimetypes.guess_type(str(path))
|
||||
if mime_type:
|
||||
if mime_type in VIDEO_TYPES or mime_type.startswith("video/"):
|
||||
return "video"
|
||||
if mime_type in IMAGE_TYPES or mime_type.startswith("image/"):
|
||||
return "image"
|
||||
if mime_type in AUDIO_TYPES or mime_type.startswith("audio/"):
|
||||
return "audio"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def get_mime_type(path: Path) -> str:
|
||||
"""
|
||||
Get the MIME type for a file.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
|
||||
Returns:
|
||||
MIME type string or "application/octet-stream"
|
||||
"""
|
||||
mime_type, _ = mimetypes.guess_type(str(path))
|
||||
return mime_type or "application/octet-stream"
|
||||
|
||||
|
||||
def get_media_extension(media_type: str) -> str:
|
||||
"""
|
||||
Get the typical file extension for a media type.
|
||||
|
||||
Args:
|
||||
media_type: Media category or MIME type
|
||||
|
||||
Returns:
|
||||
File extension with dot (e.g., ".mp4")
|
||||
"""
|
||||
if media_type == "video":
|
||||
return ".mp4"
|
||||
if media_type == "image":
|
||||
return ".png"
|
||||
if media_type == "audio":
|
||||
return ".mp3"
|
||||
|
||||
# Try as MIME type
|
||||
ext = mimetypes.guess_extension(media_type)
|
||||
return ext or ""
|
||||
|
||||
|
||||
def is_streamable(path: Path) -> bool:
|
||||
"""
|
||||
Check if a file type is streamable (video/audio).
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
|
||||
Returns:
|
||||
True if the file can be streamed
|
||||
"""
|
||||
media_type = detect_media_type(path)
|
||||
return media_type in ("video", "audio")
|
||||
|
||||
|
||||
def needs_conversion(path: Path, target_format: str = "mp4") -> bool:
|
||||
"""
|
||||
Check if a video file needs format conversion.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
target_format: Target format (default mp4)
|
||||
|
||||
Returns:
|
||||
True if conversion is needed
|
||||
"""
|
||||
media_type = detect_media_type(path)
|
||||
if media_type != "video":
|
||||
return False
|
||||
|
||||
ext = path.suffix.lower().lstrip(".")
|
||||
return ext != target_format
|
||||
|
||||
|
||||
def get_video_src(
|
||||
content_hash: str,
|
||||
original_path: Optional[Path] = None,
|
||||
is_ios: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Get the appropriate video source URL.
|
||||
|
||||
For iOS devices, prefer MP4 format.
|
||||
|
||||
Args:
|
||||
content_hash: Content hash for the video
|
||||
original_path: Optional original file path
|
||||
is_ios: Whether the client is iOS
|
||||
|
||||
Returns:
|
||||
URL path for the video source
|
||||
"""
|
||||
if is_ios:
|
||||
return f"/cache/{content_hash}/mp4"
|
||||
|
||||
if original_path and original_path.suffix.lower() in (".mp4", ".webm"):
|
||||
return f"/cache/{content_hash}/raw"
|
||||
|
||||
return f"/cache/{content_hash}/mp4"
|
||||
85
artdag/common/artdag_common/utils/pagination.py
Normal file
85
artdag/common/artdag_common/utils/pagination.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Pagination utilities.
|
||||
"""
|
||||
|
||||
from typing import List, Any, Tuple, Optional
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
from ..constants import DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE
|
||||
|
||||
|
||||
def get_pagination_params(request: Request) -> Tuple[int, int]:
|
||||
"""
|
||||
Extract pagination parameters from request query string.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
Tuple of (page, limit)
|
||||
"""
|
||||
try:
|
||||
page = int(request.query_params.get("page", 1))
|
||||
page = max(1, page)
|
||||
except ValueError:
|
||||
page = 1
|
||||
|
||||
try:
|
||||
limit = int(request.query_params.get("limit", DEFAULT_PAGE_SIZE))
|
||||
limit = max(1, min(limit, MAX_PAGE_SIZE))
|
||||
except ValueError:
|
||||
limit = DEFAULT_PAGE_SIZE
|
||||
|
||||
return page, limit
|
||||
|
||||
|
||||
def paginate(
|
||||
items: List[Any],
|
||||
page: int = 1,
|
||||
limit: int = DEFAULT_PAGE_SIZE,
|
||||
) -> Tuple[List[Any], dict]:
|
||||
"""
|
||||
Paginate a list of items.
|
||||
|
||||
Args:
|
||||
items: Full list of items
|
||||
page: Page number (1-indexed)
|
||||
limit: Items per page
|
||||
|
||||
Returns:
|
||||
Tuple of (paginated items, pagination info dict)
|
||||
"""
|
||||
total = len(items)
|
||||
start = (page - 1) * limit
|
||||
end = start + limit
|
||||
|
||||
paginated = items[start:end]
|
||||
|
||||
return paginated, {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": end < total,
|
||||
"total_pages": (total + limit - 1) // limit if total > 0 else 1,
|
||||
}
|
||||
|
||||
|
||||
def calculate_offset(page: int, limit: int) -> int:
|
||||
"""Calculate database offset from page and limit."""
|
||||
return (page - 1) * limit
|
||||
|
||||
|
||||
def build_pagination_info(
|
||||
page: int,
|
||||
limit: int,
|
||||
total: int,
|
||||
) -> dict:
|
||||
"""Build pagination info dictionary."""
|
||||
return {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": page * limit < total,
|
||||
"total_pages": (total + limit - 1) // limit if total > 0 else 1,
|
||||
}
|
||||
22
artdag/common/pyproject.toml
Normal file
22
artdag/common/pyproject.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "artdag-common"
|
||||
version = "0.1.3"
|
||||
description = "Shared components for Art-DAG L1 and L2 servers"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"fastapi>=0.100.0",
|
||||
"jinja2>=3.1.0",
|
||||
"pydantic>=2.0.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.0.0",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["artdag_common"]
|
||||
47
artdag/core/.gitignore
vendored
Normal file
47
artdag/core/.gitignore
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Testing
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
|
||||
# Private keys (ActivityPub secrets)
|
||||
.cache/
|
||||
|
||||
# Test outputs
|
||||
test_cache/
|
||||
test_plan_output.json
|
||||
analysis.json
|
||||
plan.json
|
||||
plan_with_analysis.json
|
||||
110
artdag/core/README.md
Normal file
110
artdag/core/README.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# artdag
|
||||
|
||||
Content-addressed DAG execution engine with ActivityPub ownership.
|
||||
|
||||
## Features
|
||||
|
||||
- **Content-addressed nodes**: `node_id = SHA3-256(type + config + inputs)` for automatic deduplication
|
||||
- **Quantum-resistant hashing**: SHA-3 throughout for future-proof integrity
|
||||
- **ActivityPub ownership**: Cryptographically signed ownership claims
|
||||
- **Federated identity**: `@user@artdag.rose-ash.com` style identities
|
||||
- **Pluggable executors**: Register custom node types
|
||||
- **Built-in video primitives**: SOURCE, SEGMENT, RESIZE, TRANSFORM, SEQUENCE, MUX, BLEND
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
### Optional: External Effect Tools
|
||||
|
||||
Some effects can use external tools for better performance:
|
||||
|
||||
**Pixelsort** (glitch art pixel sorting):
|
||||
```bash
|
||||
# Rust CLI (recommended - fast)
|
||||
cargo install --git https://github.com/Void-ux/pixelsort.git pixelsort
|
||||
|
||||
# Or Python CLI
|
||||
pip install git+https://github.com/Blotz/pixelsort-cli
|
||||
```
|
||||
|
||||
**Datamosh** (video glitch/corruption):
|
||||
```bash
|
||||
# FFglitch (recommended)
|
||||
./scripts/install-ffglitch.sh
|
||||
|
||||
# Or Python CLI
|
||||
pip install git+https://github.com/tiberiuiancu/datamoshing
|
||||
```
|
||||
|
||||
Check available tools:
|
||||
```bash
|
||||
python -m artdag.sexp.external_tools
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```python
|
||||
from artdag import Engine, DAGBuilder, Registry
|
||||
from artdag.activitypub import OwnershipManager
|
||||
|
||||
# Create ownership manager
|
||||
manager = OwnershipManager("./my_registry")
|
||||
|
||||
# Create your identity
|
||||
actor = manager.create_actor("alice", "Alice")
|
||||
print(f"Created: {actor.handle}") # @alice@artdag.rose-ash.com
|
||||
|
||||
# Register an asset with ownership
|
||||
asset, activity = manager.register_asset(
|
||||
actor=actor,
|
||||
name="my_image",
|
||||
path="/path/to/image.jpg",
|
||||
tags=["photo", "art"],
|
||||
)
|
||||
print(f"Owned: {asset.name} (hash: {asset.content_hash})")
|
||||
|
||||
# Build and execute a DAG
|
||||
engine = Engine("./cache")
|
||||
builder = DAGBuilder()
|
||||
|
||||
source = builder.source(str(asset.path))
|
||||
resized = builder.resize(source, width=1920, height=1080)
|
||||
builder.set_output(resized)
|
||||
|
||||
result = engine.execute(builder.build())
|
||||
print(f"Output: {result.output_path}")
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
artdag/
|
||||
├── dag.py # Node, DAG, DAGBuilder
|
||||
├── cache.py # Content-addressed file cache
|
||||
├── executor.py # Base executor + registry
|
||||
├── engine.py # DAG execution engine
|
||||
├── activitypub/ # Identity + ownership
|
||||
│ ├── actor.py # Actor identity with RSA keys
|
||||
│ ├── activity.py # Create, Announce activities
|
||||
│ ├── signatures.py # RSA signing/verification
|
||||
│ └── ownership.py # Links actors to assets
|
||||
├── nodes/ # Built-in executors
|
||||
│ ├── source.py # SOURCE
|
||||
│ ├── transform.py # SEGMENT, RESIZE, TRANSFORM
|
||||
│ ├── compose.py # SEQUENCE, LAYER, MUX, BLEND
|
||||
│ └── effect.py # EFFECT (identity, etc.)
|
||||
└── effects/ # Effect implementations
|
||||
└── identity/ # The foundational identity effect
|
||||
```
|
||||
|
||||
## Related Repos
|
||||
|
||||
- **Registry**: https://git.rose-ash.com/art-dag/registry - Asset registry with ownership proofs
|
||||
- **Recipes**: https://git.rose-ash.com/art-dag/recipes - DAG recipes using effects
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
61
artdag/core/artdag/__init__.py
Normal file
61
artdag/core/artdag/__init__.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# artdag - Content-addressed DAG execution engine with ActivityPub ownership
|
||||
#
|
||||
# A standalone execution engine that processes directed acyclic graphs (DAGs)
|
||||
# where each node represents an operation. Nodes are content-addressed for
|
||||
# automatic caching and deduplication.
|
||||
#
|
||||
# Core concepts:
|
||||
# - Node: An operation with type, config, and inputs
|
||||
# - DAG: A graph of nodes with a designated output node
|
||||
# - Executor: Implements the actual operation for a node type
|
||||
# - Engine: Executes DAGs by resolving dependencies and running executors
|
||||
|
||||
from .dag import Node, DAG, DAGBuilder, NodeType
|
||||
from .cache import Cache, CacheEntry
|
||||
from .executor import Executor, register_executor, get_executor
|
||||
from .engine import Engine
|
||||
from .registry import Registry, Asset
|
||||
from .activities import Activity, ActivityStore, ActivityManager, make_is_shared_fn
|
||||
|
||||
# Analysis and planning modules (optional, require extra dependencies)
|
||||
try:
|
||||
from .analysis import Analyzer, AnalysisResult
|
||||
except ImportError:
|
||||
Analyzer = None
|
||||
AnalysisResult = None
|
||||
|
||||
try:
|
||||
from .planning import RecipePlanner, ExecutionPlan, ExecutionStep
|
||||
except ImportError:
|
||||
RecipePlanner = None
|
||||
ExecutionPlan = None
|
||||
ExecutionStep = None
|
||||
|
||||
__all__ = [
|
||||
# Core
|
||||
"Node",
|
||||
"DAG",
|
||||
"DAGBuilder",
|
||||
"NodeType",
|
||||
"Cache",
|
||||
"CacheEntry",
|
||||
"Executor",
|
||||
"register_executor",
|
||||
"get_executor",
|
||||
"Engine",
|
||||
"Registry",
|
||||
"Asset",
|
||||
"Activity",
|
||||
"ActivityStore",
|
||||
"ActivityManager",
|
||||
"make_is_shared_fn",
|
||||
# Analysis (optional)
|
||||
"Analyzer",
|
||||
"AnalysisResult",
|
||||
# Planning (optional)
|
||||
"RecipePlanner",
|
||||
"ExecutionPlan",
|
||||
"ExecutionStep",
|
||||
]
|
||||
|
||||
__version__ = "0.1.0"
|
||||
371
artdag/core/artdag/activities.py
Normal file
371
artdag/core/artdag/activities.py
Normal file
@@ -0,0 +1,371 @@
|
||||
# artdag/activities.py
|
||||
"""
|
||||
Persistent activity (job) tracking for cache management.
|
||||
|
||||
Activities represent executions of DAGs. They track:
|
||||
- Input node IDs (sources)
|
||||
- Output node ID (terminal node)
|
||||
- Intermediate node IDs (everything in between)
|
||||
|
||||
This enables deletion rules:
|
||||
- Shared items (ActivityPub published) cannot be deleted
|
||||
- Inputs/outputs of activities cannot be deleted
|
||||
- Intermediates can be deleted (reconstructible)
|
||||
- Activities can only be discarded if no items are shared
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Set
|
||||
|
||||
from .cache import Cache, CacheEntry
|
||||
from .dag import DAG
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_is_shared_fn(activitypub_store: "ActivityStore") -> Callable[[str], bool]:
|
||||
"""
|
||||
Create an is_shared function from an ActivityPub ActivityStore.
|
||||
|
||||
Args:
|
||||
activitypub_store: The ActivityPub activity store
|
||||
(from artdag.activitypub.activity)
|
||||
|
||||
Returns:
|
||||
Function that checks if a cid has been published
|
||||
"""
|
||||
def is_shared(cid: str) -> bool:
|
||||
activities = activitypub_store.find_by_object_hash(cid)
|
||||
return any(a.activity_type == "Create" for a in activities)
|
||||
return is_shared
|
||||
|
||||
|
||||
@dataclass
|
||||
class Activity:
|
||||
"""
|
||||
A recorded execution of a DAG.
|
||||
|
||||
Tracks which cache entries are inputs, outputs, and intermediates
|
||||
to enforce deletion rules.
|
||||
"""
|
||||
activity_id: str
|
||||
input_ids: List[str] # Source node cache IDs
|
||||
output_id: str # Terminal node cache ID
|
||||
intermediate_ids: List[str] # Everything in between
|
||||
created_at: float
|
||||
status: str = "completed" # pending|running|completed|failed
|
||||
dag_snapshot: Optional[Dict[str, Any]] = None # Serialized DAG for reconstruction
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"activity_id": self.activity_id,
|
||||
"input_ids": self.input_ids,
|
||||
"output_id": self.output_id,
|
||||
"intermediate_ids": self.intermediate_ids,
|
||||
"created_at": self.created_at,
|
||||
"status": self.status,
|
||||
"dag_snapshot": self.dag_snapshot,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Activity":
|
||||
return cls(
|
||||
activity_id=data["activity_id"],
|
||||
input_ids=data["input_ids"],
|
||||
output_id=data["output_id"],
|
||||
intermediate_ids=data["intermediate_ids"],
|
||||
created_at=data["created_at"],
|
||||
status=data.get("status", "completed"),
|
||||
dag_snapshot=data.get("dag_snapshot"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_dag(cls, dag: DAG, activity_id: str = None) -> "Activity":
|
||||
"""
|
||||
Create an Activity from a DAG.
|
||||
|
||||
Classifies nodes as inputs, output, or intermediates.
|
||||
"""
|
||||
if activity_id is None:
|
||||
activity_id = str(uuid.uuid4())
|
||||
|
||||
# Find input nodes (nodes with no inputs - sources)
|
||||
input_ids = []
|
||||
for node_id, node in dag.nodes.items():
|
||||
if not node.inputs:
|
||||
input_ids.append(node_id)
|
||||
|
||||
# Output is the terminal node
|
||||
output_id = dag.output_id
|
||||
|
||||
# Intermediates are everything else
|
||||
intermediate_ids = []
|
||||
for node_id in dag.nodes:
|
||||
if node_id not in input_ids and node_id != output_id:
|
||||
intermediate_ids.append(node_id)
|
||||
|
||||
return cls(
|
||||
activity_id=activity_id,
|
||||
input_ids=sorted(input_ids),
|
||||
output_id=output_id,
|
||||
intermediate_ids=sorted(intermediate_ids),
|
||||
created_at=time.time(),
|
||||
status="completed",
|
||||
dag_snapshot=dag.to_dict(),
|
||||
)
|
||||
|
||||
@property
|
||||
def all_node_ids(self) -> List[str]:
|
||||
"""All node IDs involved in this activity."""
|
||||
return self.input_ids + [self.output_id] + self.intermediate_ids
|
||||
|
||||
|
||||
class ActivityStore:
|
||||
"""
|
||||
Persistent storage for activities.
|
||||
|
||||
Provides methods to check deletion eligibility and perform deletions.
|
||||
"""
|
||||
|
||||
def __init__(self, store_dir: Path | str):
|
||||
self.store_dir = Path(store_dir)
|
||||
self.store_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._activities: Dict[str, Activity] = {}
|
||||
self._load()
|
||||
|
||||
def _index_path(self) -> Path:
|
||||
return self.store_dir / "activities.json"
|
||||
|
||||
def _load(self):
|
||||
"""Load activities from disk."""
|
||||
index_path = self._index_path()
|
||||
if index_path.exists():
|
||||
try:
|
||||
with open(index_path) as f:
|
||||
data = json.load(f)
|
||||
self._activities = {
|
||||
a["activity_id"]: Activity.from_dict(a)
|
||||
for a in data.get("activities", [])
|
||||
}
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
logger.warning(f"Failed to load activities: {e}")
|
||||
self._activities = {}
|
||||
|
||||
def _save(self):
|
||||
"""Save activities to disk."""
|
||||
data = {
|
||||
"version": "1.0",
|
||||
"activities": [a.to_dict() for a in self._activities.values()],
|
||||
}
|
||||
with open(self._index_path(), "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
def add(self, activity: Activity) -> None:
|
||||
"""Add an activity."""
|
||||
self._activities[activity.activity_id] = activity
|
||||
self._save()
|
||||
|
||||
def get(self, activity_id: str) -> Optional[Activity]:
|
||||
"""Get an activity by ID."""
|
||||
return self._activities.get(activity_id)
|
||||
|
||||
def remove(self, activity_id: str) -> bool:
|
||||
"""Remove an activity record (does not delete cache entries)."""
|
||||
if activity_id not in self._activities:
|
||||
return False
|
||||
del self._activities[activity_id]
|
||||
self._save()
|
||||
return True
|
||||
|
||||
def list(self) -> List[Activity]:
|
||||
"""List all activities."""
|
||||
return list(self._activities.values())
|
||||
|
||||
def find_by_input_ids(self, input_ids: List[str]) -> List[Activity]:
|
||||
"""Find activities with the same inputs (for UI grouping)."""
|
||||
sorted_inputs = sorted(input_ids)
|
||||
return [
|
||||
a for a in self._activities.values()
|
||||
if sorted(a.input_ids) == sorted_inputs
|
||||
]
|
||||
|
||||
def find_using_node(self, node_id: str) -> List[Activity]:
|
||||
"""Find all activities that reference a node ID."""
|
||||
return [
|
||||
a for a in self._activities.values()
|
||||
if node_id in a.all_node_ids
|
||||
]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._activities)
|
||||
|
||||
|
||||
class ActivityManager:
|
||||
"""
|
||||
Manages activities and cache deletion with sharing rules.
|
||||
|
||||
Deletion rules:
|
||||
1. Shared items (ActivityPub published) cannot be deleted
|
||||
2. Inputs/outputs of activities cannot be deleted
|
||||
3. Intermediates can be deleted (reconstructible)
|
||||
4. Activities can only be discarded if no items are shared
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cache: Cache,
|
||||
activity_store: ActivityStore,
|
||||
is_shared_fn: Callable[[str], bool],
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
cache: The L1 cache
|
||||
activity_store: Activity persistence
|
||||
is_shared_fn: Function that checks if a cid is shared
|
||||
(published via ActivityPub)
|
||||
"""
|
||||
self.cache = cache
|
||||
self.activities = activity_store
|
||||
self._is_shared = is_shared_fn
|
||||
|
||||
def record_activity(self, dag: DAG) -> Activity:
|
||||
"""Record a completed DAG execution as an activity."""
|
||||
activity = Activity.from_dag(dag)
|
||||
self.activities.add(activity)
|
||||
return activity
|
||||
|
||||
def is_shared(self, node_id: str) -> bool:
|
||||
"""Check if a cache entry is shared (published via ActivityPub)."""
|
||||
entry = self.cache.get_entry(node_id)
|
||||
if not entry or not entry.cid:
|
||||
return False
|
||||
return self._is_shared(entry.cid)
|
||||
|
||||
def can_delete_cache_entry(self, node_id: str) -> bool:
|
||||
"""
|
||||
Check if a cache entry can be deleted.
|
||||
|
||||
Returns False if:
|
||||
- Entry is shared (ActivityPub published)
|
||||
- Entry is an input or output of any activity
|
||||
"""
|
||||
# Check if shared
|
||||
if self.is_shared(node_id):
|
||||
return False
|
||||
|
||||
# Check if it's an input or output of any activity
|
||||
for activity in self.activities.list():
|
||||
if node_id in activity.input_ids:
|
||||
return False
|
||||
if node_id == activity.output_id:
|
||||
return False
|
||||
|
||||
# It's either an intermediate or orphaned - can delete
|
||||
return True
|
||||
|
||||
def can_discard_activity(self, activity_id: str) -> bool:
|
||||
"""
|
||||
Check if an activity can be discarded.
|
||||
|
||||
Returns False if any cache entry (input, output, or intermediate)
|
||||
is shared via ActivityPub.
|
||||
"""
|
||||
activity = self.activities.get(activity_id)
|
||||
if not activity:
|
||||
return False
|
||||
|
||||
# Check if any item is shared
|
||||
for node_id in activity.all_node_ids:
|
||||
if self.is_shared(node_id):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def discard_activity(self, activity_id: str) -> bool:
|
||||
"""
|
||||
Discard an activity and delete its intermediate cache entries.
|
||||
|
||||
Returns False if the activity cannot be discarded (has shared items).
|
||||
|
||||
When discarded:
|
||||
- Intermediate cache entries are deleted
|
||||
- The activity record is removed
|
||||
- Inputs remain (may be used by other activities)
|
||||
- Output is deleted if orphaned (not shared, not used elsewhere)
|
||||
"""
|
||||
if not self.can_discard_activity(activity_id):
|
||||
return False
|
||||
|
||||
activity = self.activities.get(activity_id)
|
||||
if not activity:
|
||||
return False
|
||||
|
||||
output_id = activity.output_id
|
||||
intermediate_ids = list(activity.intermediate_ids)
|
||||
|
||||
# Remove the activity record first
|
||||
self.activities.remove(activity_id)
|
||||
|
||||
# Delete intermediates
|
||||
for node_id in intermediate_ids:
|
||||
self.cache.remove(node_id)
|
||||
logger.debug(f"Deleted intermediate: {node_id}")
|
||||
|
||||
# Check if output is now orphaned
|
||||
if self._is_orphaned(output_id) and not self.is_shared(output_id):
|
||||
self.cache.remove(output_id)
|
||||
logger.debug(f"Deleted orphaned output: {output_id}")
|
||||
|
||||
# Inputs remain - they may be used by other activities
|
||||
# But check if any are orphaned now
|
||||
for input_id in activity.input_ids:
|
||||
if self._is_orphaned(input_id) and not self.is_shared(input_id):
|
||||
self.cache.remove(input_id)
|
||||
logger.debug(f"Deleted orphaned input: {input_id}")
|
||||
|
||||
return True
|
||||
|
||||
def _is_orphaned(self, node_id: str) -> bool:
|
||||
"""Check if a node is not referenced by any activity."""
|
||||
for activity in self.activities.list():
|
||||
if node_id in activity.all_node_ids:
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_deletable_entries(self) -> List[CacheEntry]:
|
||||
"""Get all cache entries that can be deleted."""
|
||||
deletable = []
|
||||
for entry in self.cache.list_entries():
|
||||
if self.can_delete_cache_entry(entry.node_id):
|
||||
deletable.append(entry)
|
||||
return deletable
|
||||
|
||||
def get_discardable_activities(self) -> List[Activity]:
|
||||
"""Get all activities that can be discarded."""
|
||||
return [
|
||||
a for a in self.activities.list()
|
||||
if self.can_discard_activity(a.activity_id)
|
||||
]
|
||||
|
||||
def cleanup_intermediates(self) -> int:
|
||||
"""
|
||||
Delete all intermediate cache entries.
|
||||
|
||||
Intermediates are safe to delete as they can be reconstructed
|
||||
from inputs using the DAG.
|
||||
|
||||
Returns:
|
||||
Number of entries deleted
|
||||
"""
|
||||
deleted = 0
|
||||
for activity in self.activities.list():
|
||||
for node_id in activity.intermediate_ids:
|
||||
if self.cache.has(node_id):
|
||||
self.cache.remove(node_id)
|
||||
deleted += 1
|
||||
return deleted
|
||||
33
artdag/core/artdag/activitypub/__init__.py
Normal file
33
artdag/core/artdag/activitypub/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# primitive/activitypub/__init__.py
|
||||
"""
|
||||
ActivityPub implementation for Art DAG.
|
||||
|
||||
Provides decentralized identity and ownership for assets.
|
||||
Domain: artdag.rose-ash.com
|
||||
|
||||
Core concepts:
|
||||
- Actor: A user identity with cryptographic keys
|
||||
- Object: An asset (image, video, etc.)
|
||||
- Activity: An action (Create, Announce, Like, etc.)
|
||||
- Signature: Cryptographic proof of authorship
|
||||
"""
|
||||
|
||||
from .actor import Actor, ActorStore
|
||||
from .activity import Activity, CreateActivity, ActivityStore
|
||||
from .signatures import sign_activity, verify_signature, verify_activity_ownership
|
||||
from .ownership import OwnershipManager, OwnershipRecord
|
||||
|
||||
__all__ = [
|
||||
"Actor",
|
||||
"ActorStore",
|
||||
"Activity",
|
||||
"CreateActivity",
|
||||
"ActivityStore",
|
||||
"sign_activity",
|
||||
"verify_signature",
|
||||
"verify_activity_ownership",
|
||||
"OwnershipManager",
|
||||
"OwnershipRecord",
|
||||
]
|
||||
|
||||
DOMAIN = "artdag.rose-ash.com"
|
||||
203
artdag/core/artdag/activitypub/activity.py
Normal file
203
artdag/core/artdag/activitypub/activity.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# primitive/activitypub/activity.py
|
||||
"""
|
||||
ActivityPub Activity types.
|
||||
|
||||
Activities represent actions taken by actors on objects.
|
||||
Key activity types for Art DAG:
|
||||
- Create: Actor creates/claims ownership of an object
|
||||
- Announce: Actor shares/boosts an object
|
||||
- Like: Actor endorses an object
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from .actor import Actor, DOMAIN
|
||||
|
||||
|
||||
def _generate_id() -> str:
|
||||
"""Generate unique activity ID."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@dataclass
|
||||
class Activity:
|
||||
"""
|
||||
Base ActivityPub Activity.
|
||||
|
||||
Attributes:
|
||||
activity_id: Unique identifier
|
||||
activity_type: Type (Create, Announce, Like, etc.)
|
||||
actor_id: ID of the actor performing the activity
|
||||
object_data: The object of the activity
|
||||
published: ISO timestamp
|
||||
signature: Cryptographic signature (added after signing)
|
||||
"""
|
||||
activity_id: str
|
||||
activity_type: str
|
||||
actor_id: str
|
||||
object_data: Dict[str, Any]
|
||||
published: str = field(default_factory=lambda: time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()))
|
||||
signature: Optional[Dict[str, Any]] = None
|
||||
|
||||
def to_activitypub(self) -> Dict[str, Any]:
|
||||
"""Return ActivityPub JSON-LD representation."""
|
||||
activity = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": self.activity_type,
|
||||
"id": f"https://{DOMAIN}/activities/{self.activity_id}",
|
||||
"actor": self.actor_id,
|
||||
"object": self.object_data,
|
||||
"published": self.published,
|
||||
}
|
||||
if self.signature:
|
||||
activity["signature"] = self.signature
|
||||
return activity
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Serialize for storage."""
|
||||
return {
|
||||
"activity_id": self.activity_id,
|
||||
"activity_type": self.activity_type,
|
||||
"actor_id": self.actor_id,
|
||||
"object_data": self.object_data,
|
||||
"published": self.published,
|
||||
"signature": self.signature,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Activity":
|
||||
"""Deserialize from storage."""
|
||||
return cls(
|
||||
activity_id=data["activity_id"],
|
||||
activity_type=data["activity_type"],
|
||||
actor_id=data["actor_id"],
|
||||
object_data=data["object_data"],
|
||||
published=data.get("published", ""),
|
||||
signature=data.get("signature"),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateActivity(Activity):
|
||||
"""
|
||||
Create activity - establishes ownership of an object.
|
||||
|
||||
Used when an actor creates or claims an asset.
|
||||
"""
|
||||
activity_type: str = field(default="Create", init=False)
|
||||
|
||||
@classmethod
|
||||
def for_asset(
|
||||
cls,
|
||||
actor: Actor,
|
||||
asset_name: str,
|
||||
cid: str,
|
||||
asset_type: str = "Image",
|
||||
metadata: Dict[str, Any] = None,
|
||||
) -> "CreateActivity":
|
||||
"""
|
||||
Create a Create activity for an asset.
|
||||
|
||||
Args:
|
||||
actor: The actor claiming ownership
|
||||
asset_name: Name of the asset
|
||||
cid: SHA-3 hash of the asset content
|
||||
asset_type: ActivityPub object type (Image, Video, Audio, etc.)
|
||||
metadata: Additional metadata
|
||||
|
||||
Returns:
|
||||
CreateActivity establishing ownership
|
||||
"""
|
||||
object_data = {
|
||||
"type": asset_type,
|
||||
"name": asset_name,
|
||||
"id": f"https://{DOMAIN}/objects/{cid}",
|
||||
"contentHash": {
|
||||
"algorithm": "sha3-256",
|
||||
"value": cid,
|
||||
},
|
||||
"attributedTo": actor.id,
|
||||
}
|
||||
if metadata:
|
||||
object_data["metadata"] = metadata
|
||||
|
||||
return cls(
|
||||
activity_id=_generate_id(),
|
||||
actor_id=actor.id,
|
||||
object_data=object_data,
|
||||
)
|
||||
|
||||
|
||||
class ActivityStore:
|
||||
"""
|
||||
Persistent storage for activities.
|
||||
|
||||
Activities are stored as an append-only log for auditability.
|
||||
"""
|
||||
|
||||
def __init__(self, store_dir: Path | str):
|
||||
self.store_dir = Path(store_dir)
|
||||
self.store_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._activities: List[Activity] = []
|
||||
self._load()
|
||||
|
||||
def _log_path(self) -> Path:
|
||||
return self.store_dir / "activities.json"
|
||||
|
||||
def _load(self):
|
||||
"""Load activities from disk."""
|
||||
log_path = self._log_path()
|
||||
if log_path.exists():
|
||||
with open(log_path) as f:
|
||||
data = json.load(f)
|
||||
self._activities = [
|
||||
Activity.from_dict(a) for a in data.get("activities", [])
|
||||
]
|
||||
|
||||
def _save(self):
|
||||
"""Save activities to disk."""
|
||||
data = {
|
||||
"version": "1.0",
|
||||
"activities": [a.to_dict() for a in self._activities],
|
||||
}
|
||||
with open(self._log_path(), "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
def add(self, activity: Activity) -> None:
|
||||
"""Add an activity to the log."""
|
||||
self._activities.append(activity)
|
||||
self._save()
|
||||
|
||||
def get(self, activity_id: str) -> Optional[Activity]:
|
||||
"""Get an activity by ID."""
|
||||
for a in self._activities:
|
||||
if a.activity_id == activity_id:
|
||||
return a
|
||||
return None
|
||||
|
||||
def list(self) -> List[Activity]:
|
||||
"""List all activities."""
|
||||
return list(self._activities)
|
||||
|
||||
def find_by_actor(self, actor_id: str) -> List[Activity]:
|
||||
"""Find activities by actor."""
|
||||
return [a for a in self._activities if a.actor_id == actor_id]
|
||||
|
||||
def find_by_object_hash(self, cid: str) -> List[Activity]:
|
||||
"""Find activities referencing an object by hash."""
|
||||
results = []
|
||||
for a in self._activities:
|
||||
obj_hash = a.object_data.get("contentHash", {})
|
||||
if isinstance(obj_hash, dict) and obj_hash.get("value") == cid:
|
||||
results.append(a)
|
||||
elif a.object_data.get("contentHash") == cid:
|
||||
results.append(a)
|
||||
return results
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._activities)
|
||||
206
artdag/core/artdag/activitypub/actor.py
Normal file
206
artdag/core/artdag/activitypub/actor.py
Normal file
@@ -0,0 +1,206 @@
|
||||
# primitive/activitypub/actor.py
|
||||
"""
|
||||
ActivityPub Actor management.
|
||||
|
||||
An Actor is an identity with:
|
||||
- Username and display name
|
||||
- RSA key pair for signing
|
||||
- ActivityPub-compliant JSON-LD representation
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||
|
||||
DOMAIN = "artdag.rose-ash.com"
|
||||
|
||||
|
||||
def _generate_keypair() -> tuple[bytes, bytes]:
|
||||
"""Generate RSA key pair for signing."""
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
public_pem = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
return private_pem, public_pem
|
||||
|
||||
|
||||
@dataclass
|
||||
class Actor:
|
||||
"""
|
||||
An ActivityPub Actor (identity).
|
||||
|
||||
Attributes:
|
||||
username: Unique username (e.g., "giles")
|
||||
display_name: Human-readable name
|
||||
public_key: PEM-encoded public key
|
||||
private_key: PEM-encoded private key (kept secret)
|
||||
created_at: Timestamp of creation
|
||||
"""
|
||||
username: str
|
||||
display_name: str
|
||||
public_key: bytes
|
||||
private_key: bytes
|
||||
created_at: float = field(default_factory=time.time)
|
||||
domain: str = DOMAIN
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
"""ActivityPub actor ID (URL)."""
|
||||
return f"https://{self.domain}/users/{self.username}"
|
||||
|
||||
@property
|
||||
def handle(self) -> str:
|
||||
"""Fediverse handle."""
|
||||
return f"@{self.username}@{self.domain}"
|
||||
|
||||
@property
|
||||
def inbox(self) -> str:
|
||||
"""ActivityPub inbox URL."""
|
||||
return f"{self.id}/inbox"
|
||||
|
||||
@property
|
||||
def outbox(self) -> str:
|
||||
"""ActivityPub outbox URL."""
|
||||
return f"{self.id}/outbox"
|
||||
|
||||
@property
|
||||
def key_id(self) -> str:
|
||||
"""Key ID for HTTP Signatures."""
|
||||
return f"{self.id}#main-key"
|
||||
|
||||
def to_activitypub(self) -> Dict[str, Any]:
|
||||
"""Return ActivityPub JSON-LD representation."""
|
||||
return {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"type": "Person",
|
||||
"id": self.id,
|
||||
"preferredUsername": self.username,
|
||||
"name": self.display_name,
|
||||
"inbox": self.inbox,
|
||||
"outbox": self.outbox,
|
||||
"publicKey": {
|
||||
"id": self.key_id,
|
||||
"owner": self.id,
|
||||
"publicKeyPem": self.public_key.decode("utf-8"),
|
||||
},
|
||||
}
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Serialize for storage."""
|
||||
return {
|
||||
"username": self.username,
|
||||
"display_name": self.display_name,
|
||||
"public_key": self.public_key.decode("utf-8"),
|
||||
"private_key": self.private_key.decode("utf-8"),
|
||||
"created_at": self.created_at,
|
||||
"domain": self.domain,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Actor":
|
||||
"""Deserialize from storage."""
|
||||
return cls(
|
||||
username=data["username"],
|
||||
display_name=data["display_name"],
|
||||
public_key=data["public_key"].encode("utf-8"),
|
||||
private_key=data["private_key"].encode("utf-8"),
|
||||
created_at=data.get("created_at", time.time()),
|
||||
domain=data.get("domain", DOMAIN),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create(cls, username: str, display_name: str = None) -> "Actor":
|
||||
"""Create a new actor with generated keys."""
|
||||
private_pem, public_pem = _generate_keypair()
|
||||
return cls(
|
||||
username=username,
|
||||
display_name=display_name or username,
|
||||
public_key=public_pem,
|
||||
private_key=private_pem,
|
||||
)
|
||||
|
||||
|
||||
class ActorStore:
|
||||
"""
|
||||
Persistent storage for actors.
|
||||
|
||||
Structure:
|
||||
store_dir/
|
||||
actors.json # Index of all actors
|
||||
keys/
|
||||
<username>.private.pem
|
||||
<username>.public.pem
|
||||
"""
|
||||
|
||||
def __init__(self, store_dir: Path | str):
|
||||
self.store_dir = Path(store_dir)
|
||||
self.store_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._actors: Dict[str, Actor] = {}
|
||||
self._load()
|
||||
|
||||
def _index_path(self) -> Path:
|
||||
return self.store_dir / "actors.json"
|
||||
|
||||
def _load(self):
|
||||
"""Load actors from disk."""
|
||||
index_path = self._index_path()
|
||||
if index_path.exists():
|
||||
with open(index_path) as f:
|
||||
data = json.load(f)
|
||||
self._actors = {
|
||||
username: Actor.from_dict(actor_data)
|
||||
for username, actor_data in data.get("actors", {}).items()
|
||||
}
|
||||
|
||||
def _save(self):
|
||||
"""Save actors to disk."""
|
||||
data = {
|
||||
"version": "1.0",
|
||||
"domain": DOMAIN,
|
||||
"actors": {
|
||||
username: actor.to_dict()
|
||||
for username, actor in self._actors.items()
|
||||
},
|
||||
}
|
||||
with open(self._index_path(), "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
def create(self, username: str, display_name: str = None) -> Actor:
|
||||
"""Create and store a new actor."""
|
||||
if username in self._actors:
|
||||
raise ValueError(f"Actor {username} already exists")
|
||||
|
||||
actor = Actor.create(username, display_name)
|
||||
self._actors[username] = actor
|
||||
self._save()
|
||||
return actor
|
||||
|
||||
def get(self, username: str) -> Optional[Actor]:
|
||||
"""Get an actor by username."""
|
||||
return self._actors.get(username)
|
||||
|
||||
def list(self) -> list[Actor]:
|
||||
"""List all actors."""
|
||||
return list(self._actors.values())
|
||||
|
||||
def __contains__(self, username: str) -> bool:
|
||||
return username in self._actors
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._actors)
|
||||
226
artdag/core/artdag/activitypub/ownership.py
Normal file
226
artdag/core/artdag/activitypub/ownership.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# primitive/activitypub/ownership.py
|
||||
"""
|
||||
Ownership integration between ActivityPub and Registry.
|
||||
|
||||
Connects actors, activities, and assets to establish provable ownership.
|
||||
"""
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from .actor import Actor, ActorStore
|
||||
from .activity import Activity, CreateActivity, ActivityStore
|
||||
from .signatures import sign_activity, verify_activity_ownership
|
||||
from ..registry import Registry, Asset
|
||||
|
||||
|
||||
@dataclass
|
||||
class OwnershipRecord:
|
||||
"""
|
||||
A verified ownership record linking actor to asset.
|
||||
|
||||
Attributes:
|
||||
actor_handle: The actor's fediverse handle
|
||||
asset_name: Name of the owned asset
|
||||
cid: SHA-3 hash of the asset
|
||||
activity_id: ID of the Create activity establishing ownership
|
||||
verified: Whether the signature has been verified
|
||||
"""
|
||||
actor_handle: str
|
||||
asset_name: str
|
||||
cid: str
|
||||
activity_id: str
|
||||
verified: bool = False
|
||||
|
||||
|
||||
class OwnershipManager:
|
||||
"""
|
||||
Manages ownership relationships between actors and assets.
|
||||
|
||||
Integrates:
|
||||
- ActorStore: Identity management
|
||||
- Registry: Asset storage
|
||||
- ActivityStore: Ownership activities
|
||||
"""
|
||||
|
||||
def __init__(self, base_dir: Path | str):
|
||||
self.base_dir = Path(base_dir)
|
||||
self.base_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Initialize stores
|
||||
self.actors = ActorStore(self.base_dir / "actors")
|
||||
self.activities = ActivityStore(self.base_dir / "activities")
|
||||
self.registry = Registry(self.base_dir / "registry")
|
||||
|
||||
def create_actor(self, username: str, display_name: str = None) -> Actor:
|
||||
"""Create a new actor identity."""
|
||||
return self.actors.create(username, display_name)
|
||||
|
||||
def get_actor(self, username: str) -> Optional[Actor]:
|
||||
"""Get an actor by username."""
|
||||
return self.actors.get(username)
|
||||
|
||||
def register_asset(
|
||||
self,
|
||||
actor: Actor,
|
||||
name: str,
|
||||
cid: str,
|
||||
url: str = None,
|
||||
local_path: Path | str = None,
|
||||
tags: List[str] = None,
|
||||
metadata: Dict[str, Any] = None,
|
||||
) -> tuple[Asset, Activity]:
|
||||
"""
|
||||
Register an asset and establish ownership.
|
||||
|
||||
Creates the asset in the registry and a signed Create activity
|
||||
proving the actor's ownership.
|
||||
|
||||
Args:
|
||||
actor: The actor claiming ownership
|
||||
name: Name for the asset
|
||||
cid: SHA-3-256 hash of the content
|
||||
url: Public URL (canonical location)
|
||||
local_path: Optional local path
|
||||
tags: Optional tags
|
||||
metadata: Optional metadata
|
||||
|
||||
Returns:
|
||||
Tuple of (Asset, signed CreateActivity)
|
||||
"""
|
||||
# Add to registry
|
||||
asset = self.registry.add(
|
||||
name=name,
|
||||
cid=cid,
|
||||
url=url,
|
||||
local_path=local_path,
|
||||
tags=tags,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
# Create ownership activity
|
||||
activity = CreateActivity.for_asset(
|
||||
actor=actor,
|
||||
asset_name=name,
|
||||
cid=asset.cid,
|
||||
asset_type=self._asset_type_to_ap(asset.asset_type),
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
# Sign the activity
|
||||
signed_activity = sign_activity(activity, actor)
|
||||
|
||||
# Store the activity
|
||||
self.activities.add(signed_activity)
|
||||
|
||||
return asset, signed_activity
|
||||
|
||||
def _asset_type_to_ap(self, asset_type: str) -> str:
|
||||
"""Convert registry asset type to ActivityPub type."""
|
||||
type_map = {
|
||||
"image": "Image",
|
||||
"video": "Video",
|
||||
"audio": "Audio",
|
||||
"unknown": "Document",
|
||||
}
|
||||
return type_map.get(asset_type, "Document")
|
||||
|
||||
def get_owner(self, asset_name: str) -> Optional[Actor]:
|
||||
"""
|
||||
Get the owner of an asset.
|
||||
|
||||
Finds the earliest Create activity for the asset and returns
|
||||
the actor if the signature is valid.
|
||||
"""
|
||||
asset = self.registry.get(asset_name)
|
||||
if not asset:
|
||||
return None
|
||||
|
||||
# Find Create activities for this asset
|
||||
activities = self.activities.find_by_object_hash(asset.cid)
|
||||
create_activities = [a for a in activities if a.activity_type == "Create"]
|
||||
|
||||
if not create_activities:
|
||||
return None
|
||||
|
||||
# Get the earliest (first owner)
|
||||
earliest = min(create_activities, key=lambda a: a.published)
|
||||
|
||||
# Extract username from actor_id
|
||||
# Format: https://artdag.rose-ash.com/users/{username}
|
||||
actor_id = earliest.actor_id
|
||||
if "/users/" in actor_id:
|
||||
username = actor_id.split("/users/")[-1]
|
||||
actor = self.actors.get(username)
|
||||
if actor and verify_activity_ownership(earliest, actor):
|
||||
return actor
|
||||
|
||||
return None
|
||||
|
||||
def verify_ownership(self, asset_name: str, actor: Actor) -> bool:
|
||||
"""
|
||||
Verify that an actor owns an asset.
|
||||
|
||||
Checks for a valid signed Create activity linking the actor
|
||||
to the asset.
|
||||
"""
|
||||
asset = self.registry.get(asset_name)
|
||||
if not asset:
|
||||
return False
|
||||
|
||||
activities = self.activities.find_by_object_hash(asset.cid)
|
||||
for activity in activities:
|
||||
if activity.activity_type == "Create" and activity.actor_id == actor.id:
|
||||
if verify_activity_ownership(activity, actor):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def list_owned_assets(self, actor: Actor) -> List[Asset]:
|
||||
"""List all assets owned by an actor."""
|
||||
activities = self.activities.find_by_actor(actor.id)
|
||||
owned = []
|
||||
|
||||
for activity in activities:
|
||||
if activity.activity_type == "Create":
|
||||
# Find asset by hash
|
||||
obj_hash = activity.object_data.get("contentHash", {})
|
||||
if isinstance(obj_hash, dict):
|
||||
hash_value = obj_hash.get("value")
|
||||
else:
|
||||
hash_value = obj_hash
|
||||
|
||||
if hash_value:
|
||||
asset = self.registry.find_by_hash(hash_value)
|
||||
if asset:
|
||||
owned.append(asset)
|
||||
|
||||
return owned
|
||||
|
||||
def get_ownership_records(self) -> List[OwnershipRecord]:
|
||||
"""Get all ownership records."""
|
||||
records = []
|
||||
|
||||
for activity in self.activities.list():
|
||||
if activity.activity_type != "Create":
|
||||
continue
|
||||
|
||||
# Extract info
|
||||
actor_id = activity.actor_id
|
||||
username = actor_id.split("/users/")[-1] if "/users/" in actor_id else "unknown"
|
||||
actor = self.actors.get(username)
|
||||
|
||||
obj_hash = activity.object_data.get("contentHash", {})
|
||||
hash_value = obj_hash.get("value") if isinstance(obj_hash, dict) else obj_hash
|
||||
|
||||
records.append(OwnershipRecord(
|
||||
actor_handle=actor.handle if actor else f"@{username}@unknown",
|
||||
asset_name=activity.object_data.get("name", "unknown"),
|
||||
cid=hash_value or "unknown",
|
||||
activity_id=activity.activity_id,
|
||||
verified=verify_activity_ownership(activity, actor) if actor else False,
|
||||
))
|
||||
|
||||
return records
|
||||
163
artdag/core/artdag/activitypub/signatures.py
Normal file
163
artdag/core/artdag/activitypub/signatures.py
Normal file
@@ -0,0 +1,163 @@
|
||||
# primitive/activitypub/signatures.py
|
||||
"""
|
||||
Cryptographic signatures for ActivityPub.
|
||||
|
||||
Uses RSA-SHA256 signatures compatible with HTTP Signatures spec
|
||||
and Linked Data Signatures for ActivityPub.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
|
||||
from .actor import Actor
|
||||
from .activity import Activity
|
||||
|
||||
|
||||
def _canonicalize(data: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Canonicalize JSON for signing.
|
||||
|
||||
Uses JCS (JSON Canonicalization Scheme) - sorted keys, no whitespace.
|
||||
"""
|
||||
return json.dumps(data, sort_keys=True, separators=(",", ":"))
|
||||
|
||||
|
||||
def _hash_sha256(data: str) -> bytes:
|
||||
"""Hash string with SHA-256."""
|
||||
return hashlib.sha256(data.encode()).digest()
|
||||
|
||||
|
||||
def sign_activity(activity: Activity, actor: Actor) -> Activity:
|
||||
"""
|
||||
Sign an activity with the actor's private key.
|
||||
|
||||
Uses Linked Data Signatures with RsaSignature2017.
|
||||
|
||||
Args:
|
||||
activity: The activity to sign
|
||||
actor: The actor whose key signs the activity
|
||||
|
||||
Returns:
|
||||
Activity with signature attached
|
||||
"""
|
||||
# Load private key
|
||||
private_key = serialization.load_pem_private_key(
|
||||
actor.private_key,
|
||||
password=None,
|
||||
)
|
||||
|
||||
# Create signature options
|
||||
created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
# Canonicalize the activity (without signature)
|
||||
activity_data = activity.to_activitypub()
|
||||
activity_data.pop("signature", None)
|
||||
canonical = _canonicalize(activity_data)
|
||||
|
||||
# Create the data to sign: hash of options + hash of document
|
||||
options = {
|
||||
"@context": "https://w3id.org/security/v1",
|
||||
"type": "RsaSignature2017",
|
||||
"creator": actor.key_id,
|
||||
"created": created,
|
||||
}
|
||||
options_hash = _hash_sha256(_canonicalize(options))
|
||||
document_hash = _hash_sha256(canonical)
|
||||
to_sign = options_hash + document_hash
|
||||
|
||||
# Sign with RSA-SHA256
|
||||
signature_bytes = private_key.sign(
|
||||
to_sign,
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_value = base64.b64encode(signature_bytes).decode("utf-8")
|
||||
|
||||
# Attach signature to activity
|
||||
activity.signature = {
|
||||
"type": "RsaSignature2017",
|
||||
"creator": actor.key_id,
|
||||
"created": created,
|
||||
"signatureValue": signature_value,
|
||||
}
|
||||
|
||||
return activity
|
||||
|
||||
|
||||
def verify_signature(activity: Activity, public_key_pem: bytes) -> bool:
|
||||
"""
|
||||
Verify an activity's signature.
|
||||
|
||||
Args:
|
||||
activity: The activity with signature
|
||||
public_key_pem: PEM-encoded public key
|
||||
|
||||
Returns:
|
||||
True if signature is valid
|
||||
"""
|
||||
if not activity.signature:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Load public key
|
||||
public_key = serialization.load_pem_public_key(public_key_pem)
|
||||
|
||||
# Reconstruct signature options
|
||||
options = {
|
||||
"@context": "https://w3id.org/security/v1",
|
||||
"type": activity.signature["type"],
|
||||
"creator": activity.signature["creator"],
|
||||
"created": activity.signature["created"],
|
||||
}
|
||||
|
||||
# Canonicalize activity without signature
|
||||
activity_data = activity.to_activitypub()
|
||||
activity_data.pop("signature", None)
|
||||
canonical = _canonicalize(activity_data)
|
||||
|
||||
# Recreate signed data
|
||||
options_hash = _hash_sha256(_canonicalize(options))
|
||||
document_hash = _hash_sha256(canonical)
|
||||
signed_data = options_hash + document_hash
|
||||
|
||||
# Decode and verify signature
|
||||
signature_bytes = base64.b64decode(activity.signature["signatureValue"])
|
||||
public_key.verify(
|
||||
signature_bytes,
|
||||
signed_data,
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
return True
|
||||
|
||||
except (InvalidSignature, KeyError, ValueError):
|
||||
return False
|
||||
|
||||
|
||||
def verify_activity_ownership(activity: Activity, actor: Actor) -> bool:
|
||||
"""
|
||||
Verify that an activity was signed by the claimed actor.
|
||||
|
||||
Args:
|
||||
activity: The activity to verify
|
||||
actor: The claimed actor
|
||||
|
||||
Returns:
|
||||
True if the activity was signed by this actor
|
||||
"""
|
||||
if not activity.signature:
|
||||
return False
|
||||
|
||||
# Check creator matches actor
|
||||
if activity.signature.get("creator") != actor.key_id:
|
||||
return False
|
||||
|
||||
# Verify signature
|
||||
return verify_signature(activity, actor.public_key)
|
||||
26
artdag/core/artdag/analysis/__init__.py
Normal file
26
artdag/core/artdag/analysis/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# artdag/analysis - Audio and video feature extraction
|
||||
#
|
||||
# Provides the Analysis phase of the 3-phase execution model:
|
||||
# 1. ANALYZE - Extract features from inputs
|
||||
# 2. PLAN - Generate execution plan with cache IDs
|
||||
# 3. EXECUTE - Run steps with caching
|
||||
|
||||
from .schema import (
|
||||
AnalysisResult,
|
||||
AudioFeatures,
|
||||
VideoFeatures,
|
||||
BeatInfo,
|
||||
EnergyEnvelope,
|
||||
SpectrumBands,
|
||||
)
|
||||
from .analyzer import Analyzer
|
||||
|
||||
__all__ = [
|
||||
"Analyzer",
|
||||
"AnalysisResult",
|
||||
"AudioFeatures",
|
||||
"VideoFeatures",
|
||||
"BeatInfo",
|
||||
"EnergyEnvelope",
|
||||
"SpectrumBands",
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user