Import L1 (celery) as l1/

This commit is contained in:
giles
2026-02-24 23:07:19 +00:00
225 changed files with 57298 additions and 0 deletions

22
l1/.dockerignore Normal file
View File

@@ -0,0 +1,22 @@
# Don't copy local clones - Dockerfile will clone fresh
artdag-effects/
# Python cache
__pycache__/
*.py[cod]
*.egg-info/
.pytest_cache/
# Virtual environments
.venv/
venv/
# Local env
.env
# Git
.git/
# IDE
.vscode/
.idea/

20
l1/.env.example Normal file
View File

@@ -0,0 +1,20 @@
# L1 Server Configuration
# PostgreSQL password (REQUIRED - no default)
POSTGRES_PASSWORD=changeme-generate-with-openssl-rand-hex-16
# Admin token for purge operations (REQUIRED - no default)
# Generate with: openssl rand -hex 32
ADMIN_TOKEN=changeme-generate-with-openssl-rand-hex-32
# L1 host IP/hostname for GPU worker cross-VPC access
L1_HOST=your-l1-server-ip
# This L1 server's public URL (sent to L2 when publishing)
L1_PUBLIC_URL=https://l1.artdag.rose-ash.com
# L2 server URL (for authentication and publishing)
L2_SERVER=https://artdag.rose-ash.com
# L2 domain for ActivityPub actor IDs (e.g., @user@domain)
L2_DOMAIN=artdag.rose-ash.com

11
l1/.env.gpu Normal file
View File

@@ -0,0 +1,11 @@
# GPU worker env - connects to L1 host via public IP (cross-VPC)
REDIS_URL=redis://138.68.142.139:16379/5
DATABASE_URL=postgresql://artdag:f960bcc61d8b2155a1d57f7dd72c1c58@138.68.142.139:15432/artdag
IPFS_API=/ip4/138.68.142.139/tcp/15001
IPFS_GATEWAYS=https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link
IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
CACHE_DIR=/data/cache
C_FORCE_ROOT=true
ARTDAG_CLUSTER_KEY=
NVIDIA_VISIBLE_DEVICES=all
STREAMING_GPU_PERSIST=0

View File

@@ -0,0 +1,63 @@
name: Build and Deploy
on:
push:
env:
REGISTRY: registry.rose-ash.com:5000
IMAGE_CPU: celery-l1-server
jobs:
build-and-deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install tools
run: |
apt-get update && apt-get install -y --no-install-recommends openssh-client
- name: Set up SSH
env:
SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
run: |
mkdir -p ~/.ssh
echo "$SSH_KEY" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true
- name: Pull latest code on server
env:
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
BRANCH: ${{ github.ref_name }}
run: |
ssh "root@$DEPLOY_HOST" "
cd /root/art-dag/celery
git fetch origin $BRANCH
git checkout $BRANCH
git reset --hard origin/$BRANCH
"
- name: Build and push image
env:
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
run: |
ssh "root@$DEPLOY_HOST" "
cd /root/art-dag/celery
docker build --build-arg CACHEBUST=\$(date +%s) -t ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:latest -t ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:${{ github.sha }} .
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:latest
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:${{ github.sha }}
"
- name: Deploy stack
env:
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
run: |
ssh "root@$DEPLOY_HOST" "
cd /root/art-dag/celery
docker stack deploy -c docker-compose.yml celery
echo 'Waiting for services to update...'
sleep 10
docker stack services celery
"

8
l1/.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
__pycache__/
*.py[cod]
.pytest_cache/
*.egg-info/
.venv/
venv/
.env
artdag-effects/

31
l1/Dockerfile Normal file
View File

@@ -0,0 +1,31 @@
FROM python:3.11-slim
WORKDIR /app
# Install git and ffmpeg (for video transcoding)
RUN apt-get update && apt-get install -y --no-install-recommends git ffmpeg && rm -rf /var/lib/apt/lists/*
# Install dependencies
COPY requirements.txt .
ARG CACHEBUST=1
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY . .
# Clone effects repo
RUN git clone https://git.rose-ash.com/art-dag/effects.git /app/artdag-effects
# Build client tarball for download
RUN ./build-client.sh
# Create cache directory
RUN mkdir -p /data/cache
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV EFFECTS_PATH=/app/artdag-effects
ENV PYTHONPATH=/app
# Default command runs the server
CMD ["python", "server.py"]

98
l1/Dockerfile.gpu Normal file
View File

@@ -0,0 +1,98 @@
# GPU-enabled worker image
# Multi-stage build: use devel image for compiling, runtime for final image
# Stage 1: Build decord with CUDA
FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 AS builder
RUN apt-get update && apt-get install -y --no-install-recommends \
python3.11 \
python3.11-venv \
python3.11-dev \
python3-pip \
git \
cmake \
build-essential \
pkg-config \
libavcodec-dev \
libavformat-dev \
libavutil-dev \
libavdevice-dev \
libavfilter-dev \
libswresample-dev \
libswscale-dev \
&& rm -rf /var/lib/apt/lists/* \
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
&& ln -sf /usr/bin/python3 /usr/bin/python
# Download Video Codec SDK headers for NVDEC/NVCUVID
RUN git clone https://github.com/FFmpeg/nv-codec-headers.git /tmp/nv-codec-headers && \
cd /tmp/nv-codec-headers && make install && rm -rf /tmp/nv-codec-headers
# Create stub for libnvcuvid (real library comes from driver at runtime)
RUN echo 'void* __nvcuvid_stub__;' | gcc -shared -x c - -o /usr/local/cuda/lib64/libnvcuvid.so
# Build decord with CUDA support
RUN git clone --recursive https://github.com/dmlc/decord /tmp/decord && \
cd /tmp/decord && \
mkdir build && cd build && \
cmake .. -DUSE_CUDA=ON -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_CUDA_ARCHITECTURES="70;75;80;86;89;90" && \
make -j$(nproc) && \
cd ../python && pip install --target=/decord-install .
# Stage 2: Runtime image
FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu22.04
WORKDIR /app
# Install Python 3.11 and system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
python3.11 \
python3.11-venv \
python3-pip \
git \
ffmpeg \
&& rm -rf /var/lib/apt/lists/* \
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
&& ln -sf /usr/bin/python3 /usr/bin/python
# Upgrade pip
RUN python3 -m pip install --upgrade pip
# Install CPU dependencies first
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Install GPU-specific dependencies (CuPy for CUDA 12.x)
RUN pip install --no-cache-dir cupy-cuda12x
# Install PyNvVideoCodec for zero-copy GPU encoding
RUN pip install --no-cache-dir PyNvVideoCodec
# Copy decord from builder stage
COPY --from=builder /decord-install /usr/local/lib/python3.11/dist-packages/
COPY --from=builder /tmp/decord/build/libdecord.so /usr/local/lib/
RUN ldconfig
# Clone effects repo (before COPY so it gets cached)
RUN git clone https://git.rose-ash.com/art-dag/effects.git /app/artdag-effects
# Copy application (this invalidates cache for any code change)
COPY . .
# Create cache directory
RUN mkdir -p /data/cache
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV EFFECTS_PATH=/app/artdag-effects
ENV PYTHONPATH=/app
# GPU persistence enabled - frames stay on GPU throughout pipeline
ENV STREAMING_GPU_PERSIST=1
# Preload libnvcuvid for decord NVDEC GPU decode
ENV LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libnvcuvid.so
# Use cluster's public IPFS gateway for HLS segment URLs
ENV IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
# Default command runs celery worker
CMD ["celery", "-A", "celery_app", "worker", "--loglevel=info", "-E", "-Q", "gpu,celery"]

329
l1/README.md Normal file
View File

@@ -0,0 +1,329 @@
# Art DAG L1 Server
L1 rendering server for the Art DAG system. Manages distributed rendering jobs via Celery workers with content-addressable caching and optional IPFS integration.
## Features
- **3-Phase Execution**: Analyze → Plan → Execute pipeline for recipe-based rendering
- **Content-Addressable Caching**: IPFS CIDs with deduplication
- **IPFS Integration**: Optional IPFS-primary mode for distributed storage
- **Storage Providers**: S3, IPFS, and local storage backends
- **DAG Visualization**: Interactive graph visualization of execution plans
- **SPA-Style Navigation**: Smooth URL-based navigation without full page reloads
- **L2 Federation**: Publish outputs to ActivityPub registry
## Dependencies
- **artdag** (GitHub): Core DAG execution engine
- **artdag-effects** (rose-ash): Effect implementations
- **artdag-common**: Shared templates and middleware
- **Redis**: Message broker, result backend, and run persistence
- **PostgreSQL**: Metadata storage
- **IPFS** (optional): Distributed content storage
## Quick Start
```bash
# Install dependencies
pip install -r requirements.txt
# Start Redis
redis-server
# Start a worker
celery -A celery_app worker --loglevel=info -E
# Start the L1 server
python server.py
```
## Docker Swarm Deployment
```bash
docker stack deploy -c docker-compose.yml artdag
```
The stack includes:
- **redis**: Message broker (Redis 7)
- **postgres**: Metadata database (PostgreSQL 16)
- **ipfs**: IPFS node (Kubo)
- **l1-server**: FastAPI web server
- **l1-worker**: Celery workers (2 replicas)
- **flower**: Celery task monitoring
## Configuration
### Environment Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `HOST` | `0.0.0.0` | Server bind address |
| `PORT` | `8000` | Server port |
| `REDIS_URL` | `redis://localhost:6379/5` | Redis connection |
| `DATABASE_URL` | **(required)** | PostgreSQL connection |
| `CACHE_DIR` | `~/.artdag/cache` | Local cache directory |
| `IPFS_API` | `/dns/localhost/tcp/5001` | IPFS API multiaddr |
| `IPFS_GATEWAY_URL` | `https://ipfs.io/ipfs` | Public IPFS gateway |
| `IPFS_PRIMARY` | `false` | Enable IPFS-primary mode |
| `L1_PUBLIC_URL` | `http://localhost:8100` | Public URL for redirects |
| `L2_SERVER` | - | L2 ActivityPub server URL |
| `L2_DOMAIN` | - | L2 domain for federation |
| `ARTDAG_CLUSTER_KEY` | - | Cluster key for trust domains |
### IPFS-Primary Mode
When `IPFS_PRIMARY=true`, all content is stored on IPFS:
- Input files are added to IPFS on upload
- Analysis results stored as JSON on IPFS
- Execution plans stored on IPFS
- Step outputs pinned to IPFS
- Local cache becomes a read-through cache
This enables distributed execution across multiple L1 nodes sharing the same IPFS network.
## Web UI
| Path | Description |
|------|-------------|
| `/` | Home page with server info |
| `/runs` | View and manage rendering runs |
| `/run/{id}` | Run detail with tabs: Plan, Analysis, Artifacts |
| `/run/{id}/plan` | Interactive DAG visualization |
| `/run/{id}/analysis` | Audio/video analysis data |
| `/run/{id}/artifacts` | Cached step outputs |
| `/recipes` | Browse and run available recipes |
| `/recipe/{id}` | Recipe detail page |
| `/recipe/{id}/dag` | Recipe DAG visualization |
| `/media` | Browse cached media files |
| `/storage` | Manage storage providers |
| `/auth` | Receive auth token from L2 |
| `/logout` | Log out |
| `/download/client` | Download CLI client |
## API Reference
Interactive docs: http://localhost:8100/docs
### Runs
| Method | Path | Description |
|--------|------|-------------|
| POST | `/runs` | Start a rendering run |
| GET | `/runs` | List all runs (paginated) |
| GET | `/runs/{run_id}` | Get run status |
| DELETE | `/runs/{run_id}` | Delete a run |
| GET | `/api/run/{run_id}` | Get run as JSON |
| GET | `/api/run/{run_id}/plan` | Get execution plan JSON |
| GET | `/api/run/{run_id}/analysis` | Get analysis data JSON |
### Recipes
| Method | Path | Description |
|--------|------|-------------|
| POST | `/recipes/upload` | Upload recipe YAML |
| GET | `/recipes` | List recipes (paginated) |
| GET | `/recipes/{recipe_id}` | Get recipe details |
| DELETE | `/recipes/{recipe_id}` | Delete recipe |
| POST | `/recipes/{recipe_id}/run` | Execute recipe |
### Cache
| Method | Path | Description |
|--------|------|-------------|
| GET | `/cache/{cid}` | Get cached content (with preview) |
| GET | `/cache/{cid}/raw` | Download raw content |
| GET | `/cache/{cid}/mp4` | Get MP4 video |
| GET | `/cache/{cid}/meta` | Get content metadata |
| PATCH | `/cache/{cid}/meta` | Update metadata |
| POST | `/cache/{cid}/publish` | Publish to L2 |
| DELETE | `/cache/{cid}` | Delete from cache |
| POST | `/cache/import?path=` | Import local file |
| POST | `/cache/upload` | Upload file |
| GET | `/media` | Browse media gallery |
### IPFS
| Method | Path | Description |
|--------|------|-------------|
| GET | `/ipfs/{cid}` | Redirect to IPFS gateway |
| GET | `/ipfs/{cid}/raw` | Fetch raw content from IPFS |
### Storage Providers
| Method | Path | Description |
|--------|------|-------------|
| GET | `/storage` | List storage providers |
| POST | `/storage` | Add provider (form) |
| POST | `/storage/add` | Add provider (JSON) |
| GET | `/storage/{id}` | Get provider details |
| PATCH | `/storage/{id}` | Update provider |
| DELETE | `/storage/{id}` | Delete provider |
| POST | `/storage/{id}/test` | Test connection |
| GET | `/storage/type/{type}` | Get form for provider type |
### 3-Phase API
| Method | Path | Description |
|--------|------|-------------|
| POST | `/api/plan` | Generate execution plan |
| POST | `/api/execute` | Execute a plan |
| POST | `/api/run-recipe` | Full pipeline (analyze+plan+execute) |
### Authentication
| Method | Path | Description |
|--------|------|-------------|
| GET | `/auth` | Receive auth token from L2 |
| GET | `/logout` | Log out |
| POST | `/auth/revoke` | Revoke a specific token |
| POST | `/auth/revoke-user` | Revoke all user tokens |
## 3-Phase Execution
Recipes are executed in three phases:
### Phase 1: Analyze
Extract features from input files:
- **Audio/Video**: Tempo, beat times, energy levels
- Results cached by CID
### Phase 2: Plan
Generate an execution plan:
- Parse recipe YAML
- Resolve dependencies between steps
- Compute cache IDs for each step
- Skip already-cached steps
### Phase 3: Execute
Run the plan level by level:
- Steps at each level run in parallel
- Results cached with content-addressable hashes
- Progress tracked in Redis
## Recipe Format
Recipes define reusable DAG pipelines:
```yaml
name: beat-sync
version: "1.0"
description: "Synchronize video to audio beats"
inputs:
video:
type: video
description: "Source video"
audio:
type: audio
description: "Audio track"
steps:
- id: analyze_audio
type: ANALYZE
inputs: [audio]
config:
features: [beats, energy]
- id: sync_video
type: BEAT_SYNC
inputs: [video, analyze_audio]
config:
mode: stretch
output: sync_video
```
## Storage
### Local Cache
- Location: `~/.artdag/cache/` (or `CACHE_DIR`)
- Content-addressed by IPFS CID
- Subdirectories: `plans/`, `analysis/`
### Redis
- Database 5 (configurable via `REDIS_URL`)
- Keys:
- `artdag:run:*` - Run state
- `artdag:recipe:*` - Recipe definitions
- `artdag:revoked:*` - Token revocation
- `artdag:user_tokens:*` - User token tracking
### PostgreSQL
- Content metadata
- Storage provider configurations
- Provenance records
## Authentication
L1 servers authenticate via L2 (ActivityPub registry). No shared secrets required.
### Flow
1. User clicks "Attach" on L2's Renderers page
2. L2 creates a scoped token bound to this L1
3. User redirected to L1's `/auth?auth_token=...`
4. L1 calls L2's `/auth/verify` to validate
5. L1 sets local cookie and records token
### Token Revocation
- Tokens tracked per-user in Redis
- L2 calls `/auth/revoke-user` on logout
- Revoked hashes stored with 30-day expiry
- Every request checks revocation list
## CLI Usage
```bash
# Quick render (effect mode)
python render.py dog cat --sync
# Submit async
python render.py dog cat
# Run a recipe
curl -X POST http://localhost:8100/recipes/beat-sync/run \
-H "Content-Type: application/json" \
-H "Authorization: Bearer <token>" \
-d '{"inputs": {"video": "abc123...", "audio": "def456..."}}'
```
## Architecture
```
L1 Server (FastAPI)
├── Web UI (Jinja2 + HTMX + Tailwind)
├── POST /runs → Celery tasks
│ │
│ └── celery_app.py
│ ├── tasks/analyze.py (Phase 1)
│ ├── tasks/execute.py (Phase 3 steps)
│ └── tasks/orchestrate.py (Full pipeline)
├── cache_manager.py
│ │
│ ├── Local filesystem (CACHE_DIR)
│ ├── IPFS (ipfs_client.py)
│ └── S3/Storage providers
└── database.py (PostgreSQL metadata)
```
## Provenance
Every render produces a provenance record:
```json
{
"task_id": "celery-task-uuid",
"rendered_at": "2026-01-07T...",
"rendered_by": "@giles@artdag.rose-ash.com",
"output": {"name": "...", "cid": "Qm..."},
"inputs": [...],
"effects": [...],
"infrastructure": {
"software": {"name": "infra:artdag", "cid": "Qm..."},
"hardware": {"name": "infra:giles-hp", "cid": "Qm..."}
}
}
```

237
l1/app/__init__.py Normal file
View File

@@ -0,0 +1,237 @@
"""
Art-DAG L1 Server Application Factory.
Creates and configures the FastAPI application with all routers and middleware.
"""
import secrets
import time
from pathlib import Path
from urllib.parse import quote
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
from artdag_common import create_jinja_env
from artdag_common.middleware.auth import get_user_from_cookie
from .config import settings
# Paths that should never trigger a silent auth check
_SKIP_PREFIXES = ("/auth/", "/static/", "/api/", "/ipfs/", "/download/", "/inbox", "/health", "/internal/", "/oembed")
_SILENT_CHECK_COOLDOWN = 300 # 5 minutes
_DEVICE_COOKIE = "artdag_did"
_DEVICE_COOKIE_MAX_AGE = 30 * 24 * 3600 # 30 days
# Derive external base URL from oauth_redirect_uri (e.g. https://celery-artdag.rose-ash.com)
_EXTERNAL_BASE = settings.oauth_redirect_uri.rsplit("/auth/callback", 1)[0]
def _external_url(request: Request) -> str:
"""Build external URL from request path + query, using configured base domain."""
url = f"{_EXTERNAL_BASE}{request.url.path}"
if request.url.query:
url += f"?{request.url.query}"
return url
def create_app() -> FastAPI:
"""
Create and configure the L1 FastAPI application.
Returns:
Configured FastAPI instance
"""
app = FastAPI(
title="Art-DAG L1 Server",
description="Content-addressed media processing with distributed execution",
version="1.0.0",
)
# Database lifecycle events
from database import init_db, close_db
@app.on_event("startup")
async def startup():
await init_db()
@app.on_event("shutdown")
async def shutdown():
await close_db()
# Silent auth check — auto-login via prompt=none OAuth
# NOTE: registered BEFORE device_id so device_id is outermost (runs first)
@app.middleware("http")
async def silent_auth_check(request: Request, call_next):
path = request.url.path
if (
request.method != "GET"
or any(path.startswith(p) for p in _SKIP_PREFIXES)
or request.headers.get("hx-request") # skip HTMX
):
return await call_next(request)
# Already logged in — but verify account hasn't logged out
if get_user_from_cookie(request):
device_id = getattr(request.state, "device_id", None)
if device_id:
try:
from .dependencies import get_redis_client
r = get_redis_client()
if not r.get(f"did_auth:{device_id}"):
# Account logged out — clear our cookie
response = await call_next(request)
response.delete_cookie("artdag_session")
response.delete_cookie("pnone_at")
return response
except Exception:
pass
return await call_next(request)
# Check cooldown — don't re-check within 5 minutes
pnone_at = request.cookies.get("pnone_at")
if pnone_at:
try:
pnone_ts = float(pnone_at)
if (time.time() - pnone_ts) < _SILENT_CHECK_COOLDOWN:
# But first check if account signalled a login via inbox delivery
device_id = getattr(request.state, "device_id", None)
if device_id:
try:
from .dependencies import get_redis_client
r = get_redis_client()
auth_ts = r.get(f"did_auth:{device_id}")
if auth_ts and float(auth_ts) > pnone_ts:
# Login happened since our last check — retry
current_url = _external_url(request)
return RedirectResponse(
url=f"/auth/login?prompt=none&next={quote(current_url, safe='')}",
status_code=302,
)
except Exception:
pass
return await call_next(request)
except (ValueError, TypeError):
pass
# Redirect to silent OAuth check
current_url = _external_url(request)
return RedirectResponse(
url=f"/auth/login?prompt=none&next={quote(current_url, safe='')}",
status_code=302,
)
# Device ID middleware — track browser identity across domains
# Registered AFTER silent_auth_check so it's outermost (always runs)
@app.middleware("http")
async def device_id_middleware(request: Request, call_next):
did = request.cookies.get(_DEVICE_COOKIE)
if did:
request.state.device_id = did
request.state._new_device_id = False
else:
request.state.device_id = secrets.token_urlsafe(32)
request.state._new_device_id = True
response = await call_next(request)
if getattr(request.state, "_new_device_id", False):
response.set_cookie(
key=_DEVICE_COOKIE,
value=request.state.device_id,
max_age=_DEVICE_COOKIE_MAX_AGE,
httponly=True,
samesite="lax",
secure=True,
)
return response
# Coop fragment pre-fetch — inject nav-tree, auth-menu, cart-mini into
# request.state for full-page HTML renders. Skips HTMX, API, and
# internal paths. Failures are silent (fragments default to "").
_FRAG_SKIP = ("/auth/", "/api/", "/internal/", "/health", "/oembed",
"/ipfs/", "/download/", "/inbox", "/static/")
@app.middleware("http")
async def coop_fragments_middleware(request: Request, call_next):
path = request.url.path
if (
request.method != "GET"
or any(path.startswith(p) for p in _FRAG_SKIP)
or request.headers.get("hx-request")
or request.headers.get(fragments.FRAGMENT_HEADER)
):
request.state.nav_tree_html = ""
request.state.auth_menu_html = ""
request.state.cart_mini_html = ""
return await call_next(request)
from artdag_common.fragments import fetch_fragments as _fetch_frags
user = get_user_from_cookie(request)
auth_params = {"email": user.email} if user and user.email else {}
nav_params = {"app_name": "artdag", "path": path}
try:
nav_tree_html, auth_menu_html, cart_mini_html = await _fetch_frags([
("blog", "nav-tree", nav_params),
("account", "auth-menu", auth_params or None),
("cart", "cart-mini", None),
])
except Exception:
nav_tree_html = auth_menu_html = cart_mini_html = ""
request.state.nav_tree_html = nav_tree_html
request.state.auth_menu_html = auth_menu_html
request.state.cart_mini_html = cart_mini_html
return await call_next(request)
# Initialize Jinja2 templates
template_dir = Path(__file__).parent / "templates"
app.state.templates = create_jinja_env(template_dir)
# Custom 404 handler
@app.exception_handler(404)
async def not_found_handler(request: Request, exc):
from artdag_common.middleware import wants_html
if wants_html(request):
from artdag_common import render
return render(app.state.templates, "404.html", request,
user=None,
status_code=404,
)
return JSONResponse({"detail": "Not found"}, status_code=404)
# Include routers
from .routers import auth, storage, api, recipes, cache, runs, home, effects, inbox, fragments, oembed
# Home and auth routers (root level)
app.include_router(home.router, tags=["home"])
app.include_router(auth.router, prefix="/auth", tags=["auth"])
app.include_router(inbox.router, tags=["inbox"])
app.include_router(fragments.router, tags=["fragments"])
app.include_router(oembed.router, tags=["oembed"])
# Feature routers
app.include_router(storage.router, prefix="/storage", tags=["storage"])
app.include_router(api.router, prefix="/api", tags=["api"])
# Runs and recipes routers
app.include_router(runs.router, prefix="/runs", tags=["runs"])
app.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
# Cache router - handles /cache and /media
app.include_router(cache.router, prefix="/cache", tags=["cache"])
# Also mount cache router at /media for convenience
app.include_router(cache.router, prefix="/media", tags=["media"])
# Effects router
app.include_router(effects.router, prefix="/effects", tags=["effects"])
return app
# Create the default app instance
app = create_app()

116
l1/app/config.py Normal file
View File

@@ -0,0 +1,116 @@
"""
L1 Server Configuration.
Environment-based configuration with sensible defaults.
All config should go through this module - no direct os.environ calls elsewhere.
"""
import os
import sys
from pathlib import Path
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class Settings:
"""Application settings loaded from environment."""
# Server
host: str = field(default_factory=lambda: os.environ.get("HOST", "0.0.0.0"))
port: int = field(default_factory=lambda: int(os.environ.get("PORT", "8000")))
debug: bool = field(default_factory=lambda: os.environ.get("DEBUG", "").lower() == "true")
# Cache (use /data/cache in Docker via env var, ~/.artdag/cache locally)
cache_dir: Path = field(
default_factory=lambda: Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
)
# Redis
redis_url: str = field(
default_factory=lambda: os.environ.get("REDIS_URL", "redis://localhost:6379/5")
)
# Database
database_url: str = field(
default_factory=lambda: os.environ.get("DATABASE_URL", "")
)
# IPFS
ipfs_api: str = field(
default_factory=lambda: os.environ.get("IPFS_API", "/dns/localhost/tcp/5001")
)
ipfs_gateway_url: str = field(
default_factory=lambda: os.environ.get("IPFS_GATEWAY_URL", "https://ipfs.io/ipfs")
)
# OAuth SSO (replaces L2 auth)
oauth_authorize_url: str = field(
default_factory=lambda: os.environ.get("OAUTH_AUTHORIZE_URL", "https://account.rose-ash.com/auth/oauth/authorize")
)
oauth_token_url: str = field(
default_factory=lambda: os.environ.get("OAUTH_TOKEN_URL", "https://account.rose-ash.com/auth/oauth/token")
)
oauth_client_id: str = field(
default_factory=lambda: os.environ.get("OAUTH_CLIENT_ID", "artdag")
)
oauth_redirect_uri: str = field(
default_factory=lambda: os.environ.get("OAUTH_REDIRECT_URI", "https://celery-artdag.rose-ash.com/auth/callback")
)
oauth_logout_url: str = field(
default_factory=lambda: os.environ.get("OAUTH_LOGOUT_URL", "https://account.rose-ash.com/auth/sso-logout/")
)
secret_key: str = field(
default_factory=lambda: os.environ.get("SECRET_KEY", "change-me-in-production")
)
# GPU/Streaming settings
streaming_gpu_persist: bool = field(
default_factory=lambda: os.environ.get("STREAMING_GPU_PERSIST", "0") == "1"
)
ipfs_gateways: str = field(
default_factory=lambda: os.environ.get(
"IPFS_GATEWAYS", "https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
)
)
# Derived paths
@property
def plan_cache_dir(self) -> Path:
return self.cache_dir / "plans"
@property
def analysis_cache_dir(self) -> Path:
return self.cache_dir / "analysis"
def ensure_dirs(self) -> None:
"""Create required directories."""
self.cache_dir.mkdir(parents=True, exist_ok=True)
self.plan_cache_dir.mkdir(parents=True, exist_ok=True)
self.analysis_cache_dir.mkdir(parents=True, exist_ok=True)
def log_config(self, logger=None) -> None:
"""Log all configuration values for debugging."""
output = logger.info if logger else lambda x: print(x, file=sys.stderr)
output("=" * 60)
output("CONFIGURATION")
output("=" * 60)
output(f" cache_dir: {self.cache_dir}")
output(f" redis_url: {self.redis_url}")
output(f" database_url: {self.database_url[:50]}...")
output(f" ipfs_api: {self.ipfs_api}")
output(f" ipfs_gateway_url: {self.ipfs_gateway_url}")
output(f" ipfs_gateways: {self.ipfs_gateways[:50]}...")
output(f" streaming_gpu_persist: {self.streaming_gpu_persist}")
output(f" oauth_client_id: {self.oauth_client_id}")
output(f" oauth_authorize_url: {self.oauth_authorize_url}")
output("=" * 60)
# Singleton settings instance
settings = Settings()
# Log config on import if DEBUG or SHOW_CONFIG is set
if os.environ.get("DEBUG") or os.environ.get("SHOW_CONFIG"):
settings.log_config()

186
l1/app/dependencies.py Normal file
View File

@@ -0,0 +1,186 @@
"""
FastAPI dependency injection container.
Provides shared resources and services to route handlers.
"""
from functools import lru_cache
from typing import Optional
import asyncio
from fastapi import Request, Depends, HTTPException
from jinja2 import Environment
from artdag_common.middleware.auth import UserContext, get_user_from_cookie, get_user_from_header
from .config import settings
# Lazy imports to avoid circular dependencies
_redis_client = None
_cache_manager = None
_database = None
def get_redis_client():
"""Get the Redis client singleton."""
global _redis_client
if _redis_client is None:
import redis
_redis_client = redis.from_url(settings.redis_url, decode_responses=True)
return _redis_client
def get_cache_manager():
"""Get the cache manager singleton."""
global _cache_manager
if _cache_manager is None:
from cache_manager import get_cache_manager as _get_cache_manager
_cache_manager = _get_cache_manager()
return _cache_manager
def get_database():
"""Get the database singleton."""
global _database
if _database is None:
import database
_database = database
return _database
def get_templates(request: Request) -> Environment:
"""Get the Jinja2 environment from app state."""
return request.app.state.templates
async def get_current_user(request: Request) -> Optional[UserContext]:
"""
Get the current user from request (cookie or header).
This is a permissive dependency - returns None if not authenticated.
Use require_auth for routes that require authentication.
"""
# Try header first (API clients)
ctx = get_user_from_header(request)
if ctx:
return ctx
# Fall back to cookie (browser)
return get_user_from_cookie(request)
async def require_auth(request: Request) -> UserContext:
"""
Require authentication for a route.
Raises:
HTTPException 401 if not authenticated
HTTPException 302 redirect to login for HTML requests
"""
ctx = await get_current_user(request)
if ctx is None:
# Check if HTML request for redirect
accept = request.headers.get("accept", "")
if "text/html" in accept:
raise HTTPException(
status_code=302,
headers={"Location": "/auth/login"}
)
raise HTTPException(status_code=401, detail="Authentication required")
return ctx
async def get_user_context_from_cookie(request: Request) -> Optional[UserContext]:
"""
Legacy compatibility: get user from cookie.
Validates token with L2 server if configured.
"""
ctx = get_user_from_cookie(request)
if ctx is None:
return None
# If L2 server configured, could validate token here
# For now, trust the cookie
return ctx
# Service dependencies (lazy loading)
def get_run_service():
"""Get the run service."""
from .services.run_service import RunService
return RunService(
database=get_database(),
redis=get_redis_client(),
cache=get_cache_manager(),
)
def get_recipe_service():
"""Get the recipe service."""
from .services.recipe_service import RecipeService
return RecipeService(
redis=get_redis_client(), # Kept for API compatibility, not used
cache=get_cache_manager(),
)
def get_cache_service():
"""Get the cache service."""
from .services.cache_service import CacheService
return CacheService(
cache_manager=get_cache_manager(),
database=get_database(),
)
async def get_nav_counts(actor_id: Optional[str] = None) -> dict:
"""
Get counts for navigation bar display.
Returns dict with: runs, recipes, effects, media, storage
"""
counts = {}
try:
import database
counts["media"] = await database.count_user_items(actor_id) if actor_id else 0
except Exception:
pass
try:
recipe_service = get_recipe_service()
recipes = await recipe_service.list_recipes(actor_id)
counts["recipes"] = len(recipes)
except Exception:
pass
try:
run_service = get_run_service()
runs = await run_service.list_runs(actor_id)
counts["runs"] = len(runs)
except Exception:
pass
try:
# Effects are stored in _effects/ directory, not in cache
from pathlib import Path
cache_mgr = get_cache_manager()
effects_dir = Path(cache_mgr.cache_dir) / "_effects"
if effects_dir.exists():
counts["effects"] = len([d for d in effects_dir.iterdir() if d.is_dir()])
else:
counts["effects"] = 0
except Exception:
pass
try:
import database
storage_providers = await database.get_user_storage_providers(actor_id) if actor_id else []
counts["storage"] = len(storage_providers) if storage_providers else 0
except Exception:
pass
return counts

View File

@@ -0,0 +1,10 @@
"""
L1 Server Repositories.
Data access layer for persistence operations.
"""
# TODO: Implement repositories
# - RunRepository - Redis-backed run storage
# - RecipeRepository - Redis-backed recipe storage
# - CacheRepository - Filesystem + PostgreSQL cache metadata

View File

@@ -0,0 +1,23 @@
"""
L1 Server Routers.
Each router handles a specific domain of functionality.
"""
from . import auth
from . import storage
from . import api
from . import recipes
from . import cache
from . import runs
from . import home
__all__ = [
"auth",
"storage",
"api",
"recipes",
"cache",
"runs",
"home",
]

257
l1/app/routers/api.py Normal file
View File

@@ -0,0 +1,257 @@
"""
3-phase API routes for L1 server.
Provides the plan/execute/run-recipe endpoints for programmatic access.
"""
import hashlib
import json
import logging
import uuid
from datetime import datetime, timezone
from typing import Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from artdag_common.middleware.auth import UserContext
from ..dependencies import require_auth, get_redis_client, get_cache_manager
router = APIRouter()
logger = logging.getLogger(__name__)
# Redis key prefix
RUNS_KEY_PREFIX = "artdag:run:"
class PlanRequest(BaseModel):
recipe_sexp: str
input_hashes: Dict[str, str]
class ExecutePlanRequest(BaseModel):
plan_json: str
run_id: Optional[str] = None
class RecipeRunRequest(BaseModel):
recipe_sexp: str
input_hashes: Dict[str, str]
def compute_run_id(input_hashes: List[str], recipe: str, recipe_hash: str = None) -> str:
"""Compute deterministic run_id from inputs and recipe."""
data = {
"inputs": sorted(input_hashes),
"recipe": recipe_hash or f"effect:{recipe}",
"version": "1",
}
json_str = json.dumps(data, sort_keys=True, separators=(",", ":"))
return hashlib.sha3_256(json_str.encode()).hexdigest()
@router.post("/plan")
async def generate_plan_endpoint(
request: PlanRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Generate an execution plan without executing it.
Phase 1 (Analyze) + Phase 2 (Plan) of the 3-phase model.
Returns the plan with cache status for each step.
"""
from tasks.orchestrate import generate_plan
try:
task = generate_plan.delay(
recipe_sexp=request.recipe_sexp,
input_hashes=request.input_hashes,
)
# Wait for result (plan generation is usually fast)
result = task.get(timeout=60)
return {
"status": result.get("status"),
"recipe": result.get("recipe"),
"plan_id": result.get("plan_id"),
"total_steps": result.get("total_steps"),
"cached_steps": result.get("cached_steps"),
"pending_steps": result.get("pending_steps"),
"steps": result.get("steps"),
}
except Exception as e:
logger.error(f"Plan generation failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/execute")
async def execute_plan_endpoint(
request: ExecutePlanRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Execute a pre-generated execution plan.
Phase 3 (Execute) of the 3-phase model.
Submits the plan to Celery for parallel execution.
"""
from tasks.orchestrate import run_plan
run_id = request.run_id or str(uuid.uuid4())
try:
task = run_plan.delay(
plan_json=request.plan_json,
run_id=run_id,
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
}
except Exception as e:
logger.error(f"Plan execution failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/run-recipe")
async def run_recipe_endpoint(
request: RecipeRunRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Run a complete recipe through all 3 phases.
1. Analyze: Extract features from inputs
2. Plan: Generate execution plan with cache IDs
3. Execute: Run steps with parallel execution
Returns immediately with run_id. Poll /api/run/{run_id} for status.
"""
from tasks.orchestrate import run_recipe
from artdag.sexp import compile_string
import database
redis = get_redis_client()
cache = get_cache_manager()
# Parse recipe name from S-expression
try:
compiled = compile_string(request.recipe_sexp)
recipe_name = compiled.name or "unknown"
except Exception:
recipe_name = "unknown"
# Compute deterministic run_id
run_id = compute_run_id(
list(request.input_hashes.values()),
recipe_name,
hashlib.sha3_256(request.recipe_sexp.encode()).hexdigest()
)
# Check if already completed
cached = await database.get_run_cache(run_id)
if cached:
output_cid = cached.get("output_cid")
if cache.has_content(output_cid):
return {
"status": "completed",
"run_id": run_id,
"output_cid": output_cid,
"output_ipfs_cid": cache.get_ipfs_cid(output_cid),
"cached": True,
}
# Submit to Celery
try:
task = run_recipe.delay(
recipe_sexp=request.recipe_sexp,
input_hashes=request.input_hashes,
run_id=run_id,
)
# Store run status in Redis
run_data = {
"run_id": run_id,
"status": "pending",
"recipe": recipe_name,
"inputs": list(request.input_hashes.values()),
"celery_task_id": task.id,
"created_at": datetime.now(timezone.utc).isoformat(),
"username": ctx.actor_id,
}
redis.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400,
json.dumps(run_data)
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
"recipe": recipe_name,
}
except Exception as e:
logger.error(f"Recipe run failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/run/{run_id}")
async def get_run_status(
run_id: str,
ctx: UserContext = Depends(require_auth),
):
"""Get status of a recipe execution run."""
import database
from celery.result import AsyncResult
redis = get_redis_client()
# Check Redis for run status
run_data = redis.get(f"{RUNS_KEY_PREFIX}{run_id}")
if run_data:
data = json.loads(run_data)
# If pending, check Celery task status
if data.get("status") == "pending" and data.get("celery_task_id"):
result = AsyncResult(data["celery_task_id"])
if result.ready():
if result.successful():
task_result = result.get()
data["status"] = task_result.get("status", "completed")
data["output_cid"] = task_result.get("output_cache_id")
data["output_ipfs_cid"] = task_result.get("output_ipfs_cid")
data["total_steps"] = task_result.get("total_steps")
data["cached"] = task_result.get("cached")
data["executed"] = task_result.get("executed")
# Update Redis
redis.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400,
json.dumps(data)
)
else:
data["status"] = "failed"
data["error"] = str(result.result)
else:
data["celery_status"] = result.status
return data
# Check database cache
cached = await database.get_run_cache(run_id)
if cached:
return {
"run_id": run_id,
"status": "completed",
"output_cid": cached.get("output_cid"),
"cached": True,
}
raise HTTPException(status_code=404, detail="Run not found")

165
l1/app/routers/auth.py Normal file
View File

@@ -0,0 +1,165 @@
"""
Authentication routes — OAuth2 authorization code flow via account.rose-ash.com.
GET /auth/login — redirect to account OAuth authorize
GET /auth/callback — exchange code for user info, set session cookie
GET /auth/logout — clear cookie, redirect through account SSO logout
"""
import secrets
import time
import httpx
from fastapi import APIRouter, Request
from fastapi.responses import RedirectResponse
from itsdangerous import URLSafeSerializer
from artdag_common.middleware.auth import UserContext, set_auth_cookie, clear_auth_cookie
from ..config import settings
router = APIRouter()
_signer = None
def _get_signer() -> URLSafeSerializer:
global _signer
if _signer is None:
_signer = URLSafeSerializer(settings.secret_key, salt="oauth-state")
return _signer
@router.get("/login")
async def login(request: Request):
"""Store state + next in signed cookie, redirect to account OAuth authorize."""
next_url = request.query_params.get("next", "/")
prompt = request.query_params.get("prompt", "")
state = secrets.token_urlsafe(32)
signer = _get_signer()
state_payload = signer.dumps({"state": state, "next": next_url, "prompt": prompt})
device_id = getattr(request.state, "device_id", "")
authorize_url = (
f"{settings.oauth_authorize_url}"
f"?client_id={settings.oauth_client_id}"
f"&redirect_uri={settings.oauth_redirect_uri}"
f"&device_id={device_id}"
f"&state={state}"
)
if prompt:
authorize_url += f"&prompt={prompt}"
response = RedirectResponse(url=authorize_url, status_code=302)
response.set_cookie(
key="oauth_state",
value=state_payload,
max_age=600, # 10 minutes
httponly=True,
samesite="lax",
secure=True,
)
return response
@router.get("/callback")
async def callback(request: Request):
"""Validate state, exchange code via token endpoint, set session cookie."""
code = request.query_params.get("code", "")
state = request.query_params.get("state", "")
error = request.query_params.get("error", "")
account_did = request.query_params.get("account_did", "")
# Adopt account's device ID as our own (one identity across all apps)
if account_did:
request.state.device_id = account_did
request.state._new_device_id = True # device_id middleware will set cookie
# Recover state from signed cookie
state_cookie = request.cookies.get("oauth_state", "")
signer = _get_signer()
try:
payload = signer.loads(state_cookie) if state_cookie else {}
except Exception:
payload = {}
next_url = payload.get("next", "/")
# Handle prompt=none rejection (user not logged in on account)
if error == "login_required":
response = RedirectResponse(url=next_url, status_code=302)
response.delete_cookie("oauth_state")
# Set cooldown cookie — don't re-check for 5 minutes
response.set_cookie(
key="pnone_at",
value=str(time.time()),
max_age=300,
httponly=True,
samesite="lax",
secure=True,
)
# Set device cookie if adopted
if account_did:
response.set_cookie(
key="artdag_did",
value=account_did,
max_age=30 * 24 * 3600,
httponly=True,
samesite="lax",
secure=True,
)
return response
# Normal callback — validate state + code
if not state_cookie or not code or not state:
return RedirectResponse(url="/", status_code=302)
if payload.get("state") != state:
return RedirectResponse(url="/", status_code=302)
# Exchange code for user info via account's token endpoint
async with httpx.AsyncClient(timeout=10) as client:
try:
resp = await client.post(
settings.oauth_token_url,
json={
"code": code,
"client_id": settings.oauth_client_id,
"redirect_uri": settings.oauth_redirect_uri,
},
)
except httpx.HTTPError:
return RedirectResponse(url="/", status_code=302)
if resp.status_code != 200:
return RedirectResponse(url="/", status_code=302)
data = resp.json()
if "error" in data:
return RedirectResponse(url="/", status_code=302)
# Map OAuth response to artdag UserContext
# Note: account token endpoint returns user.email as "username"
display_name = data.get("display_name", "")
username = data.get("username", "")
email = username # OAuth response "username" is the user's email
actor_id = f"@{username}"
user = UserContext(username=username, actor_id=actor_id, email=email)
response = RedirectResponse(url=next_url, status_code=302)
set_auth_cookie(response, user)
response.delete_cookie("oauth_state")
response.delete_cookie("pnone_at")
return response
@router.get("/logout")
async def logout():
"""Clear session cookie, redirect through account SSO logout."""
response = RedirectResponse(url=settings.oauth_logout_url, status_code=302)
clear_auth_cookie(response)
response.delete_cookie("oauth_state")
response.delete_cookie("pnone_at")
return response

515
l1/app/routers/cache.py Normal file
View File

@@ -0,0 +1,515 @@
"""
Cache and media routes for L1 server.
Handles content retrieval, metadata, media preview, and publishing.
"""
import logging
from pathlib import Path
from typing import Optional, Dict, Any
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, FileResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import (
require_auth, get_templates, get_redis_client,
get_cache_manager, get_current_user
)
from ..services.auth_service import AuthService
from ..services.cache_service import CacheService
router = APIRouter()
logger = logging.getLogger(__name__)
class UpdateMetadataRequest(BaseModel):
title: Optional[str] = None
description: Optional[str] = None
tags: Optional[list] = None
custom: Optional[Dict[str, Any]] = None
def get_cache_service():
"""Get cache service instance."""
import database
return CacheService(database, get_cache_manager())
@router.get("/{cid}")
async def get_cached(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get cached content by hash. Content negotiation: HTML for browsers, JSON for APIs."""
ctx = await get_current_user(request)
# Pass actor_id to get friendly name and user-specific metadata
actor_id = ctx.actor_id if ctx else None
cache_item = await cache_service.get_cache_item(cid, actor_id=actor_id)
if not cache_item:
if wants_html(request):
templates = get_templates(request)
return render(templates, "cache/not_found.html", request,
cid=cid,
user=ctx,
active_tab="media",
)
raise HTTPException(404, f"Content {cid} not in cache")
# JSON response
if wants_json(request):
return cache_item
# HTML response
if not ctx:
from fastapi.responses import RedirectResponse
return RedirectResponse(url="/auth", status_code=302)
# Check access
has_access = await cache_service.check_access(cid, ctx.actor_id, ctx.username)
if not has_access:
raise HTTPException(403, "Access denied")
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "cache/detail.html", request,
cache=cache_item,
user=ctx,
nav_counts=nav_counts,
active_tab="media",
)
@router.get("/{cid}/raw")
async def get_cached_raw(
cid: str,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get raw cached content (file download)."""
file_path, media_type, filename = await cache_service.get_raw_file(cid)
if not file_path:
raise HTTPException(404, f"Content {cid} not in cache")
return FileResponse(file_path, media_type=media_type, filename=filename)
@router.get("/{cid}/mp4")
async def get_cached_mp4(
cid: str,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get cached content as MP4 (transcodes MKV on first request)."""
mp4_path, error = await cache_service.get_as_mp4(cid)
if error:
raise HTTPException(400 if "not a video" in error else 404, error)
return FileResponse(mp4_path, media_type="video/mp4")
@router.get("/{cid}/meta")
async def get_metadata(
cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Get content metadata."""
meta = await cache_service.get_metadata(cid, ctx.actor_id)
if meta is None:
raise HTTPException(404, "Content not found")
return meta
@router.patch("/{cid}/meta")
async def update_metadata(
cid: str,
req: UpdateMetadataRequest,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Update content metadata."""
success, error = await cache_service.update_metadata(
cid=cid,
actor_id=ctx.actor_id,
title=req.title,
description=req.description,
tags=req.tags,
custom=req.custom,
)
if error:
raise HTTPException(400, error)
return {"updated": True}
@router.post("/{cid}/publish")
async def publish_content(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Publish content to L2 and IPFS."""
ipfs_cid, error = await cache_service.publish_to_l2(
cid=cid,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Published: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "published": True}
@router.delete("/{cid}")
async def delete_content(
cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Delete content from cache."""
success, error = await cache_service.delete_content(cid, ctx.actor_id)
if error:
raise HTTPException(400 if "Cannot" in error or "pinned" in error else 404, error)
return {"deleted": True}
@router.post("/import")
async def import_from_ipfs(
ipfs_cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Import content from IPFS."""
cid, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id)
if error:
raise HTTPException(400, error)
return {"cid": cid, "imported": True}
@router.post("/upload/chunk")
async def upload_chunk(
request: Request,
chunk: UploadFile = File(...),
upload_id: str = Form(...),
chunk_index: int = Form(...),
total_chunks: int = Form(...),
filename: str = Form(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload a file chunk. Assembles file when all chunks received."""
import tempfile
import os
# Create temp dir for this upload
chunk_dir = Path(tempfile.gettempdir()) / "uploads" / upload_id
chunk_dir.mkdir(parents=True, exist_ok=True)
# Save this chunk
chunk_path = chunk_dir / f"chunk_{chunk_index:05d}"
chunk_data = await chunk.read()
chunk_path.write_bytes(chunk_data)
# Check if all chunks received
received = len(list(chunk_dir.glob("chunk_*")))
if received < total_chunks:
return {"status": "partial", "received": received, "total": total_chunks}
# All chunks received - assemble file
final_path = chunk_dir / filename
with open(final_path, 'wb') as f:
for i in range(total_chunks):
cp = chunk_dir / f"chunk_{i:05d}"
f.write(cp.read_bytes())
cp.unlink() # Clean up chunk
# Read assembled file
content = final_path.read_bytes()
final_path.unlink()
chunk_dir.rmdir()
# Now do the normal upload flow
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
filename=filename,
actor_id=ctx.actor_id,
)
if error:
raise HTTPException(400, error)
# Assign friendly name
final_cid = ipfs_cid or cid
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
filename=filename,
)
return {
"status": "complete",
"cid": final_cid,
"friendly_name": friendly_entry["friendly_name"],
"filename": filename,
"size": len(content),
"uploaded": True,
}
@router.post("/upload")
async def upload_content(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload content to cache and IPFS.
Args:
file: The file to upload
display_name: Optional custom name for the media (used as friendly name)
"""
content = await file.read()
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
filename=file.filename,
actor_id=ctx.actor_id,
)
if error:
raise HTTPException(400, error)
# Assign friendly name (use IPFS CID if available, otherwise local hash)
final_cid = ipfs_cid or cid
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name, # Use custom name if provided
filename=file.filename,
)
return {
"cid": final_cid,
"content_hash": cid, # Legacy, for backwards compatibility
"friendly_name": friendly_entry["friendly_name"],
"filename": file.filename,
"size": len(content),
"uploaded": True,
}
# Media listing endpoint
@router.get("")
async def list_media(
request: Request,
offset: int = 0,
limit: int = 24,
media_type: Optional[str] = None,
cache_service: CacheService = Depends(get_cache_service),
ctx: UserContext = Depends(require_auth),
):
"""List all media in cache."""
items = await cache_service.list_media(
actor_id=ctx.actor_id,
username=ctx.username,
offset=offset,
limit=limit,
media_type=media_type,
)
has_more = len(items) >= limit
if wants_json(request):
return {"items": items, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "cache/media_list.html", request,
items=items,
user=ctx,
nav_counts=nav_counts,
offset=offset,
limit=limit,
has_more=has_more,
active_tab="media",
)
# HTMX metadata form
@router.get("/{cid}/meta-form", response_class=HTMLResponse)
async def get_metadata_form(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get metadata editing form (HTMX)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
meta = await cache_service.get_metadata(cid, ctx.actor_id)
return HTMLResponse(f'''
<h2 class="text-lg font-semibold mb-4">Metadata</h2>
<form hx-patch="/cache/{cid}/meta"
hx-target="#metadata-section"
hx-swap="innerHTML"
class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Title</label>
<input type="text" name="title" value="{meta.get('title', '') if meta else ''}"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Description</label>
<textarea name="description" rows="3"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white"
>{meta.get('description', '') if meta else ''}</textarea>
</div>
<button type="submit"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Save Metadata
</button>
</form>
''')
@router.patch("/{cid}/meta", response_class=HTMLResponse)
async def update_metadata_htmx(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Update metadata (HTMX form handler)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
form_data = await request.form()
success, error = await cache_service.update_metadata(
cid=cid,
actor_id=ctx.actor_id,
title=form_data.get("title"),
description=form_data.get("description"),
)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse('''
<div class="text-green-400 mb-4">Metadata saved!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
# Friendly name editing
@router.get("/{cid}/name-form", response_class=HTMLResponse)
async def get_name_form(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get friendly name editing form (HTMX)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
# Get current friendly name
from ..services.naming_service import get_naming_service
naming = get_naming_service()
entry = await naming.get_by_cid(ctx.actor_id, cid)
current_name = entry.get("base_name", "") if entry else ""
return HTMLResponse(f'''
<form hx-post="/cache/{cid}/name"
hx-target="#friendly-name-section"
hx-swap="innerHTML"
class="space-y-3">
<div>
<label class="block text-gray-400 text-sm mb-1">Friendly Name</label>
<input type="text" name="display_name" value="{current_name}"
placeholder="e.g., my-background-video"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A name to reference this media in recipes</p>
</div>
<div class="flex space-x-2">
<button type="submit"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Save
</button>
<button type="button"
onclick="location.reload()"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
</div>
</form>
''')
@router.post("/{cid}/name", response_class=HTMLResponse)
async def update_friendly_name(
cid: str,
request: Request,
):
"""Update friendly name (HTMX form handler)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
form_data = await request.form()
display_name = form_data.get("display_name", "").strip()
if not display_name:
return HTMLResponse('<div class="text-red-400">Name cannot be empty</div>')
from ..services.naming_service import get_naming_service
naming = get_naming_service()
try:
entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
)
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Name updated!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
except Exception as e:
return HTMLResponse(f'<div class="text-red-400">Error: {e}</div>')

415
l1/app/routers/effects.py Normal file
View File

@@ -0,0 +1,415 @@
"""
Effects routes for L1 server.
Handles effect upload, listing, and metadata.
Effects are S-expression files stored in IPFS like all other content-addressed data.
"""
import json
import logging
import re
import time
from pathlib import Path
from typing import Optional
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, PlainTextResponse
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import (
require_auth, get_templates, get_redis_client,
get_cache_manager,
)
from ..services.auth_service import AuthService
import ipfs_client
router = APIRouter()
logger = logging.getLogger(__name__)
def get_effects_dir() -> Path:
"""Get effects storage directory."""
cache_mgr = get_cache_manager()
effects_dir = Path(cache_mgr.cache_dir) / "_effects"
effects_dir.mkdir(parents=True, exist_ok=True)
return effects_dir
def parse_effect_metadata(source: str) -> dict:
"""
Parse effect metadata from S-expression source code.
Extracts metadata from comment headers (;; @key value format)
or from (defeffect name ...) form.
"""
metadata = {
"name": "",
"version": "1.0.0",
"author": "",
"temporal": False,
"description": "",
"params": [],
}
# Parse comment-based metadata (;; @key value)
for line in source.split("\n"):
stripped = line.strip()
if not stripped.startswith(";"):
# Stop parsing metadata at first non-comment line
if stripped and not stripped.startswith("("):
continue
if stripped.startswith("("):
break
# Remove comment prefix
comment = stripped.lstrip(";").strip()
if comment.startswith("@effect "):
metadata["name"] = comment[8:].strip()
elif comment.startswith("@name "):
metadata["name"] = comment[6:].strip()
elif comment.startswith("@version "):
metadata["version"] = comment[9:].strip()
elif comment.startswith("@author "):
metadata["author"] = comment[8:].strip()
elif comment.startswith("@temporal"):
val = comment[9:].strip().lower() if len(comment) > 9 else "true"
metadata["temporal"] = val in ("true", "yes", "1", "")
elif comment.startswith("@description "):
metadata["description"] = comment[13:].strip()
elif comment.startswith("@param "):
# Format: @param name type [description]
parts = comment[7:].split(None, 2)
if len(parts) >= 2:
param = {"name": parts[0], "type": parts[1]}
if len(parts) > 2:
param["description"] = parts[2]
metadata["params"].append(param)
# Also try to extract name from (defeffect "name" ...) or (effect "name" ...)
if not metadata["name"]:
name_match = re.search(r'\((defeffect|effect)\s+"([^"]+)"', source)
if name_match:
metadata["name"] = name_match.group(2)
# Try to extract name from first (define ...) form
if not metadata["name"]:
define_match = re.search(r'\(define\s+(\w+)', source)
if define_match:
metadata["name"] = define_match.group(1)
return metadata
@router.post("/upload")
async def upload_effect(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
):
"""
Upload an S-expression effect to IPFS.
Parses metadata from comment headers.
Returns IPFS CID for use in recipes.
Args:
file: The .sexp effect file
display_name: Optional custom friendly name for the effect
"""
content = await file.read()
try:
source = content.decode("utf-8")
except UnicodeDecodeError:
raise HTTPException(400, "Effect must be valid UTF-8 text")
# Parse metadata from sexp source
try:
meta = parse_effect_metadata(source)
except Exception as e:
logger.warning(f"Failed to parse effect metadata: {e}")
meta = {"name": file.filename or "unknown"}
if not meta.get("name"):
meta["name"] = Path(file.filename).stem if file.filename else "unknown"
# Store effect source in IPFS
cid = ipfs_client.add_bytes(content)
if not cid:
raise HTTPException(500, "Failed to store effect in IPFS")
# Also keep local cache for fast worker access
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
effect_dir.mkdir(parents=True, exist_ok=True)
(effect_dir / "effect.sexp").write_text(source, encoding="utf-8")
# Store metadata (locally and in IPFS)
full_meta = {
"cid": cid,
"meta": meta,
"uploader": ctx.actor_id,
"uploaded_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"filename": file.filename,
}
(effect_dir / "metadata.json").write_text(json.dumps(full_meta, indent=2))
# Also store metadata in IPFS for discoverability
meta_cid = ipfs_client.add_json(full_meta)
# Track ownership in item_types
import database
await database.save_item_metadata(
cid=cid,
actor_id=ctx.actor_id,
item_type="effect",
filename=file.filename,
)
# Assign friendly name (use custom display_name if provided, else from metadata)
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="effect",
display_name=display_name or meta.get("name"),
filename=file.filename,
)
logger.info(f"Uploaded effect '{meta.get('name')}' cid={cid} friendly_name='{friendly_entry['friendly_name']}' by {ctx.actor_id}")
return {
"cid": cid,
"metadata_cid": meta_cid,
"name": meta.get("name"),
"friendly_name": friendly_entry["friendly_name"],
"version": meta.get("version"),
"temporal": meta.get("temporal", False),
"params": meta.get("params", []),
"uploaded": True,
}
@router.get("/{cid}")
async def get_effect(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
):
"""Get effect metadata by CID."""
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
metadata_path = effect_dir / "metadata.json"
# Try local cache first
if metadata_path.exists():
meta = json.loads(metadata_path.read_text())
else:
# Fetch from IPFS
source_bytes = ipfs_client.get_bytes(cid)
if not source_bytes:
raise HTTPException(404, f"Effect {cid[:16]}... not found")
# Cache locally
effect_dir.mkdir(parents=True, exist_ok=True)
source = source_bytes.decode("utf-8")
(effect_dir / "effect.sexp").write_text(source)
# Parse metadata from source
parsed_meta = parse_effect_metadata(source)
meta = {"cid": cid, "meta": parsed_meta}
(effect_dir / "metadata.json").write_text(json.dumps(meta, indent=2))
# Add friendly name if available
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly = await naming.get_by_cid(ctx.actor_id, cid)
if friendly:
meta["friendly_name"] = friendly["friendly_name"]
meta["base_name"] = friendly["base_name"]
meta["version_id"] = friendly["version_id"]
if wants_json(request):
return meta
# HTML response
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "effects/detail.html", request,
effect=meta,
user=ctx,
nav_counts=nav_counts,
active_tab="effects",
)
@router.get("/{cid}/source")
async def get_effect_source(
cid: str,
ctx: UserContext = Depends(require_auth),
):
"""Get effect source code."""
effects_dir = get_effects_dir()
source_path = effects_dir / cid / "effect.sexp"
# Try local cache first (check both .sexp and legacy .py)
if source_path.exists():
return PlainTextResponse(source_path.read_text())
legacy_path = effects_dir / cid / "effect.py"
if legacy_path.exists():
return PlainTextResponse(legacy_path.read_text())
# Fetch from IPFS
source_bytes = ipfs_client.get_bytes(cid)
if not source_bytes:
raise HTTPException(404, f"Effect {cid[:16]}... not found")
# Cache locally
source_path.parent.mkdir(parents=True, exist_ok=True)
source = source_bytes.decode("utf-8")
source_path.write_text(source)
return PlainTextResponse(source)
@router.get("")
async def list_effects(
request: Request,
offset: int = 0,
limit: int = 20,
ctx: UserContext = Depends(require_auth),
):
"""List user's effects with pagination."""
import database
effects_dir = get_effects_dir()
effects = []
# Get user's effect CIDs from item_types
user_items = await database.get_user_items(ctx.actor_id, item_type="effect", limit=1000)
effect_cids = [item["cid"] for item in user_items]
# Get naming service for friendly name lookup
from ..services.naming_service import get_naming_service
naming = get_naming_service()
for cid in effect_cids:
effect_dir = effects_dir / cid
metadata_path = effect_dir / "metadata.json"
if metadata_path.exists():
try:
meta = json.loads(metadata_path.read_text())
# Add friendly name if available
friendly = await naming.get_by_cid(ctx.actor_id, cid)
if friendly:
meta["friendly_name"] = friendly["friendly_name"]
meta["base_name"] = friendly["base_name"]
effects.append(meta)
except json.JSONDecodeError:
pass
# Sort by upload time (newest first)
effects.sort(key=lambda e: e.get("uploaded_at", ""), reverse=True)
# Apply pagination
total = len(effects)
paginated_effects = effects[offset:offset + limit]
has_more = offset + limit < total
if wants_json(request):
return {"effects": paginated_effects, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "effects/list.html", request,
effects=paginated_effects,
user=ctx,
nav_counts=nav_counts,
active_tab="effects",
offset=offset,
limit=limit,
has_more=has_more,
)
@router.post("/{cid}/publish")
async def publish_effect(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
):
"""Publish effect to L2 ActivityPub server."""
from ..services.cache_service import CacheService
import database
# Verify effect exists
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
if not effect_dir.exists():
error = "Effect not found"
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(404, error)
# Use cache service to publish
cache_service = CacheService(database, get_cache_manager())
ipfs_cid, error = await cache_service.publish_to_l2(
cid=cid,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
logger.info(f"Published effect {cid[:16]}... to L2 by {ctx.actor_id}")
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "cid": cid, "published": True}
@router.delete("/{cid}")
async def delete_effect(
cid: str,
ctx: UserContext = Depends(require_auth),
):
"""Remove user's ownership link to an effect."""
import database
# Remove user's ownership link from item_types
await database.delete_item_type(cid, ctx.actor_id, "effect")
# Remove friendly name
await database.delete_friendly_name(ctx.actor_id, cid)
# Check if anyone still owns this effect
remaining_owners = await database.get_item_types(cid)
# Only delete local files if no one owns it anymore
if not remaining_owners:
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
if effect_dir.exists():
import shutil
shutil.rmtree(effect_dir)
# Unpin from IPFS
ipfs_client.unpin(cid)
logger.info(f"Garbage collected effect {cid[:16]}... (no remaining owners)")
logger.info(f"Removed effect {cid[:16]}... ownership for {ctx.actor_id}")
return {"deleted": True}

143
l1/app/routers/fragments.py Normal file
View File

@@ -0,0 +1,143 @@
"""
Art-DAG fragment endpoints.
Exposes HTML fragments at ``/internal/fragments/{type}`` for consumption
by coop apps via the fragment client.
"""
import os
from fastapi import APIRouter, Request, Response
router = APIRouter()
# Registry of fragment handlers: type -> async callable(request) returning HTML str
_handlers: dict[str, object] = {}
FRAGMENT_HEADER = "X-Fragment-Request"
@router.get("/internal/fragments/{fragment_type}")
async def get_fragment(fragment_type: str, request: Request):
if not request.headers.get(FRAGMENT_HEADER):
return Response(content="", status_code=403)
handler = _handlers.get(fragment_type)
if handler is None:
return Response(content="", media_type="text/html", status_code=200)
html = await handler(request)
return Response(content=html, media_type="text/html", status_code=200)
# --- nav-item fragment ---
async def _nav_item_handler(request: Request) -> str:
from artdag_common import render_fragment
templates = request.app.state.templates
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
return render_fragment(templates, "fragments/nav_item.html", artdag_url=artdag_url)
_handlers["nav-item"] = _nav_item_handler
# --- link-card fragment ---
async def _link_card_handler(request: Request) -> str:
from artdag_common import render_fragment
import database
templates = request.app.state.templates
cid = request.query_params.get("cid", "")
content_type = request.query_params.get("type", "media")
slug = request.query_params.get("slug", "")
keys_raw = request.query_params.get("keys", "")
# Batch mode: return multiple cards separated by markers
if keys_raw:
keys = [k.strip() for k in keys_raw.split(",") if k.strip()]
parts = []
for key in keys:
parts.append(f"<!-- fragment:{key} -->")
card_html = await _render_single_link_card(
templates, key, content_type,
)
parts.append(card_html)
return "\n".join(parts)
# Single mode: use cid or slug
lookup_cid = cid or slug
if not lookup_cid:
return ""
return await _render_single_link_card(templates, lookup_cid, content_type)
async def _render_single_link_card(templates, cid: str, content_type: str) -> str:
import database
from artdag_common import render_fragment
if not cid:
return ""
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
# Try item_types first (has metadata)
item = await database.get_item_types(cid)
# get_item_types returns a list; pick best match for content_type
meta = None
if item:
for it in item:
if it.get("type") == content_type:
meta = it
break
if not meta:
meta = item[0]
# Try friendly name for display
friendly = None
if meta and meta.get("actor_id"):
friendly = await database.get_friendly_name_by_cid(meta["actor_id"], cid)
# Try run cache if type is "run"
run = None
if content_type == "run":
run = await database.get_run_cache(cid)
title = ""
description = ""
link = ""
if friendly:
title = friendly.get("display_name") or friendly.get("base_name", cid[:12])
elif meta:
title = meta.get("filename") or meta.get("description", cid[:12])
elif run:
title = f"Run {cid[:12]}"
else:
title = cid[:16]
if meta:
description = meta.get("description", "")
if content_type == "run":
link = f"{artdag_url}/runs/{cid}"
elif content_type == "recipe":
link = f"{artdag_url}/recipes/{cid}"
elif content_type == "effect":
link = f"{artdag_url}/effects/{cid}"
else:
link = f"{artdag_url}/cache/{cid}"
return render_fragment(
templates, "fragments/link_card.html",
title=title,
description=description,
link=link,
cid=cid,
content_type=content_type,
artdag_url=artdag_url,
)
_handlers["link-card"] = _link_card_handler

253
l1/app/routers/home.py Normal file
View File

@@ -0,0 +1,253 @@
"""
Home and root routes for L1 server.
"""
from pathlib import Path
import markdown
from fastapi import APIRouter, Request, Depends, HTTPException
from fastapi.responses import HTMLResponse, RedirectResponse, FileResponse
from artdag_common import render
from artdag_common.middleware import wants_html
from ..dependencies import get_templates, get_current_user
router = APIRouter()
@router.get("/health")
async def health():
"""Health check endpoint — always returns 200."""
return {"status": "ok"}
async def get_user_stats(actor_id: str) -> dict:
"""Get stats for a user."""
import database
from ..services.run_service import RunService
from ..dependencies import get_redis_client, get_cache_manager
stats = {}
try:
# Count only actual media types (video, image, audio), not effects/recipes
media_count = 0
for media_type in ["video", "image", "audio", "unknown"]:
media_count += await database.count_user_items(actor_id, item_type=media_type)
stats["media"] = media_count
except Exception:
stats["media"] = 0
try:
# Count user's recipes from database (ownership-based)
stats["recipes"] = await database.count_user_items(actor_id, item_type="recipe")
except Exception:
stats["recipes"] = 0
try:
run_service = RunService(database, get_redis_client(), get_cache_manager())
runs = await run_service.list_runs(actor_id)
stats["runs"] = len(runs)
except Exception:
stats["runs"] = 0
try:
storage_providers = await database.get_user_storage_providers(actor_id)
stats["storage"] = len(storage_providers) if storage_providers else 0
except Exception:
stats["storage"] = 0
try:
# Count user's effects from database (ownership-based)
stats["effects"] = await database.count_user_items(actor_id, item_type="effect")
except Exception:
stats["effects"] = 0
return stats
@router.get("/api/stats")
async def api_stats(request: Request):
"""Get user stats as JSON for CLI and API clients."""
user = await get_current_user(request)
if not user:
raise HTTPException(401, "Authentication required")
stats = await get_user_stats(user.actor_id)
return stats
@router.delete("/api/clear-data")
async def clear_user_data(request: Request):
"""
Clear all user L1 data except storage configuration.
Deletes: runs, recipes, effects, media/cache items.
Preserves: storage provider configurations.
"""
import logging
logger = logging.getLogger(__name__)
user = await get_current_user(request)
if not user:
raise HTTPException(401, "Authentication required")
import database
from ..services.recipe_service import RecipeService
from ..services.run_service import RunService
from ..dependencies import get_redis_client, get_cache_manager
actor_id = user.actor_id
username = user.username
deleted = {
"runs": 0,
"recipes": 0,
"effects": 0,
"media": 0,
}
errors = []
# Delete all runs
try:
run_service = RunService(database, get_redis_client(), get_cache_manager())
runs = await run_service.list_runs(actor_id, offset=0, limit=10000)
for run in runs:
try:
await run_service.discard_run(run["run_id"], actor_id, username)
deleted["runs"] += 1
except Exception as e:
errors.append(f"Run {run['run_id']}: {e}")
except Exception as e:
errors.append(f"Failed to list runs: {e}")
# Delete all recipes
try:
recipe_service = RecipeService(get_redis_client(), get_cache_manager())
recipes = await recipe_service.list_recipes(actor_id, offset=0, limit=10000)
for recipe in recipes:
try:
success, error = await recipe_service.delete_recipe(recipe["recipe_id"], actor_id)
if success:
deleted["recipes"] += 1
else:
errors.append(f"Recipe {recipe['recipe_id']}: {error}")
except Exception as e:
errors.append(f"Recipe {recipe['recipe_id']}: {e}")
except Exception as e:
errors.append(f"Failed to list recipes: {e}")
# Delete all effects (uses ownership model)
cache_manager = get_cache_manager()
try:
# Get user's effects from item_types
effect_items = await database.get_user_items(actor_id, item_type="effect", limit=10000)
for item in effect_items:
cid = item.get("cid")
if cid:
try:
# Remove ownership link
await database.delete_item_type(cid, actor_id, "effect")
await database.delete_friendly_name(actor_id, cid)
# Check if orphaned
remaining = await database.get_item_types(cid)
if not remaining:
# Garbage collect
effects_dir = Path(cache_manager.cache_dir) / "_effects" / cid
if effects_dir.exists():
import shutil
shutil.rmtree(effects_dir)
import ipfs_client
ipfs_client.unpin(cid)
deleted["effects"] += 1
except Exception as e:
errors.append(f"Effect {cid[:16]}...: {e}")
except Exception as e:
errors.append(f"Failed to delete effects: {e}")
# Delete all media/cache items for user (uses ownership model)
try:
from ..services.cache_service import CacheService
cache_service = CacheService(database, cache_manager)
# Get user's media items (video, image, audio)
for media_type in ["video", "image", "audio", "unknown"]:
items = await database.get_user_items(actor_id, item_type=media_type, limit=10000)
for item in items:
cid = item.get("cid")
if cid:
try:
success, error = await cache_service.delete_content(cid, actor_id)
if success:
deleted["media"] += 1
elif error:
errors.append(f"Media {cid[:16]}...: {error}")
except Exception as e:
errors.append(f"Media {cid[:16]}...: {e}")
except Exception as e:
errors.append(f"Failed to delete media: {e}")
logger.info(f"Cleared data for {actor_id}: {deleted}")
if errors:
logger.warning(f"Errors during clear: {errors[:10]}") # Log first 10 errors
return {
"message": "User data cleared",
"deleted": deleted,
"errors": errors[:10] if errors else [], # Return first 10 errors
"storage_preserved": True,
}
@router.get("/")
async def home(request: Request):
"""
Home page - show README and stats.
"""
user = await get_current_user(request)
# Load README
readme_html = ""
try:
readme_path = Path(__file__).parent.parent.parent / "README.md"
if readme_path.exists():
readme_html = markdown.markdown(readme_path.read_text(), extensions=['tables', 'fenced_code'])
except Exception:
pass
# Get stats for current user
stats = {}
if user:
stats = await get_user_stats(user.actor_id)
templates = get_templates(request)
return render(templates, "home.html", request,
user=user,
readme_html=readme_html,
stats=stats,
nav_counts=stats, # Reuse stats for nav counts
active_tab="home",
)
@router.get("/login")
async def login_redirect(request: Request):
"""Redirect to OAuth login flow."""
return RedirectResponse(url="/auth/login", status_code=302)
# Client tarball path
CLIENT_TARBALL = Path(__file__).parent.parent.parent / "artdag-client.tar.gz"
@router.get("/download/client")
async def download_client():
"""Download the Art DAG CLI client."""
if not CLIENT_TARBALL.exists():
raise HTTPException(404, "Client package not found. Run build-client.sh to create it.")
return FileResponse(
CLIENT_TARBALL,
media_type="application/gzip",
filename="artdag-client.tar.gz"
)

125
l1/app/routers/inbox.py Normal file
View File

@@ -0,0 +1,125 @@
"""AP-style inbox endpoint for receiving signed activities from the coop.
POST /inbox — verify HTTP Signature, dispatch by activity type.
"""
from __future__ import annotations
import logging
import time
import httpx
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
from ..dependencies import get_redis_client
from ..utils.http_signatures import verify_request_signature, parse_key_id
log = logging.getLogger(__name__)
router = APIRouter()
# Cache fetched public keys in Redis for 24 hours
_KEY_CACHE_TTL = 86400
async def _fetch_actor_public_key(actor_url: str) -> str | None:
"""Fetch an actor's public key, with Redis caching."""
redis = get_redis_client()
cache_key = f"actor_pubkey:{actor_url}"
# Check cache
cached = redis.get(cache_key)
if cached:
return cached
# Fetch actor JSON
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.get(
actor_url,
headers={"Accept": "application/activity+json, application/ld+json"},
)
if resp.status_code != 200:
log.warning("Failed to fetch actor %s: %d", actor_url, resp.status_code)
return None
data = resp.json()
except Exception:
log.warning("Error fetching actor %s", actor_url, exc_info=True)
return None
pub_key_pem = (data.get("publicKey") or {}).get("publicKeyPem")
if not pub_key_pem:
log.warning("No publicKey in actor %s", actor_url)
return None
# Cache it
redis.set(cache_key, pub_key_pem, ex=_KEY_CACHE_TTL)
return pub_key_pem
@router.post("/inbox")
async def inbox(request: Request):
"""Receive signed AP activities from the coop platform."""
sig_header = request.headers.get("signature", "")
if not sig_header:
return JSONResponse({"error": "missing signature"}, status_code=401)
# Read body
body = await request.body()
# Verify HTTP Signature
actor_url = parse_key_id(sig_header)
if not actor_url:
return JSONResponse({"error": "invalid keyId"}, status_code=401)
pub_key = await _fetch_actor_public_key(actor_url)
if not pub_key:
return JSONResponse({"error": "could not fetch public key"}, status_code=401)
req_headers = dict(request.headers)
path = request.url.path
valid = verify_request_signature(
public_key_pem=pub_key,
signature_header=sig_header,
method="POST",
path=path,
headers=req_headers,
)
if not valid:
log.warning("Invalid signature from %s", actor_url)
return JSONResponse({"error": "invalid signature"}, status_code=401)
# Parse and dispatch
try:
activity = await request.json()
except Exception:
return JSONResponse({"error": "invalid json"}, status_code=400)
activity_type = activity.get("type", "")
log.info("Inbox received: %s from %s", activity_type, actor_url)
if activity_type == "rose:DeviceAuth":
_handle_device_auth(activity)
# Always 202 — AP convention
return JSONResponse({"status": "accepted"}, status_code=202)
def _handle_device_auth(activity: dict) -> None:
"""Set or delete did_auth:{device_id} in local Redis."""
obj = activity.get("object", {})
device_id = obj.get("device_id", "")
action = obj.get("action", "")
if not device_id:
log.warning("rose:DeviceAuth missing device_id")
return
redis = get_redis_client()
if action == "login":
redis.set(f"did_auth:{device_id}", str(time.time()), ex=30 * 24 * 3600)
log.info("did_auth set for device %s...", device_id[:16])
elif action == "logout":
redis.delete(f"did_auth:{device_id}")
log.info("did_auth cleared for device %s...", device_id[:16])
else:
log.warning("rose:DeviceAuth unknown action: %s", action)

74
l1/app/routers/oembed.py Normal file
View File

@@ -0,0 +1,74 @@
"""Art-DAG oEmbed endpoint.
Returns oEmbed JSON responses for Art-DAG content (media, recipes, effects, runs).
"""
import os
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
router = APIRouter()
@router.get("/oembed")
async def oembed(request: Request):
url = request.query_params.get("url", "")
if not url:
return JSONResponse({"error": "url parameter required"}, status_code=400)
# Parse URL to extract content type and CID
# URL patterns: /cache/{cid}, /recipes/{cid}, /effects/{cid}, /runs/{cid}
from urllib.parse import urlparse
parsed = urlparse(url)
parts = [p for p in parsed.path.strip("/").split("/") if p]
if len(parts) < 2:
return JSONResponse({"error": "could not parse content URL"}, status_code=404)
content_type = parts[0].rstrip("s") # recipes -> recipe, runs -> run
cid = parts[1]
import database
title = cid[:16]
thumbnail_url = None
# Look up metadata
items = await database.get_item_types(cid)
if items:
meta = items[0]
title = meta.get("filename") or meta.get("description") or title
# Try friendly name
actor_id = meta.get("actor_id")
if actor_id:
friendly = await database.get_friendly_name_by_cid(actor_id, cid)
if friendly:
title = friendly.get("display_name") or friendly.get("base_name", title)
# Media items get a thumbnail
if meta.get("type") == "media":
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
thumbnail_url = f"{artdag_url}/cache/{cid}/raw"
elif content_type == "run":
run = await database.get_run_cache(cid)
if run:
title = f"Run {cid[:12]}"
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
resp = {
"version": "1.0",
"type": "link",
"title": title,
"provider_name": "art-dag",
"provider_url": artdag_url,
"url": url,
}
if thumbnail_url:
resp["thumbnail_url"] = thumbnail_url
return JSONResponse(resp)

686
l1/app/routers/recipes.py Normal file
View File

@@ -0,0 +1,686 @@
"""
Recipe management routes for L1 server.
Handles recipe upload, listing, viewing, and execution.
"""
import json
import logging
from typing import Any, Dict, List, Optional, Tuple
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
from fastapi.responses import HTMLResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import require_auth, get_current_user, get_templates, get_redis_client, get_cache_manager
from ..services.auth_service import AuthService
from ..services.recipe_service import RecipeService
from ..types import (
CompiledNode, TransformedNode, Registry, Recipe,
is_variable_input, get_effect_cid,
)
router = APIRouter()
logger = logging.getLogger(__name__)
class RecipeUploadRequest(BaseModel):
content: str # S-expression or YAML
name: Optional[str] = None
description: Optional[str] = None
class RecipeRunRequest(BaseModel):
"""Request to run a recipe with variable inputs."""
inputs: Dict[str, str] = {} # Map input names to CIDs
def get_recipe_service() -> RecipeService:
"""Get recipe service instance."""
return RecipeService(get_redis_client(), get_cache_manager())
def transform_node(
node: CompiledNode,
assets: Dict[str, Dict[str, Any]],
effects: Dict[str, Dict[str, Any]],
) -> TransformedNode:
"""
Transform a compiled node to artdag execution format.
- Resolves asset references to CIDs for SOURCE nodes
- Resolves effect references to CIDs for EFFECT nodes
- Renames 'type' to 'node_type', 'id' to 'node_id'
"""
node_id = node.get("id", "")
config = dict(node.get("config", {})) # Copy to avoid mutation
# Resolve asset references for SOURCE nodes
if node.get("type") == "SOURCE" and "asset" in config:
asset_name = config["asset"]
if asset_name in assets:
config["cid"] = assets[asset_name].get("cid")
# Resolve effect references for EFFECT nodes
if node.get("type") == "EFFECT" and "effect" in config:
effect_name = config["effect"]
if effect_name in effects:
config["cid"] = effects[effect_name].get("cid")
return {
"node_id": node_id,
"node_type": node.get("type", "EFFECT"),
"config": config,
"inputs": node.get("inputs", []),
"name": node.get("name"),
}
def build_input_name_mapping(
nodes: Dict[str, TransformedNode],
) -> Dict[str, str]:
"""
Build a mapping from input names to node IDs for variable inputs.
Variable inputs can be referenced by:
- node_id directly
- config.name (e.g., "Second Video")
- snake_case version (e.g., "second_video")
- kebab-case version (e.g., "second-video")
- node.name (def binding name)
"""
input_name_to_node: Dict[str, str] = {}
for node_id, node in nodes.items():
if node.get("node_type") != "SOURCE":
continue
config = node.get("config", {})
if not is_variable_input(config):
continue
# Map by node_id
input_name_to_node[node_id] = node_id
# Map by config.name
name = config.get("name")
if name:
input_name_to_node[name] = node_id
input_name_to_node[name.lower().replace(" ", "_")] = node_id
input_name_to_node[name.lower().replace(" ", "-")] = node_id
# Map by node.name (def binding)
node_name = node.get("name")
if node_name:
input_name_to_node[node_name] = node_id
input_name_to_node[node_name.replace("-", "_")] = node_id
return input_name_to_node
def bind_inputs(
nodes: Dict[str, TransformedNode],
input_name_to_node: Dict[str, str],
user_inputs: Dict[str, str],
) -> List[str]:
"""
Bind user-provided input CIDs to source nodes.
Returns list of warnings for inputs that couldn't be bound.
"""
warnings: List[str] = []
for input_name, cid in user_inputs.items():
# Try direct node ID match first
if input_name in nodes:
node = nodes[input_name]
if node.get("node_type") == "SOURCE":
node["config"]["cid"] = cid
logger.info(f"Bound input {input_name} directly to node, cid={cid[:16]}...")
continue
# Try input name lookup
if input_name in input_name_to_node:
node_id = input_name_to_node[input_name]
node = nodes[node_id]
node["config"]["cid"] = cid
logger.info(f"Bound input {input_name} via lookup to node {node_id}, cid={cid[:16]}...")
continue
# Input not found
warnings.append(f"Input '{input_name}' not found in recipe")
logger.warning(f"Input {input_name} not found in nodes or input_name_to_node")
return warnings
async def resolve_friendly_names_in_registry(
registry: dict,
actor_id: str,
) -> dict:
"""
Resolve friendly names to CIDs in the registry.
Friendly names are identified by containing a space (e.g., "brightness 01hw3x9k")
or by not being a valid CID format.
"""
from ..services.naming_service import get_naming_service
import re
naming = get_naming_service()
resolved = {"assets": {}, "effects": {}}
# CID patterns: IPFS CID (Qm..., bafy...) or SHA256 hash (64 hex chars)
cid_pattern = re.compile(r'^(Qm[a-zA-Z0-9]{44}|bafy[a-zA-Z0-9]+|[a-f0-9]{64})$')
for asset_name, asset_info in registry.get("assets", {}).items():
cid = asset_info.get("cid", "")
if cid and not cid_pattern.match(cid):
# Looks like a friendly name, resolve it
resolved_cid = await naming.resolve(actor_id, cid, item_type="media")
if resolved_cid:
asset_info = dict(asset_info)
asset_info["cid"] = resolved_cid
asset_info["_resolved_from"] = cid
resolved["assets"][asset_name] = asset_info
for effect_name, effect_info in registry.get("effects", {}).items():
cid = effect_info.get("cid", "")
if cid and not cid_pattern.match(cid):
# Looks like a friendly name, resolve it
resolved_cid = await naming.resolve(actor_id, cid, item_type="effect")
if resolved_cid:
effect_info = dict(effect_info)
effect_info["cid"] = resolved_cid
effect_info["_resolved_from"] = cid
resolved["effects"][effect_name] = effect_info
return resolved
async def prepare_dag_for_execution(
recipe: Recipe,
user_inputs: Dict[str, str],
actor_id: str = None,
) -> Tuple[str, List[str]]:
"""
Prepare a recipe DAG for execution by transforming nodes and binding inputs.
Resolves friendly names to CIDs if actor_id is provided.
Returns (dag_json, warnings).
"""
recipe_dag = recipe.get("dag")
if not recipe_dag or not isinstance(recipe_dag, dict):
raise ValueError("Recipe has no DAG definition")
# Deep copy to avoid mutating original
dag_copy = json.loads(json.dumps(recipe_dag))
nodes = dag_copy.get("nodes", {})
# Get registry for resolving references
registry = recipe.get("registry", {})
# Resolve friendly names to CIDs
if actor_id and registry:
registry = await resolve_friendly_names_in_registry(registry, actor_id)
assets = registry.get("assets", {}) if registry else {}
effects = registry.get("effects", {}) if registry else {}
# Transform nodes from list to dict if needed
if isinstance(nodes, list):
nodes_dict: Dict[str, TransformedNode] = {}
for node in nodes:
node_id = node.get("id")
if node_id:
nodes_dict[node_id] = transform_node(node, assets, effects)
nodes = nodes_dict
dag_copy["nodes"] = nodes
# Build input name mapping and bind user inputs
input_name_to_node = build_input_name_mapping(nodes)
logger.info(f"Input name to node mapping: {input_name_to_node}")
logger.info(f"User-provided inputs: {user_inputs}")
warnings = bind_inputs(nodes, input_name_to_node, user_inputs)
# Log final SOURCE node configs for debugging
for nid, n in nodes.items():
if n.get("node_type") == "SOURCE":
logger.info(f"Final SOURCE node {nid}: config={n.get('config')}")
# Transform output to output_id
if "output" in dag_copy:
dag_copy["output_id"] = dag_copy.pop("output")
# Add metadata if not present
if "metadata" not in dag_copy:
dag_copy["metadata"] = {}
return json.dumps(dag_copy), warnings
@router.post("/upload")
async def upload_recipe(
file: UploadFile = File(...),
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Upload a new recipe from S-expression or YAML file."""
import yaml
# Read content from the uploaded file
content = (await file.read()).decode("utf-8")
# Detect format (skip comments starting with ;)
def is_sexp_format(text):
for line in text.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
return stripped.startswith('(')
return False
is_sexp = is_sexp_format(content)
try:
from artdag.sexp import compile_string, ParseError, CompileError
SEXP_AVAILABLE = True
except ImportError:
SEXP_AVAILABLE = False
recipe_name = None
recipe_version = "1.0"
recipe_description = None
variable_inputs = []
fixed_inputs = []
if is_sexp:
if not SEXP_AVAILABLE:
raise HTTPException(500, "S-expression recipes require artdag.sexp module (not installed on server)")
# Parse S-expression
try:
compiled = compile_string(content)
recipe_name = compiled.name
recipe_version = compiled.version
recipe_description = compiled.description
for node in compiled.nodes:
if node.get("type") == "SOURCE":
config = node.get("config", {})
if config.get("input"):
variable_inputs.append(config.get("name", node.get("id")))
elif config.get("asset"):
fixed_inputs.append(config.get("asset"))
except Exception as e:
raise HTTPException(400, f"Parse error: {e}")
else:
# Parse YAML
try:
recipe_data = yaml.safe_load(content)
recipe_name = recipe_data.get("name")
recipe_version = recipe_data.get("version", "1.0")
recipe_description = recipe_data.get("description")
inputs = recipe_data.get("inputs", {})
for input_name, input_def in inputs.items():
if isinstance(input_def, dict) and input_def.get("fixed"):
fixed_inputs.append(input_name)
else:
variable_inputs.append(input_name)
except yaml.YAMLError as e:
raise HTTPException(400, f"Invalid YAML: {e}")
# Use filename as recipe name if not specified
if not recipe_name and file.filename:
recipe_name = file.filename.rsplit(".", 1)[0]
recipe_id, error = await recipe_service.upload_recipe(
content=content,
uploader=ctx.actor_id,
name=recipe_name,
description=recipe_description,
)
if error:
raise HTTPException(400, error)
return {
"recipe_id": recipe_id,
"name": recipe_name or "unnamed",
"version": recipe_version,
"variable_inputs": variable_inputs,
"fixed_inputs": fixed_inputs,
"message": "Recipe uploaded successfully",
}
@router.get("")
async def list_recipes(
request: Request,
offset: int = 0,
limit: int = 20,
recipe_service: RecipeService = Depends(get_recipe_service),
ctx: UserContext = Depends(require_auth),
):
"""List available recipes."""
recipes = await recipe_service.list_recipes(ctx.actor_id, offset=offset, limit=limit)
has_more = len(recipes) >= limit
if wants_json(request):
return {"recipes": recipes, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "recipes/list.html", request,
recipes=recipes,
user=ctx,
nav_counts=nav_counts,
active_tab="recipes",
offset=offset,
limit=limit,
has_more=has_more,
)
@router.get("/{recipe_id}")
async def get_recipe(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
ctx: UserContext = Depends(require_auth),
):
"""Get recipe details."""
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
# Add friendly name if available
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly = await naming.get_by_cid(ctx.actor_id, recipe_id)
if friendly:
recipe["friendly_name"] = friendly["friendly_name"]
recipe["base_name"] = friendly["base_name"]
recipe["version_id"] = friendly["version_id"]
if wants_json(request):
return recipe
# Build DAG elements for visualization and convert nodes to steps format
dag_elements = []
steps = []
node_colors = {
"SOURCE": "#3b82f6",
"EFFECT": "#8b5cf6",
"SEQUENCE": "#ec4899",
"transform": "#10b981",
"output": "#f59e0b",
}
# Debug: log recipe structure
logger.info(f"Recipe keys: {list(recipe.keys())}")
# Get nodes from dag - can be list or dict, can be under "dag" or directly on recipe
dag = recipe.get("dag", {})
logger.info(f"DAG type: {type(dag)}, keys: {list(dag.keys()) if isinstance(dag, dict) else 'not dict'}")
nodes = dag.get("nodes", []) if isinstance(dag, dict) else []
logger.info(f"Nodes from dag.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
# Also check for nodes directly on recipe (alternative formats)
if not nodes:
nodes = recipe.get("nodes", [])
logger.info(f"Nodes from recipe.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
if not nodes:
nodes = recipe.get("pipeline", [])
logger.info(f"Nodes from recipe.pipeline: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
if not nodes:
nodes = recipe.get("steps", [])
logger.info(f"Nodes from recipe.steps: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
logger.info(f"Final nodes count: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
# Convert list of nodes to steps format
if isinstance(nodes, list):
for node in nodes:
node_id = node.get("id", "")
node_type = node.get("type", "EFFECT")
inputs = node.get("inputs", [])
config = node.get("config", {})
steps.append({
"id": node_id,
"name": node_id,
"type": node_type,
"inputs": inputs,
"params": config,
})
dag_elements.append({
"data": {
"id": node_id,
"label": node_id,
"color": node_colors.get(node_type, "#6b7280"),
}
})
for inp in inputs:
if isinstance(inp, str):
dag_elements.append({
"data": {"source": inp, "target": node_id}
})
elif isinstance(nodes, dict):
for node_id, node in nodes.items():
node_type = node.get("type", "EFFECT")
inputs = node.get("inputs", [])
config = node.get("config", {})
steps.append({
"id": node_id,
"name": node_id,
"type": node_type,
"inputs": inputs,
"params": config,
})
dag_elements.append({
"data": {
"id": node_id,
"label": node_id,
"color": node_colors.get(node_type, "#6b7280"),
}
})
for inp in inputs:
if isinstance(inp, str):
dag_elements.append({
"data": {"source": inp, "target": node_id}
})
# Add steps to recipe for template
recipe["steps"] = steps
# Use S-expression source if available
if "sexp" not in recipe:
recipe["sexp"] = "; No S-expression source available"
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "recipes/detail.html", request,
recipe=recipe,
dag_elements=dag_elements,
user=ctx,
nav_counts=nav_counts,
active_tab="recipes",
)
@router.delete("/{recipe_id}")
async def delete_recipe(
recipe_id: str,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Delete a recipe."""
success, error = await recipe_service.delete_recipe(recipe_id, ctx.actor_id)
if error:
raise HTTPException(400 if "Cannot" in error else 404, error)
return {"deleted": True, "recipe_id": recipe_id}
@router.post("/{recipe_id}/run")
async def run_recipe(
recipe_id: str,
req: RecipeRunRequest,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Run a recipe with given inputs."""
from ..services.run_service import RunService
from ..dependencies import get_cache_manager
import database
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
try:
# Create run using run service
run_service = RunService(database, get_redis_client(), get_cache_manager())
# Prepare DAG for execution (transform nodes, bind inputs, resolve friendly names)
dag_json = None
if recipe.get("dag"):
dag_json, warnings = await prepare_dag_for_execution(recipe, req.inputs, actor_id=ctx.actor_id)
for warning in warnings:
logger.warning(warning)
run, error = await run_service.create_run(
recipe=recipe_id, # Use recipe hash as primary identifier
inputs=req.inputs,
use_dag=True,
dag_json=dag_json,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
recipe_name=recipe.get("name"), # Store name for display
recipe_sexp=recipe.get("sexp"), # S-expression for code-addressed execution
)
if error:
raise HTTPException(400, error)
if not run:
raise HTTPException(500, "Run creation returned no result")
return {
"run_id": run["run_id"] if isinstance(run, dict) else run.run_id,
"status": run.get("status", "pending") if isinstance(run, dict) else run.status,
"message": "Recipe execution started",
}
except HTTPException:
raise
except Exception as e:
logger.exception(f"Error running recipe {recipe_id}")
raise HTTPException(500, f"Run failed: {e}")
@router.get("/{recipe_id}/dag")
async def recipe_dag(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Get recipe DAG visualization data."""
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
dag_elements = []
node_colors = {
"input": "#3b82f6",
"effect": "#8b5cf6",
"analyze": "#ec4899",
"transform": "#10b981",
"output": "#f59e0b",
}
for i, step in enumerate(recipe.get("steps", [])):
step_id = step.get("id", f"step-{i}")
dag_elements.append({
"data": {
"id": step_id,
"label": step.get("name", f"Step {i+1}"),
"color": node_colors.get(step.get("type", "effect"), "#6b7280"),
}
})
for inp in step.get("inputs", []):
dag_elements.append({
"data": {"source": inp, "target": step_id}
})
return {"elements": dag_elements}
@router.delete("/{recipe_id}/ui", response_class=HTMLResponse)
async def ui_discard_recipe(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""HTMX handler: discard a recipe."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>', status_code=401)
success, error = await recipe_service.delete_recipe(recipe_id, ctx.actor_id)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse(
'<div class="text-green-400">Recipe deleted</div>'
'<script>setTimeout(() => window.location.href = "/recipes", 1500);</script>'
)
@router.post("/{recipe_id}/publish")
async def publish_recipe(
recipe_id: str,
request: Request,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Publish recipe to L2 and IPFS."""
from ..services.cache_service import CacheService
from ..dependencies import get_cache_manager
import database
# Verify recipe exists
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
# Use cache service to publish (recipes are stored in cache)
cache_service = CacheService(database, get_cache_manager())
ipfs_cid, error = await cache_service.publish_to_l2(
cid=recipe_id,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "published": True}

1704
l1/app/routers/runs.py Normal file

File diff suppressed because it is too large Load Diff

264
l1/app/routers/storage.py Normal file
View File

@@ -0,0 +1,264 @@
"""
Storage provider routes for L1 server.
Manages user storage backends (Pinata, web3.storage, local, etc.)
"""
from typing import Optional, Dict, Any
from fastapi import APIRouter, Request, Depends, HTTPException, Form
from fastapi.responses import HTMLResponse, RedirectResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import get_database, get_current_user, require_auth, get_templates
from ..services.storage_service import StorageService, STORAGE_PROVIDERS_INFO, VALID_PROVIDER_TYPES
router = APIRouter()
# Import storage_providers module
import storage_providers as sp_module
def get_storage_service():
"""Get storage service instance."""
import database
return StorageService(database, sp_module)
class AddStorageRequest(BaseModel):
provider_type: str
config: Dict[str, Any]
capacity_gb: int = 5
provider_name: Optional[str] = None
class UpdateStorageRequest(BaseModel):
config: Optional[Dict[str, Any]] = None
capacity_gb: Optional[int] = None
is_active: Optional[bool] = None
@router.get("")
async def list_storage(
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""List user's storage providers. HTML for browsers, JSON for API."""
storages = await storage_service.list_storages(ctx.actor_id)
if wants_json(request):
return {"storages": storages}
# Render HTML template
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "storage/list.html", request,
storages=storages,
user=ctx,
nav_counts=nav_counts,
providers_info=STORAGE_PROVIDERS_INFO,
active_tab="storage",
)
@router.post("")
async def add_storage(
req: AddStorageRequest,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Add a storage provider via API."""
ctx = await require_auth(request)
storage_id, error = await storage_service.add_storage(
actor_id=ctx.actor_id,
provider_type=req.provider_type,
config=req.config,
capacity_gb=req.capacity_gb,
provider_name=req.provider_name,
)
if error:
raise HTTPException(400, error)
return {"id": storage_id, "message": "Storage provider added"}
@router.post("/add")
async def add_storage_form(
request: Request,
provider_type: str = Form(...),
provider_name: Optional[str] = Form(None),
description: Optional[str] = Form(None),
capacity_gb: int = Form(5),
api_key: Optional[str] = Form(None),
secret_key: Optional[str] = Form(None),
api_token: Optional[str] = Form(None),
project_id: Optional[str] = Form(None),
project_secret: Optional[str] = Form(None),
access_key: Optional[str] = Form(None),
bucket: Optional[str] = Form(None),
path: Optional[str] = Form(None),
storage_service: StorageService = Depends(get_storage_service),
):
"""Add a storage provider via HTML form."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Not authenticated</div>', status_code=401)
# Build config from form
form_data = {
"api_key": api_key,
"secret_key": secret_key,
"api_token": api_token,
"project_id": project_id,
"project_secret": project_secret,
"access_key": access_key,
"bucket": bucket,
"path": path,
}
config, error = storage_service.build_config_from_form(provider_type, form_data)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
storage_id, error = await storage_service.add_storage(
actor_id=ctx.actor_id,
provider_type=provider_type,
config=config,
capacity_gb=capacity_gb,
provider_name=provider_name,
description=description,
)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Storage provider added successfully!</div>
<script>setTimeout(() => window.location.href = '/storage/type/{provider_type}', 1500);</script>
''')
@router.get("/{storage_id}")
async def get_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Get a specific storage provider."""
ctx = await require_auth(request)
storage = await storage_service.get_storage(storage_id, ctx.actor_id)
if not storage:
raise HTTPException(404, "Storage provider not found")
return storage
@router.patch("/{storage_id}")
async def update_storage(
storage_id: int,
req: UpdateStorageRequest,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Update a storage provider."""
ctx = await require_auth(request)
success, error = await storage_service.update_storage(
storage_id=storage_id,
actor_id=ctx.actor_id,
config=req.config,
capacity_gb=req.capacity_gb,
is_active=req.is_active,
)
if error:
raise HTTPException(400, error)
return {"message": "Storage provider updated"}
@router.delete("/{storage_id}")
async def delete_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""Remove a storage provider."""
success, error = await storage_service.delete_storage(storage_id, ctx.actor_id)
if error:
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse("")
return {"message": "Storage provider removed"}
@router.post("/{storage_id}/test")
async def test_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Test storage provider connectivity."""
ctx = await get_current_user(request)
if not ctx:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Not authenticated</span>', status_code=401)
raise HTTPException(401, "Not authenticated")
success, message = await storage_service.test_storage(storage_id, ctx.actor_id)
if wants_html(request):
color = "green" if success else "red"
return HTMLResponse(f'<span class="text-{color}-400">{message}</span>')
return {"success": success, "message": message}
@router.get("/type/{provider_type}")
async def storage_type_page(
provider_type: str,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""Page for managing storage configs of a specific type."""
if provider_type not in STORAGE_PROVIDERS_INFO:
raise HTTPException(404, "Invalid provider type")
storages = await storage_service.list_by_type(ctx.actor_id, provider_type)
provider_info = STORAGE_PROVIDERS_INFO[provider_type]
if wants_json(request):
return {
"provider_type": provider_type,
"provider_info": provider_info,
"storages": storages,
}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "storage/type.html", request,
provider_type=provider_type,
provider_info=provider_info,
storages=storages,
user=ctx,
nav_counts=nav_counts,
active_tab="storage",
)

View File

@@ -0,0 +1,15 @@
"""
L1 Server Services.
Business logic layer between routers and repositories.
"""
from .run_service import RunService
from .recipe_service import RecipeService
from .cache_service import CacheService
__all__ = [
"RunService",
"RecipeService",
"CacheService",
]

View File

@@ -0,0 +1,138 @@
"""
Auth Service - token management and user verification.
"""
import hashlib
import base64
import json
from typing import Optional, Dict, Any, TYPE_CHECKING
import httpx
from artdag_common.middleware.auth import UserContext
from ..config import settings
if TYPE_CHECKING:
import redis
from starlette.requests import Request
# Token expiry (30 days to match token lifetime)
TOKEN_EXPIRY_SECONDS = 60 * 60 * 24 * 30
# Redis key prefixes
REVOKED_KEY_PREFIX = "artdag:revoked:"
USER_TOKENS_PREFIX = "artdag:user_tokens:"
class AuthService:
"""Service for authentication and token management."""
def __init__(self, redis_client: "redis.Redis[bytes]") -> None:
self.redis = redis_client
def register_user_token(self, username: str, token: str) -> None:
"""Track a token for a user (for later revocation by username)."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{USER_TOKENS_PREFIX}{username}"
self.redis.sadd(key, token_hash)
self.redis.expire(key, TOKEN_EXPIRY_SECONDS)
def revoke_token(self, token: str) -> bool:
"""Add token to revocation set. Returns True if newly revoked."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
result = self.redis.set(key, "1", ex=TOKEN_EXPIRY_SECONDS, nx=True)
return result is not None
def revoke_token_hash(self, token_hash: str) -> bool:
"""Add token hash to revocation set. Returns True if newly revoked."""
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
result = self.redis.set(key, "1", ex=TOKEN_EXPIRY_SECONDS, nx=True)
return result is not None
def revoke_all_user_tokens(self, username: str) -> int:
"""Revoke all tokens for a user. Returns count revoked."""
key = f"{USER_TOKENS_PREFIX}{username}"
token_hashes = self.redis.smembers(key)
count = 0
for token_hash in token_hashes:
if self.revoke_token_hash(
token_hash.decode() if isinstance(token_hash, bytes) else token_hash
):
count += 1
self.redis.delete(key)
return count
def is_token_revoked(self, token: str) -> bool:
"""Check if token has been revoked."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
return self.redis.exists(key) > 0
def decode_token_claims(self, token: str) -> Optional[Dict[str, Any]]:
"""Decode JWT claims without verification."""
try:
parts = token.split(".")
if len(parts) != 3:
return None
payload = parts[1]
# Add padding
padding = 4 - len(payload) % 4
if padding != 4:
payload += "=" * padding
return json.loads(base64.urlsafe_b64decode(payload))
except (json.JSONDecodeError, ValueError):
return None
def get_user_context_from_token(self, token: str) -> Optional[UserContext]:
"""Extract user context from a token."""
if self.is_token_revoked(token):
return None
claims = self.decode_token_claims(token)
if not claims:
return None
username = claims.get("username") or claims.get("sub")
actor_id = claims.get("actor_id") or claims.get("actor")
if not username:
return None
return UserContext(
username=username,
actor_id=actor_id or f"@{username}",
token=token,
l2_server=settings.l2_server,
)
async def verify_token_with_l2(self, token: str) -> Optional[UserContext]:
"""Verify token with L2 server."""
ctx = self.get_user_context_from_token(token)
if not ctx:
return None
# If L2 server configured, verify token
if settings.l2_server:
try:
async with httpx.AsyncClient() as client:
resp = await client.get(
f"{settings.l2_server}/auth/verify",
headers={"Authorization": f"Bearer {token}"},
timeout=5.0,
)
if resp.status_code != 200:
return None
except httpx.RequestError:
# L2 unavailable, trust the token
pass
return ctx
def get_user_from_cookie(self, request: "Request") -> Optional[UserContext]:
"""Extract user context from auth cookie."""
token = request.cookies.get("auth_token")
if not token:
return None
return self.get_user_context_from_token(token)

View File

@@ -0,0 +1,618 @@
"""
Cache Service - business logic for cache and media management.
"""
import asyncio
import json
import logging
import os
import subprocess
from pathlib import Path
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
import httpx
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from database import Database
from cache_manager import L1CacheManager
def detect_media_type(cache_path: Path) -> str:
"""Detect if file is image, video, or audio based on magic bytes."""
try:
with open(cache_path, "rb") as f:
header = f.read(32)
except Exception:
return "unknown"
# Video signatures
if header[:4] == b'\x1a\x45\xdf\xa3': # WebM/MKV
return "video"
if len(header) > 8 and header[4:8] == b'ftyp': # MP4/MOV
return "video"
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'AVI ': # AVI
return "video"
# Image signatures
if header[:8] == b'\x89PNG\r\n\x1a\n': # PNG
return "image"
if header[:2] == b'\xff\xd8': # JPEG
return "image"
if header[:6] in (b'GIF87a', b'GIF89a'): # GIF
return "image"
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'WEBP': # WebP
return "image"
# Audio signatures
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'WAVE': # WAV
return "audio"
if header[:3] == b'ID3' or header[:2] == b'\xff\xfb': # MP3
return "audio"
if header[:4] == b'fLaC': # FLAC
return "audio"
return "unknown"
def get_mime_type(path: Path) -> str:
"""Get MIME type based on file magic bytes."""
media_type = detect_media_type(path)
if media_type == "video":
try:
with open(path, "rb") as f:
header = f.read(12)
if header[:4] == b'\x1a\x45\xdf\xa3':
return "video/x-matroska"
return "video/mp4"
except Exception:
return "video/mp4"
elif media_type == "image":
try:
with open(path, "rb") as f:
header = f.read(8)
if header[:8] == b'\x89PNG\r\n\x1a\n':
return "image/png"
if header[:2] == b'\xff\xd8':
return "image/jpeg"
if header[:6] in (b'GIF87a', b'GIF89a'):
return "image/gif"
return "image/jpeg"
except Exception:
return "image/jpeg"
elif media_type == "audio":
return "audio/mpeg"
return "application/octet-stream"
class CacheService:
"""
Service for managing cached content.
Handles content retrieval, metadata, and media type detection.
"""
def __init__(self, database: "Database", cache_manager: "L1CacheManager") -> None:
self.db = database
self.cache = cache_manager
self.cache_dir = Path(os.environ.get("CACHE_DIR", "/tmp/artdag-cache"))
async def get_cache_item(self, cid: str, actor_id: str = None) -> Optional[Dict[str, Any]]:
"""Get cached item with full metadata for display."""
# Get metadata from database first
meta = await self.db.load_item_metadata(cid, actor_id)
cache_item = await self.db.get_cache_item(cid)
# Check if content exists locally
path = self.cache.get_by_cid(cid) if self.cache.has_content(cid) else None
if path and path.exists():
# Local file exists - detect type from file
media_type = detect_media_type(path)
mime_type = get_mime_type(path)
size = path.stat().st_size
else:
# File not local - check database for type info
# Try to get type from item_types table
media_type = "unknown"
mime_type = "application/octet-stream"
size = 0
if actor_id:
try:
item_types = await self.db.get_item_types(cid, actor_id)
if item_types:
media_type = item_types[0].get("type", "unknown")
if media_type == "video":
mime_type = "video/mp4"
elif media_type == "image":
mime_type = "image/png"
elif media_type == "audio":
mime_type = "audio/mpeg"
except Exception:
pass
# If no local path but we have IPFS CID, content is available remotely
if not cache_item:
return None
result = {
"cid": cid,
"path": str(path) if path else None,
"media_type": media_type,
"mime_type": mime_type,
"size": size,
"ipfs_cid": cache_item.get("ipfs_cid") if cache_item else None,
"meta": meta,
"remote_only": path is None or not path.exists(),
}
# Unpack meta fields to top level for template convenience
if meta:
result["title"] = meta.get("title")
result["description"] = meta.get("description")
result["tags"] = meta.get("tags", [])
result["source_type"] = meta.get("source_type")
result["source_note"] = meta.get("source_note")
result["created_at"] = meta.get("created_at")
result["filename"] = meta.get("filename")
# Get friendly name if actor_id provided
if actor_id:
from .naming_service import get_naming_service
naming = get_naming_service()
friendly = await naming.get_by_cid(actor_id, cid)
if friendly:
result["friendly_name"] = friendly["friendly_name"]
result["base_name"] = friendly["base_name"]
result["version_id"] = friendly["version_id"]
return result
async def check_access(self, cid: str, actor_id: str, username: str) -> bool:
"""Check if user has access to content."""
user_hashes = await self._get_user_cache_hashes(username, actor_id)
return cid in user_hashes
async def _get_user_cache_hashes(self, username: str, actor_id: Optional[str] = None) -> set:
"""Get all cache hashes owned by or associated with a user."""
match_values = [username]
if actor_id:
match_values.append(actor_id)
hashes = set()
# Query database for items owned by user
if actor_id:
try:
db_items = await self.db.get_user_items(actor_id)
for item in db_items:
hashes.add(item["cid"])
except Exception:
pass
# Legacy: Files uploaded by user (JSON metadata)
if self.cache_dir.exists():
for f in self.cache_dir.iterdir():
if f.name.endswith('.meta.json'):
try:
with open(f, 'r') as mf:
meta = json.load(mf)
if meta.get("uploader") in match_values:
hashes.add(f.name.replace('.meta.json', ''))
except Exception:
pass
# Files from user's runs (inputs and outputs)
runs = await self._list_user_runs(username, actor_id)
for run in runs:
inputs = run.get("inputs", [])
if isinstance(inputs, dict):
inputs = list(inputs.values())
hashes.update(inputs)
if run.get("output_cid"):
hashes.add(run["output_cid"])
return hashes
async def _list_user_runs(self, username: str, actor_id: Optional[str]) -> List[Dict]:
"""List runs for a user (helper for access check)."""
from ..dependencies import get_redis_client
import json
redis = get_redis_client()
runs = []
cursor = 0
prefix = "artdag:run:"
while True:
cursor, keys = redis.scan(cursor=cursor, match=f"{prefix}*", count=100)
for key in keys:
data = redis.get(key)
if data:
run = json.loads(data)
if run.get("actor_id") in (username, actor_id) or run.get("username") in (username, actor_id):
runs.append(run)
if cursor == 0:
break
return runs
async def get_raw_file(self, cid: str) -> Tuple[Optional[Path], Optional[str], Optional[str]]:
"""Get raw file path, media type, and filename for download."""
if not self.cache.has_content(cid):
return None, None, None
path = self.cache.get_by_cid(cid)
if not path or not path.exists():
return None, None, None
media_type = detect_media_type(path)
mime = get_mime_type(path)
# Determine extension
ext = "bin"
if media_type == "video":
try:
with open(path, "rb") as f:
header = f.read(12)
if header[:4] == b'\x1a\x45\xdf\xa3':
ext = "mkv"
else:
ext = "mp4"
except Exception:
ext = "mp4"
elif media_type == "image":
try:
with open(path, "rb") as f:
header = f.read(8)
if header[:8] == b'\x89PNG\r\n\x1a\n':
ext = "png"
else:
ext = "jpg"
except Exception:
ext = "jpg"
filename = f"{cid}.{ext}"
return path, mime, filename
async def get_as_mp4(self, cid: str) -> Tuple[Optional[Path], Optional[str]]:
"""Get content as MP4, transcoding if necessary. Returns (path, error)."""
if not self.cache.has_content(cid):
return None, f"Content {cid} not in cache"
path = self.cache.get_by_cid(cid)
if not path or not path.exists():
return None, f"Content {cid} not in cache"
# Check if video
media_type = detect_media_type(path)
if media_type != "video":
return None, "Content is not a video"
# Check for cached MP4
mp4_path = self.cache_dir / f"{cid}.mp4"
if mp4_path.exists():
return mp4_path, None
# Check if already MP4 format
try:
result = subprocess.run(
["ffprobe", "-v", "error", "-select_streams", "v:0",
"-show_entries", "format=format_name", "-of", "csv=p=0", str(path)],
capture_output=True, text=True, timeout=10
)
if "mp4" in result.stdout.lower() or "mov" in result.stdout.lower():
return path, None
except Exception:
pass
# Transcode to MP4
transcode_path = self.cache_dir / f"{cid}.transcoding.mp4"
try:
result = subprocess.run(
["ffmpeg", "-y", "-i", str(path),
"-c:v", "libx264", "-preset", "fast", "-crf", "23",
"-c:a", "aac", "-b:a", "128k",
"-movflags", "+faststart",
str(transcode_path)],
capture_output=True, text=True, timeout=600
)
if result.returncode != 0:
return None, f"Transcoding failed: {result.stderr[:200]}"
transcode_path.rename(mp4_path)
return mp4_path, None
except subprocess.TimeoutExpired:
if transcode_path.exists():
transcode_path.unlink()
return None, "Transcoding timed out"
except Exception as e:
if transcode_path.exists():
transcode_path.unlink()
return None, f"Transcoding failed: {e}"
async def get_metadata(self, cid: str, actor_id: str) -> Optional[Dict[str, Any]]:
"""Get content metadata."""
if not self.cache.has_content(cid):
return None
return await self.db.load_item_metadata(cid, actor_id)
async def update_metadata(
self,
cid: str,
actor_id: str,
title: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[List[str]] = None,
custom: Optional[Dict[str, Any]] = None,
) -> Tuple[bool, Optional[str]]:
"""Update content metadata. Returns (success, error)."""
if not self.cache.has_content(cid):
return False, "Content not found"
# Build update dict
updates = {}
if title is not None:
updates["title"] = title
if description is not None:
updates["description"] = description
if tags is not None:
updates["tags"] = tags
if custom is not None:
updates["custom"] = custom
try:
await self.db.update_item_metadata(cid, actor_id, **updates)
return True, None
except Exception as e:
return False, str(e)
async def publish_to_l2(
self,
cid: str,
actor_id: str,
l2_server: str,
auth_token: str,
) -> Tuple[Optional[str], Optional[str]]:
"""Publish content to L2 and IPFS. Returns (ipfs_cid, error)."""
if not self.cache.has_content(cid):
return None, "Content not found"
# Get IPFS CID
cache_item = await self.db.get_cache_item(cid)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
# Get metadata for origin info
meta = await self.db.load_item_metadata(cid, actor_id)
origin = meta.get("origin") if meta else None
if not origin or "type" not in origin:
return None, "Origin must be set before publishing"
if not auth_token:
return None, "Authentication token required"
# Call L2 publish-cache endpoint
try:
async with httpx.AsyncClient(timeout=30) as client:
resp = await client.post(
f"{l2_server}/assets/publish-cache",
headers={"Authorization": f"Bearer {auth_token}"},
json={
"cid": cid,
"ipfs_cid": ipfs_cid,
"asset_name": meta.get("title") or cid[:16],
"asset_type": detect_media_type(self.cache.get_by_cid(cid)),
"origin": origin,
"description": meta.get("description"),
"tags": meta.get("tags", []),
}
)
resp.raise_for_status()
l2_result = resp.json()
except httpx.HTTPStatusError as e:
error_detail = str(e)
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
pass
return None, f"L2 publish failed: {error_detail}"
except Exception as e:
return None, f"L2 publish failed: {e}"
# Update local metadata with publish status
await self.db.save_l2_share(
cid=cid,
actor_id=actor_id,
l2_server=l2_server,
asset_name=meta.get("title") or cid[:16],
content_type=detect_media_type(self.cache.get_by_cid(cid))
)
await self.db.update_item_metadata(
cid=cid,
actor_id=actor_id,
pinned=True,
pin_reason="published"
)
return l2_result.get("ipfs_cid") or ipfs_cid, None
async def delete_content(self, cid: str, actor_id: str) -> Tuple[bool, Optional[str]]:
"""
Remove user's ownership link to cached content.
This removes the item_types entry linking the user to the content.
The cached file is only deleted if no other users own it.
Returns (success, error).
"""
import logging
logger = logging.getLogger(__name__)
# Check if pinned for this user
meta = await self.db.load_item_metadata(cid, actor_id)
if meta and meta.get("pinned"):
pin_reason = meta.get("pin_reason", "unknown")
return False, f"Cannot discard pinned item (reason: {pin_reason})"
# Get the item type to delete the right ownership entry
item_types = await self.db.get_item_types(cid, actor_id)
if not item_types:
return False, "You don't own this content"
# Remove user's ownership links (all types for this user)
for item in item_types:
item_type = item.get("type", "media")
await self.db.delete_item_type(cid, actor_id, item_type)
# Remove friendly name
await self.db.delete_friendly_name(actor_id, cid)
# Check if anyone else still owns this content
remaining_owners = await self.db.get_item_types(cid)
# Only delete the actual file if no one owns it anymore
if not remaining_owners:
# Check deletion rules via cache_manager
can_delete, reason = self.cache.can_delete(cid)
if can_delete:
# Delete via cache_manager
self.cache.delete_by_cid(cid)
# Clean up legacy metadata files
meta_path = self.cache_dir / f"{cid}.meta.json"
if meta_path.exists():
meta_path.unlink()
mp4_path = self.cache_dir / f"{cid}.mp4"
if mp4_path.exists():
mp4_path.unlink()
# Delete from database
await self.db.delete_cache_item(cid)
logger.info(f"Garbage collected content {cid[:16]}... (no remaining owners)")
else:
logger.info(f"Content {cid[:16]}... orphaned but cannot delete: {reason}")
logger.info(f"Removed content {cid[:16]}... ownership for {actor_id}")
return True, None
async def import_from_ipfs(self, ipfs_cid: str, actor_id: str) -> Tuple[Optional[str], Optional[str]]:
"""Import content from IPFS. Returns (cid, error)."""
try:
import ipfs_client
# Download from IPFS
legacy_dir = self.cache_dir / "legacy"
legacy_dir.mkdir(parents=True, exist_ok=True)
tmp_path = legacy_dir / f"import-{ipfs_cid[:16]}"
if not ipfs_client.get_file(ipfs_cid, str(tmp_path)):
return None, f"Could not fetch CID {ipfs_cid} from IPFS"
# Detect media type before storing
media_type = detect_media_type(tmp_path)
# Store in cache
cached, new_ipfs_cid = self.cache.put(tmp_path, node_type="import", move=True)
cid = new_ipfs_cid or cached.cid # Prefer IPFS CID
# Save to database with detected media type
await self.db.create_cache_item(cid, new_ipfs_cid)
await self.db.save_item_metadata(
cid=cid,
actor_id=actor_id,
item_type=media_type, # Use detected type for filtering
filename=f"ipfs-{ipfs_cid[:16]}"
)
return cid, None
except Exception as e:
return None, f"Import failed: {e}"
async def upload_content(
self,
content: bytes,
filename: str,
actor_id: str,
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
"""Upload content to cache. Returns (cid, ipfs_cid, error).
Files are stored locally first for fast response, then uploaded
to IPFS in the background.
"""
import tempfile
try:
# Write to temp file
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.write(content)
tmp_path = Path(tmp.name)
# Detect media type (video/image/audio) before moving file
media_type = detect_media_type(tmp_path)
# Store locally AND upload to IPFS synchronously
# This ensures the IPFS CID is available immediately for distributed access
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True, skip_ipfs=False)
cid = ipfs_cid or cached.cid # Prefer IPFS CID, fall back to local hash
# Save to database with media category type
await self.db.create_cache_item(cached.cid, ipfs_cid)
await self.db.save_item_metadata(
cid=cid,
actor_id=actor_id,
item_type=media_type,
filename=filename
)
if ipfs_cid:
logger.info(f"Uploaded to IPFS: {ipfs_cid[:16]}...")
else:
logger.warning(f"IPFS upload failed, using local hash: {cid[:16]}...")
return cid, ipfs_cid, None
except Exception as e:
return None, None, f"Upload failed: {e}"
async def list_media(
self,
actor_id: Optional[str] = None,
username: Optional[str] = None,
offset: int = 0,
limit: int = 24,
media_type: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""List media items in cache."""
# Get items from database (uses item_types table)
items = await self.db.get_user_items(
actor_id=actor_id or username,
item_type=media_type, # "video", "image", "audio", or None for all
limit=limit,
offset=offset,
)
# Add friendly names to items
if actor_id:
from .naming_service import get_naming_service
naming = get_naming_service()
for item in items:
cid = item.get("cid")
if cid:
friendly = await naming.get_by_cid(actor_id, cid)
if friendly:
item["friendly_name"] = friendly["friendly_name"]
item["base_name"] = friendly["base_name"]
return items
# Legacy compatibility methods
def has_content(self, cid: str) -> bool:
"""Check if content exists in cache."""
return self.cache.has_content(cid)
def get_ipfs_cid(self, cid: str) -> Optional[str]:
"""Get IPFS CID for cached content."""
return self.cache.get_ipfs_cid(cid)

View File

@@ -0,0 +1,234 @@
"""
Naming service for friendly names.
Handles:
- Name normalization (My Cool Effect -> my-cool-effect)
- Version ID generation (server-signed timestamps)
- Friendly name assignment and resolution
"""
import hmac
import os
import re
import time
from typing import Optional, Tuple
import database
# Base32 Crockford alphabet (excludes I, L, O, U to avoid confusion)
CROCKFORD_ALPHABET = "0123456789abcdefghjkmnpqrstvwxyz"
def _get_server_secret() -> bytes:
"""Get server secret for signing version IDs."""
secret = os.environ.get("SERVER_SECRET", "")
if not secret:
# Fall back to a derived secret from other env vars
# In production, SERVER_SECRET should be set explicitly
secret = os.environ.get("SECRET_KEY", "default-dev-secret")
return secret.encode("utf-8")
def _base32_crockford_encode(data: bytes) -> str:
"""Encode bytes as base32-crockford (lowercase)."""
# Convert bytes to integer
num = int.from_bytes(data, "big")
if num == 0:
return CROCKFORD_ALPHABET[0]
result = []
while num > 0:
result.append(CROCKFORD_ALPHABET[num % 32])
num //= 32
return "".join(reversed(result))
def generate_version_id() -> str:
"""
Generate a version ID that is:
- Always increasing (timestamp-based prefix)
- Verifiable as originating from this server (HMAC suffix)
- Short and URL-safe (13 chars)
Format: 6 bytes timestamp (ms) + 2 bytes HMAC = 8 bytes = 13 base32 chars
"""
timestamp_ms = int(time.time() * 1000)
timestamp_bytes = timestamp_ms.to_bytes(6, "big")
# HMAC the timestamp with server secret
secret = _get_server_secret()
sig = hmac.new(secret, timestamp_bytes, "sha256").digest()
# Combine: 6 bytes timestamp + 2 bytes HMAC signature
combined = timestamp_bytes + sig[:2]
# Encode as base32-crockford
return _base32_crockford_encode(combined)
def normalize_name(name: str) -> str:
"""
Normalize a display name to a base name.
- Lowercase
- Replace spaces and underscores with dashes
- Remove special characters (keep alphanumeric and dashes)
- Collapse multiple dashes
- Strip leading/trailing dashes
Examples:
"My Cool Effect" -> "my-cool-effect"
"Brightness_V2" -> "brightness-v2"
"Test!!!Effect" -> "test-effect"
"""
# Lowercase
name = name.lower()
# Replace spaces and underscores with dashes
name = re.sub(r"[\s_]+", "-", name)
# Remove anything that's not alphanumeric or dash
name = re.sub(r"[^a-z0-9-]", "", name)
# Collapse multiple dashes
name = re.sub(r"-+", "-", name)
# Strip leading/trailing dashes
name = name.strip("-")
return name or "unnamed"
def parse_friendly_name(friendly_name: str) -> Tuple[str, Optional[str]]:
"""
Parse a friendly name into base name and optional version.
Args:
friendly_name: Name like "my-effect" or "my-effect 01hw3x9k"
Returns:
Tuple of (base_name, version_id or None)
"""
parts = friendly_name.strip().split(" ", 1)
base_name = parts[0]
version_id = parts[1] if len(parts) > 1 else None
return base_name, version_id
def format_friendly_name(base_name: str, version_id: str) -> str:
"""Format a base name and version into a full friendly name."""
return f"{base_name} {version_id}"
def format_l2_name(actor_id: str, base_name: str, version_id: str) -> str:
"""
Format a friendly name for L2 sharing.
Format: @user@domain base-name version-id
"""
return f"{actor_id} {base_name} {version_id}"
class NamingService:
"""Service for managing friendly names."""
async def assign_name(
self,
cid: str,
actor_id: str,
item_type: str,
display_name: Optional[str] = None,
filename: Optional[str] = None,
) -> dict:
"""
Assign a friendly name to content.
Args:
cid: Content ID
actor_id: User ID
item_type: Type (recipe, effect, media)
display_name: Human-readable name (optional)
filename: Original filename (used as fallback for media)
Returns:
Friendly name entry dict
"""
# Determine display name
if not display_name:
if filename:
# Use filename without extension
display_name = os.path.splitext(filename)[0]
else:
display_name = f"unnamed-{item_type}"
# Normalize to base name
base_name = normalize_name(display_name)
# Generate version ID
version_id = generate_version_id()
# Create database entry
entry = await database.create_friendly_name(
actor_id=actor_id,
base_name=base_name,
version_id=version_id,
cid=cid,
item_type=item_type,
display_name=display_name,
)
return entry
async def get_by_cid(self, actor_id: str, cid: str) -> Optional[dict]:
"""Get friendly name entry by CID."""
return await database.get_friendly_name_by_cid(actor_id, cid)
async def resolve(
self,
actor_id: str,
name: str,
item_type: Optional[str] = None,
) -> Optional[str]:
"""
Resolve a friendly name to a CID.
Args:
actor_id: User ID
name: Friendly name ("base-name" or "base-name version")
item_type: Optional type filter
Returns:
CID or None if not found
"""
return await database.resolve_friendly_name(actor_id, name, item_type)
async def list_names(
self,
actor_id: str,
item_type: Optional[str] = None,
latest_only: bool = False,
) -> list:
"""List friendly names for a user."""
return await database.list_friendly_names(
actor_id=actor_id,
item_type=item_type,
latest_only=latest_only,
)
async def delete(self, actor_id: str, cid: str) -> bool:
"""Delete a friendly name entry."""
return await database.delete_friendly_name(actor_id, cid)
# Module-level instance
_naming_service: Optional[NamingService] = None
def get_naming_service() -> NamingService:
"""Get the naming service singleton."""
global _naming_service
if _naming_service is None:
_naming_service = NamingService()
return _naming_service

View File

@@ -0,0 +1,337 @@
"""
Recipe Service - business logic for recipe management.
Recipes are S-expressions stored in the content-addressed cache (and IPFS).
The recipe ID is the content hash of the file.
"""
import tempfile
from pathlib import Path
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
from artdag.sexp import compile_string, parse, serialize, CompileError, ParseError
if TYPE_CHECKING:
import redis
from cache_manager import L1CacheManager
from ..types import Recipe, CompiledDAG, VisualizationDAG, VisNode, VisEdge
class RecipeService:
"""
Service for managing recipes.
Recipes are S-expressions stored in the content-addressed cache.
"""
def __init__(self, redis: "redis.Redis", cache: "L1CacheManager") -> None:
# Redis kept for compatibility but not used for recipe storage
self.redis = redis
self.cache = cache
async def get_recipe(self, recipe_id: str) -> Optional[Recipe]:
"""Get a recipe by ID (content hash)."""
import yaml
import logging
logger = logging.getLogger(__name__)
# Get from cache (content-addressed storage)
logger.info(f"get_recipe: Looking up recipe_id={recipe_id[:16]}...")
path = self.cache.get_by_cid(recipe_id)
logger.info(f"get_recipe: cache.get_by_cid returned path={path}")
if not path or not path.exists():
logger.warning(f"get_recipe: Recipe {recipe_id[:16]}... not found in cache")
return None
with open(path) as f:
content = f.read()
# Detect format - check if it starts with ( after skipping comments
def is_sexp_format(text):
for line in text.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
return stripped.startswith('(')
return False
import logging
logger = logging.getLogger(__name__)
if is_sexp_format(content):
# Detect if this is a streaming recipe (starts with (stream ...))
def is_streaming_recipe(text):
for line in text.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
return stripped.startswith('(stream')
return False
if is_streaming_recipe(content):
# Streaming recipes have different format - parse manually
import re
name_match = re.search(r'\(stream\s+"([^"]+)"', content)
recipe_name = name_match.group(1) if name_match else "streaming"
recipe_data = {
"name": recipe_name,
"sexp": content,
"format": "sexp",
"type": "streaming",
"dag": {"nodes": []}, # Streaming recipes don't have traditional DAG
}
logger.info(f"Parsed streaming recipe {recipe_id[:16]}..., name: {recipe_name}")
else:
# Parse traditional (recipe ...) S-expression
try:
compiled = compile_string(content)
recipe_data = compiled.to_dict()
recipe_data["sexp"] = content
recipe_data["format"] = "sexp"
logger.info(f"Parsed sexp recipe {recipe_id[:16]}..., keys: {list(recipe_data.keys())}")
except (ParseError, CompileError) as e:
logger.warning(f"Failed to parse sexp recipe {recipe_id[:16]}...: {e}")
return {"error": str(e), "recipe_id": recipe_id}
else:
# Parse YAML
try:
recipe_data = yaml.safe_load(content)
if not isinstance(recipe_data, dict):
return {"error": "Invalid YAML: expected dictionary", "recipe_id": recipe_id}
recipe_data["yaml"] = content
recipe_data["format"] = "yaml"
except yaml.YAMLError as e:
return {"error": f"YAML parse error: {e}", "recipe_id": recipe_id}
# Add the recipe_id to the data for convenience
recipe_data["recipe_id"] = recipe_id
# Get IPFS CID if available
ipfs_cid = self.cache.get_ipfs_cid(recipe_id)
if ipfs_cid:
recipe_data["ipfs_cid"] = ipfs_cid
# Compute step_count from nodes (handle both formats)
if recipe_data.get("format") == "sexp":
nodes = recipe_data.get("dag", {}).get("nodes", [])
else:
# YAML format: nodes might be at top level or under dag
nodes = recipe_data.get("nodes", recipe_data.get("dag", {}).get("nodes", []))
recipe_data["step_count"] = len(nodes) if isinstance(nodes, (list, dict)) else 0
return recipe_data
async def list_recipes(self, actor_id: Optional[str] = None, offset: int = 0, limit: int = 20) -> List[Recipe]:
"""
List recipes owned by a user.
Queries item_types table for user's recipe links.
"""
import logging
import database
logger = logging.getLogger(__name__)
recipes = []
if not actor_id:
logger.warning("list_recipes called without actor_id")
return []
# Get user's recipe CIDs from item_types
user_items = await database.get_user_items(actor_id, item_type="recipe", limit=1000)
recipe_cids = [item["cid"] for item in user_items]
logger.info(f"Found {len(recipe_cids)} recipe CIDs for user {actor_id}")
for cid in recipe_cids:
recipe = await self.get_recipe(cid)
if recipe and not recipe.get("error"):
recipes.append(recipe)
elif recipe and recipe.get("error"):
logger.warning(f"Recipe {cid[:16]}... has error: {recipe.get('error')}")
# Add friendly names
from .naming_service import get_naming_service
naming = get_naming_service()
for recipe in recipes:
recipe_id = recipe.get("recipe_id")
if recipe_id:
friendly = await naming.get_by_cid(actor_id, recipe_id)
if friendly:
recipe["friendly_name"] = friendly["friendly_name"]
recipe["base_name"] = friendly["base_name"]
# Sort by name
recipes.sort(key=lambda r: r.get("name", ""))
return recipes[offset:offset + limit]
async def upload_recipe(
self,
content: str,
uploader: str,
name: str = None,
description: str = None,
) -> Tuple[Optional[str], Optional[str]]:
"""
Upload a recipe from S-expression content.
The recipe is stored in the cache and pinned to IPFS.
Returns (recipe_id, error_message).
"""
# Validate S-expression
try:
compiled = compile_string(content)
except ParseError as e:
return None, f"Parse error: {e}"
except CompileError as e:
return None, f"Compile error: {e}"
# Write to temp file for caching
import logging
logger = logging.getLogger(__name__)
try:
with tempfile.NamedTemporaryFile(delete=False, suffix=".sexp", mode="w") as tmp:
tmp.write(content)
tmp_path = Path(tmp.name)
# Store in cache (content-addressed, auto-pins to IPFS)
logger.info(f"upload_recipe: Storing recipe in cache from {tmp_path}")
cached, ipfs_cid = self.cache.put(tmp_path, node_type="recipe", move=True)
recipe_id = ipfs_cid or cached.cid # Prefer IPFS CID
logger.info(f"upload_recipe: Stored recipe, cached.cid={cached.cid[:16]}..., ipfs_cid={ipfs_cid[:16] if ipfs_cid else None}, recipe_id={recipe_id[:16]}...")
# Track ownership in item_types and assign friendly name
if uploader:
import database
display_name = name or compiled.name or "unnamed-recipe"
# Create item_types entry (ownership link)
await database.save_item_metadata(
cid=recipe_id,
actor_id=uploader,
item_type="recipe",
description=description,
filename=f"{display_name}.sexp",
)
# Assign friendly name
from .naming_service import get_naming_service
naming = get_naming_service()
await naming.assign_name(
cid=recipe_id,
actor_id=uploader,
item_type="recipe",
display_name=display_name,
)
return recipe_id, None
except Exception as e:
return None, f"Failed to cache recipe: {e}"
async def delete_recipe(self, recipe_id: str, actor_id: str = None) -> Tuple[bool, Optional[str]]:
"""
Remove user's ownership link to a recipe.
This removes the item_types entry linking the user to the recipe.
The cached file is only deleted if no other users own it.
Returns (success, error_message).
"""
import database
if not actor_id:
return False, "actor_id required"
# Remove user's ownership link
try:
await database.delete_item_type(recipe_id, actor_id, "recipe")
# Also remove friendly name
await database.delete_friendly_name(actor_id, recipe_id)
# Try to garbage collect if no one owns it anymore
# (delete_cache_item only deletes if no item_types remain)
await database.delete_cache_item(recipe_id)
return True, None
except Exception as e:
return False, f"Failed to delete: {e}"
def parse_recipe(self, content: str) -> CompiledDAG:
"""Parse recipe S-expression content."""
compiled = compile_string(content)
return compiled.to_dict()
def build_dag(self, recipe: Recipe) -> VisualizationDAG:
"""
Build DAG visualization data from recipe.
Returns nodes and edges for Cytoscape.js.
"""
vis_nodes: List[VisNode] = []
edges: List[VisEdge] = []
dag = recipe.get("dag", {})
dag_nodes = dag.get("nodes", [])
output_node = dag.get("output")
# Handle list format (compiled S-expression)
if isinstance(dag_nodes, list):
for node_def in dag_nodes:
node_id = node_def.get("id")
node_type = node_def.get("type", "EFFECT")
vis_nodes.append({
"data": {
"id": node_id,
"label": node_id,
"nodeType": node_type,
"isOutput": node_id == output_node,
}
})
for input_ref in node_def.get("inputs", []):
if isinstance(input_ref, dict):
source = input_ref.get("node") or input_ref.get("input")
else:
source = input_ref
if source:
edges.append({
"data": {
"source": source,
"target": node_id,
}
})
# Handle dict format
elif isinstance(dag_nodes, dict):
for node_id, node_def in dag_nodes.items():
node_type = node_def.get("type", "EFFECT")
vis_nodes.append({
"data": {
"id": node_id,
"label": node_id,
"nodeType": node_type,
"isOutput": node_id == output_node,
}
})
for input_ref in node_def.get("inputs", []):
if isinstance(input_ref, dict):
source = input_ref.get("node") or input_ref.get("input")
else:
source = input_ref
if source:
edges.append({
"data": {
"source": source,
"target": node_id,
}
})
return {"nodes": vis_nodes, "edges": edges}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,232 @@
"""
Storage Service - business logic for storage provider management.
"""
import json
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
from database import Database
from storage_providers import StorageProvidersModule
STORAGE_PROVIDERS_INFO = {
"pinata": {"name": "Pinata", "desc": "1GB free, IPFS pinning", "color": "blue"},
"web3storage": {"name": "web3.storage", "desc": "IPFS + Filecoin", "color": "green"},
"nftstorage": {"name": "NFT.Storage", "desc": "Free for NFTs", "color": "pink"},
"infura": {"name": "Infura IPFS", "desc": "5GB free", "color": "orange"},
"filebase": {"name": "Filebase", "desc": "5GB free, S3+IPFS", "color": "cyan"},
"storj": {"name": "Storj", "desc": "25GB free", "color": "indigo"},
"local": {"name": "Local Storage", "desc": "Your own disk", "color": "purple"},
}
VALID_PROVIDER_TYPES = list(STORAGE_PROVIDERS_INFO.keys())
class StorageService:
"""Service for managing user storage providers."""
def __init__(self, database: "Database", storage_providers_module: "StorageProvidersModule") -> None:
self.db = database
self.providers = storage_providers_module
async def list_storages(self, actor_id: str) -> List[Dict[str, Any]]:
"""List all storage providers for a user with usage stats."""
storages = await self.db.get_user_storage(actor_id)
for storage in storages:
usage = await self.db.get_storage_usage(storage["id"])
storage["used_bytes"] = usage["used_bytes"]
storage["pin_count"] = usage["pin_count"]
storage["donated_gb"] = storage["capacity_gb"] // 2
# Mask sensitive config keys for display
if storage.get("config"):
config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
masked = {}
for k, v in config.items():
if "key" in k.lower() or "token" in k.lower() or "secret" in k.lower():
masked[k] = v[:4] + "..." + v[-4:] if len(str(v)) > 8 else "****"
else:
masked[k] = v
storage["config_display"] = masked
return storages
async def get_storage(self, storage_id: int, actor_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific storage provider."""
storage = await self.db.get_storage_by_id(storage_id)
if not storage:
return None
if storage["actor_id"] != actor_id:
return None
usage = await self.db.get_storage_usage(storage_id)
storage["used_bytes"] = usage["used_bytes"]
storage["pin_count"] = usage["pin_count"]
storage["donated_gb"] = storage["capacity_gb"] // 2
return storage
async def add_storage(
self,
actor_id: str,
provider_type: str,
config: Dict[str, Any],
capacity_gb: int = 5,
provider_name: Optional[str] = None,
description: Optional[str] = None,
) -> Tuple[Optional[int], Optional[str]]:
"""Add a new storage provider. Returns (storage_id, error_message)."""
if provider_type not in VALID_PROVIDER_TYPES:
return None, f"Invalid provider type: {provider_type}"
# Test connection before saving
provider = self.providers.create_provider(provider_type, {
**config,
"capacity_gb": capacity_gb
})
if not provider:
return None, "Failed to create provider with given config"
success, message = await provider.test_connection()
if not success:
return None, f"Provider connection failed: {message}"
# Generate name if not provided
if not provider_name:
existing = await self.db.get_user_storage_by_type(actor_id, provider_type)
provider_name = f"{provider_type}-{len(existing) + 1}"
storage_id = await self.db.add_user_storage(
actor_id=actor_id,
provider_type=provider_type,
provider_name=provider_name,
config=config,
capacity_gb=capacity_gb,
description=description
)
if not storage_id:
return None, "Failed to save storage provider"
return storage_id, None
async def update_storage(
self,
storage_id: int,
actor_id: str,
config: Optional[Dict[str, Any]] = None,
capacity_gb: Optional[int] = None,
is_active: Optional[bool] = None,
) -> Tuple[bool, Optional[str]]:
"""Update a storage provider. Returns (success, error_message)."""
storage = await self.db.get_storage_by_id(storage_id)
if not storage:
return False, "Storage provider not found"
if storage["actor_id"] != actor_id:
return False, "Not authorized"
# Test new config if provided
if config:
existing_config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
new_config = {**existing_config, **config}
provider = self.providers.create_provider(storage["provider_type"], {
**new_config,
"capacity_gb": capacity_gb or storage["capacity_gb"]
})
if provider:
success, message = await provider.test_connection()
if not success:
return False, f"Provider connection failed: {message}"
success = await self.db.update_user_storage(
storage_id,
config=config,
capacity_gb=capacity_gb,
is_active=is_active
)
return success, None if success else "Failed to update storage provider"
async def delete_storage(self, storage_id: int, actor_id: str) -> Tuple[bool, Optional[str]]:
"""Delete a storage provider. Returns (success, error_message)."""
storage = await self.db.get_storage_by_id(storage_id)
if not storage:
return False, "Storage provider not found"
if storage["actor_id"] != actor_id:
return False, "Not authorized"
success = await self.db.remove_user_storage(storage_id)
return success, None if success else "Failed to remove storage provider"
async def test_storage(self, storage_id: int, actor_id: str) -> Tuple[bool, str]:
"""Test storage provider connectivity. Returns (success, message)."""
storage = await self.db.get_storage_by_id(storage_id)
if not storage:
return False, "Storage not found"
if storage["actor_id"] != actor_id:
return False, "Not authorized"
config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
provider = self.providers.create_provider(storage["provider_type"], {
**config,
"capacity_gb": storage["capacity_gb"]
})
if not provider:
return False, "Failed to create provider"
return await provider.test_connection()
async def list_by_type(self, actor_id: str, provider_type: str) -> List[Dict[str, Any]]:
"""List storage providers of a specific type."""
return await self.db.get_user_storage_by_type(actor_id, provider_type)
def build_config_from_form(self, provider_type: str, form_data: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
"""Build provider config from form data. Returns (config, error)."""
api_key = form_data.get("api_key")
secret_key = form_data.get("secret_key")
api_token = form_data.get("api_token")
project_id = form_data.get("project_id")
project_secret = form_data.get("project_secret")
access_key = form_data.get("access_key")
bucket = form_data.get("bucket")
path = form_data.get("path")
if provider_type == "pinata":
if not api_key or not secret_key:
return None, "Pinata requires API Key and Secret Key"
return {"api_key": api_key, "secret_key": secret_key}, None
elif provider_type == "web3storage":
if not api_token:
return None, "web3.storage requires API Token"
return {"api_token": api_token}, None
elif provider_type == "nftstorage":
if not api_token:
return None, "NFT.Storage requires API Token"
return {"api_token": api_token}, None
elif provider_type == "infura":
if not project_id or not project_secret:
return None, "Infura requires Project ID and Project Secret"
return {"project_id": project_id, "project_secret": project_secret}, None
elif provider_type == "filebase":
if not access_key or not secret_key or not bucket:
return None, "Filebase requires Access Key, Secret Key, and Bucket"
return {"access_key": access_key, "secret_key": secret_key, "bucket": bucket}, None
elif provider_type == "storj":
if not access_key or not secret_key or not bucket:
return None, "Storj requires Access Key, Secret Key, and Bucket"
return {"access_key": access_key, "secret_key": secret_key, "bucket": bucket}, None
elif provider_type == "local":
if not path:
return None, "Local storage requires a path"
return {"path": path}, None
return None, f"Unknown provider type: {provider_type}"

14
l1/app/templates/404.html Normal file
View File

@@ -0,0 +1,14 @@
{% extends "base.html" %}
{% block title %}Not Found - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-2xl mx-auto text-center py-16">
<h1 class="text-6xl font-bold text-gray-400 mb-4">404</h1>
<h2 class="text-2xl font-semibold mb-4">Page Not Found</h2>
<p class="text-gray-400 mb-8">The page you're looking for doesn't exist or has been moved.</p>
<a href="/" class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded-lg font-medium">
Go Home
</a>
</div>
{% endblock %}

View File

@@ -0,0 +1,46 @@
{% extends "_base.html" %}
{% block brand %}
<a href="https://blog.rose-ash.com/" class="no-underline text-stone-900">Rose Ash</a>
<span class="text-stone-400 mx-1">|</span>
<a href="/" class="no-underline text-stone-900">Art-DAG</a>
{% endblock %}
{% block cart_mini %}
{% if request and request.state.cart_mini_html %}
{{ request.state.cart_mini_html | safe }}
{% endif %}
{% endblock %}
{% block nav_tree %}
{% if request and request.state.nav_tree_html %}
{{ request.state.nav_tree_html | safe }}
{% endif %}
{% endblock %}
{% block auth_menu %}
{% if request and request.state.auth_menu_html %}
{{ request.state.auth_menu_html | safe }}
{% endif %}
{% endblock %}
{% block auth_menu_mobile %}
{% if request and request.state.auth_menu_html %}
{{ request.state.auth_menu_html | safe }}
{% endif %}
{% endblock %}
{% block sub_nav %}
<div class="bg-stone-200 border-b border-stone-300">
<div class="max-w-screen-2xl mx-auto px-4">
<nav class="flex items-center gap-4 py-2 text-sm overflow-x-auto no-scrollbar">
<a href="/runs" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'runs' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Runs{% if nav_counts and nav_counts.runs %} ({{ nav_counts.runs }}){% endif %}</a>
<a href="/recipes" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'recipes' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Recipes{% if nav_counts and nav_counts.recipes %} ({{ nav_counts.recipes }}){% endif %}</a>
<a href="/effects" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'effects' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Effects{% if nav_counts and nav_counts.effects %} ({{ nav_counts.effects }}){% endif %}</a>
<a href="/media" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'media' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Media{% if nav_counts and nav_counts.media %} ({{ nav_counts.media }}){% endif %}</a>
<a href="/storage" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'storage' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Storage{% if nav_counts and nav_counts.storage %} ({{ nav_counts.storage }}){% endif %}</a>
<a href="/download/client" class="whitespace-nowrap px-3 py-1.5 rounded text-stone-700 hover:bg-stone-300" title="Download CLI client">Client</a>
</nav>
</div>
</div>
{% endblock %}

182
l1/app/templates/cache/detail.html vendored Normal file
View File

@@ -0,0 +1,182 @@
{% extends "base.html" %}
{% block title %}{{ cache.cid[:16] }} - Cache - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-4xl mx-auto">
<!-- Header -->
<div class="flex items-center space-x-4 mb-6">
<a href="/media" class="text-gray-400 hover:text-white">&larr; Media</a>
<h1 class="text-xl font-bold font-mono">{{ cache.cid[:24] }}...</h1>
</div>
<!-- Preview -->
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 overflow-hidden">
{% if cache.mime_type and cache.mime_type.startswith('image/') %}
{% if cache.remote_only and cache.ipfs_cid %}
<img src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" alt=""
class="w-full max-h-96 object-contain bg-gray-900">
{% else %}
<img src="/cache/{{ cache.cid }}/raw" alt=""
class="w-full max-h-96 object-contain bg-gray-900">
{% endif %}
{% elif cache.mime_type and cache.mime_type.startswith('video/') %}
{% if cache.remote_only and cache.ipfs_cid %}
<video src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls
class="w-full max-h-96 bg-gray-900">
</video>
{% else %}
<video src="/cache/{{ cache.cid }}/raw" controls
class="w-full max-h-96 bg-gray-900">
</video>
{% endif %}
{% elif cache.mime_type and cache.mime_type.startswith('audio/') %}
<div class="p-8 bg-gray-900">
{% if cache.remote_only and cache.ipfs_cid %}
<audio src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls class="w-full"></audio>
{% else %}
<audio src="/cache/{{ cache.cid }}/raw" controls class="w-full"></audio>
{% endif %}
</div>
{% elif cache.mime_type == 'application/json' %}
<div class="p-4 bg-gray-900 max-h-96 overflow-auto">
<pre class="text-sm text-gray-300">{{ cache.content_preview }}</pre>
</div>
{% else %}
<div class="p-8 bg-gray-900 text-center text-gray-500">
<div class="text-4xl mb-2">{{ cache.mime_type or 'Unknown type' }}</div>
<div>{{ cache.size | filesizeformat if cache.size else 'Unknown size' }}</div>
</div>
{% endif %}
</div>
<!-- Friendly Name -->
<div id="friendly-name-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
<div class="flex items-center justify-between mb-2">
<span class="text-gray-500 text-sm">Friendly Name</span>
<button hx-get="/cache/{{ cache.cid }}/name-form"
hx-target="#friendly-name-section"
hx-swap="innerHTML"
class="text-blue-400 hover:text-blue-300 text-sm">
Edit
</button>
</div>
{% if cache.friendly_name %}
<p class="text-blue-400 font-medium text-lg">{{ cache.friendly_name }}</p>
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
{% else %}
<p class="text-gray-500 text-sm">No friendly name assigned. Click Edit to add one.</p>
{% endif %}
</div>
<!-- User Metadata (editable) -->
<div id="metadata-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
<div class="flex items-center justify-between mb-3">
<h3 class="text-lg font-semibold">Details</h3>
<button hx-get="/cache/{{ cache.cid }}/meta-form"
hx-target="#metadata-section"
hx-swap="innerHTML"
class="text-blue-400 hover:text-blue-300 text-sm">
Edit
</button>
</div>
{% if cache.title or cache.description or cache.filename %}
<div class="space-y-2 mb-4">
{% if cache.title %}
<h4 class="text-white font-medium">{{ cache.title }}</h4>
{% elif cache.filename %}
<h4 class="text-white font-medium">{{ cache.filename }}</h4>
{% endif %}
{% if cache.description %}
<p class="text-gray-400">{{ cache.description }}</p>
{% endif %}
</div>
{% else %}
<p class="text-gray-500 text-sm mb-4">No title or description set. Click Edit to add metadata.</p>
{% endif %}
{% if cache.tags %}
<div class="flex flex-wrap gap-2 mb-4">
{% for tag in cache.tags %}
<span class="bg-gray-700 text-gray-300 px-2 py-1 rounded text-sm">{{ tag }}</span>
{% endfor %}
</div>
{% endif %}
{% if cache.source_type or cache.source_note %}
<div class="text-sm text-gray-500">
{% if cache.source_type %}Source: {{ cache.source_type }}{% endif %}
{% if cache.source_note %} - {{ cache.source_note }}{% endif %}
</div>
{% endif %}
</div>
<!-- Technical Metadata -->
<div class="grid grid-cols-2 gap-4 mb-6">
<div class="bg-gray-800 rounded-lg p-4">
<div class="text-gray-500 text-sm">CID</div>
<div class="font-mono text-sm text-white break-all">{{ cache.cid }}</div>
</div>
<div class="bg-gray-800 rounded-lg p-4">
<div class="text-gray-500 text-sm">Content Type</div>
<div class="text-white">{{ cache.mime_type or 'Unknown' }}</div>
</div>
<div class="bg-gray-800 rounded-lg p-4">
<div class="text-gray-500 text-sm">Size</div>
<div class="text-white">{{ cache.size | filesizeformat if cache.size else 'Unknown' }}</div>
</div>
<div class="bg-gray-800 rounded-lg p-4">
<div class="text-gray-500 text-sm">Created</div>
<div class="text-white">{{ cache.created_at or 'Unknown' }}</div>
</div>
</div>
<!-- IPFS -->
{% if cache.ipfs_cid %}
<div class="bg-gray-800 rounded-lg p-4 mb-6">
<div class="text-gray-500 text-sm mb-1">IPFS CID</div>
<div class="flex items-center justify-between">
<span class="font-mono text-sm text-white">{{ cache.ipfs_cid }}</span>
<a href="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}"
target="_blank"
class="text-blue-400 hover:text-blue-300 text-sm">
View on IPFS Gateway &rarr;
</a>
</div>
</div>
{% endif %}
<!-- Related Runs -->
{% if cache.runs %}
<h2 class="text-lg font-semibold mb-4">Related Runs</h2>
<div class="space-y-2">
{% for run in cache.runs %}
<a href="/runs/{{ run.run_id }}"
class="block bg-gray-800 rounded p-3 hover:bg-gray-750 transition-colors">
<div class="flex items-center justify-between">
<span class="font-mono text-sm">{{ run.run_id[:16] }}...</span>
<span class="text-gray-500 text-sm">{{ run.created_at }}</span>
</div>
</a>
{% endfor %}
</div>
{% endif %}
<!-- Actions -->
<div class="flex items-center space-x-4 mt-8">
<a href="/cache/{{ cache.cid }}/raw"
download
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Download
</a>
<button hx-post="/cache/{{ cache.cid }}/publish"
hx-target="#share-result"
class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded font-medium">
Share to L2
</button>
<span id="share-result"></span>
</div>
</div>
{% endblock %}

325
l1/app/templates/cache/media_list.html vendored Normal file
View File

@@ -0,0 +1,325 @@
{% extends "base.html" %}
{% block title %}Media - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Media</h1>
<div class="flex items-center space-x-4">
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload Media
</button>
<select id="type-filter" onchange="filterMedia()"
class="bg-gray-800 border border-gray-600 rounded px-3 py-2 text-white">
<option value="">All Types</option>
<option value="image">Images</option>
<option value="video">Videos</option>
<option value="audio">Audio</option>
</select>
</div>
</div>
<!-- Upload Modal -->
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
<div class="flex justify-between items-center mb-4">
<h2 class="text-xl font-semibold">Upload Media</h2>
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="text-gray-400 hover:text-white">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Files</label>
<input type="file" name="files" id="upload-file" required multiple
accept="image/*,video/*,audio/*"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
<p class="text-gray-500 text-xs mt-1">Select one or more files to upload</p>
</div>
<div id="single-name-field">
<label class="block text-gray-400 text-sm mb-1">Name (optional, for single file)</label>
<input type="text" name="display_name" id="upload-name" placeholder="e.g., my-background-video"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A friendly name to reference this media in recipes</p>
</div>
<div id="upload-progress" class="hidden">
<div class="bg-gray-700 rounded-full h-2">
<div id="progress-bar" class="bg-blue-600 h-2 rounded-full transition-all" style="width: 0%"></div>
</div>
<p id="progress-text" class="text-gray-400 text-sm mt-1">Uploading...</p>
</div>
<div id="upload-result" class="hidden max-h-48 overflow-y-auto"></div>
<div class="flex justify-end space-x-3">
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
<button type="submit" id="upload-btn"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload
</button>
</div>
</form>
</div>
</div>
{% if items %}
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4" id="media-grid">
{% for item in items %}
{# Determine media category from type or filename #}
{% set is_image = item.type in ('image', 'image/jpeg', 'image/png', 'image/gif', 'image/webp') or (item.filename and item.filename.lower().endswith(('.jpg', '.jpeg', '.png', '.gif', '.webp'))) %}
{% set is_video = item.type in ('video', 'video/mp4', 'video/webm', 'video/x-matroska') or (item.filename and item.filename.lower().endswith(('.mp4', '.mkv', '.webm', '.mov'))) %}
{% set is_audio = item.type in ('audio', 'audio/mpeg', 'audio/wav', 'audio/flac') or (item.filename and item.filename.lower().endswith(('.mp3', '.wav', '.flac', '.ogg'))) %}
<a href="/cache/{{ item.cid }}"
class="media-item bg-gray-800 rounded-lg overflow-hidden hover:ring-2 hover:ring-blue-500 transition-all"
data-type="{% if is_image %}image{% elif is_video %}video{% elif is_audio %}audio{% else %}other{% endif %}">
{% if is_image %}
<img src="/cache/{{ item.cid }}/raw"
alt=""
loading="lazy"
class="w-full h-40 object-cover">
{% elif is_video %}
<div class="relative">
<video src="/cache/{{ item.cid }}/raw"
class="w-full h-40 object-cover"
muted
onmouseover="this.play()"
onmouseout="this.pause(); this.currentTime=0;">
</video>
<div class="absolute inset-0 flex items-center justify-center pointer-events-none">
<div class="bg-black bg-opacity-50 rounded-full p-2">
<svg class="w-6 h-6" fill="currentColor" viewBox="0 0 20 20">
<path d="M6.3 2.841A1.5 1.5 0 004 4.11V15.89a1.5 1.5 0 002.3 1.269l9.344-5.89a1.5 1.5 0 000-2.538L6.3 2.84z"/>
</svg>
</div>
</div>
</div>
{% elif is_audio %}
<div class="w-full h-40 bg-gray-900 flex flex-col items-center justify-center">
<svg class="w-12 h-12 text-gray-600 mb-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
d="M9 19V6l12-3v13M9 19c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zm12-3c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zM9 10l12-3"/>
</svg>
<span class="text-gray-500 text-sm">Audio</span>
</div>
{% else %}
<div class="w-full h-40 bg-gray-900 flex items-center justify-center">
<span class="text-gray-600 text-sm">{{ item.type or 'Media' }}</span>
</div>
{% endif %}
<div class="p-3">
{% if item.friendly_name %}
<div class="text-xs text-blue-400 font-medium truncate">{{ item.friendly_name }}</div>
{% else %}
<div class="font-mono text-xs text-gray-500 truncate">{{ item.cid[:16] }}...</div>
{% endif %}
{% if item.filename %}
<div class="text-xs text-gray-600 truncate">{{ item.filename }}</div>
{% endif %}
</div>
</a>
{% endfor %}
</div>
{% if has_more %}
<div hx-get="/media?offset={{ offset + limit }}"
hx-trigger="revealed"
hx-swap="beforeend"
hx-target="#media-grid"
hx-select=".media-item"
class="h-20 flex items-center justify-center text-gray-500 mt-4">
Loading more...
</div>
{% endif %}
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
<svg class="w-16 h-16 mx-auto mb-4 text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z"/>
</svg>
<p class="text-gray-500 mb-4">No media files yet</p>
<p class="text-gray-600 text-sm">Run a recipe to generate media artifacts.</p>
</div>
{% endif %}
</div>
<script>
function filterMedia() {
const filter = document.getElementById('type-filter').value;
document.querySelectorAll('.media-item').forEach(item => {
if (!filter || item.dataset.type === filter) {
item.classList.remove('hidden');
} else {
item.classList.add('hidden');
}
});
}
// Show/hide name field based on file count
document.getElementById('upload-file').addEventListener('change', function(e) {
const nameField = document.getElementById('single-name-field');
if (e.target.files.length > 1) {
nameField.style.display = 'none';
} else {
nameField.style.display = 'block';
}
});
// Handle upload form
document.getElementById('upload-form').addEventListener('submit', async function(e) {
e.preventDefault();
const form = e.target;
const fileInput = document.getElementById('upload-file');
const files = fileInput.files;
const displayName = document.getElementById('upload-name').value;
const progressDiv = document.getElementById('upload-progress');
const progressBar = document.getElementById('progress-bar');
const progressText = document.getElementById('progress-text');
const resultDiv = document.getElementById('upload-result');
const uploadBtn = document.getElementById('upload-btn');
// Show progress
progressDiv.classList.remove('hidden');
resultDiv.classList.add('hidden');
uploadBtn.disabled = true;
const results = [];
const errors = [];
const CHUNK_SIZE = 1024 * 1024; // 1MB chunks
for (let i = 0; i < files.length; i++) {
const file = files[i];
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
const uploadId = crypto.randomUUID();
const useChunked = file.size > CHUNK_SIZE * 2; // Use chunked for files > 2MB
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name}`;
try {
let data;
if (useChunked && totalChunks > 1) {
// Chunked upload for large files
for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
const start = chunkIndex * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
const chunkForm = new FormData();
chunkForm.append('chunk', chunk);
chunkForm.append('upload_id', uploadId);
chunkForm.append('chunk_index', chunkIndex);
chunkForm.append('total_chunks', totalChunks);
chunkForm.append('filename', file.name);
if (files.length === 1 && displayName) {
chunkForm.append('display_name', displayName);
}
const chunkProgress = ((i + (chunkIndex + 1) / totalChunks) / files.length) * 100;
progressBar.style.width = `${chunkProgress}%`;
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name} (${chunkIndex + 1}/${totalChunks} chunks)`;
const response = await fetch('/media/upload/chunk', {
method: 'POST',
body: chunkForm,
});
const contentType = response.headers.get('content-type') || '';
if (!contentType.includes('application/json')) {
const text = await response.text();
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
}
data = await response.json();
if (!response.ok) {
throw new Error(data.detail || 'Chunk upload failed');
}
}
} else {
// Regular upload for small files
const formData = new FormData();
formData.append('file', file);
if (files.length === 1 && displayName) {
formData.append('display_name', displayName);
}
progressBar.style.width = `${((i + 0.5) / files.length) * 100}%`;
const response = await fetch('/media/upload', {
method: 'POST',
body: formData,
});
const contentType = response.headers.get('content-type') || '';
if (!contentType.includes('application/json')) {
const text = await response.text();
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
}
data = await response.json();
if (!response.ok) {
throw new Error(data.detail || 'Upload failed');
}
}
results.push({ filename: file.name, friendly_name: data.friendly_name, cid: data.cid });
} catch (err) {
errors.push({ filename: file.name, error: err.message });
}
progressBar.style.width = `${((i + 1) / files.length) * 100}%`;
}
progressText.textContent = 'Upload complete!';
// Show results
let html = '';
if (results.length > 0) {
html += '<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300 mb-2">';
html += `<p class="font-medium">${results.length} file(s) uploaded successfully!</p>`;
for (const r of results) {
html += `<p class="text-sm mt-1">${r.filename} → <span class="font-mono">${r.friendly_name}</span></p>`;
}
html += '</div>';
}
if (errors.length > 0) {
html += '<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">';
html += `<p class="font-medium">${errors.length} file(s) failed:</p>`;
for (const e of errors) {
html += `<p class="text-sm mt-1">${e.filename}: ${e.error}</p>`;
}
html += '</div>';
}
resultDiv.innerHTML = html;
resultDiv.classList.remove('hidden');
if (results.length > 0) {
// Reload page after 2 seconds
setTimeout(() => location.reload(), 2000);
} else {
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
});
</script>
{% endblock %}

21
l1/app/templates/cache/not_found.html vendored Normal file
View File

@@ -0,0 +1,21 @@
{% extends "base.html" %}
{% block title %}Content Not Found - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-2xl mx-auto text-center py-16">
<h1 class="text-6xl font-bold text-gray-400 mb-4">404</h1>
<h2 class="text-2xl font-semibold mb-4">Content Not Found</h2>
<p class="text-gray-400 mb-8">
The content with hash <code class="bg-gray-800 px-2 py-1 rounded">{{ cid[:24] if cid else 'unknown' }}...</code> was not found in the cache.
</p>
<div class="flex justify-center gap-4">
<a href="/cache/" class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded-lg font-medium">
Browse Media
</a>
<a href="/" class="bg-gray-700 hover:bg-gray-600 px-6 py-3 rounded-lg font-medium">
Go Home
</a>
</div>
</div>
{% endblock %}

View File

@@ -0,0 +1,203 @@
{% extends "base.html" %}
{% set meta = effect.meta or effect %}
{% block title %}{{ meta.name or 'Effect' }} - Effects - Art-DAG L1{% endblock %}
{% block head %}
{{ super() }}
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/lisp.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/scheme.min.js"></script>
{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<!-- Header -->
<div class="flex items-center space-x-4 mb-6">
<a href="/effects" class="text-gray-400 hover:text-white">&larr; Effects</a>
<h1 class="text-2xl font-bold">{{ meta.name or 'Unnamed Effect' }}</h1>
<span class="text-gray-500">v{{ meta.version or '1.0.0' }}</span>
{% if meta.temporal %}
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">temporal</span>
{% endif %}
</div>
{% if meta.author %}
<p class="text-gray-500 mb-2">by {{ meta.author }}</p>
{% endif %}
{% if meta.description %}
<p class="text-gray-400 mb-6">{{ meta.description }}</p>
{% endif %}
<!-- Friendly Name & CID Info -->
<div class="bg-gray-800 rounded-lg p-4 border border-gray-700 mb-6">
{% if effect.friendly_name %}
<div class="mb-4 pb-4 border-b border-gray-700">
<span class="text-gray-500 text-sm">Friendly Name</span>
<p class="text-blue-400 font-medium text-lg mt-1">{{ effect.friendly_name }}</p>
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">(effect {{ effect.base_name }})</code></p>
</div>
{% endif %}
<div class="flex items-center justify-between">
<div>
<span class="text-gray-500 text-sm">Content ID (CID)</span>
<p class="font-mono text-sm text-gray-300 mt-1" id="effect-cid">{{ effect.cid }}</p>
</div>
<button onclick="copyToClipboard('{{ effect.cid }}')"
class="bg-gray-700 hover:bg-gray-600 px-3 py-1 rounded text-sm">
Copy
</button>
</div>
{% if effect.uploaded_at %}
<div class="mt-3 text-gray-500 text-sm">
Uploaded: {{ effect.uploaded_at }}
{% if effect.uploader %}
by {{ effect.uploader }}
{% endif %}
</div>
{% endif %}
</div>
<div class="grid grid-cols-1 lg:grid-cols-3 gap-6">
<!-- Left Column: Parameters & Dependencies -->
<div class="lg:col-span-1 space-y-6">
<!-- Parameters -->
{% if meta.params %}
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2">
<span class="text-gray-400 text-sm font-medium">Parameters</span>
</div>
<div class="p-4 space-y-4">
{% for param in meta.params %}
<div>
<div class="flex items-center space-x-2 mb-1">
<span class="font-medium text-white">{{ param.name }}</span>
<span class="bg-blue-900 text-blue-300 px-2 py-0.5 rounded text-xs">{{ param.type }}</span>
</div>
{% if param.description %}
<p class="text-gray-400 text-sm">{{ param.description }}</p>
{% endif %}
<div class="flex flex-wrap gap-2 mt-1 text-xs">
{% if param.range %}
<span class="text-gray-500">range: {{ param.range[0] }} - {{ param.range[1] }}</span>
{% endif %}
{% if param.default is defined %}
<span class="text-gray-500">default: {{ param.default }}</span>
{% endif %}
</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
<!-- Usage in Recipe -->
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2">
<span class="text-gray-400 text-sm font-medium">Usage in Recipe</span>
</div>
<div class="p-4">
{% if effect.base_name %}
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">({{ effect.base_name }} ...)</code></pre>
<p class="text-gray-500 text-xs mt-2">
Use the friendly name to reference this effect.
</p>
{% else %}
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect :cid "{{ effect.cid }}")</code></pre>
<p class="text-gray-500 text-xs mt-2">
Reference this effect by CID in your recipe.
</p>
{% endif %}
</div>
</div>
</div>
<!-- Right Column: Source Code -->
<div class="lg:col-span-2">
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
<span class="text-gray-400 text-sm font-medium">Source Code (S-expression)</span>
<div class="flex items-center space-x-2">
<a href="/effects/{{ effect.cid }}/source"
class="text-gray-400 hover:text-white text-sm"
download="{{ meta.name or 'effect' }}.sexp">
Download
</a>
</div>
</div>
<div class="p-4">
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-lisp" id="source-code">Loading...</code></pre>
</div>
</div>
</div>
</div>
<!-- Actions -->
<div class="flex items-center space-x-4 mt-8">
{% if effect.cid.startswith('Qm') or effect.cid.startswith('bafy') %}
<a href="https://ipfs.io/ipfs/{{ effect.cid }}"
target="_blank"
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
View on IPFS
</a>
{% endif %}
<button hx-post="/effects/{{ effect.cid }}/publish"
hx-target="#action-result"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Share to L2
</button>
<button onclick="deleteEffect('{{ effect.cid }}')"
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
Delete
</button>
<span id="action-result"></span>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', function() {
// Load source code
fetch('/effects/{{ effect.cid }}/source')
.then(response => response.text())
.then(source => {
const codeEl = document.getElementById('source-code');
codeEl.textContent = source;
hljs.highlightElement(codeEl);
})
.catch(error => {
document.getElementById('source-code').textContent = 'Failed to load source code';
});
});
function copyToClipboard(text) {
navigator.clipboard.writeText(text).then(() => {
const btn = event.target;
const originalText = btn.textContent;
btn.textContent = 'Copied!';
setTimeout(() => { btn.textContent = originalText; }, 1500);
});
}
function deleteEffect(cid) {
if (!confirm('Delete this effect from local cache? IPFS copies will persist.')) return;
fetch('/effects/' + cid, { method: 'DELETE' })
.then(response => {
if (!response.ok) throw new Error('Delete failed');
return response.json();
})
.then(data => {
document.getElementById('action-result').innerHTML =
'<span class="text-green-400">Deleted. Redirecting...</span>';
setTimeout(() => { window.location.href = '/effects'; }, 1000);
})
.catch(error => {
document.getElementById('action-result').innerHTML =
'<span class="text-red-400">' + error.message + '</span>';
});
}
</script>
{% endblock %}

View File

@@ -0,0 +1,200 @@
{% extends "base.html" %}
{% block title %}Effects - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Effects</h1>
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload Effect
</button>
</div>
<!-- Upload Modal -->
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
<div class="flex justify-between items-center mb-4">
<h2 class="text-xl font-semibold">Upload Effect</h2>
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="text-gray-400 hover:text-white">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Effect File (.sexp)</label>
<input type="file" name="file" id="upload-file" required
accept=".sexp,.lisp"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Friendly Name (optional)</label>
<input type="text" name="display_name" id="upload-name" placeholder="e.g., color-shift"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A name to reference this effect in recipes</p>
</div>
<div id="upload-result" class="hidden"></div>
<div class="flex justify-end space-x-3">
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
<button type="submit" id="upload-btn"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload
</button>
</div>
</form>
</div>
</div>
<p class="text-gray-400 mb-8">
Effects are S-expression files that define video processing operations.
Each effect is stored in IPFS and can be referenced by name in recipes.
</p>
{% if effects %}
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="effects-list">
{% for effect in effects %}
{% set meta = effect.meta or effect %}
<a href="/effects/{{ effect.cid }}"
class="effect-card bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
<div class="flex items-center justify-between mb-2">
<span class="font-medium text-white">{{ meta.name or 'Unnamed' }}</span>
<span class="text-gray-500 text-sm">v{{ meta.version or '1.0.0' }}</span>
</div>
{% if meta.description %}
<p class="text-gray-400 text-sm mb-3 line-clamp-2">{{ meta.description }}</p>
{% endif %}
<div class="flex items-center justify-between text-sm mb-2">
{% if meta.author %}
<span class="text-gray-500">by {{ meta.author }}</span>
{% else %}
<span></span>
{% endif %}
{% if meta.temporal %}
<span class="bg-purple-900 text-purple-300 px-2 py-0.5 rounded text-xs">temporal</span>
{% endif %}
</div>
{% if meta.params %}
<div class="text-gray-500 text-sm">
{{ meta.params | length }} parameter{{ 's' if meta.params | length != 1 else '' }}
</div>
{% endif %}
<div class="mt-3 text-xs">
{% if effect.friendly_name %}
<span class="text-blue-400 font-medium">{{ effect.friendly_name }}</span>
{% else %}
<span class="text-gray-600 font-mono truncate">{{ effect.cid[:24] }}...</span>
{% endif %}
</div>
</a>
{% endfor %}
</div>
{% if has_more %}
<div hx-get="/effects?offset={{ offset + limit }}&limit={{ limit }}"
hx-trigger="revealed"
hx-swap="afterend"
hx-select="#effects-list > *"
class="h-20 flex items-center justify-center text-gray-500">
Loading more...
</div>
{% endif %}
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
<svg class="w-16 h-16 mx-auto mb-4 text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4"/>
</svg>
<p class="text-gray-500 mb-4">No effects uploaded yet.</p>
<p class="text-gray-600 text-sm mb-6">
Effects are S-expression files with metadata in comment headers.
</p>
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium">
Upload Your First Effect
</button>
</div>
{% endif %}
</div>
<script>
// Handle upload form
document.getElementById('upload-form').addEventListener('submit', async function(e) {
e.preventDefault();
const form = e.target;
const fileInput = document.getElementById('upload-file');
const displayName = document.getElementById('upload-name').value;
const resultDiv = document.getElementById('upload-result');
const uploadBtn = document.getElementById('upload-btn');
const file = fileInput.files[0];
if (!file) return;
const formData = new FormData();
formData.append('file', file);
if (displayName) {
formData.append('display_name', displayName);
}
uploadBtn.disabled = true;
uploadBtn.textContent = 'Uploading...';
resultDiv.classList.add('hidden');
try {
const response = await fetch('/effects/upload', {
method: 'POST',
body: formData
});
const data = await response.json();
if (response.ok) {
resultDiv.innerHTML = `
<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300">
<p class="font-medium">Effect uploaded!</p>
<p class="text-sm mt-1">${data.name} <span class="font-mono">${data.friendly_name}</span></p>
</div>
`;
resultDiv.classList.remove('hidden');
setTimeout(() => location.reload(), 1500);
} else {
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
<p class="font-medium">Upload failed</p>
<p class="text-sm mt-1">${data.detail || 'Unknown error'}</p>
</div>
`;
resultDiv.classList.remove('hidden');
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
} catch (error) {
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
<p class="font-medium">Upload failed</p>
<p class="text-sm mt-1">${error.message}</p>
</div>
`;
resultDiv.classList.remove('hidden');
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
});
</script>
{% endblock %}

View File

@@ -0,0 +1,22 @@
<a href="{{ link }}" class="block rounded border border-stone-200 bg-white hover:bg-stone-50 transition-colors no-underline" data-fragment="link-card" data-app="artdag" data-hx-disable>
<div class="flex flex-row items-center gap-3 p-3">
<div class="flex-shrink-0 w-10 h-10 rounded bg-stone-100 flex items-center justify-center text-stone-500">
{% if content_type == "recipe" %}
<i class="fas fa-scroll text-sm"></i>
{% elif content_type == "effect" %}
<i class="fas fa-magic text-sm"></i>
{% elif content_type == "run" %}
<i class="fas fa-play-circle text-sm"></i>
{% else %}
<i class="fas fa-cube text-sm"></i>
{% endif %}
</div>
<div class="flex-1 min-w-0">
<div class="font-medium text-stone-900 text-sm truncate">{{ title }}</div>
{% if description %}
<div class="text-xs text-stone-500 clamp-2">{{ description }}</div>
{% endif %}
<div class="text-xs text-stone-400 mt-0.5">{{ content_type }} &middot; {{ cid[:12] }}&hellip;</div>
</div>
</div>
</a>

View File

@@ -0,0 +1,7 @@
<div class="relative nav-group">
<a href="{{ artdag_url }}"
class="justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black p-3"
data-hx-disable>
<i class="fas fa-project-diagram text-sm"></i> art-dag
</a>
</div>

View File

@@ -0,0 +1,51 @@
{% extends "base.html" %}
{% block title %}Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-4xl mx-auto text-center py-12">
<h1 class="text-4xl font-bold mb-4">Art-DAG L1</h1>
<p class="text-xl text-gray-400 mb-8">Content-Addressable Media Processing</p>
<div class="grid grid-cols-1 md:grid-cols-3 gap-6 max-w-3xl mx-auto mb-12">
<a href="/runs"
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-blue-500 transition-colors">
<div class="text-blue-400 text-3xl font-bold mb-2">{{ stats.runs or 0 }}</div>
<div class="text-gray-400">Execution Runs</div>
</a>
<a href="/recipes"
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-green-500 transition-colors">
<div class="text-green-400 text-3xl font-bold mb-2">{{ stats.recipes or 0 }}</div>
<div class="text-gray-400">Recipes</div>
</a>
<a href="/effects"
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-cyan-500 transition-colors">
<div class="text-cyan-400 text-3xl font-bold mb-2">{{ stats.effects or 0 }}</div>
<div class="text-gray-400">Effects</div>
</a>
<a href="/media"
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-purple-500 transition-colors">
<div class="text-purple-400 text-3xl font-bold mb-2">{{ stats.media or 0 }}</div>
<div class="text-gray-400">Media Files</div>
</a>
<a href="/storage"
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-orange-500 transition-colors">
<div class="text-orange-400 text-3xl font-bold mb-2">{{ stats.storage or 0 }}</div>
<div class="text-gray-400">Storage Providers</div>
</a>
</div>
{% if not user %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-8 max-w-md mx-auto mb-12">
<p class="text-gray-400 mb-4">Sign in through your L2 server to access all features.</p>
<a href="/auth" class="text-blue-400 hover:text-blue-300">Sign In &rarr;</a>
</div>
{% endif %}
{% if readme_html %}
<div class="text-left bg-gray-800 border border-gray-700 rounded-lg p-8 prose prose-invert max-w-none">
{{ readme_html | safe }}
</div>
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,265 @@
{% extends "base.html" %}
{% block title %}{{ recipe.name }} - Recipe - Art-DAG L1{% endblock %}
{% block head %}
{{ super() }}
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.23.0/cytoscape.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"></script>
{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<!-- Header -->
<div class="flex items-center space-x-4 mb-6">
<a href="/recipes" class="text-gray-400 hover:text-white">&larr; Recipes</a>
<h1 class="text-2xl font-bold">{{ recipe.name or 'Unnamed Recipe' }}</h1>
{% if recipe.version %}
<span class="text-gray-500">v{{ recipe.version }}</span>
{% endif %}
</div>
{% if recipe.description %}
<p class="text-gray-400 mb-4">{{ recipe.description }}</p>
{% endif %}
<!-- Metadata -->
<div class="bg-gray-800 rounded-lg p-4 border border-gray-700 mb-6">
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 text-sm">
<div>
<span class="text-gray-500">Recipe ID</span>
<p class="text-gray-300 font-mono text-xs truncate" title="{{ recipe.recipe_id }}">{{ recipe.recipe_id[:16] }}...</p>
</div>
{% if recipe.ipfs_cid %}
<div>
<span class="text-gray-500">IPFS CID</span>
<p class="text-gray-300 font-mono text-xs truncate" title="{{ recipe.ipfs_cid }}">{{ recipe.ipfs_cid[:16] }}...</p>
</div>
{% endif %}
<div>
<span class="text-gray-500">Steps</span>
<p class="text-gray-300">{{ recipe.step_count or recipe.steps|length }}</p>
</div>
{% if recipe.author %}
<div>
<span class="text-gray-500">Author</span>
<p class="text-gray-300">{{ recipe.author }}</p>
</div>
{% endif %}
</div>
</div>
{% if recipe.type == 'streaming' %}
<!-- Streaming Recipe Info -->
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 p-4">
<div class="flex items-center space-x-2 mb-2">
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">Streaming Recipe</span>
</div>
<p class="text-gray-400 text-sm">
This recipe uses frame-by-frame streaming rendering. The pipeline is defined as an S-expression that generates frames dynamically.
</p>
</div>
{% else %}
<!-- DAG Visualization -->
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6">
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
<span class="text-gray-400 text-sm">Pipeline DAG</span>
<span class="text-gray-500 text-sm">{{ recipe.steps | length }} steps</span>
</div>
<div id="dag-container" class="h-80"></div>
</div>
<!-- Steps -->
<h2 class="text-lg font-semibold mb-4">Steps</h2>
<div class="space-y-3 mb-8">
{% for step in recipe.steps %}
{% set colors = {
'effect': 'blue',
'analyze': 'purple',
'transform': 'green',
'combine': 'orange',
'output': 'cyan'
} %}
{% set color = colors.get(step.type, 'gray') %}
<div class="bg-gray-800 rounded-lg p-4 border border-gray-700">
<div class="flex items-center justify-between mb-2">
<div class="flex items-center space-x-3">
<span class="w-8 h-8 rounded bg-{{ color }}-900 text-{{ color }}-300 flex items-center justify-center font-mono text-sm">
{{ loop.index }}
</span>
<span class="font-medium">{{ step.name }}</span>
<span class="bg-{{ color }}-900 text-{{ color }}-300 px-2 py-0.5 rounded text-xs">
{{ step.type }}
</span>
</div>
</div>
{% if step.inputs %}
<div class="text-sm text-gray-400 mb-1">
Inputs: {{ step.inputs | join(', ') }}
</div>
{% endif %}
{% if step.params %}
<div class="mt-2 bg-gray-900 rounded p-2">
<code class="text-xs text-gray-400">{{ step.params | tojson }}</code>
</div>
{% endif %}
</div>
{% endfor %}
</div>
{% endif %}
<!-- Source Code -->
<h2 class="text-lg font-semibold mb-4">Recipe (S-expression)</h2>
<div class="bg-gray-900 rounded-lg p-4 border border-gray-700">
{% if recipe.sexp %}
<pre class="text-sm font-mono text-gray-300 overflow-x-auto whitespace-pre-wrap sexp-code">{{ recipe.sexp }}</pre>
{% else %}
<p class="text-gray-500">No source available</p>
{% endif %}
</div>
<script>
// Single-pass S-expression syntax highlighter (avoids regex corruption)
function highlightSexp(text) {
const SPECIAL = new Set(['plan','recipe','def','->','stream','let','lambda','if','cond','define']);
const PRIMS = new Set(['source','effect','sequence','segment','resize','transform','layer','blend','mux','analyze','fused-pipeline']);
function esc(s) { return s.replace(/&/g,'&amp;').replace(/</g,'&lt;').replace(/>/g,'&gt;'); }
function span(cls, s) { return '<span class="' + cls + '">' + esc(s) + '</span>'; }
let out = '', i = 0, len = text.length;
while (i < len) {
if (text[i] === ';' && i + 1 < len && text[i+1] === ';') {
let end = text.indexOf('\n', i);
if (end === -1) end = len;
out += span('text-gray-500', text.slice(i, end));
i = end;
}
else if (text[i] === '"') {
let j = i + 1;
while (j < len && text[j] !== '"') { if (text[j] === '\\') j++; j++; }
if (j < len) j++;
out += span('text-green-400', text.slice(i, j));
i = j;
}
else if (text[i] === ':' && i + 1 < len && /[a-zA-Z_-]/.test(text[i+1])) {
let j = i + 1;
while (j < len && /[a-zA-Z0-9_-]/.test(text[j])) j++;
out += span('text-purple-400', text.slice(i, j));
i = j;
}
else if (text[i] === '(') {
out += span('text-yellow-500', '(');
i++;
let ws = '';
while (i < len && (text[i] === ' ' || text[i] === '\t')) { ws += text[i]; i++; }
out += esc(ws);
if (i < len && /[a-zA-Z_>-]/.test(text[i])) {
let j = i;
while (j < len && /[a-zA-Z0-9_>-]/.test(text[j])) j++;
let word = text.slice(i, j);
if (SPECIAL.has(word)) out += span('text-pink-400 font-semibold', word);
else if (PRIMS.has(word)) out += span('text-blue-400', word);
else out += esc(word);
i = j;
}
}
else if (text[i] === ')') {
out += span('text-yellow-500', ')');
i++;
}
else if (/[0-9]/.test(text[i]) && (i === 0 || /[\s(]/.test(text[i-1]))) {
let j = i;
while (j < len && /[0-9.]/.test(text[j])) j++;
out += span('text-orange-300', text.slice(i, j));
i = j;
}
else {
let j = i;
while (j < len && !'(;":)'.includes(text[j])) {
if (text[j] === ':' && j + 1 < len && /[a-zA-Z_-]/.test(text[j+1])) break;
if (/[0-9]/.test(text[j]) && (j === 0 || /[\s(]/.test(text[j-1]))) break;
j++;
}
if (j === i) { out += esc(text[i]); i++; }
else { out += esc(text.slice(i, j)); i = j; }
}
}
return out;
}
document.querySelectorAll('.sexp-code').forEach(el => {
el.innerHTML = highlightSexp(el.textContent);
});
</script>
<!-- Actions -->
<div class="flex items-center space-x-4 mt-8">
<button hx-post="/runs/rerun/{{ recipe.recipe_id }}"
hx-target="#action-result"
hx-swap="innerHTML"
class="bg-green-600 hover:bg-green-700 px-4 py-2 rounded font-medium">
Run Recipe
</button>
{% if recipe.ipfs_cid %}
<a href="https://ipfs.io/ipfs/{{ recipe.ipfs_cid }}"
target="_blank"
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
View on IPFS
</a>
{% elif recipe.recipe_id.startswith('Qm') or recipe.recipe_id.startswith('bafy') %}
<a href="https://ipfs.io/ipfs/{{ recipe.recipe_id }}"
target="_blank"
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
View on IPFS
</a>
{% endif %}
<button hx-post="/recipes/{{ recipe.recipe_id }}/publish"
hx-target="#action-result"
class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded font-medium">
Share to L2
</button>
<button hx-delete="/recipes/{{ recipe.recipe_id }}/ui"
hx-target="#action-result"
hx-confirm="Delete this recipe? This cannot be undone."
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
Delete
</button>
<span id="action-result"></span>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', function() {
const cy = cytoscape({
container: document.getElementById('dag-container'),
style: [
{ selector: 'node', style: {
'label': 'data(label)',
'background-color': 'data(color)',
'color': '#fff',
'text-valign': 'center',
'text-halign': 'center',
'font-size': '11px',
'width': 'label',
'height': 35,
'padding': '10px',
'shape': 'round-rectangle'
}},
{ selector: 'edge', style: {
'width': 2,
'line-color': '#4b5563',
'target-arrow-color': '#4b5563',
'target-arrow-shape': 'triangle',
'curve-style': 'bezier'
}}
],
elements: {{ dag_elements | tojson }},
layout: { name: 'dagre', rankDir: 'LR', padding: 30 }
});
});
</script>
{% endblock %}

View File

@@ -0,0 +1,136 @@
{% extends "base.html" %}
{% block title %}Recipes - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Recipes</h1>
<label class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium cursor-pointer">
Upload Recipe
<input type="file" accept=".sexp,.yaml,.yml" class="hidden" id="recipe-upload" />
</label>
</div>
<p class="text-gray-400 mb-8">
Recipes define processing pipelines for audio and media. Each recipe is a DAG of effects.
</p>
{% if recipes %}
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="recipes-list">
{% for recipe in recipes %}
<a href="/recipes/{{ recipe.recipe_id }}"
class="recipe-card bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
<div class="flex items-center justify-between mb-2">
<span class="font-medium text-white">{{ recipe.name }}</span>
{% if recipe.version %}
<span class="text-gray-500 text-sm">v{{ recipe.version }}</span>
{% endif %}
</div>
{% if recipe.description %}
<p class="text-gray-400 text-sm mb-3 line-clamp-2">{{ recipe.description }}</p>
{% endif %}
<div class="flex items-center justify-between text-sm">
<span class="text-gray-500">{{ recipe.step_count or 0 }} steps</span>
{% if recipe.last_run %}
<span class="text-gray-500">Last run: {{ recipe.last_run }}</span>
{% endif %}
</div>
{% if recipe.tags %}
<div class="mt-2 flex flex-wrap gap-1">
{% for tag in recipe.tags %}
<span class="bg-gray-700 text-gray-300 px-2 py-0.5 rounded text-xs">{{ tag }}</span>
{% endfor %}
</div>
{% endif %}
<div class="mt-3 text-xs">
{% if recipe.friendly_name %}
<span class="text-blue-400 font-medium">{{ recipe.friendly_name }}</span>
{% else %}
<span class="text-gray-600 font-mono truncate">{{ recipe.recipe_id[:24] }}...</span>
{% endif %}
</div>
</a>
{% endfor %}
</div>
{% if has_more %}
<div hx-get="/recipes?offset={{ offset + limit }}&limit={{ limit }}"
hx-trigger="revealed"
hx-swap="afterend"
hx-select="#recipes-list > *"
class="h-20 flex items-center justify-center text-gray-500">
Loading more...
</div>
{% endif %}
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
<p class="text-gray-500 mb-4">No recipes available.</p>
<p class="text-gray-600 text-sm mb-6">
Recipes are S-expression files (.sexp) that define processing pipelines.
</p>
<label class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium cursor-pointer inline-block">
Upload Your First Recipe
<input type="file" accept=".sexp,.yaml,.yml" class="hidden" id="recipe-upload-empty" />
</label>
</div>
{% endif %}
</div>
<div id="upload-result" class="fixed bottom-4 right-4 max-w-sm"></div>
<script>
function handleRecipeUpload(input) {
const file = input.files[0];
if (!file) return;
const formData = new FormData();
formData.append('file', file);
fetch('/recipes/upload', {
method: 'POST',
body: formData
})
.then(response => {
if (!response.ok) throw new Error('Upload failed');
return response.json();
})
.then(data => {
const resultDiv = document.getElementById('upload-result');
resultDiv.innerHTML = `
<div class="bg-green-900 border border-green-700 rounded-lg p-4">
<p class="text-green-300 font-medium">Recipe uploaded!</p>
<p class="text-green-400 text-sm mt-1">${data.name} v${data.version}</p>
<p class="text-gray-400 text-xs mt-2 font-mono">${data.recipe_id}</p>
</div>
`;
setTimeout(() => {
window.location.reload();
}, 1500);
})
.catch(error => {
const resultDiv = document.getElementById('upload-result');
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded-lg p-4">
<p class="text-red-300 font-medium">Upload failed</p>
<p class="text-red-400 text-sm mt-1">${error.message}</p>
</div>
`;
});
input.value = '';
}
document.getElementById('recipe-upload')?.addEventListener('change', function() {
handleRecipeUpload(this);
});
document.getElementById('recipe-upload-empty')?.addEventListener('change', function() {
handleRecipeUpload(this);
});
</script>
{% endblock %}

View File

@@ -0,0 +1,89 @@
{# Run card partial - expects 'run' variable #}
{% set status_colors = {
'completed': 'green',
'running': 'blue',
'pending': 'yellow',
'failed': 'red',
'cached': 'purple'
} %}
{% set color = status_colors.get(run.status, 'gray') %}
<a href="/runs/{{ run.run_id }}"
class="block bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
<div class="flex items-center justify-between mb-2">
<div class="flex items-center space-x-3">
<span class="font-mono text-sm text-gray-400">{{ run.run_id[:12] }}...</span>
<span class="bg-{{ color }}-900 text-{{ color }}-300 px-2 py-0.5 rounded text-xs uppercase">
{{ run.status }}
</span>
{% if run.cached %}
<span class="bg-purple-900 text-purple-300 px-2 py-0.5 rounded text-xs">cached</span>
{% endif %}
</div>
<span class="text-gray-500 text-sm">{{ run.created_at }}</span>
</div>
<div class="flex items-center justify-between mb-3">
<div class="flex items-center space-x-4 text-sm">
<span class="text-gray-400">
Recipe: <span class="text-white">{{ run.recipe_name or (run.recipe[:12] ~ '...' if run.recipe and run.recipe|length > 12 else run.recipe) or 'Unknown' }}</span>
</span>
{% if run.total_steps %}
<span class="text-gray-400">
Steps: <span class="text-white">{{ run.executed or 0 }}/{{ run.total_steps }}</span>
</span>
{% endif %}
</div>
</div>
{# Media previews row #}
<div class="flex items-center space-x-4">
{# Input previews #}
{% if run.input_previews %}
<div class="flex items-center space-x-1">
<span class="text-xs text-gray-500 mr-1">In:</span>
{% for inp in run.input_previews %}
{% if inp.media_type and inp.media_type.startswith('image/') %}
<img src="/cache/{{ inp.cid }}/raw" alt="" class="w-10 h-10 object-cover rounded">
{% elif inp.media_type and inp.media_type.startswith('video/') %}
<video src="/cache/{{ inp.cid }}/raw" class="w-10 h-10 object-cover rounded" muted></video>
{% else %}
<div class="w-10 h-10 bg-gray-700 rounded flex items-center justify-center text-gray-500 text-xs">?</div>
{% endif %}
{% endfor %}
{% if run.inputs and run.inputs|length > 3 %}
<span class="text-xs text-gray-500">+{{ run.inputs|length - 3 }}</span>
{% endif %}
</div>
{% elif run.inputs %}
<div class="text-xs text-gray-500">
{{ run.inputs|length }} input(s)
</div>
{% endif %}
{# Arrow #}
<span class="text-gray-600">-></span>
{# Output preview - prefer IPFS URLs when available #}
{% if run.output_cid %}
<div class="flex items-center space-x-1">
<span class="text-xs text-gray-500 mr-1">Out:</span>
{% if run.output_media_type and run.output_media_type.startswith('image/') %}
<img src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" alt="" class="w-10 h-10 object-cover rounded">
{% elif run.output_media_type and run.output_media_type.startswith('video/') %}
<video src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" class="w-10 h-10 object-cover rounded" muted></video>
{% else %}
<div class="w-10 h-10 bg-gray-700 rounded flex items-center justify-center text-gray-500 text-xs">?</div>
{% endif %}
</div>
{% else %}
<span class="text-xs text-gray-500">No output yet</span>
{% endif %}
<div class="flex-grow"></div>
{% if run.output_cid %}
<span class="font-mono text-xs text-gray-600">{{ run.output_cid[:12] }}...</span>
{% endif %}
</div>
</a>

View File

@@ -0,0 +1,62 @@
{% extends "base.html" %}
{% block title %}Run Artifacts{% endblock %}
{% block content %}
<div class="mb-6">
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to Run
</a>
</div>
<h1 class="text-2xl font-bold text-white mb-6">Run Artifacts</h1>
{% if artifacts %}
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
{% for artifact in artifacts %}
<div class="bg-gray-800 rounded-lg p-4">
<div class="flex items-center justify-between mb-3">
<span class="px-2 py-1 text-xs rounded
{% if artifact.role == 'input' %}bg-blue-600
{% elif artifact.role == 'output' %}bg-green-600
{% else %}bg-purple-600{% endif %}">
{{ artifact.role }}
</span>
<span class="text-sm text-gray-400">{{ artifact.step_name }}</span>
</div>
<div class="mb-3">
<p class="text-xs text-gray-500 mb-1">Content Hash</p>
<p class="font-mono text-xs text-gray-300 truncate">{{ artifact.hash }}</p>
</div>
<div class="flex items-center justify-between text-sm">
<span class="text-gray-400">
{% if artifact.media_type == 'video' %}Video
{% elif artifact.media_type == 'image' %}Image
{% elif artifact.media_type == 'audio' %}Audio
{% else %}File{% endif %}
</span>
<span class="text-gray-500">{{ (artifact.size_bytes / 1024)|round(1) }} KB</span>
</div>
<div class="mt-3 flex gap-2">
<a href="/cache/{{ artifact.hash }}" class="flex-1 px-3 py-1 bg-gray-700 hover:bg-gray-600 text-center text-sm rounded transition-colors">
View
</a>
<a href="/cache/{{ artifact.hash }}/raw" class="flex-1 px-3 py-1 bg-blue-600 hover:bg-blue-700 text-center text-sm rounded transition-colors">
Download
</a>
</div>
</div>
{% endfor %}
</div>
{% else %}
<div class="bg-gray-800 rounded-lg p-6 text-center">
<p class="text-gray-400">No artifacts found for this run.</p>
</div>
{% endif %}
{% endblock %}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,45 @@
{% extends "base.html" %}
{% block title %}Runs - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Execution Runs</h1>
<a href="/recipes" class="text-gray-400 hover:text-white">Browse Recipes &rarr;</a>
</div>
{% if runs %}
<div class="space-y-4" id="runs-list">
{% for run in runs %}
{% include "runs/_run_card.html" %}
{% endfor %}
</div>
{% if has_more %}
<div hx-get="/runs?offset={{ offset + limit }}"
hx-trigger="revealed"
hx-swap="afterend"
hx-select="#runs-list > *"
class="h-20 flex items-center justify-center text-gray-500">
Loading more...
</div>
{% endif %}
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
<div class="text-gray-500 mb-4">
<svg class="w-16 h-16 mx-auto mb-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
d="M13 10V3L4 14h7v7l9-11h-7z"/>
</svg>
<p class="text-xl">No runs yet</p>
</div>
<p class="text-gray-600 mb-6">Execute a recipe to see your runs here.</p>
<a href="/recipes" class="bg-blue-600 hover:bg-blue-700 px-6 py-2 rounded font-medium">
Browse Recipes
</a>
</div>
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,99 @@
{% extends "base.html" %}
{% block title %}Run Plan - {{ run_id[:16] }}{% endblock %}
{% block head %}
<script src="https://unpkg.com/cytoscape@3.25.0/dist/cytoscape.min.js"></script>
{% endblock %}
{% block content %}
<div class="mb-6">
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to Run
</a>
</div>
<h1 class="text-2xl font-bold text-white mb-6">Execution Plan</h1>
{% if plan %}
<div class="grid grid-cols-1 lg:grid-cols-2 gap-6">
<!-- DAG Visualization -->
<div class="bg-gray-800 rounded-lg p-4">
<h2 class="text-lg font-semibold text-white mb-4">DAG Visualization</h2>
<div id="dag-container" class="w-full h-96 bg-gray-900 rounded"></div>
</div>
<!-- Steps List -->
<div class="bg-gray-800 rounded-lg p-4">
<h2 class="text-lg font-semibold text-white mb-4">Steps ({{ plan.steps|length if plan.steps else 0 }})</h2>
<div class="space-y-3 max-h-96 overflow-y-auto">
{% for step in plan.get('steps', []) %}
<div class="bg-gray-900 rounded-lg p-3">
<div class="flex items-center justify-between mb-2">
<span class="font-medium text-white">{{ step.name or step.id or 'Step ' ~ loop.index }}</span>
<span class="px-2 py-0.5 text-xs rounded {% if step.status == 'completed' %}bg-green-600{% elif step.cached %}bg-blue-600{% else %}bg-gray-600{% endif %}">
{{ step.status or ('cached' if step.cached else 'pending') }}
</span>
</div>
{% if step.cache_id %}
<div class="text-xs text-gray-400 font-mono truncate">
{{ step.cache_id[:24] }}...
</div>
{% endif %}
</div>
{% else %}
<p class="text-gray-500">No steps defined</p>
{% endfor %}
</div>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', function() {
const elements = {{ dag_elements | tojson | safe }};
if (elements.length > 0) {
cytoscape({
container: document.getElementById('dag-container'),
elements: elements,
style: [
{
selector: 'node',
style: {
'background-color': 'data(color)',
'label': 'data(label)',
'color': '#fff',
'text-valign': 'bottom',
'text-margin-y': 5,
'font-size': '10px'
}
},
{
selector: 'edge',
style: {
'width': 2,
'line-color': '#6b7280',
'target-arrow-color': '#6b7280',
'target-arrow-shape': 'triangle',
'curve-style': 'bezier'
}
}
],
layout: {
name: 'breadthfirst',
directed: true,
padding: 20
}
});
}
});
</script>
{% else %}
<div class="bg-gray-800 rounded-lg p-6 text-center">
<p class="text-gray-400">No execution plan available for this run.</p>
</div>
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,99 @@
{# Plan node detail panel - loaded via HTMX #}
{% set status_color = 'green' if status in ('cached', 'completed') else 'yellow' %}
<div class="flex justify-between items-start mb-4">
<div>
<h4 class="text-lg font-semibold text-white">{{ step.name or step.step_id[:20] }}</h4>
<div class="flex items-center gap-2 mt-1">
<span class="px-2 py-0.5 rounded text-xs text-white" style="background-color: {{ node_color }}">
{{ step.node_type or 'EFFECT' }}
</span>
<span class="text-{{ status_color }}-400 text-xs">{{ status }}</span>
<span class="text-gray-500 text-xs">Level {{ step.level or 0 }}</span>
</div>
</div>
<button onclick="closeNodeDetail()" class="text-gray-400 hover:text-white p-1">
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
{# Output preview #}
{% if output_preview %}
<div class="mb-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Output</h5>
{% if output_media_type == 'video' %}
<video src="/cache/{{ cache_id }}/raw" controls muted class="w-full max-h-48 rounded-lg"></video>
{% elif output_media_type == 'image' %}
<img src="/cache/{{ cache_id }}/raw" class="w-full max-h-48 rounded-lg object-contain">
{% elif output_media_type == 'audio' %}
<audio src="/cache/{{ cache_id }}/raw" controls class="w-full"></audio>
{% endif %}
</div>
{% elif ipfs_cid %}
<div class="mb-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Output (IPFS)</h5>
<video src="{{ ipfs_gateway }}/{{ ipfs_cid }}" controls muted class="w-full max-h-48 rounded-lg"></video>
</div>
{% endif %}
{# Output link #}
{% if ipfs_cid %}
<a href="/ipfs/{{ ipfs_cid }}" class="flex items-center justify-between bg-gray-800 rounded p-2 hover:bg-gray-700 transition-colors text-xs mb-4">
<span class="font-mono text-gray-300 truncate">{{ ipfs_cid[:24] }}...</span>
<span class="px-2 py-1 bg-blue-600 text-white rounded ml-2">View</span>
</a>
{% elif has_cached and cache_id %}
<a href="/cache/{{ cache_id }}" class="flex items-center justify-between bg-gray-800 rounded p-2 hover:bg-gray-700 transition-colors text-xs mb-4">
<span class="font-mono text-gray-300 truncate">{{ cache_id[:24] }}...</span>
<span class="px-2 py-1 bg-blue-600 text-white rounded ml-2">View</span>
</a>
{% endif %}
{# Input media previews #}
{% if inputs %}
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Inputs ({{ inputs|length }})</h5>
<div class="grid grid-cols-2 gap-2">
{% for inp in inputs %}
<a href="/cache/{{ inp.cache_id }}" class="block bg-gray-800 rounded-lg overflow-hidden hover:bg-gray-700 transition-colors">
{% if inp.media_type == 'video' %}
<video src="/cache/{{ inp.cache_id }}/raw" class="w-full h-20 object-cover rounded-t" muted></video>
{% elif inp.media_type == 'image' %}
<img src="/cache/{{ inp.cache_id }}/raw" class="w-full h-20 object-cover rounded-t">
{% else %}
<div class="w-full h-20 bg-gray-700 rounded-t flex items-center justify-center text-xs text-gray-400">
{{ inp.media_type or 'File' }}
</div>
{% endif %}
<div class="p-2">
<div class="text-xs text-white truncate">{{ inp.name }}</div>
<div class="text-xs text-gray-500 font-mono truncate">{{ inp.cache_id[:12] }}...</div>
</div>
</a>
{% endfor %}
</div>
</div>
{% endif %}
{# Parameters/Config #}
{% if config %}
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Parameters</h5>
<div class="bg-gray-800 rounded p-3 text-xs space-y-1">
{% for key, value in config.items() %}
<div class="flex justify-between">
<span class="text-gray-400">{{ key }}:</span>
<span class="text-white">{{ value if value is string else value|tojson }}</span>
</div>
{% endfor %}
</div>
</div>
{% endif %}
{# Metadata #}
<div class="mt-4 text-xs text-gray-500 space-y-1">
<div><span class="text-gray-400">Step ID:</span> <span class="font-mono">{{ step.step_id[:32] }}...</span></div>
<div><span class="text-gray-400">Cache ID:</span> <span class="font-mono">{{ cache_id[:32] }}...</span></div>
</div>

View File

@@ -0,0 +1,90 @@
{% extends "base.html" %}
{% block title %}Storage Providers - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-6xl mx-auto">
<h1 class="text-3xl font-bold mb-6">Storage Providers</h1>
<p class="text-gray-400 mb-8">
Configure your IPFS pinning services. Data is pinned to your accounts, giving you full control.
</p>
<!-- Provider Grid -->
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 mb-8">
{% for key, info in providers_info.items() %}
<a href="/storage/type/{{ key }}"
class="bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-{{ info.color }}-500 transition-colors">
<div class="flex items-center justify-between mb-2">
<span class="text-lg font-medium text-{{ info.color }}-400">{{ info.name }}</span>
{% set count = storages | selectattr('provider_type', 'equalto', key) | list | length %}
{% if count > 0 %}
<span class="bg-{{ info.color }}-900 text-{{ info.color }}-300 px-2 py-0.5 rounded text-sm">
{{ count }} configured
</span>
{% endif %}
</div>
<p class="text-gray-400 text-sm">{{ info.desc }}</p>
</a>
{% endfor %}
</div>
<!-- Configured Providers -->
{% if storages %}
<h2 class="text-xl font-semibold mb-4">Your Storage Providers</h2>
<div class="space-y-4">
{% for storage in storages %}
{% set info = providers_info.get(storage.provider_type, {'name': storage.provider_type, 'color': 'gray'}) %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-4" id="storage-{{ storage.id }}">
<div class="flex items-center justify-between mb-3">
<div class="flex items-center space-x-3">
<span class="text-{{ info.color }}-400 font-medium">{{ storage.provider_name or info.name }}</span>
{% if storage.is_active %}
<span class="bg-green-900 text-green-300 px-2 py-0.5 rounded text-xs">Active</span>
{% else %}
<span class="bg-gray-700 text-gray-400 px-2 py-0.5 rounded text-xs">Inactive</span>
{% endif %}
</div>
<div class="flex items-center space-x-2">
<button hx-post="/storage/{{ storage.id }}/test"
hx-target="#test-result-{{ storage.id }}"
class="text-gray-400 hover:text-white text-sm">
Test
</button>
<button hx-delete="/storage/{{ storage.id }}"
hx-target="#storage-{{ storage.id }}"
hx-swap="outerHTML"
hx-confirm="Remove this storage provider?"
class="text-red-400 hover:text-red-300 text-sm">
Remove
</button>
</div>
</div>
<div class="grid grid-cols-3 gap-4 text-sm">
<div>
<span class="text-gray-500">Capacity:</span>
<span class="text-gray-300">{{ storage.capacity_gb }} GB</span>
</div>
<div>
<span class="text-gray-500">Used:</span>
<span class="text-gray-300">{{ (storage.used_bytes / 1024 / 1024 / 1024) | round(2) }} GB</span>
</div>
<div>
<span class="text-gray-500">Pins:</span>
<span class="text-gray-300">{{ storage.pin_count }}</span>
</div>
</div>
<div id="test-result-{{ storage.id }}" class="mt-2 text-sm"></div>
</div>
{% endfor %}
</div>
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-8 text-center">
<p class="text-gray-400 mb-4">No storage providers configured yet.</p>
<p class="text-gray-500 text-sm">Click on a provider above to add your first one.</p>
</div>
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,152 @@
{% extends "base.html" %}
{% block title %}{{ provider_info.name }} - Storage - Art-DAG L1{% endblock %}
{% block content %}
<div class="max-w-4xl mx-auto">
<div class="flex items-center space-x-4 mb-6">
<a href="/storage" class="text-gray-400 hover:text-white">&larr; All Providers</a>
<h1 class="text-2xl font-bold text-{{ provider_info.color }}-400">{{ provider_info.name }}</h1>
</div>
<p class="text-gray-400 mb-8">{{ provider_info.desc }}</p>
<!-- Add New -->
<div class="bg-gray-800 border border-gray-700 rounded-lg p-6 mb-8">
<h2 class="text-lg font-semibold mb-4">Add {{ provider_info.name }} Account</h2>
<form hx-post="/storage/add"
hx-target="#add-result"
class="space-y-4">
<input type="hidden" name="provider_type" value="{{ provider_type }}">
<div>
<label class="block text-gray-400 text-sm mb-1">Name (optional)</label>
<input type="text" name="provider_name"
placeholder="{{ provider_type }}-1"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
{% if provider_type == 'pinata' %}
<div class="grid grid-cols-2 gap-4">
<div>
<label class="block text-gray-400 text-sm mb-1">API Key *</label>
<input type="text" name="api_key" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Secret Key *</label>
<input type="password" name="secret_key" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
</div>
{% elif provider_type in ['web3storage', 'nftstorage'] %}
<div>
<label class="block text-gray-400 text-sm mb-1">API Token *</label>
<input type="password" name="api_token" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
{% elif provider_type == 'infura' %}
<div class="grid grid-cols-2 gap-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Project ID *</label>
<input type="text" name="project_id" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Project Secret *</label>
<input type="password" name="project_secret" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
</div>
{% elif provider_type in ['filebase', 'storj'] %}
<div class="grid grid-cols-2 gap-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Access Key *</label>
<input type="text" name="access_key" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Secret Key *</label>
<input type="password" name="secret_key" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Bucket *</label>
<input type="text" name="bucket" required
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
{% elif provider_type == 'local' %}
<div>
<label class="block text-gray-400 text-sm mb-1">Path *</label>
<input type="text" name="path" required placeholder="/data/ipfs"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
{% endif %}
<div>
<label class="block text-gray-400 text-sm mb-1">Capacity (GB)</label>
<input type="number" name="capacity_gb" value="5" min="1"
class="w-32 bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div class="pt-2">
<button type="submit"
class="bg-{{ provider_info.color }}-600 hover:bg-{{ provider_info.color }}-700 px-4 py-2 rounded font-medium">
Add Provider
</button>
</div>
<div id="add-result"></div>
</form>
</div>
<!-- Existing Configs -->
{% if storages %}
<h2 class="text-lg font-semibold mb-4">Configured Accounts</h2>
<div class="space-y-4">
{% for storage in storages %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-4" id="storage-{{ storage.id }}">
<div class="flex items-center justify-between mb-3">
<div class="flex items-center space-x-3">
<span class="font-medium">{{ storage.provider_name }}</span>
{% if storage.is_active %}
<span class="bg-green-900 text-green-300 px-2 py-0.5 rounded text-xs">Active</span>
{% endif %}
</div>
<div class="flex items-center space-x-3">
<button hx-post="/storage/{{ storage.id }}/test"
hx-target="#test-{{ storage.id }}"
class="text-gray-400 hover:text-white text-sm">
Test Connection
</button>
<button hx-delete="/storage/{{ storage.id }}"
hx-target="#storage-{{ storage.id }}"
hx-swap="outerHTML"
hx-confirm="Remove this storage provider?"
class="text-red-400 hover:text-red-300 text-sm">
Remove
</button>
</div>
</div>
{% if storage.config_display %}
<div class="text-sm text-gray-400 space-x-4">
{% for key, value in storage.config_display.items() %}
<span>{{ key }}: <code class="text-gray-300">{{ value }}</code></span>
{% endfor %}
</div>
{% endif %}
<div id="test-{{ storage.id }}" class="mt-2 text-sm"></div>
</div>
{% endfor %}
</div>
{% endif %}
</div>
{% endblock %}

197
l1/app/types.py Normal file
View File

@@ -0,0 +1,197 @@
"""
Type definitions for Art DAG L1 server.
Uses TypedDict for configuration structures to enable mypy checking.
"""
from typing import Any, Dict, List, Optional, TypedDict, Union
from typing_extensions import NotRequired
# === Node Config Types ===
class SourceConfig(TypedDict, total=False):
"""Config for SOURCE nodes."""
cid: str # Content ID (IPFS CID or SHA3-256 hash)
asset: str # Asset name from registry
input: bool # True if this is a variable input
name: str # Human-readable name for variable inputs
description: str # Description for variable inputs
class EffectConfig(TypedDict, total=False):
"""Config for EFFECT nodes."""
effect: str # Effect name
cid: str # Effect CID (for cached/IPFS effects)
# Effect parameters are additional keys
intensity: float
level: float
class SequenceConfig(TypedDict, total=False):
"""Config for SEQUENCE nodes."""
transition: Dict[str, Any] # Transition config
class SegmentConfig(TypedDict, total=False):
"""Config for SEGMENT nodes."""
start: float
end: float
duration: float
# Union of all config types
NodeConfig = Union[SourceConfig, EffectConfig, SequenceConfig, SegmentConfig, Dict[str, Any]]
# === Node Types ===
class CompiledNode(TypedDict):
"""Node as produced by the S-expression compiler."""
id: str
type: str # "SOURCE", "EFFECT", "SEQUENCE", etc.
config: Dict[str, Any]
inputs: List[str]
name: NotRequired[str]
class TransformedNode(TypedDict):
"""Node after transformation for artdag execution."""
node_id: str
node_type: str
config: Dict[str, Any]
inputs: List[str]
name: NotRequired[str]
# === DAG Types ===
class CompiledDAG(TypedDict):
"""DAG as produced by the S-expression compiler."""
nodes: List[CompiledNode]
output: str
class TransformedDAG(TypedDict):
"""DAG after transformation for artdag execution."""
nodes: Dict[str, TransformedNode]
output_id: str
metadata: NotRequired[Dict[str, Any]]
# === Registry Types ===
class AssetEntry(TypedDict, total=False):
"""Asset in the recipe registry."""
cid: str
url: str
class EffectEntry(TypedDict, total=False):
"""Effect in the recipe registry."""
cid: str
url: str
temporal: bool
class Registry(TypedDict):
"""Recipe registry containing assets and effects."""
assets: Dict[str, AssetEntry]
effects: Dict[str, EffectEntry]
# === Visualization Types ===
class VisNodeData(TypedDict, total=False):
"""Data for a visualization node (Cytoscape.js format)."""
id: str
label: str
nodeType: str
isOutput: bool
class VisNode(TypedDict):
"""Visualization node wrapper."""
data: VisNodeData
class VisEdgeData(TypedDict):
"""Data for a visualization edge."""
source: str
target: str
class VisEdge(TypedDict):
"""Visualization edge wrapper."""
data: VisEdgeData
class VisualizationDAG(TypedDict):
"""DAG structure for Cytoscape.js visualization."""
nodes: List[VisNode]
edges: List[VisEdge]
# === Recipe Types ===
class Recipe(TypedDict, total=False):
"""Compiled recipe structure."""
name: str
version: str
description: str
owner: str
registry: Registry
dag: CompiledDAG
recipe_id: str
ipfs_cid: str
sexp: str
step_count: int
error: str
# === API Request/Response Types ===
class RecipeRunInputs(TypedDict):
"""Mapping of input names to CIDs for recipe execution."""
# Keys are input names, values are CIDs
pass # Actually just Dict[str, str]
class RunResult(TypedDict, total=False):
"""Result of a recipe run."""
run_id: str
status: str # "pending", "running", "completed", "failed"
recipe: str
recipe_name: str
inputs: List[str]
output_cid: str
ipfs_cid: str
provenance_cid: str
error: str
created_at: str
completed_at: str
actor_id: str
celery_task_id: str
output_name: str
# === Helper functions for type narrowing ===
def is_source_node(node: TransformedNode) -> bool:
"""Check if node is a SOURCE node."""
return node.get("node_type") == "SOURCE"
def is_effect_node(node: TransformedNode) -> bool:
"""Check if node is an EFFECT node."""
return node.get("node_type") == "EFFECT"
def is_variable_input(config: Dict[str, Any]) -> bool:
"""Check if a SOURCE node config represents a variable input."""
return bool(config.get("input"))
def get_effect_cid(config: Dict[str, Any]) -> Optional[str]:
"""Get effect CID from config, checking both 'cid' and 'hash' keys."""
return config.get("cid") or config.get("hash")

0
l1/app/utils/__init__.py Normal file
View File

View File

@@ -0,0 +1,84 @@
"""HTTP Signature verification for incoming AP-style inbox requests.
Implements the same RSA-SHA256 / PKCS1v15 scheme used by the coop's
shared/utils/http_signatures.py, but only the verification side.
"""
from __future__ import annotations
import base64
import re
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import padding
def verify_request_signature(
public_key_pem: str,
signature_header: str,
method: str,
path: str,
headers: dict[str, str],
) -> bool:
"""Verify an incoming HTTP Signature.
Args:
public_key_pem: PEM-encoded public key of the sender.
signature_header: Value of the ``Signature`` header.
method: HTTP method (GET, POST, etc.).
path: Request path (e.g. ``/inbox``).
headers: All request headers (case-insensitive keys).
Returns:
True if the signature is valid.
"""
parts = _parse_signature_header(signature_header)
signed_headers = parts.get("headers", "date").split()
signature_b64 = parts.get("signature", "")
# Reconstruct the signed string
lc_headers = {k.lower(): v for k, v in headers.items()}
lines: list[str] = []
for h in signed_headers:
if h == "(request-target)":
lines.append(f"(request-target): {method.lower()} {path}")
else:
lines.append(f"{h}: {lc_headers.get(h, '')}")
signed_string = "\n".join(lines)
public_key = serialization.load_pem_public_key(public_key_pem.encode())
try:
public_key.verify(
base64.b64decode(signature_b64),
signed_string.encode(),
padding.PKCS1v15(),
hashes.SHA256(),
)
return True
except Exception:
return False
def parse_key_id(signature_header: str) -> str:
"""Extract the keyId from a Signature header.
keyId is typically ``https://domain/users/username#main-key``.
Returns the actor URL (strips ``#main-key``).
"""
parts = _parse_signature_header(signature_header)
key_id = parts.get("keyId", "")
return re.sub(r"#.*$", "", key_id)
def _parse_signature_header(header: str) -> dict[str, str]:
"""Parse a Signature header into its component parts."""
parts: dict[str, str] = {}
for part in header.split(","):
part = part.strip()
eq = part.find("=")
if eq < 0:
continue
key = part[:eq]
val = part[eq + 1:].strip('"')
parts[key] = val
return parts

BIN
l1/artdag-client.tar.gz Normal file

Binary file not shown.

37
l1/build-client.sh Executable file
View File

@@ -0,0 +1,37 @@
#!/bin/bash
# Build the artdag-client tarball
# This script is run during deployment to create the downloadable client package
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CLIENT_REPO="https://git.rose-ash.com/art-dag/client.git"
TEMP_DIR=$(mktemp -d)
OUTPUT_FILE="$SCRIPT_DIR/artdag-client.tar.gz"
echo "Building artdag-client.tar.gz..."
# Clone the client repo
git clone --depth 1 "$CLIENT_REPO" "$TEMP_DIR/artdag-client" 2>/dev/null || {
echo "Failed to clone client repo, trying alternative..."
# Try GitHub if internal git fails
git clone --depth 1 "https://github.com/gilesbradshaw/art-client.git" "$TEMP_DIR/artdag-client" 2>/dev/null || {
echo "Error: Could not clone client repository"
rm -rf "$TEMP_DIR"
exit 1
}
}
# Remove .git directory
rm -rf "$TEMP_DIR/artdag-client/.git"
rm -rf "$TEMP_DIR/artdag-client/__pycache__"
# Create tarball
cd "$TEMP_DIR"
tar -czf "$OUTPUT_FILE" artdag-client
# Cleanup
rm -rf "$TEMP_DIR"
echo "Created: $OUTPUT_FILE"
ls -lh "$OUTPUT_FILE"

872
l1/cache_manager.py Normal file
View File

@@ -0,0 +1,872 @@
# art-celery/cache_manager.py
"""
Cache management for Art DAG L1 server.
Integrates artdag's Cache, ActivityStore, and ActivityManager to provide:
- Content-addressed caching with both node_id and cid
- Activity tracking for runs (input/output/intermediate relationships)
- Deletion rules enforcement (shared items protected)
- L2 ActivityPub integration for "shared" status checks
- IPFS as durable backing store (local cache as hot storage)
- Redis-backed indexes for multi-worker consistency
"""
import hashlib
import json
import logging
import os
import shutil
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Callable, Dict, List, Optional, Set, TYPE_CHECKING
import requests
if TYPE_CHECKING:
import redis
from artdag import Cache, CacheEntry, DAG, Node, NodeType
from artdag.activities import Activity, ActivityStore, ActivityManager, make_is_shared_fn
import ipfs_client
logger = logging.getLogger(__name__)
def file_hash(path: Path, algorithm: str = "sha3_256") -> str:
"""Compute local content hash (fallback when IPFS unavailable)."""
hasher = hashlib.new(algorithm)
actual_path = path.resolve() if path.is_symlink() else path
with open(actual_path, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
hasher.update(chunk)
return hasher.hexdigest()
@dataclass
class CachedFile:
"""
A cached file with both identifiers.
Provides a unified view combining:
- node_id: computation identity (for DAG caching)
- cid: file content identity (for external references)
"""
node_id: str
cid: str
path: Path
size_bytes: int
node_type: str
created_at: float
@classmethod
def from_cache_entry(cls, entry: CacheEntry) -> "CachedFile":
return cls(
node_id=entry.node_id,
cid=entry.cid,
path=entry.output_path,
size_bytes=entry.size_bytes,
node_type=entry.node_type,
created_at=entry.created_at,
)
class L2SharedChecker:
"""
Checks if content is shared (published) via L2 ActivityPub server.
Caches results to avoid repeated API calls.
"""
def __init__(self, l2_server: str, cache_ttl: int = 300):
self.l2_server = l2_server
self.cache_ttl = cache_ttl
self._cache: Dict[str, tuple[bool, float]] = {}
def is_shared(self, cid: str) -> bool:
"""Check if cid has been published to L2."""
import time
now = time.time()
# Check cache
if cid in self._cache:
is_shared, cached_at = self._cache[cid]
if now - cached_at < self.cache_ttl:
logger.debug(f"L2 check (cached): {cid[:16]}... = {is_shared}")
return is_shared
# Query L2
try:
url = f"{self.l2_server}/assets/by-hash/{cid}"
logger.info(f"L2 check: GET {url}")
resp = requests.get(url, timeout=5)
logger.info(f"L2 check response: {resp.status_code}")
is_shared = resp.status_code == 200
except Exception as e:
logger.warning(f"Failed to check L2 for {cid}: {e}")
# On error, assume IS shared (safer - prevents accidental deletion)
is_shared = True
self._cache[cid] = (is_shared, now)
return is_shared
def invalidate(self, cid: str):
"""Invalidate cache for a cid (call after publishing)."""
self._cache.pop(cid, None)
def mark_shared(self, cid: str):
"""Mark as shared without querying (call after successful publish)."""
import time
self._cache[cid] = (True, time.time())
class L1CacheManager:
"""
Unified cache manager for Art DAG L1 server.
Combines:
- artdag Cache for file storage
- ActivityStore for run tracking
- ActivityManager for deletion rules
- L2 integration for shared status
Provides both node_id and cid based access.
"""
def __init__(
self,
cache_dir: Path | str,
l2_server: str = "http://localhost:8200",
redis_client: Optional["redis.Redis"] = None,
):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(parents=True, exist_ok=True)
# Redis for shared state between workers
self._redis = redis_client
self._redis_content_key = "artdag:content_index"
self._redis_ipfs_key = "artdag:ipfs_index"
# artdag components
self.cache = Cache(self.cache_dir / "nodes")
self.activity_store = ActivityStore(self.cache_dir / "activities")
# L2 shared checker
self.l2_checker = L2SharedChecker(l2_server)
# Activity manager with L2-based is_shared
self.activity_manager = ActivityManager(
cache=self.cache,
activity_store=self.activity_store,
is_shared_fn=self._is_shared_by_node_id,
)
# Legacy files directory (for files uploaded directly by cid)
self.legacy_dir = self.cache_dir / "legacy"
self.legacy_dir.mkdir(parents=True, exist_ok=True)
# ============ Redis Index (no JSON files) ============
#
# Content index maps: CID (content hash or IPFS CID) -> node_id (code hash)
# IPFS index maps: node_id -> IPFS CID
#
# Database is the ONLY source of truth for cache_id -> ipfs_cid mapping.
# No fallbacks - failures raise exceptions.
def _run_async(self, coro):
"""Run async coroutine from sync context.
Always creates a fresh event loop to avoid issues with Celery's
prefork workers where loops may be closed by previous tasks.
"""
import asyncio
# Check if we're already in an async context
try:
asyncio.get_running_loop()
# We're in an async context - use a thread with its own loop
import threading
result = [None]
error = [None]
def run_in_thread():
try:
new_loop = asyncio.new_event_loop()
asyncio.set_event_loop(new_loop)
try:
result[0] = new_loop.run_until_complete(coro)
finally:
new_loop.close()
except Exception as e:
error[0] = e
thread = threading.Thread(target=run_in_thread)
thread.start()
thread.join(timeout=30)
if error[0]:
raise error[0]
return result[0]
except RuntimeError:
# No running loop - create a fresh one (don't reuse potentially closed loops)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
def _set_content_index(self, cache_id: str, ipfs_cid: str):
"""Set content index entry in database (cache_id -> ipfs_cid)."""
import database
async def save_to_db():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
await conn.execute(
"""
INSERT INTO cache_items (cid, ipfs_cid)
VALUES ($1, $2)
ON CONFLICT (cid) DO UPDATE SET ipfs_cid = $2
""",
cache_id, ipfs_cid
)
finally:
await conn.close()
self._run_async(save_to_db())
logger.info(f"Indexed in database: {cache_id[:16]}... -> {ipfs_cid}")
def _get_content_index(self, cache_id: str) -> Optional[str]:
"""Get content index entry (cache_id -> ipfs_cid) from database."""
import database
async def get_from_db():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
row = await conn.fetchrow(
"SELECT ipfs_cid FROM cache_items WHERE cid = $1",
cache_id
)
return {"ipfs_cid": row["ipfs_cid"]} if row else None
finally:
await conn.close()
result = self._run_async(get_from_db())
if result and result.get("ipfs_cid"):
return result["ipfs_cid"]
return None
def _del_content_index(self, cache_id: str):
"""Delete content index entry from database."""
import database
async def delete_from_db():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
await conn.execute("DELETE FROM cache_items WHERE cid = $1", cache_id)
finally:
await conn.close()
self._run_async(delete_from_db())
def _set_ipfs_index(self, cid: str, ipfs_cid: str):
"""Set IPFS index entry in Redis."""
if self._redis:
try:
self._redis.hset(self._redis_ipfs_key, cid, ipfs_cid)
except Exception as e:
logger.warning(f"Failed to set IPFS index in Redis: {e}")
def _get_ipfs_cid_from_index(self, cid: str) -> Optional[str]:
"""Get IPFS CID from Redis."""
if self._redis:
try:
val = self._redis.hget(self._redis_ipfs_key, cid)
if val:
return val.decode() if isinstance(val, bytes) else val
except Exception as e:
logger.warning(f"Failed to get IPFS CID from Redis: {e}")
return None
def get_ipfs_cid(self, cid: str) -> Optional[str]:
"""Get IPFS CID for a content hash."""
return self._get_ipfs_cid_from_index(cid)
def _is_shared_by_node_id(self, cid: str) -> bool:
"""Check if a cid is shared via L2."""
return self.l2_checker.is_shared(cid)
def _load_meta(self, cid: str) -> dict:
"""Load metadata for a cached file."""
meta_path = self.cache_dir / f"{cid}.meta.json"
if meta_path.exists():
with open(meta_path) as f:
return json.load(f)
return {}
def is_pinned(self, cid: str) -> tuple[bool, str]:
"""
Check if a cid is pinned (non-deletable).
Returns:
(is_pinned, reason) tuple
"""
meta = self._load_meta(cid)
if meta.get("pinned"):
return True, meta.get("pin_reason", "published")
return False, ""
def _save_meta(self, cid: str, **updates) -> dict:
"""Save/update metadata for a cached file."""
meta = self._load_meta(cid)
meta.update(updates)
meta_path = self.cache_dir / f"{cid}.meta.json"
with open(meta_path, "w") as f:
json.dump(meta, f, indent=2)
return meta
def pin(self, cid: str, reason: str = "published") -> None:
"""Mark an item as pinned (non-deletable)."""
self._save_meta(cid, pinned=True, pin_reason=reason)
# ============ File Storage ============
def put(
self,
source_path: Path,
node_type: str = "upload",
node_id: str = None,
cache_id: str = None,
execution_time: float = 0.0,
move: bool = False,
skip_ipfs: bool = False,
) -> tuple[CachedFile, Optional[str]]:
"""
Store a file in the cache and optionally upload to IPFS.
Files are stored by IPFS CID when skip_ipfs=False (default), or by
local content hash when skip_ipfs=True. The cache_id parameter creates
an index from cache_id -> CID for code-addressed lookups.
Args:
source_path: Path to file to cache
node_type: Type of node (e.g., "upload", "source", "effect")
node_id: DEPRECATED - ignored, always uses CID
cache_id: Optional code-addressed cache ID to index
execution_time: How long the operation took
move: If True, move instead of copy
skip_ipfs: If True, skip IPFS upload and use local hash (faster for large files)
Returns:
Tuple of (CachedFile with both node_id and cid, CID or None if skip_ipfs)
"""
if skip_ipfs:
# Use local content hash instead of IPFS CID (much faster)
cid = file_hash(source_path)
ipfs_cid = None
logger.info(f"put: Using local hash (skip_ipfs=True): {cid[:16]}...")
else:
# Upload to IPFS first to get the CID (primary identifier)
cid = ipfs_client.add_file(source_path)
if not cid:
raise RuntimeError(f"IPFS upload failed for {source_path}. IPFS is required.")
ipfs_cid = cid
# Always store by IPFS CID (node_id parameter is deprecated)
node_id = cid
# Check if already cached (by node_id)
existing = self.cache.get_entry(node_id)
if existing and existing.output_path.exists():
return CachedFile.from_cache_entry(existing), ipfs_cid
# Compute local hash BEFORE moving the file (for dual-indexing)
# Only needed if we uploaded to IPFS (to map local hash -> IPFS CID)
local_hash = None
if not skip_ipfs and self._is_ipfs_cid(cid):
local_hash = file_hash(source_path)
# Store in local cache
logger.info(f"put: Storing in cache with node_id={node_id[:16]}...")
self.cache.put(
node_id=node_id,
source_path=source_path,
node_type=node_type,
execution_time=execution_time,
move=move,
)
entry = self.cache.get_entry(node_id)
logger.info(f"put: After cache.put, get_entry(node_id={node_id[:16]}...) returned entry={entry is not None}, path={entry.output_path if entry else None}")
# Verify we can retrieve it
verify_path = self.cache.get(node_id)
logger.info(f"put: Verify cache.get(node_id={node_id[:16]}...) = {verify_path}")
# Index by cache_id if provided (code-addressed cache lookup)
# This allows get_by_cid(cache_id) to find files stored by IPFS CID
if cache_id and cache_id != cid:
self._set_content_index(cache_id, cid)
logger.info(f"put: Indexed cache_id {cache_id[:16]}... -> IPFS {cid}")
# Also index by local hash for content-based lookup
if local_hash and local_hash != cid:
self._set_content_index(local_hash, cid)
logger.debug(f"Indexed local hash {local_hash[:16]}... -> IPFS {cid}")
logger.info(f"Cached: {cid[:16]}..." + (" (local only)" if skip_ipfs else " (IPFS)"))
return CachedFile.from_cache_entry(entry), ipfs_cid if not skip_ipfs else None
def get_by_node_id(self, node_id: str) -> Optional[Path]:
"""Get cached file path by node_id."""
return self.cache.get(node_id)
def _is_ipfs_cid(self, identifier: str) -> bool:
"""Check if identifier looks like an IPFS CID."""
# CIDv0 starts with "Qm", CIDv1 starts with "bafy" or other multibase prefixes
return identifier.startswith("Qm") or identifier.startswith("bafy") or identifier.startswith("baf")
def get_by_cid(self, cid: str) -> Optional[Path]:
"""Get cached file path by cid or IPFS CID. Falls back to IPFS if not in local cache."""
logger.info(f"get_by_cid: Looking for cid={cid[:16]}...")
# Check index first (Redis then local)
node_id = self._get_content_index(cid)
logger.info(f"get_by_cid: Index lookup returned node_id={node_id[:16] if node_id else None}...")
if node_id:
path = self.cache.get(node_id)
logger.info(f"get_by_cid: cache.get(node_id={node_id[:16]}...) returned path={path}")
if path and path.exists():
logger.info(f"get_by_cid: Found via index: {path}")
return path
# artdag Cache doesn't know about entry - check filesystem directly
# Files are stored at {cache_dir}/nodes/{node_id}/output.*
nodes_dir = self.cache_dir / "nodes" / node_id
if nodes_dir.exists():
for f in nodes_dir.iterdir():
if f.name.startswith("output."):
logger.info(f"get_by_cid: Found on filesystem: {f}")
return f
# For uploads, node_id == cid, so try direct lookup
# This works even if cache index hasn't been reloaded
path = self.cache.get(cid)
logger.info(f"get_by_cid: Direct cache.get({cid[:16]}...) returned: {path}")
if path and path.exists():
self._set_content_index(cid, cid)
return path
# Check filesystem directly for cid as node_id
nodes_dir = self.cache_dir / "nodes" / cid
if nodes_dir.exists():
for f in nodes_dir.iterdir():
if f.name.startswith("output."):
logger.info(f"get_by_cid: Found on filesystem (direct): {f}")
self._set_content_index(cid, cid)
return f
# Scan cache entries (fallback for new structure)
entry = self.cache.find_by_cid(cid)
logger.info(f"get_by_cid: find_by_cid({cid[:16]}...) returned entry={entry}")
if entry and entry.output_path.exists():
logger.info(f"get_by_cid: Found via scan: {entry.output_path}")
self._set_content_index(cid, entry.node_id)
return entry.output_path
# Check legacy location (files stored directly as CACHE_DIR/{cid})
legacy_path = self.cache_dir / cid
logger.info(f"get_by_cid: Checking legacy path: {legacy_path} exists={legacy_path.exists()}")
if legacy_path.exists() and legacy_path.is_file():
logger.info(f"get_by_cid: Found at legacy path: {legacy_path}")
return legacy_path
# Fetch from IPFS - this is the source of truth for all content
if self._is_ipfs_cid(cid):
logger.info(f"get_by_cid: Fetching from IPFS: {cid[:16]}...")
recovery_path = self.legacy_dir / cid
recovery_path.parent.mkdir(parents=True, exist_ok=True)
if ipfs_client.get_file(cid, str(recovery_path)):
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
self._set_content_index(cid, cid)
return recovery_path
else:
logger.warning(f"get_by_cid: IPFS fetch failed for {cid[:16]}...")
# Also try with a mapped IPFS CID if different from cid
ipfs_cid = self._get_ipfs_cid_from_index(cid)
if ipfs_cid and ipfs_cid != cid:
logger.info(f"get_by_cid: Fetching from IPFS via mapping: {ipfs_cid[:16]}...")
recovery_path = self.legacy_dir / cid
recovery_path.parent.mkdir(parents=True, exist_ok=True)
if ipfs_client.get_file(ipfs_cid, str(recovery_path)):
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
return recovery_path
return None
def has_content(self, cid: str) -> bool:
"""Check if content exists in cache."""
return self.get_by_cid(cid) is not None
def get_entry_by_cid(self, cid: str) -> Optional[CacheEntry]:
"""Get cache entry by cid."""
node_id = self._get_content_index(cid)
if node_id:
return self.cache.get_entry(node_id)
return self.cache.find_by_cid(cid)
def list_all(self) -> List[CachedFile]:
"""List all cached files."""
files = []
seen_hashes = set()
# New cache structure entries
for entry in self.cache.list_entries():
files.append(CachedFile.from_cache_entry(entry))
if entry.cid:
seen_hashes.add(entry.cid)
# Legacy files stored directly in cache_dir (old structure)
# These are files named by cid directly in CACHE_DIR
for f in self.cache_dir.iterdir():
# Skip directories and special files
if not f.is_file():
continue
# Skip metadata/auxiliary files
if f.suffix in ('.json', '.mp4'):
continue
# Skip if name doesn't look like a hash (64 hex chars)
if len(f.name) != 64 or not all(c in '0123456789abcdef' for c in f.name):
continue
# Skip if already seen via new cache
if f.name in seen_hashes:
continue
files.append(CachedFile(
node_id=f.name,
cid=f.name,
path=f,
size_bytes=f.stat().st_size,
node_type="legacy",
created_at=f.stat().st_mtime,
))
seen_hashes.add(f.name)
return files
def list_by_type(self, node_type: str) -> List[str]:
"""
List CIDs of all cached files of a specific type.
Args:
node_type: Type to filter by (e.g., "recipe", "upload", "effect")
Returns:
List of CIDs (IPFS CID if available, otherwise node_id)
"""
cids = []
for entry in self.cache.list_entries():
if entry.node_type == node_type:
# Return node_id which is the IPFS CID for uploaded content
cids.append(entry.node_id)
return cids
# ============ Activity Tracking ============
def record_activity(self, dag: DAG, run_id: str = None) -> Activity:
"""
Record a DAG execution as an activity.
Args:
dag: The executed DAG
run_id: Optional run ID to use as activity_id
Returns:
The created Activity
"""
activity = Activity.from_dag(dag, activity_id=run_id)
self.activity_store.add(activity)
return activity
def record_simple_activity(
self,
input_hashes: List[str],
output_cid: str,
run_id: str = None,
) -> Activity:
"""
Record a simple (non-DAG) execution as an activity.
For legacy single-effect runs that don't use full DAG execution.
Uses cid as node_id.
"""
activity = Activity(
activity_id=run_id or str(hash((tuple(input_hashes), output_cid))),
input_ids=sorted(input_hashes),
output_id=output_cid,
intermediate_ids=[],
created_at=datetime.now(timezone.utc).timestamp(),
status="completed",
)
self.activity_store.add(activity)
return activity
def get_activity(self, activity_id: str) -> Optional[Activity]:
"""Get activity by ID."""
return self.activity_store.get(activity_id)
def list_activities(self) -> List[Activity]:
"""List all activities."""
return self.activity_store.list()
def find_activities_by_inputs(self, input_hashes: List[str]) -> List[Activity]:
"""Find activities with matching inputs (for UI grouping)."""
return self.activity_store.find_by_input_ids(input_hashes)
# ============ Deletion Rules ============
def can_delete(self, cid: str) -> tuple[bool, str]:
"""
Check if a cached item can be deleted.
Returns:
(can_delete, reason) tuple
"""
# Check if pinned (published or input to published)
pinned, reason = self.is_pinned(cid)
if pinned:
return False, f"Item is pinned ({reason})"
# Find node_id for this content
node_id = self._get_content_index(cid) or cid
# Check if it's an input or output of any activity
for activity in self.activity_store.list():
if node_id in activity.input_ids:
return False, f"Item is input to activity {activity.activity_id}"
if node_id == activity.output_id:
return False, f"Item is output of activity {activity.activity_id}"
return True, "OK"
def can_discard_activity(self, activity_id: str) -> tuple[bool, str]:
"""
Check if an activity can be discarded.
Returns:
(can_discard, reason) tuple
"""
activity = self.activity_store.get(activity_id)
if not activity:
return False, "Activity not found"
# Check if any item is pinned
for node_id in activity.all_node_ids:
entry = self.cache.get_entry(node_id)
if entry:
pinned, reason = self.is_pinned(entry.cid)
if pinned:
return False, f"Item {node_id} is pinned ({reason})"
return True, "OK"
def delete_by_cid(self, cid: str) -> tuple[bool, str]:
"""
Delete a cached item by cid.
Enforces deletion rules.
Returns:
(success, message) tuple
"""
can_delete, reason = self.can_delete(cid)
if not can_delete:
return False, reason
# Find and delete
node_id = self._get_content_index(cid)
if node_id:
self.cache.remove(node_id)
self._del_content_index(cid)
return True, "Deleted"
# Try legacy
legacy_path = self.legacy_dir / cid
if legacy_path.exists():
legacy_path.unlink()
return True, "Deleted (legacy)"
return False, "Not found"
def discard_activity(self, activity_id: str) -> tuple[bool, str]:
"""
Discard an activity and clean up its cache entries.
Enforces deletion rules.
Returns:
(success, message) tuple
"""
can_discard, reason = self.can_discard_activity(activity_id)
if not can_discard:
return False, reason
success = self.activity_manager.discard_activity(activity_id)
if success:
return True, "Activity discarded"
return False, "Failed to discard"
def _is_used_by_other_activities(self, node_id: str, exclude_activity_id: str) -> bool:
"""Check if a node is used by any activity other than the excluded one."""
for other_activity in self.activity_store.list():
if other_activity.activity_id == exclude_activity_id:
continue
# Check if used as input, output, or intermediate
if node_id in other_activity.input_ids:
return True
if node_id == other_activity.output_id:
return True
if node_id in other_activity.intermediate_ids:
return True
return False
def discard_activity_outputs_only(self, activity_id: str) -> tuple[bool, str]:
"""
Discard an activity, deleting only outputs and intermediates.
Inputs (cache items, configs) are preserved.
Outputs/intermediates used by other activities are preserved.
Returns:
(success, message) tuple
"""
activity = self.activity_store.get(activity_id)
if not activity:
return False, "Activity not found"
# Check if output is pinned
if activity.output_id:
entry = self.cache.get_entry(activity.output_id)
if entry:
pinned, reason = self.is_pinned(entry.cid)
if pinned:
return False, f"Output is pinned ({reason})"
deleted_outputs = 0
preserved_shared = 0
# Delete output (only if not used by other activities)
if activity.output_id:
if self._is_used_by_other_activities(activity.output_id, activity_id):
preserved_shared += 1
else:
entry = self.cache.get_entry(activity.output_id)
if entry:
# Remove from cache
self.cache.remove(activity.output_id)
# Remove from content index (Redis + local)
self._del_content_index(entry.cid)
# Delete from legacy dir if exists
legacy_path = self.legacy_dir / entry.cid
if legacy_path.exists():
legacy_path.unlink()
deleted_outputs += 1
# Delete intermediates (only if not used by other activities)
for node_id in activity.intermediate_ids:
if self._is_used_by_other_activities(node_id, activity_id):
preserved_shared += 1
continue
entry = self.cache.get_entry(node_id)
if entry:
self.cache.remove(node_id)
self._del_content_index(entry.cid)
legacy_path = self.legacy_dir / entry.cid
if legacy_path.exists():
legacy_path.unlink()
deleted_outputs += 1
# Remove activity record (inputs remain in cache)
self.activity_store.remove(activity_id)
msg = f"Activity discarded (deleted {deleted_outputs} outputs"
if preserved_shared > 0:
msg += f", preserved {preserved_shared} shared items"
msg += ")"
return True, msg
def cleanup_intermediates(self) -> int:
"""Delete all intermediate cache entries (reconstructible)."""
return self.activity_manager.cleanup_intermediates()
def get_deletable_items(self) -> List[CachedFile]:
"""Get all items that can be deleted."""
deletable = []
for entry in self.activity_manager.get_deletable_entries():
deletable.append(CachedFile.from_cache_entry(entry))
return deletable
# ============ L2 Integration ============
def mark_published(self, cid: str):
"""Mark a cid as published to L2."""
self.l2_checker.mark_shared(cid)
def invalidate_shared_cache(self, cid: str):
"""Invalidate shared status cache (call if item might be unpublished)."""
self.l2_checker.invalidate(cid)
# ============ Stats ============
def get_stats(self) -> dict:
"""Get cache statistics."""
stats = self.cache.get_stats()
return {
"total_entries": stats.total_entries,
"total_size_bytes": stats.total_size_bytes,
"hits": stats.hits,
"misses": stats.misses,
"hit_rate": stats.hit_rate,
"activities": len(self.activity_store),
}
# Singleton instance (initialized on first import with env vars)
_manager: Optional[L1CacheManager] = None
def get_cache_manager() -> L1CacheManager:
"""Get the singleton cache manager instance."""
global _manager
if _manager is None:
import redis
from urllib.parse import urlparse
cache_dir = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
l2_server = os.environ.get("L2_SERVER", "http://localhost:8200")
# Initialize Redis client for shared cache index
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
parsed = urlparse(redis_url)
redis_client = redis.Redis(
host=parsed.hostname or 'localhost',
port=parsed.port or 6379,
db=int(parsed.path.lstrip('/') or 0),
socket_timeout=5,
socket_connect_timeout=5
)
_manager = L1CacheManager(cache_dir=cache_dir, l2_server=l2_server, redis_client=redis_client)
return _manager
def reset_cache_manager():
"""Reset the singleton (for testing)."""
global _manager
_manager = None

51
l1/celery_app.py Normal file
View File

@@ -0,0 +1,51 @@
"""
Art DAG Celery Application
Streaming video rendering for the Art DAG system.
Uses S-expression recipes with frame-by-frame processing.
"""
import os
import sys
from celery import Celery
from celery.signals import worker_ready
# Use central config
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from app.config import settings
app = Celery(
'art_celery',
broker=settings.redis_url,
backend=settings.redis_url,
include=['tasks', 'tasks.streaming', 'tasks.ipfs_upload']
)
@worker_ready.connect
def log_config_on_startup(sender, **kwargs):
"""Log configuration when worker starts."""
print("=" * 60, file=sys.stderr)
print("WORKER STARTED - CONFIGURATION", file=sys.stderr)
print("=" * 60, file=sys.stderr)
settings.log_config()
print(f"Worker: {sender}", file=sys.stderr)
print("=" * 60, file=sys.stderr)
app.conf.update(
result_expires=86400 * 7, # 7 days - allow time for recovery after restarts
task_serializer='json',
accept_content=['json', 'pickle'], # pickle needed for internal Celery messages
result_serializer='json',
event_serializer='json',
timezone='UTC',
enable_utc=True,
task_track_started=True,
task_acks_late=True, # Don't ack until task completes - survives worker restart
worker_prefetch_multiplier=1,
task_reject_on_worker_lost=True, # Re-queue if worker dies
task_acks_on_failure_or_timeout=True, # Ack failed tasks so they don't retry forever
)
if __name__ == '__main__':
app.start()

12
l1/check_redis.py Normal file
View File

@@ -0,0 +1,12 @@
#!/usr/bin/env python3
"""Check Redis connectivity."""
import redis
try:
r = redis.Redis(host='localhost', port=6379, db=0)
r.ping()
print("Redis: OK")
except redis.ConnectionError:
print("Redis: Not running")
print("Start with: sudo systemctl start redis-server")

421
l1/claiming.py Normal file
View File

@@ -0,0 +1,421 @@
"""
Hash-based task claiming for distributed execution.
Prevents duplicate work when multiple workers process the same plan.
Uses Redis Lua scripts for atomic claim operations.
"""
import json
import logging
import os
import time
from dataclasses import dataclass
from datetime import datetime, timezone
from enum import Enum
from typing import Optional
import redis
logger = logging.getLogger(__name__)
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
# Key prefix for task claims
CLAIM_PREFIX = "artdag:claim:"
# Default TTL for claims (5 minutes)
DEFAULT_CLAIM_TTL = 300
# TTL for completed results (1 hour)
COMPLETED_TTL = 3600
class ClaimStatus(Enum):
"""Status of a task claim."""
PENDING = "pending"
CLAIMED = "claimed"
RUNNING = "running"
COMPLETED = "completed"
CACHED = "cached"
FAILED = "failed"
@dataclass
class ClaimInfo:
"""Information about a task claim."""
cache_id: str
status: ClaimStatus
worker_id: Optional[str] = None
task_id: Optional[str] = None
claimed_at: Optional[str] = None
completed_at: Optional[str] = None
output_path: Optional[str] = None
error: Optional[str] = None
def to_dict(self) -> dict:
return {
"cache_id": self.cache_id,
"status": self.status.value,
"worker_id": self.worker_id,
"task_id": self.task_id,
"claimed_at": self.claimed_at,
"completed_at": self.completed_at,
"output_path": self.output_path,
"error": self.error,
}
@classmethod
def from_dict(cls, data: dict) -> "ClaimInfo":
return cls(
cache_id=data["cache_id"],
status=ClaimStatus(data["status"]),
worker_id=data.get("worker_id"),
task_id=data.get("task_id"),
claimed_at=data.get("claimed_at"),
completed_at=data.get("completed_at"),
output_path=data.get("output_path"),
error=data.get("error"),
)
# Lua script for atomic task claiming
# Returns 1 if claim successful, 0 if already claimed/completed
CLAIM_TASK_SCRIPT = """
local key = KEYS[1]
local data = redis.call('GET', key)
if data then
local status = cjson.decode(data)
local s = status['status']
-- Already claimed, running, completed, or cached - don't claim
if s == 'claimed' or s == 'running' or s == 'completed' or s == 'cached' then
return 0
end
end
-- Claim the task
local claim_data = ARGV[1]
local ttl = tonumber(ARGV[2])
redis.call('SETEX', key, ttl, claim_data)
return 1
"""
# Lua script for releasing a claim (e.g., on failure)
RELEASE_CLAIM_SCRIPT = """
local key = KEYS[1]
local worker_id = ARGV[1]
local data = redis.call('GET', key)
if data then
local status = cjson.decode(data)
-- Only release if we own the claim
if status['worker_id'] == worker_id then
redis.call('DEL', key)
return 1
end
end
return 0
"""
# Lua script for updating claim status (claimed -> running -> completed)
UPDATE_STATUS_SCRIPT = """
local key = KEYS[1]
local worker_id = ARGV[1]
local new_status = ARGV[2]
local new_data = ARGV[3]
local ttl = tonumber(ARGV[4])
local data = redis.call('GET', key)
if not data then
return 0
end
local status = cjson.decode(data)
-- Only update if we own the claim
if status['worker_id'] ~= worker_id then
return 0
end
redis.call('SETEX', key, ttl, new_data)
return 1
"""
class TaskClaimer:
"""
Manages hash-based task claiming for distributed execution.
Uses Redis for coordination between workers.
Each task is identified by its cache_id (content-addressed).
"""
def __init__(self, redis_url: str = None):
"""
Initialize the claimer.
Args:
redis_url: Redis connection URL
"""
self.redis_url = redis_url or REDIS_URL
self._redis: Optional[redis.Redis] = None
self._claim_script = None
self._release_script = None
self._update_script = None
@property
def redis(self) -> redis.Redis:
"""Get Redis connection (lazy initialization)."""
if self._redis is None:
self._redis = redis.from_url(self.redis_url, decode_responses=True)
# Register Lua scripts
self._claim_script = self._redis.register_script(CLAIM_TASK_SCRIPT)
self._release_script = self._redis.register_script(RELEASE_CLAIM_SCRIPT)
self._update_script = self._redis.register_script(UPDATE_STATUS_SCRIPT)
return self._redis
def _key(self, cache_id: str) -> str:
"""Get Redis key for a cache_id."""
return f"{CLAIM_PREFIX}{cache_id}"
def claim(
self,
cache_id: str,
worker_id: str,
task_id: Optional[str] = None,
ttl: int = DEFAULT_CLAIM_TTL,
) -> bool:
"""
Attempt to claim a task.
Args:
cache_id: The cache ID of the task to claim
worker_id: Identifier for the claiming worker
task_id: Optional Celery task ID
ttl: Time-to-live for the claim in seconds
Returns:
True if claim successful, False if already claimed
"""
claim_info = ClaimInfo(
cache_id=cache_id,
status=ClaimStatus.CLAIMED,
worker_id=worker_id,
task_id=task_id,
claimed_at=datetime.now(timezone.utc).isoformat(),
)
result = self._claim_script(
keys=[self._key(cache_id)],
args=[json.dumps(claim_info.to_dict()), ttl],
client=self.redis,
)
if result == 1:
logger.debug(f"Claimed task {cache_id[:16]}... for worker {worker_id}")
return True
else:
logger.debug(f"Task {cache_id[:16]}... already claimed")
return False
def update_status(
self,
cache_id: str,
worker_id: str,
status: ClaimStatus,
output_path: Optional[str] = None,
error: Optional[str] = None,
ttl: Optional[int] = None,
) -> bool:
"""
Update the status of a claimed task.
Args:
cache_id: The cache ID of the task
worker_id: Worker ID that owns the claim
status: New status
output_path: Path to output (for completed)
error: Error message (for failed)
ttl: New TTL (defaults based on status)
Returns:
True if update successful
"""
if ttl is None:
if status in (ClaimStatus.COMPLETED, ClaimStatus.CACHED):
ttl = COMPLETED_TTL
else:
ttl = DEFAULT_CLAIM_TTL
# Get existing claim info
existing = self.get_status(cache_id)
if not existing:
logger.warning(f"No claim found for {cache_id[:16]}...")
return False
claim_info = ClaimInfo(
cache_id=cache_id,
status=status,
worker_id=worker_id,
task_id=existing.task_id,
claimed_at=existing.claimed_at,
completed_at=datetime.now(timezone.utc).isoformat() if status in (
ClaimStatus.COMPLETED, ClaimStatus.CACHED, ClaimStatus.FAILED
) else None,
output_path=output_path,
error=error,
)
result = self._update_script(
keys=[self._key(cache_id)],
args=[worker_id, status.value, json.dumps(claim_info.to_dict()), ttl],
client=self.redis,
)
if result == 1:
logger.debug(f"Updated task {cache_id[:16]}... to {status.value}")
return True
else:
logger.warning(f"Failed to update task {cache_id[:16]}... (not owner?)")
return False
def release(self, cache_id: str, worker_id: str) -> bool:
"""
Release a claim (e.g., on task failure before completion).
Args:
cache_id: The cache ID of the task
worker_id: Worker ID that owns the claim
Returns:
True if release successful
"""
result = self._release_script(
keys=[self._key(cache_id)],
args=[worker_id],
client=self.redis,
)
if result == 1:
logger.debug(f"Released claim on {cache_id[:16]}...")
return True
return False
def get_status(self, cache_id: str) -> Optional[ClaimInfo]:
"""
Get the current status of a task.
Args:
cache_id: The cache ID of the task
Returns:
ClaimInfo if task has been claimed, None otherwise
"""
data = self.redis.get(self._key(cache_id))
if data:
return ClaimInfo.from_dict(json.loads(data))
return None
def is_completed(self, cache_id: str) -> bool:
"""Check if a task is completed or cached."""
info = self.get_status(cache_id)
return info is not None and info.status in (
ClaimStatus.COMPLETED, ClaimStatus.CACHED
)
def wait_for_completion(
self,
cache_id: str,
timeout: float = 300,
poll_interval: float = 0.5,
) -> Optional[ClaimInfo]:
"""
Wait for a task to complete.
Args:
cache_id: The cache ID of the task
timeout: Maximum time to wait in seconds
poll_interval: How often to check status
Returns:
ClaimInfo if completed, None if timeout
"""
start_time = time.time()
while time.time() - start_time < timeout:
info = self.get_status(cache_id)
if info and info.status in (
ClaimStatus.COMPLETED, ClaimStatus.CACHED, ClaimStatus.FAILED
):
return info
time.sleep(poll_interval)
logger.warning(f"Timeout waiting for {cache_id[:16]}...")
return None
def mark_cached(self, cache_id: str, output_path: str) -> None:
"""
Mark a task as already cached (no processing needed).
This is used when we discover the result already exists
before attempting to claim.
Args:
cache_id: The cache ID of the task
output_path: Path to the cached output
"""
claim_info = ClaimInfo(
cache_id=cache_id,
status=ClaimStatus.CACHED,
output_path=output_path,
completed_at=datetime.now(timezone.utc).isoformat(),
)
self.redis.setex(
self._key(cache_id),
COMPLETED_TTL,
json.dumps(claim_info.to_dict()),
)
def clear_all(self) -> int:
"""
Clear all claims (for testing/reset).
Returns:
Number of claims cleared
"""
pattern = f"{CLAIM_PREFIX}*"
keys = list(self.redis.scan_iter(match=pattern))
if keys:
return self.redis.delete(*keys)
return 0
# Global claimer instance
_claimer: Optional[TaskClaimer] = None
def get_claimer() -> TaskClaimer:
"""Get the global TaskClaimer instance."""
global _claimer
if _claimer is None:
_claimer = TaskClaimer()
return _claimer
def claim_task(cache_id: str, worker_id: str, task_id: str = None) -> bool:
"""Convenience function to claim a task."""
return get_claimer().claim(cache_id, worker_id, task_id)
def complete_task(cache_id: str, worker_id: str, output_path: str) -> bool:
"""Convenience function to mark a task as completed."""
return get_claimer().update_status(
cache_id, worker_id, ClaimStatus.COMPLETED, output_path=output_path
)
def fail_task(cache_id: str, worker_id: str, error: str) -> bool:
"""Convenience function to mark a task as failed."""
return get_claimer().update_status(
cache_id, worker_id, ClaimStatus.FAILED, error=error
)

View File

@@ -0,0 +1,17 @@
;; Audio Configuration - dizzy.mp3
;;
;; Defines audio analyzer and playback for a recipe.
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
;;
;; Provides:
;; - music: audio analyzer for beat/energy detection
;; - audio-playback: path for synchronized playback
(require-primitives "streaming")
;; Audio analyzer (provides beat detection and energy levels)
;; Paths relative to working directory (project root)
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
;; Audio playback path (for sync with video output)
(audio-playback "dizzy.mp3")

View File

@@ -0,0 +1,17 @@
;; Audio Configuration - dizzy.mp3
;;
;; Defines audio analyzer and playback for a recipe.
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
;;
;; Provides:
;; - music: audio analyzer for beat/energy detection
;; - audio-playback: path for synchronized playback
(require-primitives "streaming")
;; Audio analyzer (provides beat detection and energy levels)
;; Using friendly name for asset resolution
(def music (streaming:make-audio-analyzer "woods-audio"))
;; Audio playback path (for sync with video output)
(audio-playback "woods-audio")

View File

@@ -0,0 +1,38 @@
;; Default Sources Configuration
;;
;; Defines video sources and per-pair effect configurations.
;; Pass to recipe with: --sources configs/sources-default.sexp
;;
;; Required by recipes using process-pair macro:
;; - sources: array of video sources
;; - pair-configs: array of effect configurations per source
(require-primitives "streaming")
;; Video sources array
;; Paths relative to working directory (project root)
(def sources [
(streaming:make-video-source "monday.webm" 30)
(streaming:make-video-source "escher.webm" 30)
(streaming:make-video-source "2.webm" 30)
(streaming:make-video-source "disruptors.webm" 30)
(streaming:make-video-source "4.mp4" 30)
(streaming:make-video-source "ecstacy.mp4" 30)
(streaming:make-video-source "dopple.webm" 30)
(streaming:make-video-source "5.mp4" 30)
])
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
;; :dir = rotation direction (1 or -1)
;; :rot-a, :rot-b = max rotation angles for clip A and B
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
])

View File

@@ -0,0 +1,19 @@
;; Half-resolution Woods Sources (960x540)
;;
;; Pass to recipe with: --sources configs/sources-woods-half.sexp
(require-primitives "streaming")
(def sources [
(streaming:make-video-source "woods_half/1.webm" 30)
(streaming:make-video-source "woods_half/2.webm" 30)
(streaming:make-video-source "woods_half/3.webm" 30)
(streaming:make-video-source "woods_half/4.webm" 30)
])
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
])

View File

@@ -0,0 +1,39 @@
;; Default Sources Configuration
;;
;; Defines video sources and per-pair effect configurations.
;; Pass to recipe with: --sources configs/sources-default.sexp
;;
;; Required by recipes using process-pair macro:
;; - sources: array of video sources
;; - pair-configs: array of effect configurations per source
(require-primitives "streaming")
;; Video sources array
;; Using friendly names for asset resolution
(def sources [
(streaming:make-video-source "woods-1" 10)
(streaming:make-video-source "woods-2" 10)
(streaming:make-video-source "woods-3" 10)
(streaming:make-video-source "woods-4" 10)
(streaming:make-video-source "woods-5" 10)
(streaming:make-video-source "woods-6" 10)
(streaming:make-video-source "woods-7" 10)
(streaming:make-video-source "woods-8" 10)
])
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
;; :dir = rotation direction (1 or -1)
;; :rot-a, :rot-b = max rotation angles for clip A and B
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
])

2144
l1/database.py Normal file

File diff suppressed because it is too large Load Diff

19
l1/deploy.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/bin/bash
set -e
cd "$(dirname "$0")"
echo "=== Pulling latest code ==="
git pull
echo "=== Building Docker image ==="
docker build --build-arg CACHEBUST=$(date +%s) -t registry.rose-ash.com:5000/celery-l1-server:latest .
echo "=== Pushing to registry ==="
docker push registry.rose-ash.com:5000/celery-l1-server:latest
echo "=== Redeploying celery stack ==="
docker stack deploy -c docker-compose.yml celery
echo "=== Done ==="
docker stack services celery

249
l1/diagnose_gpu.py Executable file
View File

@@ -0,0 +1,249 @@
#!/usr/bin/env python3
"""
GPU Rendering Diagnostic Script
Checks for common issues that cause GPU rendering slowdowns in art-dag.
Run this script to identify potential performance bottlenecks.
"""
import sys
import subprocess
import os
def print_section(title):
print(f"\n{'='*60}")
print(f" {title}")
print(f"{'='*60}")
def check_pass(msg):
print(f" [PASS] {msg}")
def check_fail(msg):
print(f" [FAIL] {msg}")
def check_warn(msg):
print(f" [WARN] {msg}")
def check_info(msg):
print(f" [INFO] {msg}")
# ============================================================
# 1. Check GPU Availability
# ============================================================
print_section("1. GPU AVAILABILITY")
# Check nvidia-smi
try:
result = subprocess.run(["nvidia-smi", "--query-gpu=name,memory.total,memory.free,utilization.gpu",
"--format=csv,noheader"], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
for line in result.stdout.strip().split('\n'):
check_pass(f"GPU found: {line}")
else:
check_fail("nvidia-smi failed - no GPU detected")
except FileNotFoundError:
check_fail("nvidia-smi not found - NVIDIA drivers not installed")
except Exception as e:
check_fail(f"nvidia-smi error: {e}")
# ============================================================
# 2. Check CuPy
# ============================================================
print_section("2. CUPY (GPU ARRAY LIBRARY)")
try:
import cupy as cp
check_pass(f"CuPy available, version {cp.__version__}")
# Test basic GPU operation
try:
a = cp.zeros((100, 100), dtype=cp.uint8)
cp.cuda.Stream.null.synchronize()
check_pass("CuPy GPU operations working")
# Check memory
mempool = cp.get_default_memory_pool()
check_info(f"GPU memory pool: {mempool.used_bytes() / 1024**2:.1f} MB used, "
f"{mempool.total_bytes() / 1024**2:.1f} MB total")
except Exception as e:
check_fail(f"CuPy GPU test failed: {e}")
except ImportError:
check_fail("CuPy not installed - GPU rendering disabled")
# ============================================================
# 3. Check PyNvVideoCodec (GPU Encoding)
# ============================================================
print_section("3. PYNVVIDEOCODEC (GPU ENCODING)")
try:
import PyNvVideoCodec as nvc
check_pass("PyNvVideoCodec available - zero-copy GPU encoding enabled")
except ImportError:
check_warn("PyNvVideoCodec not available - using FFmpeg NVENC (slower)")
# ============================================================
# 4. Check Decord GPU (Hardware Decode)
# ============================================================
print_section("4. DECORD GPU (HARDWARE DECODE)")
try:
import decord
from decord import gpu
ctx = gpu(0)
check_pass(f"Decord GPU (NVDEC) available - hardware video decode enabled")
except ImportError:
check_warn("Decord not installed - using FFmpeg decode")
except Exception as e:
check_warn(f"Decord GPU not available ({e}) - using FFmpeg decode")
# ============================================================
# 5. Check DLPack Support
# ============================================================
print_section("5. DLPACK (ZERO-COPY TRANSFER)")
try:
import decord
from decord import VideoReader, gpu
import cupy as cp
# Need a test video file
test_video = None
for path in ["/data/cache", "/tmp"]:
if os.path.exists(path):
for f in os.listdir(path):
if f.endswith(('.mp4', '.webm', '.mkv')):
test_video = os.path.join(path, f)
break
if test_video:
break
if test_video:
try:
vr = VideoReader(test_video, ctx=gpu(0))
frame = vr[0]
dlpack = frame.to_dlpack()
gpu_frame = cp.from_dlpack(dlpack)
check_pass(f"DLPack zero-copy working (tested with {os.path.basename(test_video)})")
except Exception as e:
check_fail(f"DLPack FAILED: {e}")
check_info("This means every frame does GPU->CPU->GPU copy (SLOW)")
else:
check_warn("No test video found - cannot verify DLPack")
except ImportError:
check_warn("Cannot test DLPack - decord or cupy not available")
# ============================================================
# 6. Check Fast CUDA Kernels
# ============================================================
print_section("6. FAST CUDA KERNELS (JIT COMPILED)")
try:
sys.path.insert(0, '/root/art-dag/celery')
from streaming.jit_compiler import (
fast_rotate, fast_zoom, fast_blend, fast_hue_shift,
fast_invert, fast_ripple, get_fast_ops
)
check_pass("Fast CUDA kernels loaded successfully")
# Test one kernel
try:
import cupy as cp
test_img = cp.zeros((720, 1280, 3), dtype=cp.uint8)
result = fast_rotate(test_img, 45.0)
cp.cuda.Stream.null.synchronize()
check_pass("Fast rotate kernel working")
except Exception as e:
check_fail(f"Fast kernel execution failed: {e}")
except ImportError as e:
check_warn(f"Fast CUDA kernels not available: {e}")
check_info("Fallback to slower CuPy operations")
# ============================================================
# 7. Check Fused Pipeline Compiler
# ============================================================
print_section("7. FUSED PIPELINE COMPILER")
try:
sys.path.insert(0, '/root/art-dag/celery')
from streaming.sexp_to_cuda import compile_frame_pipeline, compile_autonomous_pipeline
check_pass("Fused CUDA pipeline compiler available")
except ImportError as e:
check_warn(f"Fused pipeline compiler not available: {e}")
check_info("Using per-operation fallback (slower for multi-effect pipelines)")
# ============================================================
# 8. Check FFmpeg NVENC
# ============================================================
print_section("8. FFMPEG NVENC (HARDWARE ENCODE)")
try:
result = subprocess.run(["ffmpeg", "-encoders"], capture_output=True, text=True, timeout=5)
if "h264_nvenc" in result.stdout:
check_pass("FFmpeg h264_nvenc encoder available")
else:
check_warn("FFmpeg h264_nvenc not available - using libx264 (CPU)")
if "hevc_nvenc" in result.stdout:
check_pass("FFmpeg hevc_nvenc encoder available")
except Exception as e:
check_fail(f"FFmpeg check failed: {e}")
# ============================================================
# 9. Check FFmpeg NVDEC
# ============================================================
print_section("9. FFMPEG NVDEC (HARDWARE DECODE)")
try:
result = subprocess.run(["ffmpeg", "-hwaccels"], capture_output=True, text=True, timeout=5)
if "cuda" in result.stdout:
check_pass("FFmpeg CUDA hwaccel available")
else:
check_warn("FFmpeg CUDA hwaccel not available - using CPU decode")
except Exception as e:
check_fail(f"FFmpeg hwaccel check failed: {e}")
# ============================================================
# 10. Check Pipeline Cache Status
# ============================================================
print_section("10. PIPELINE CACHE STATUS")
try:
sys.path.insert(0, '/root/art-dag/celery')
from sexp_effects.primitive_libs.streaming_gpu import (
_FUSED_PIPELINE_CACHE, _AUTONOMOUS_PIPELINE_CACHE
)
fused_count = len(_FUSED_PIPELINE_CACHE)
auto_count = len(_AUTONOMOUS_PIPELINE_CACHE)
if fused_count > 0 or auto_count > 0:
check_info(f"Fused pipeline cache: {fused_count} entries")
check_info(f"Autonomous pipeline cache: {auto_count} entries")
if fused_count > 100 or auto_count > 100:
check_warn("Large pipeline cache - may cause memory pressure")
else:
check_info("Pipeline caches empty (no rendering done yet)")
except Exception as e:
check_info(f"Could not check pipeline cache: {e}")
# ============================================================
# Summary
# ============================================================
print_section("SUMMARY")
print("""
Optimal GPU rendering requires:
1. [CRITICAL] CuPy with working GPU operations
2. [CRITICAL] DLPack zero-copy transfer (decord -> CuPy)
3. [HIGH] Fast CUDA kernels from jit_compiler
4. [MEDIUM] Fused pipeline compiler for multi-effect recipes
5. [MEDIUM] PyNvVideoCodec for zero-copy encoding
6. [LOW] FFmpeg NVENC/NVDEC as fallback
If DLPack is failing, check:
- decord version (needs 0.6.0+ with DLPack support)
- CuPy version compatibility
- CUDA toolkit version match
If fast kernels are not loading:
- Check if streaming/jit_compiler.py exists
- Verify CUDA compiler (nvcc) is available
""")

View File

@@ -0,0 +1,36 @@
# GPU Worker Development Override
#
# Usage: docker stack deploy -c docker-compose.yml -c docker-compose.gpu-dev.yml celery
# Or for quick testing: docker-compose -f docker-compose.yml -f docker-compose.gpu-dev.yml up l1-gpu-worker
#
# Features:
# - Mounts source code for instant changes (no rebuild needed)
# - Uses watchmedo for auto-reload on file changes
# - Shows config on startup
version: '3.8'
services:
l1-gpu-worker:
# Override command to use watchmedo for auto-reload
command: >
sh -c "
pip install -q watchdog[watchmedo] 2>/dev/null || true;
echo '=== GPU WORKER DEV MODE ===';
echo 'Source mounted - changes take effect on restart';
echo 'Auto-reload enabled via watchmedo';
env | grep -E 'STREAMING_GPU|IPFS_GATEWAY|REDIS|DATABASE' | sort;
echo '===========================';
watchmedo auto-restart --directory=/app --pattern='*.py' --recursive -- \
celery -A celery_app worker --loglevel=info -E -Q gpu,celery
"
environment:
# Development defaults (can override with .env)
- STREAMING_GPU_PERSIST=0
- IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
- SHOW_CONFIG=1
volumes:
# Mount source code for hot reload
- ./:/app:ro
# Keep cache local
- gpu_cache:/data/cache

191
l1/docker-compose.yml Normal file
View File

@@ -0,0 +1,191 @@
version: "3.8"
services:
redis:
image: redis:7-alpine
ports:
- target: 6379
published: 16379
mode: host # Bypass swarm routing mesh
volumes:
- redis_data:/data
networks:
- celery
deploy:
replicas: 1
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
postgres:
image: postgres:16-alpine
env_file:
- .env
environment:
- POSTGRES_USER=artdag
- POSTGRES_DB=artdag
ports:
- target: 5432
published: 15432
mode: host # Expose for GPU worker on different VPC
volumes:
- postgres_data:/var/lib/postgresql/data
networks:
- celery
deploy:
replicas: 1
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
ipfs:
image: ipfs/kubo:latest
ports:
- "4001:4001" # Swarm TCP
- "4001:4001/udp" # Swarm UDP
- target: 5001
published: 15001
mode: host # API port for GPU worker on different VPC
volumes:
- ipfs_data:/data/ipfs
- l1_cache:/data/cache:ro # Read-only access to cache for adding files
networks:
- celery
- externalnet # For gateway access
deploy:
replicas: 1
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
l1-server:
image: registry.rose-ash.com:5000/celery-l1-server:latest
env_file:
- .env
environment:
- REDIS_URL=redis://redis:6379/5
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
- IPFS_API=/dns/ipfs/tcp/5001
- CACHE_DIR=/data/cache
# Coop app internal URLs for fragment composition
- INTERNAL_URL_BLOG=http://blog:8000
- INTERNAL_URL_CART=http://cart:8000
- INTERNAL_URL_ACCOUNT=http://account:8000
# DATABASE_URL, ADMIN_TOKEN, ARTDAG_CLUSTER_KEY,
# L2_SERVER, L2_DOMAIN, IPFS_GATEWAY_URL from .env file
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8100/health')"]
interval: 10s
timeout: 5s
retries: 3
start_period: 15s
volumes:
- l1_cache:/data/cache
depends_on:
- redis
- postgres
- ipfs
networks:
- celery
- externalnet
deploy:
replicas: 1
update_config:
order: start-first
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
l1-worker:
image: registry.rose-ash.com:5000/celery-l1-server:latest
command: sh -c "find /app -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null; celery -A celery_app worker --loglevel=info -E"
env_file:
- .env
environment:
- REDIS_URL=redis://redis:6379/5
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
- IPFS_API=/dns/ipfs/tcp/5001
- CACHE_DIR=/data/cache
- C_FORCE_ROOT=true
# DATABASE_URL, ARTDAG_CLUSTER_KEY from .env file
volumes:
- l1_cache:/data/cache
depends_on:
- redis
- postgres
- ipfs
networks:
- celery
deploy:
replicas: 2
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
flower:
image: mher/flower:2.0
command: celery --broker=redis://redis:6379/5 flower --port=5555
environment:
- CELERY_BROKER_URL=redis://redis:6379/5
- FLOWER_PORT=5555
depends_on:
- redis
networks:
- celery
- externalnet
deploy:
replicas: 1
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu != true
# GPU worker for streaming/rendering tasks
# Build: docker build -f Dockerfile.gpu -t registry.rose-ash.com:5000/celery-l1-gpu-server:latest .
# Requires: docker node update --label-add gpu=true <gpu-node-name>
l1-gpu-worker:
image: registry.rose-ash.com:5000/celery-l1-gpu-server:latest
command: sh -c "cd /app && celery -A celery_app worker --loglevel=info -E -Q gpu,celery"
env_file:
- .env.gpu
volumes:
# Local cache - ephemeral, just for working files
- gpu_cache:/data/cache
# Note: No source mount - GPU worker uses code from image
depends_on:
- redis
- postgres
- ipfs
networks:
- celery
deploy:
replicas: 1
restart_policy:
condition: on-failure
placement:
constraints:
- node.labels.gpu == true
volumes:
redis_data:
postgres_data:
ipfs_data:
l1_cache:
gpu_cache: # Ephemeral cache for GPU workers
networks:
celery:
driver: overlay
externalnet:
external: true

View File

@@ -0,0 +1,150 @@
;; Quick Test - Fully Explicit Streaming Version
;;
;; The interpreter is completely generic - knows nothing about video/audio.
;; All domain logic is explicit via primitives.
;;
;; Run with built-in sources/audio:
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp --fps 30
;;
;; Run with external config files:
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp \
;; --sources configs/sources-default.sexp \
;; --audio configs/audio-dizzy.sexp \
;; --fps 30
(stream "quick_test_explicit"
:fps 30
:width 1920
:height 1080
:seed 42
;; Load standard primitives and effects
(include :path "../templates/standard-primitives.sexp")
(include :path "../templates/standard-effects.sexp")
;; Load reusable templates
(include :path "../templates/stream-process-pair.sexp")
(include :path "../templates/crossfade-zoom.sexp")
;; === SOURCES AS ARRAY ===
(def sources [
(streaming:make-video-source "monday.webm" 30)
(streaming:make-video-source "escher.webm" 30)
(streaming:make-video-source "2.webm" 30)
(streaming:make-video-source "disruptors.webm" 30)
(streaming:make-video-source "4.mp4" 30)
(streaming:make-video-source "ecstacy.mp4" 30)
(streaming:make-video-source "dopple.webm" 30)
(streaming:make-video-source "5.mp4" 30)
])
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
;; Pairs 3,6: reversed (negative rot-a, positive rot-b, shrink zoom-a, grow zoom-b)
;; Pair 5: smaller ranges
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
])
;; Audio analyzer
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
;; Audio playback
(audio-playback "../dizzy.mp3")
;; === GLOBAL SCANS ===
;; Cycle state: which source is active (recipe-specific)
;; clen = beats per source (8-24 beats = ~4-12 seconds)
(scan cycle (streaming:audio-beat music t)
:init {:active 0 :beat 0 :clen 16}
:step (if (< (+ beat 1) clen)
(dict :active active :beat (+ beat 1) :clen clen)
(dict :active (mod (+ active 1) (len sources)) :beat 0
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
;; Reusable scans from templates (require 'music' to be defined)
(include :path "../templates/scan-oscillating-spin.sexp")
(include :path "../templates/scan-ripple-drops.sexp")
;; === PER-PAIR STATE (dynamically sized based on sources) ===
;; Each pair has: inv-a, inv-b, hue-a, hue-b, mix, rot-angle
(scan pairs (streaming:audio-beat music t)
:init {:states (map (core:range (len sources)) (lambda (_)
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
:step (dict :states (map states (lambda (p)
(let [;; Invert toggles (10% chance, lasts 1-4 beats)
new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
;; Hue shifts (10% chance, lasts 1-4 beats) - use countdown like invert
old-hue-a (get p :hue-a)
old-hue-b (get p :hue-b)
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
;; Pick random hue value when triggering (stored separately)
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
;; Mix (holds for 1-10 beats, then picks 0, 0.5, or 1)
mix-rem (get p :mix-rem)
old-mix (get p :mix)
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
;; Rotation (accumulates, reverses direction when cycle completes)
rot-beat (get p :rot-beat)
rot-clen (get p :rot-clen)
old-angle (get p :angle)
;; Note: dir comes from pair-configs, but we store rotation state here
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
new-angle (+ old-angle (/ 360 rot-clen))]
(dict :inv-a new-inv-a :inv-b new-inv-b
:hue-a new-hue-a :hue-b new-hue-b
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
:mix new-mix :mix-rem new-mix-rem
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
;; === FRAME PIPELINE ===
(frame
(let [now t
e (streaming:audio-energy music now)
;; Get cycle state
active (bind cycle :active)
beat-pos (bind cycle :beat)
clen (bind cycle :clen)
;; Transition logic: last third of cycle crossfades to next
phase3 (* beat-pos 3)
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
next-idx (mod (+ active 1) (len sources))
;; Get pair states array (required by process-pair macro)
pair-states (bind pairs :states)
;; Process active pair using macro from template
active-frame (process-pair active)
;; Crossfade with zoom during transition (using macro)
result (if fading
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
active-frame)
;; Final: global spin + ripple
spun (rotate result :angle (bind spin :angle))
rip-gate (bind ripple-state :gate)
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
(ripple spun
:amplitude rip-amp
:center_x (bind ripple-state :cx)
:center_y (bind ripple-state :cy)
:frequency 8
:decay 2
:speed 5))))

345
l1/ipfs_client.py Normal file
View File

@@ -0,0 +1,345 @@
# art-celery/ipfs_client.py
"""
IPFS client for Art DAG L1 server.
Provides functions to add, retrieve, and pin files on IPFS.
Uses direct HTTP API calls for compatibility with all Kubo versions.
"""
import logging
import os
import re
from pathlib import Path
from typing import Optional, Union
import requests
logger = logging.getLogger(__name__)
# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
# Connection timeout in seconds (increased for large files)
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "120"))
# IPFS gateway URLs for fallback when local node doesn't have content
# Comma-separated list of gateway URLs (without /ipfs/ suffix)
IPFS_GATEWAYS = [g.strip() for g in os.getenv(
"IPFS_GATEWAYS",
"https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
).split(",") if g.strip()]
# Gateway timeout (shorter than API timeout for faster fallback)
GATEWAY_TIMEOUT = int(os.getenv("GATEWAY_TIMEOUT", "30"))
def _multiaddr_to_url(multiaddr: str) -> str:
"""Convert IPFS multiaddr to HTTP URL."""
# Handle /dns/hostname/tcp/port format
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
if dns_match:
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
# Handle /ip4/address/tcp/port format
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
if ip4_match:
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
# Fallback: assume it's already a URL or use default
if multiaddr.startswith("http"):
return multiaddr
return "http://127.0.0.1:5001"
# Base URL for IPFS API
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
def add_file(file_path: Union[Path, str], pin: bool = True) -> Optional[str]:
"""
Add a file to IPFS and optionally pin it.
Args:
file_path: Path to the file to add (Path object or string)
pin: Whether to pin the file (default: True)
Returns:
IPFS CID (content identifier) or None on failure
"""
try:
# Ensure file_path is a Path object
if isinstance(file_path, str):
file_path = Path(file_path)
url = f"{IPFS_BASE_URL}/api/v0/add"
params = {"pin": str(pin).lower()}
with open(file_path, "rb") as f:
files = {"file": (file_path.name, f)}
response = requests.post(url, params=params, files=files, timeout=IPFS_TIMEOUT)
response.raise_for_status()
result = response.json()
cid = result["Hash"]
logger.info(f"Added to IPFS: {file_path.name} -> {cid}")
return cid
except Exception as e:
logger.error(f"Failed to add to IPFS: {e}")
return None
def add_bytes(data: bytes, pin: bool = True) -> Optional[str]:
"""
Add bytes data to IPFS and optionally pin it.
Args:
data: Bytes to add
pin: Whether to pin the data (default: True)
Returns:
IPFS CID or None on failure
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/add"
params = {"pin": str(pin).lower()}
files = {"file": ("data", data)}
response = requests.post(url, params=params, files=files, timeout=IPFS_TIMEOUT)
response.raise_for_status()
result = response.json()
cid = result["Hash"]
logger.info(f"Added bytes to IPFS: {len(data)} bytes -> {cid}")
return cid
except Exception as e:
logger.error(f"Failed to add bytes to IPFS: {e}")
return None
def add_json(data: dict, pin: bool = True) -> Optional[str]:
"""
Serialize dict to JSON and add to IPFS.
Args:
data: Dictionary to serialize and store
pin: Whether to pin the data (default: True)
Returns:
IPFS CID or None on failure
"""
import json
json_bytes = json.dumps(data, indent=2, sort_keys=True).encode('utf-8')
return add_bytes(json_bytes, pin=pin)
def add_string(content: str, pin: bool = True) -> Optional[str]:
"""
Add a string to IPFS and optionally pin it.
Args:
content: String content to add (e.g., S-expression)
pin: Whether to pin the data (default: True)
Returns:
IPFS CID or None on failure
"""
return add_bytes(content.encode('utf-8'), pin=pin)
def get_file(cid: str, dest_path: Union[Path, str]) -> bool:
"""
Retrieve a file from IPFS and save to destination.
Args:
cid: IPFS CID to retrieve
dest_path: Path to save the file (Path object or string)
Returns:
True on success, False on failure
"""
try:
data = get_bytes(cid)
if data is None:
return False
# Ensure dest_path is a Path object
if isinstance(dest_path, str):
dest_path = Path(dest_path)
dest_path.parent.mkdir(parents=True, exist_ok=True)
dest_path.write_bytes(data)
logger.info(f"Retrieved from IPFS: {cid} -> {dest_path}")
return True
except Exception as e:
logger.error(f"Failed to get from IPFS: {e}")
return False
def get_bytes_from_gateway(cid: str) -> Optional[bytes]:
"""
Retrieve bytes from IPFS via public gateways (fallback).
Tries each configured gateway in order until one succeeds.
Args:
cid: IPFS CID to retrieve
Returns:
File content as bytes or None if all gateways fail
"""
for gateway in IPFS_GATEWAYS:
try:
url = f"{gateway}/ipfs/{cid}"
logger.info(f"Trying gateway: {url}")
response = requests.get(url, timeout=GATEWAY_TIMEOUT)
response.raise_for_status()
data = response.content
logger.info(f"Retrieved from gateway {gateway}: {cid} ({len(data)} bytes)")
return data
except Exception as e:
logger.warning(f"Gateway {gateway} failed for {cid}: {e}")
continue
logger.error(f"All gateways failed for {cid}")
return None
def get_bytes(cid: str, use_gateway_fallback: bool = True) -> Optional[bytes]:
"""
Retrieve bytes data from IPFS.
Tries local IPFS node first, then falls back to public gateways
if configured and use_gateway_fallback is True.
Args:
cid: IPFS CID to retrieve
use_gateway_fallback: If True, try public gateways on local failure
Returns:
File content as bytes or None on failure
"""
# Try local IPFS node first
try:
url = f"{IPFS_BASE_URL}/api/v0/cat"
params = {"arg": cid}
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
response.raise_for_status()
data = response.content
logger.info(f"Retrieved from IPFS: {cid} ({len(data)} bytes)")
return data
except Exception as e:
logger.warning(f"Local IPFS failed for {cid}: {e}")
# Try gateway fallback
if use_gateway_fallback and IPFS_GATEWAYS:
logger.info(f"Trying gateway fallback for {cid}")
return get_bytes_from_gateway(cid)
logger.error(f"Failed to get bytes from IPFS: {e}")
return None
def pin(cid: str) -> bool:
"""
Pin a CID on IPFS.
Args:
cid: IPFS CID to pin
Returns:
True on success, False on failure
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/pin/add"
params = {"arg": cid}
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
response.raise_for_status()
logger.info(f"Pinned on IPFS: {cid}")
return True
except Exception as e:
logger.error(f"Failed to pin on IPFS: {e}")
return False
def unpin(cid: str) -> bool:
"""
Unpin a CID on IPFS.
Args:
cid: IPFS CID to unpin
Returns:
True on success, False on failure
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/pin/rm"
params = {"arg": cid}
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
response.raise_for_status()
logger.info(f"Unpinned on IPFS: {cid}")
return True
except Exception as e:
logger.error(f"Failed to unpin on IPFS: {e}")
return False
def is_pinned(cid: str) -> bool:
"""
Check if a CID is pinned on IPFS.
Args:
cid: IPFS CID to check
Returns:
True if pinned, False otherwise
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/pin/ls"
params = {"arg": cid, "type": "recursive"}
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
if response.status_code == 200:
result = response.json()
return cid in result.get("Keys", {})
return False
except Exception as e:
logger.error(f"Failed to check pin status: {e}")
return False
def is_available() -> bool:
"""
Check if IPFS daemon is available.
Returns:
True if IPFS is available, False otherwise
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/id"
response = requests.post(url, timeout=5)
return response.status_code == 200
except Exception:
return False
def get_node_id() -> Optional[str]:
"""
Get this IPFS node's peer ID.
Returns:
Peer ID string or None on failure
"""
try:
url = f"{IPFS_BASE_URL}/api/v0/id"
response = requests.post(url, timeout=IPFS_TIMEOUT)
response.raise_for_status()
return response.json().get("ID")
except Exception as e:
logger.error(f"Failed to get node ID: {e}")
return None

477
l1/path_registry.py Normal file
View File

@@ -0,0 +1,477 @@
"""
Path Registry - Maps human-friendly paths to content-addressed IDs.
This module provides a bidirectional mapping between:
- Human-friendly paths (e.g., "effects/ascii_fx_zone.sexp")
- Content-addressed IDs (IPFS CIDs or SHA3-256 hashes)
The registry is useful for:
- Looking up effects by their friendly path name
- Resolving cids back to the original path for debugging
- Maintaining a stable naming scheme across cache updates
Storage:
- Uses the existing item_types table in the database (path column)
- Caches in Redis for fast lookups across distributed workers
The registry uses a system actor (@system@local) for global path mappings,
allowing effects to be resolved by path without requiring user context.
"""
import logging
import os
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass
logger = logging.getLogger(__name__)
# System actor for global path mappings (effects, recipes, analyzers)
SYSTEM_ACTOR = "@system@local"
@dataclass
class PathEntry:
"""A registered path with its content-addressed ID."""
path: str # Human-friendly path (relative or normalized)
cid: str # Content-addressed ID (IPFS CID or hash)
content_type: str # Type: "effect", "recipe", "analyzer", etc.
actor_id: str = SYSTEM_ACTOR # Owner (system for global)
description: Optional[str] = None
created_at: float = 0.0
class PathRegistry:
"""
Registry for mapping paths to content-addressed IDs.
Uses the existing item_types table for persistence and Redis
for fast lookups in distributed Celery workers.
"""
def __init__(self, redis_client=None):
self._redis = redis_client
self._redis_path_to_cid_key = "artdag:path_to_cid"
self._redis_cid_to_path_key = "artdag:cid_to_path"
def _run_async(self, coro):
"""Run async coroutine from sync context."""
import asyncio
try:
loop = asyncio.get_running_loop()
import threading
result = [None]
error = [None]
def run_in_thread():
try:
new_loop = asyncio.new_event_loop()
asyncio.set_event_loop(new_loop)
try:
result[0] = new_loop.run_until_complete(coro)
finally:
new_loop.close()
except Exception as e:
error[0] = e
thread = threading.Thread(target=run_in_thread)
thread.start()
thread.join(timeout=30)
if error[0]:
raise error[0]
return result[0]
except RuntimeError:
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop.run_until_complete(coro)
def _normalize_path(self, path: str) -> str:
"""Normalize a path for consistent storage."""
# Remove leading ./ or /
path = path.lstrip('./')
# Normalize separators
path = path.replace('\\', '/')
# Remove duplicate slashes
while '//' in path:
path = path.replace('//', '/')
return path
def register(
self,
path: str,
cid: str,
content_type: str = "effect",
actor_id: str = SYSTEM_ACTOR,
description: Optional[str] = None,
) -> PathEntry:
"""
Register a path -> cid mapping.
Args:
path: Human-friendly path (e.g., "effects/ascii_fx_zone.sexp")
cid: Content-addressed ID (IPFS CID or hash)
content_type: Type of content ("effect", "recipe", "analyzer")
actor_id: Owner (default: system for global mappings)
description: Optional description
Returns:
The created PathEntry
"""
norm_path = self._normalize_path(path)
now = datetime.now(timezone.utc).timestamp()
entry = PathEntry(
path=norm_path,
cid=cid,
content_type=content_type,
actor_id=actor_id,
description=description,
created_at=now,
)
# Store in database (item_types table)
self._save_to_db(entry)
# Update Redis cache
self._update_redis_cache(norm_path, cid)
logger.info(f"Registered path '{norm_path}' -> {cid[:16]}...")
return entry
def _save_to_db(self, entry: PathEntry):
"""Save entry to database using item_types table."""
import database
async def save():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
# Ensure cache_item exists
await conn.execute(
"INSERT INTO cache_items (cid) VALUES ($1) ON CONFLICT DO NOTHING",
entry.cid
)
# Insert or update item_type with path
await conn.execute(
"""
INSERT INTO item_types (cid, actor_id, type, path, description)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (cid, actor_id, type, path) DO UPDATE SET
description = COALESCE(EXCLUDED.description, item_types.description)
""",
entry.cid, entry.actor_id, entry.content_type, entry.path, entry.description
)
finally:
await conn.close()
try:
self._run_async(save())
except Exception as e:
logger.warning(f"Failed to save path registry to DB: {e}")
def _update_redis_cache(self, path: str, cid: str):
"""Update Redis cache with mapping."""
if self._redis:
try:
self._redis.hset(self._redis_path_to_cid_key, path, cid)
self._redis.hset(self._redis_cid_to_path_key, cid, path)
except Exception as e:
logger.warning(f"Failed to update Redis cache: {e}")
def get_cid(self, path: str, content_type: str = None) -> Optional[str]:
"""
Get the cid for a path.
Args:
path: Human-friendly path
content_type: Optional type filter
Returns:
The cid, or None if not found
"""
norm_path = self._normalize_path(path)
# Try Redis first (fast path)
if self._redis:
try:
val = self._redis.hget(self._redis_path_to_cid_key, norm_path)
if val:
return val.decode() if isinstance(val, bytes) else val
except Exception as e:
logger.warning(f"Redis lookup failed: {e}")
# Fall back to database
return self._get_cid_from_db(norm_path, content_type)
def _get_cid_from_db(self, path: str, content_type: str = None) -> Optional[str]:
"""Get cid from database using item_types table."""
import database
async def get():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
if content_type:
row = await conn.fetchrow(
"SELECT cid FROM item_types WHERE path = $1 AND type = $2",
path, content_type
)
else:
row = await conn.fetchrow(
"SELECT cid FROM item_types WHERE path = $1",
path
)
return row["cid"] if row else None
finally:
await conn.close()
try:
result = self._run_async(get())
# Update Redis cache if found
if result and self._redis:
self._update_redis_cache(path, result)
return result
except Exception as e:
logger.warning(f"Failed to get from DB: {e}")
return None
def get_path(self, cid: str) -> Optional[str]:
"""
Get the path for a cid.
Args:
cid: Content-addressed ID
Returns:
The path, or None if not found
"""
# Try Redis first
if self._redis:
try:
val = self._redis.hget(self._redis_cid_to_path_key, cid)
if val:
return val.decode() if isinstance(val, bytes) else val
except Exception as e:
logger.warning(f"Redis lookup failed: {e}")
# Fall back to database
return self._get_path_from_db(cid)
def _get_path_from_db(self, cid: str) -> Optional[str]:
"""Get path from database using item_types table."""
import database
async def get():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
row = await conn.fetchrow(
"SELECT path FROM item_types WHERE cid = $1 AND path IS NOT NULL ORDER BY created_at LIMIT 1",
cid
)
return row["path"] if row else None
finally:
await conn.close()
try:
result = self._run_async(get())
# Update Redis cache if found
if result and self._redis:
self._update_redis_cache(result, cid)
return result
except Exception as e:
logger.warning(f"Failed to get from DB: {e}")
return None
def list_by_type(self, content_type: str, actor_id: str = None) -> List[PathEntry]:
"""
List all entries of a given type.
Args:
content_type: Type to filter by ("effect", "recipe", etc.)
actor_id: Optional actor filter (None = all, SYSTEM_ACTOR = global)
Returns:
List of PathEntry objects
"""
import database
async def list_entries():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
if actor_id:
rows = await conn.fetch(
"""
SELECT cid, path, type, actor_id, description,
EXTRACT(EPOCH FROM created_at) as created_at
FROM item_types
WHERE type = $1 AND actor_id = $2 AND path IS NOT NULL
ORDER BY path
""",
content_type, actor_id
)
else:
rows = await conn.fetch(
"""
SELECT cid, path, type, actor_id, description,
EXTRACT(EPOCH FROM created_at) as created_at
FROM item_types
WHERE type = $1 AND path IS NOT NULL
ORDER BY path
""",
content_type
)
return [
PathEntry(
path=row["path"],
cid=row["cid"],
content_type=row["type"],
actor_id=row["actor_id"],
description=row["description"],
created_at=row["created_at"] or 0,
)
for row in rows
]
finally:
await conn.close()
try:
return self._run_async(list_entries())
except Exception as e:
logger.warning(f"Failed to list from DB: {e}")
return []
def delete(self, path: str, content_type: str = None) -> bool:
"""
Delete a path registration.
Args:
path: The path to delete
content_type: Optional type filter
Returns:
True if deleted, False if not found
"""
norm_path = self._normalize_path(path)
# Get cid for Redis cleanup
cid = self.get_cid(norm_path, content_type)
# Delete from database
deleted = self._delete_from_db(norm_path, content_type)
# Clean up Redis
if deleted and cid and self._redis:
try:
self._redis.hdel(self._redis_path_to_cid_key, norm_path)
self._redis.hdel(self._redis_cid_to_path_key, cid)
except Exception as e:
logger.warning(f"Failed to clean up Redis: {e}")
return deleted
def _delete_from_db(self, path: str, content_type: str = None) -> bool:
"""Delete from database."""
import database
async def delete():
import asyncpg
conn = await asyncpg.connect(database.DATABASE_URL)
try:
if content_type:
result = await conn.execute(
"DELETE FROM item_types WHERE path = $1 AND type = $2",
path, content_type
)
else:
result = await conn.execute(
"DELETE FROM item_types WHERE path = $1",
path
)
return "DELETE" in result
finally:
await conn.close()
try:
return self._run_async(delete())
except Exception as e:
logger.warning(f"Failed to delete from DB: {e}")
return False
def register_effect(
self,
path: str,
cid: str,
description: Optional[str] = None,
) -> PathEntry:
"""
Convenience method to register an effect.
Args:
path: Effect path (e.g., "effects/ascii_fx_zone.sexp")
cid: IPFS CID of the effect file
description: Optional description
Returns:
The created PathEntry
"""
return self.register(
path=path,
cid=cid,
content_type="effect",
actor_id=SYSTEM_ACTOR,
description=description,
)
def get_effect_cid(self, path: str) -> Optional[str]:
"""
Get CID for an effect by path.
Args:
path: Effect path
Returns:
IPFS CID or None
"""
return self.get_cid(path, content_type="effect")
def list_effects(self) -> List[PathEntry]:
"""List all registered effects."""
return self.list_by_type("effect", actor_id=SYSTEM_ACTOR)
# Singleton instance
_registry: Optional[PathRegistry] = None
def get_path_registry() -> PathRegistry:
"""Get the singleton path registry instance."""
global _registry
if _registry is None:
import redis
from urllib.parse import urlparse
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
parsed = urlparse(redis_url)
redis_client = redis.Redis(
host=parsed.hostname or 'localhost',
port=parsed.port or 6379,
db=int(parsed.path.lstrip('/') or 0),
socket_timeout=5,
socket_connect_timeout=5
)
_registry = PathRegistry(redis_client=redis_client)
return _registry
def reset_path_registry():
"""Reset the singleton (for testing)."""
global _registry
_registry = None

51
l1/pyproject.toml Normal file
View File

@@ -0,0 +1,51 @@
[project]
name = "art-celery"
version = "0.1.0"
description = "Art DAG L1 Server and Celery Workers"
requires-python = ">=3.11"
[tool.mypy]
python_version = "3.11"
warn_return_any = true
warn_unused_ignores = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
check_untyped_defs = true
strict_optional = true
no_implicit_optional = true
# Start strict on new code, gradually enable for existing
files = [
"app/types.py",
"app/routers/recipes.py",
"tests/",
]
# Ignore missing imports for third-party packages without stubs
[[tool.mypy.overrides]]
module = [
"celery.*",
"redis.*",
"artdag.*",
"artdag_common.*",
"ipfs_client.*",
]
ignore_missing_imports = true
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_functions = ["test_*"]
asyncio_mode = "auto"
addopts = "-v --tb=short"
filterwarnings = [
"ignore::DeprecationWarning",
]
[tool.ruff]
line-length = 100
target-version = "py311"
[tool.ruff.lint]
select = ["E", "F", "I", "UP"]
ignore = ["E501"] # Line length handled separately

View File

@@ -0,0 +1,223 @@
;; Woods Recipe - OPTIMIZED VERSION
;;
;; Uses fused-pipeline for GPU acceleration when available,
;; falls back to individual primitives on CPU.
;;
;; Key optimizations:
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
;; 2. Uses fused-pipeline to batch effects into single kernel passes
;; 3. GPU persistence - frames stay on GPU throughout pipeline
(stream "woods-lowres"
:fps 30
:width 640
:height 360
:seed 42
;; Load standard primitives (includes proper asset resolution)
;; Auto-selects GPU versions when available, falls back to CPU
(include :name "tpl-standard-primitives")
;; === SOURCES (using streaming: which has proper asset resolution) ===
(def sources [
(streaming:make-video-source "woods-1" 30)
(streaming:make-video-source "woods-2" 30)
(streaming:make-video-source "woods-3" 30)
(streaming:make-video-source "woods-4" 30)
(streaming:make-video-source "woods-5" 30)
(streaming:make-video-source "woods-6" 30)
(streaming:make-video-source "woods-7" 30)
(streaming:make-video-source "woods-8" 30)
])
;; Per-pair config
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
])
;; Audio
(def music (streaming:make-audio-analyzer "woods-audio"))
(audio-playback "woods-audio")
;; === SCANS ===
;; Cycle state
(scan cycle (streaming:audio-beat music t)
:init {:active 0 :beat 0 :clen 16}
:step (if (< (+ beat 1) clen)
(dict :active active :beat (+ beat 1) :clen clen)
(dict :active (mod (+ active 1) (len sources)) :beat 0
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
;; Spin scan
(scan spin (streaming:audio-beat music t)
:init {:angle 0 :dir 1 :speed 2}
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
(dict :angle (+ angle (* new-dir new-speed))
:dir new-dir
:speed new-speed)))
;; Ripple scan - raindrop style, all params randomized
;; Higher freq = bigger gaps between waves (formula is dist/freq)
(scan ripple-state (streaming:audio-beat music t)
:init {:gate 0 :cx 320 :cy 180 :freq 20 :decay 6 :amp-mult 1.0}
:step (let [new-gate (if (< (core:rand) 0.2) (+ 2 (core:rand-int 0 4)) (core:max 0 (- gate 1)))
triggered (> new-gate gate)
new-cx (if triggered (core:rand-int 50 590) cx)
new-cy (if triggered (core:rand-int 50 310) cy)
new-freq (if triggered (+ 15 (core:rand-int 0 20)) freq)
new-decay (if triggered (+ 5 (core:rand-int 0 4)) decay)
new-amp-mult (if triggered (+ 0.8 (* (core:rand) 1.2)) amp-mult)]
(dict :gate new-gate :cx new-cx :cy new-cy :freq new-freq :decay new-decay :amp-mult new-amp-mult)))
;; Pair states
(scan pairs (streaming:audio-beat music t)
:init {:states (map (core:range (len sources)) (lambda (_)
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
:step (dict :states (map states (lambda (p)
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
old-hue-a (get p :hue-a)
old-hue-b (get p :hue-b)
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
mix-rem (get p :mix-rem)
old-mix (get p :mix)
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
rot-beat (get p :rot-beat)
rot-clen (get p :rot-clen)
old-angle (get p :angle)
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
new-angle (+ old-angle (/ 360 rot-clen))]
(dict :inv-a new-inv-a :inv-b new-inv-b
:hue-a new-hue-a :hue-b new-hue-b
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
:mix new-mix :mix-rem new-mix-rem
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
;; === OPTIMIZED PROCESS-PAIR MACRO ===
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
(defmacro process-pair-fast (idx)
(let [;; Get sources for this pair (with safe modulo indexing)
num-sources (len sources)
src-a (nth sources (mod (* idx 2) num-sources))
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
cfg (nth pair-configs idx)
pstate (nth (bind pairs :states) idx)
;; Read frames (GPU decode, stays on GPU)
frame-a (streaming:source-read src-a t)
frame-b (streaming:source-read src-b t)
;; Get state values
dir (get cfg :dir)
rot-max-a (get cfg :rot-a)
rot-max-b (get cfg :rot-b)
zoom-max-a (get cfg :zoom-a)
zoom-max-b (get cfg :zoom-b)
pair-angle (get pstate :angle)
inv-a-on (> (get pstate :inv-a) 0)
inv-b-on (> (get pstate :inv-b) 0)
hue-a-on (> (get pstate :hue-a) 0)
hue-b-on (> (get pstate :hue-b) 0)
hue-a-val (get pstate :hue-a-val)
hue-b-val (get pstate :hue-b-val)
mix-ratio (get pstate :mix)
;; Calculate rotation angles
angle-a (* dir pair-angle rot-max-a 0.01)
angle-b (* dir pair-angle rot-max-b 0.01)
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
zoom-a (core:map-range e 0 1 1 zoom-max-a)
zoom-b (core:map-range e 0 1 1 zoom-max-b)
;; Define effect pipelines for each source
;; These get compiled to single CUDA kernels!
;; First resize to target resolution, then apply effects
effects-a [{:op "resize" :width 640 :height 360}
{:op "zoom" :amount zoom-a}
{:op "rotate" :angle angle-a}
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
{:op "invert" :amount (if inv-a-on 1 0)}]
effects-b [{:op "resize" :width 640 :height 360}
{:op "zoom" :amount zoom-b}
{:op "rotate" :angle angle-b}
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
{:op "invert" :amount (if inv-b-on 1 0)}]
;; Apply fused pipelines (single kernel per source!)
processed-a (streaming:fused-pipeline frame-a effects-a)
processed-b (streaming:fused-pipeline frame-b effects-b)]
;; Blend the two processed frames
(blending:blend-images processed-a processed-b mix-ratio)))
;; === FRAME PIPELINE ===
(frame
(let [now t
e (streaming:audio-energy music now)
;; Get cycle state
active (bind cycle :active)
beat-pos (bind cycle :beat)
clen (bind cycle :clen)
;; Transition logic
phase3 (* beat-pos 3)
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
next-idx (mod (+ active 1) (len sources))
;; Process active pair with fused pipeline
active-frame (process-pair-fast active)
;; Crossfade with zoom during transition
;; Old pair: zooms out (1.0 -> 2.0) and fades out
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
result (if fading
(let [next-frame (process-pair-fast next-idx)
;; Active zooms out as it fades
active-zoom (+ 1.0 fade-amt)
active-zoomed (streaming:fused-pipeline active-frame
[{:op "zoom" :amount active-zoom}])
;; Next starts small and zooms in
next-zoom (+ 0.1 (* fade-amt 0.9))
next-zoomed (streaming:fused-pipeline next-frame
[{:op "zoom" :amount next-zoom}])]
(blending:blend-images active-zoomed next-zoomed fade-amt))
active-frame)
;; Final effects pipeline (fused!)
spin-angle (bind spin :angle)
;; Ripple params - all randomized per ripple trigger
rip-gate (bind ripple-state :gate)
rip-amp-mult (bind ripple-state :amp-mult)
rip-amp (* rip-gate rip-amp-mult (core:map-range e 0 1 50 200))
rip-cx (bind ripple-state :cx)
rip-cy (bind ripple-state :cy)
rip-freq (bind ripple-state :freq)
rip-decay (bind ripple-state :decay)
;; Fused final effects
final-effects [{:op "rotate" :angle spin-angle}
{:op "ripple" :amplitude rip-amp :frequency rip-freq :decay rip-decay
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
;; Apply final fused pipeline
(streaming:fused-pipeline result final-effects
:rotate_angle spin-angle
:ripple_phase (* now 5)
:ripple_amplitude rip-amp))))

View File

@@ -0,0 +1,211 @@
;; Woods Recipe - OPTIMIZED VERSION
;;
;; Uses fused-pipeline for GPU acceleration when available,
;; falls back to individual primitives on CPU.
;;
;; Key optimizations:
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
;; 2. Uses fused-pipeline to batch effects into single kernel passes
;; 3. GPU persistence - frames stay on GPU throughout pipeline
(stream "woods-recipe-optimized"
:fps 30
:width 1920
:height 1080
:seed 42
;; Load standard primitives (includes proper asset resolution)
;; Auto-selects GPU versions when available, falls back to CPU
(include :name "tpl-standard-primitives")
;; === SOURCES (using streaming: which has proper asset resolution) ===
(def sources [
(streaming:make-video-source "woods-1" 30)
(streaming:make-video-source "woods-2" 30)
(streaming:make-video-source "woods-3" 30)
(streaming:make-video-source "woods-4" 30)
(streaming:make-video-source "woods-5" 30)
(streaming:make-video-source "woods-6" 30)
(streaming:make-video-source "woods-7" 30)
(streaming:make-video-source "woods-8" 30)
])
;; Per-pair config
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
])
;; Audio
(def music (streaming:make-audio-analyzer "woods-audio"))
(audio-playback "woods-audio")
;; === SCANS ===
;; Cycle state
(scan cycle (streaming:audio-beat music t)
:init {:active 0 :beat 0 :clen 16}
:step (if (< (+ beat 1) clen)
(dict :active active :beat (+ beat 1) :clen clen)
(dict :active (mod (+ active 1) (len sources)) :beat 0
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
;; Spin scan
(scan spin (streaming:audio-beat music t)
:init {:angle 0 :dir 1 :speed 2}
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
(dict :angle (+ angle (* new-dir new-speed))
:dir new-dir
:speed new-speed)))
;; Ripple scan
(scan ripple-state (streaming:audio-beat music t)
:init {:gate 0 :cx 960 :cy 540}
:step (let [new-gate (if (< (core:rand) 0.15) (+ 3 (core:rand-int 0 5)) (core:max 0 (- gate 1)))
new-cx (if (> new-gate gate) (+ 200 (core:rand-int 0 1520)) cx)
new-cy (if (> new-gate gate) (+ 200 (core:rand-int 0 680)) cy)]
(dict :gate new-gate :cx new-cx :cy new-cy)))
;; Pair states
(scan pairs (streaming:audio-beat music t)
:init {:states (map (core:range (len sources)) (lambda (_)
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
:step (dict :states (map states (lambda (p)
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
old-hue-a (get p :hue-a)
old-hue-b (get p :hue-b)
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
mix-rem (get p :mix-rem)
old-mix (get p :mix)
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
rot-beat (get p :rot-beat)
rot-clen (get p :rot-clen)
old-angle (get p :angle)
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
new-angle (+ old-angle (/ 360 rot-clen))]
(dict :inv-a new-inv-a :inv-b new-inv-b
:hue-a new-hue-a :hue-b new-hue-b
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
:mix new-mix :mix-rem new-mix-rem
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
;; === OPTIMIZED PROCESS-PAIR MACRO ===
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
(defmacro process-pair-fast (idx)
(let [;; Get sources for this pair (with safe modulo indexing)
num-sources (len sources)
src-a (nth sources (mod (* idx 2) num-sources))
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
cfg (nth pair-configs idx)
pstate (nth (bind pairs :states) idx)
;; Read frames (GPU decode, stays on GPU)
frame-a (streaming:source-read src-a t)
frame-b (streaming:source-read src-b t)
;; Get state values
dir (get cfg :dir)
rot-max-a (get cfg :rot-a)
rot-max-b (get cfg :rot-b)
zoom-max-a (get cfg :zoom-a)
zoom-max-b (get cfg :zoom-b)
pair-angle (get pstate :angle)
inv-a-on (> (get pstate :inv-a) 0)
inv-b-on (> (get pstate :inv-b) 0)
hue-a-on (> (get pstate :hue-a) 0)
hue-b-on (> (get pstate :hue-b) 0)
hue-a-val (get pstate :hue-a-val)
hue-b-val (get pstate :hue-b-val)
mix-ratio (get pstate :mix)
;; Calculate rotation angles
angle-a (* dir pair-angle rot-max-a 0.01)
angle-b (* dir pair-angle rot-max-b 0.01)
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
zoom-a (core:map-range e 0 1 1 zoom-max-a)
zoom-b (core:map-range e 0 1 1 zoom-max-b)
;; Define effect pipelines for each source
;; These get compiled to single CUDA kernels!
effects-a [{:op "zoom" :amount zoom-a}
{:op "rotate" :angle angle-a}
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
{:op "invert" :amount (if inv-a-on 1 0)}]
effects-b [{:op "zoom" :amount zoom-b}
{:op "rotate" :angle angle-b}
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
{:op "invert" :amount (if inv-b-on 1 0)}]
;; Apply fused pipelines (single kernel per source!)
processed-a (streaming:fused-pipeline frame-a effects-a)
processed-b (streaming:fused-pipeline frame-b effects-b)]
;; Blend the two processed frames
(blending:blend-images processed-a processed-b mix-ratio)))
;; === FRAME PIPELINE ===
(frame
(let [now t
e (streaming:audio-energy music now)
;; Get cycle state
active (bind cycle :active)
beat-pos (bind cycle :beat)
clen (bind cycle :clen)
;; Transition logic
phase3 (* beat-pos 3)
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
next-idx (mod (+ active 1) (len sources))
;; Process active pair with fused pipeline
active-frame (process-pair-fast active)
;; Crossfade with zoom during transition
;; Old pair: zooms out (1.0 -> 2.0) and fades out
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
result (if fading
(let [next-frame (process-pair-fast next-idx)
;; Active zooms out as it fades
active-zoom (+ 1.0 fade-amt)
active-zoomed (streaming:fused-pipeline active-frame
[{:op "zoom" :amount active-zoom}])
;; Next starts small and zooms in
next-zoom (+ 0.1 (* fade-amt 0.9))
next-zoomed (streaming:fused-pipeline next-frame
[{:op "zoom" :amount next-zoom}])]
(blending:blend-images active-zoomed next-zoomed fade-amt))
active-frame)
;; Final effects pipeline (fused!)
spin-angle (bind spin :angle)
rip-gate (bind ripple-state :gate)
rip-amp (* rip-gate (core:map-range e 0 1 5 50))
rip-cx (bind ripple-state :cx)
rip-cy (bind ripple-state :cy)
;; Fused final effects
final-effects [{:op "rotate" :angle spin-angle}
{:op "ripple" :amplitude rip-amp :frequency 8 :decay 2
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
;; Apply final fused pipeline
(streaming:fused-pipeline result final-effects
:rotate_angle spin-angle
:ripple_phase (* now 5)
:ripple_amplitude rip-amp))))

View File

@@ -0,0 +1,134 @@
;; Woods Recipe - Using friendly names for all assets
;;
;; Requires uploaded:
;; - Media: woods-1 through woods-8 (videos), woods-audio (audio)
;; - Effects: fx-rotate, fx-zoom, fx-blend, fx-ripple, fx-invert, fx-hue-shift
;; - Templates: tpl-standard-primitives, tpl-standard-effects, tpl-process-pair,
;; tpl-crossfade-zoom, tpl-scan-spin, tpl-scan-ripple
(stream "woods-recipe"
:fps 30
:width 1920
:height 1080
:seed 42
;; Load standard primitives and effects via friendly names
(include :name "tpl-standard-primitives")
(include :name "tpl-standard-effects")
;; Load reusable templates
(include :name "tpl-process-pair")
(include :name "tpl-crossfade-zoom")
;; === SOURCES AS ARRAY (using friendly names) ===
(def sources [
(streaming:make-video-source "woods-1" 30)
(streaming:make-video-source "woods-2" 30)
(streaming:make-video-source "woods-3" 30)
(streaming:make-video-source "woods-4" 30)
(streaming:make-video-source "woods-5" 30)
(streaming:make-video-source "woods-6" 30)
(streaming:make-video-source "woods-7" 30)
(streaming:make-video-source "woods-8" 30)
])
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
])
;; Audio analyzer (using friendly name)
(def music (streaming:make-audio-analyzer "woods-audio"))
;; Audio playback (friendly name resolved by streaming primitives)
(audio-playback "woods-audio")
;; === GLOBAL SCANS ===
;; Cycle state: which source is active
(scan cycle (streaming:audio-beat music t)
:init {:active 0 :beat 0 :clen 16}
:step (if (< (+ beat 1) clen)
(dict :active active :beat (+ beat 1) :clen clen)
(dict :active (mod (+ active 1) (len sources)) :beat 0
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
;; Reusable scans from templates
(include :name "tpl-scan-spin")
(include :name "tpl-scan-ripple")
;; === PER-PAIR STATE ===
(scan pairs (streaming:audio-beat music t)
:init {:states (map (core:range (len sources)) (lambda (_)
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
:step (dict :states (map states (lambda (p)
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
old-hue-a (get p :hue-a)
old-hue-b (get p :hue-b)
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
mix-rem (get p :mix-rem)
old-mix (get p :mix)
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
rot-beat (get p :rot-beat)
rot-clen (get p :rot-clen)
old-angle (get p :angle)
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
new-angle (+ old-angle (/ 360 rot-clen))]
(dict :inv-a new-inv-a :inv-b new-inv-b
:hue-a new-hue-a :hue-b new-hue-b
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
:mix new-mix :mix-rem new-mix-rem
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
;; === FRAME PIPELINE ===
(frame
(let [now t
e (streaming:audio-energy music now)
;; Get cycle state
active (bind cycle :active)
beat-pos (bind cycle :beat)
clen (bind cycle :clen)
;; Transition logic
phase3 (* beat-pos 3)
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
next-idx (mod (+ active 1) (len sources))
;; Get pair states array
pair-states (bind pairs :states)
;; Process active pair using macro from template
active-frame (process-pair active)
;; Crossfade with zoom during transition
result (if fading
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
active-frame)
;; Final: global spin + ripple
spun (rotate result :angle (bind spin :angle))
rip-gate (bind ripple-state :gate)
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
(ripple spun
:amplitude rip-amp
:center_x (bind ripple-state :cx)
:center_y (bind ripple-state :cy)
:frequency 8
:decay 2
:speed 5))))

16
l1/requirements-dev.txt Normal file
View File

@@ -0,0 +1,16 @@
# Development dependencies
-r requirements.txt
# Type checking
mypy>=1.8.0
types-requests>=2.31.0
types-PyYAML>=6.0.0
typing_extensions>=4.9.0
# Testing
pytest>=8.0.0
pytest-asyncio>=0.23.0
pytest-cov>=4.1.0
# Linting
ruff>=0.2.0

21
l1/requirements.txt Normal file
View File

@@ -0,0 +1,21 @@
celery[redis]>=5.3.0
redis>=5.0.0
requests>=2.31.0
httpx>=0.27.0
itsdangerous>=2.0
cryptography>=41.0
fastapi>=0.109.0
uvicorn>=0.27.0
python-multipart>=0.0.6
PyYAML>=6.0
asyncpg>=0.29.0
markdown>=3.5.0
# Common effect dependencies (used by uploaded effects)
numpy>=1.24.0
opencv-python-headless>=4.8.0
# Core artdag from GitHub (tracks main branch)
git+https://github.com/gilesbradshaw/art-dag.git@main
# Shared components (tracks master branch)
git+https://git.rose-ash.com/art-dag/common.git@master
psycopg2-binary
nest_asyncio

View File

@@ -0,0 +1,77 @@
#!/bin/bash
# Cloud-init startup script for GPU droplet (RTX 6000 Ada, etc.)
# Paste this into DigitalOcean "User data" field when creating droplet
set -e
export DEBIAN_FRONTEND=noninteractive
exec > /var/log/artdag-setup.log 2>&1
echo "=== ArtDAG GPU Setup Started $(date) ==="
# Update system (non-interactive, keep existing configs)
apt-get update
apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" upgrade
# Install essentials
apt-get install -y \
python3 python3-venv python3-pip \
git curl wget \
ffmpeg \
vulkan-tools \
build-essential
# Create venv
VENV_DIR="/opt/artdag-gpu"
python3 -m venv "$VENV_DIR"
source "$VENV_DIR/bin/activate"
# Install Python packages
pip install --upgrade pip
pip install \
numpy \
opencv-python-headless \
wgpu \
httpx \
pyyaml \
celery[redis] \
fastapi \
uvicorn \
asyncpg
# Create code directory
mkdir -p "$VENV_DIR/celery/sexp_effects/effects"
mkdir -p "$VENV_DIR/celery/sexp_effects/primitive_libs"
mkdir -p "$VENV_DIR/celery/streaming"
# Add SSH key for easier access (optional - add your key here)
# echo "ssh-ed25519 AAAA... your-key" >> /root/.ssh/authorized_keys
# Test GPU
echo "=== GPU Info ==="
nvidia-smi || echo "nvidia-smi not available yet"
echo "=== NVENC Check ==="
ffmpeg -encoders 2>/dev/null | grep -E "nvenc|cuda" || echo "NVENC not detected"
echo "=== wgpu Check ==="
"$VENV_DIR/bin/python3" -c "
import wgpu
try:
adapter = wgpu.gpu.request_adapter_sync(power_preference='high-performance')
print(f'GPU: {adapter.info}')
except Exception as e:
print(f'wgpu error: {e}')
" || echo "wgpu test failed"
# Add environment setup
cat >> /etc/profile.d/artdag-gpu.sh << 'ENVEOF'
export WGPU_BACKEND_TYPE=Vulkan
export PATH="/opt/artdag-gpu/bin:$PATH"
ENVEOF
# Mark setup complete
touch /opt/artdag-gpu/.setup-complete
echo "=== Setup Complete $(date) ==="
echo "Venv: /opt/artdag-gpu"
echo "Activate: source /opt/artdag-gpu/bin/activate"
echo "Vulkan: export WGPU_BACKEND_TYPE=Vulkan"

51
l1/scripts/deploy-to-gpu.sh Executable file
View File

@@ -0,0 +1,51 @@
#!/bin/bash
# Deploy art-dag GPU code to a remote droplet
# Usage: ./deploy-to-gpu.sh <droplet-ip>
set -e
if [ -z "$1" ]; then
echo "Usage: $0 <droplet-ip>"
echo "Example: $0 159.223.7.100"
exit 1
fi
DROPLET_IP="$1"
REMOTE_DIR="/opt/artdag-gpu/celery"
LOCAL_DIR="$(dirname "$0")/.."
echo "=== Deploying to $DROPLET_IP ==="
# Create remote directory
echo "[1/4] Creating remote directory..."
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects $REMOTE_DIR/streaming $REMOTE_DIR/scripts"
# Copy core files
echo "[2/4] Copying core files..."
scp "$LOCAL_DIR/sexp_effects/wgsl_compiler.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
scp "$LOCAL_DIR/sexp_effects/parser.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
scp "$LOCAL_DIR/sexp_effects/interpreter.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
scp "$LOCAL_DIR/sexp_effects/__init__.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
scp "$LOCAL_DIR/streaming/backends.py" "root@$DROPLET_IP:$REMOTE_DIR/streaming/"
# Copy effects
echo "[3/4] Copying effects..."
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects/effects $REMOTE_DIR/sexp_effects/primitive_libs"
scp -r "$LOCAL_DIR/sexp_effects/effects/"*.sexp "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/effects/" 2>/dev/null || true
scp -r "$LOCAL_DIR/sexp_effects/primitive_libs/"*.py "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/primitive_libs/" 2>/dev/null || true
# Test
echo "[4/4] Testing deployment..."
ssh "root@$DROPLET_IP" "cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c '
import sys
sys.path.insert(0, \".\")
from sexp_effects.wgsl_compiler import compile_effect_file
result = compile_effect_file(\"sexp_effects/effects/invert.sexp\")
print(f\"Compiled effect: {result.name}\")
print(\"Deployment OK\")
'" || echo "Test failed - may need to run setup script first"
echo ""
echo "=== Deployment complete ==="
echo "SSH: ssh root@$DROPLET_IP"
echo "Test: ssh root@$DROPLET_IP 'cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c \"from streaming.backends import get_backend; b=get_backend(\\\"wgpu\\\"); print(b)\"'"

34
l1/scripts/gpu-dev-deploy.sh Executable file
View File

@@ -0,0 +1,34 @@
#!/bin/bash
# Quick deploy to GPU node with hot reload
# Usage: ./scripts/gpu-dev-deploy.sh
set -e
GPU_HOST="${GPU_HOST:-root@138.197.163.123}"
REMOTE_DIR="/root/art-dag/celery"
echo "=== GPU Dev Deploy ==="
echo "Syncing code to $GPU_HOST..."
# Sync code (excluding cache, git, __pycache__)
rsync -avz --delete \
--exclude '.git' \
--exclude '__pycache__' \
--exclude '*.pyc' \
--exclude '.pytest_cache' \
--exclude 'node_modules' \
--exclude '.env' \
./ "$GPU_HOST:$REMOTE_DIR/"
echo "Restarting GPU worker..."
ssh "$GPU_HOST" "docker kill \$(docker ps -q -f name=l1-gpu-worker) 2>/dev/null || true"
echo "Waiting for new container..."
sleep 10
# Show new container logs
ssh "$GPU_HOST" "docker logs --tail 30 \$(docker ps -q -f name=l1-gpu-worker)"
echo ""
echo "=== Deploy Complete ==="
echo "Use 'ssh $GPU_HOST docker logs -f \$(docker ps -q -f name=l1-gpu-worker)' to follow logs"

108
l1/scripts/setup-gpu-droplet.sh Executable file
View File

@@ -0,0 +1,108 @@
#!/bin/bash
# Setup script for GPU droplet with NVENC support
# Run as root on a fresh Ubuntu droplet with NVIDIA GPU
set -e
echo "=== ArtDAG GPU Droplet Setup ==="
# 1. System updates
echo "[1/7] Updating system..."
apt-get update
apt-get upgrade -y
# 2. Install NVIDIA drivers (if not already installed)
echo "[2/7] Checking NVIDIA drivers..."
if ! command -v nvidia-smi &> /dev/null; then
echo "Installing NVIDIA drivers..."
apt-get install -y nvidia-driver-535 nvidia-utils-535
echo "NVIDIA drivers installed. Reboot required."
echo "After reboot, run this script again."
exit 0
fi
nvidia-smi
echo "NVIDIA drivers OK"
# 3. Install FFmpeg with NVENC support
echo "[3/7] Installing FFmpeg with NVENC..."
apt-get install -y ffmpeg
# Verify NVENC
if ffmpeg -encoders 2>/dev/null | grep -q nvenc; then
echo "NVENC available:"
ffmpeg -encoders 2>/dev/null | grep nvenc
else
echo "WARNING: NVENC not available. GPU may not support hardware encoding."
fi
# 4. Install Python and create venv
echo "[4/7] Setting up Python environment..."
apt-get install -y python3 python3-venv python3-pip git
VENV_DIR="/opt/artdag-gpu"
python3 -m venv "$VENV_DIR"
source "$VENV_DIR/bin/activate"
# 5. Install Python dependencies
echo "[5/7] Installing Python packages..."
pip install --upgrade pip
pip install \
numpy \
opencv-python-headless \
wgpu \
httpx \
pyyaml \
celery[redis] \
fastapi \
uvicorn
# 6. Clone/update art-dag code
echo "[6/7] Setting up art-dag code..."
ARTDAG_DIR="$VENV_DIR/celery"
if [ -d "$ARTDAG_DIR" ]; then
echo "Updating existing code..."
cd "$ARTDAG_DIR"
git pull || true
else
echo "Cloning art-dag..."
git clone https://git.rose-ash.com/art-dag/celery.git "$ARTDAG_DIR" || {
echo "Git clone failed. You may need to copy code manually."
}
fi
# 7. Test GPU compute
echo "[7/7] Testing GPU compute..."
"$VENV_DIR/bin/python3" << 'PYTEST'
import sys
try:
import wgpu
adapter = wgpu.gpu.request_adapter_sync(power_preference="high-performance")
print(f"GPU Adapter: {adapter.info.get('device', 'unknown')}")
device = adapter.request_device_sync()
print("wgpu device created successfully")
# Check for NVENC via FFmpeg
import subprocess
result = subprocess.run(['ffmpeg', '-encoders'], capture_output=True, text=True)
if 'h264_nvenc' in result.stdout:
print("NVENC H.264 encoder: AVAILABLE")
else:
print("NVENC H.264 encoder: NOT AVAILABLE")
if 'hevc_nvenc' in result.stdout:
print("NVENC HEVC encoder: AVAILABLE")
else:
print("NVENC HEVC encoder: NOT AVAILABLE")
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
PYTEST
echo ""
echo "=== Setup Complete ==="
echo "Venv: $VENV_DIR"
echo "Code: $ARTDAG_DIR"
echo ""
echo "To activate: source $VENV_DIR/bin/activate"
echo "To test: cd $ARTDAG_DIR && python -c 'from streaming.backends import get_backend; print(get_backend(\"wgpu\"))'"

26
l1/server.py Normal file
View File

@@ -0,0 +1,26 @@
#!/usr/bin/env python3
"""
Art DAG L1 Server
Minimal entry point that uses the modular app factory.
All routes are defined in app/routers/.
All templates are in app/templates/.
"""
import logging
import os
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s: %(message)s'
)
# Import the app from the factory
from app import app
if __name__ == "__main__":
import uvicorn
host = os.environ.get("HOST", "0.0.0.0")
port = int(os.environ.get("PORT", "8100"))
uvicorn.run("server:app", host=host, port=port, workers=4)

View File

@@ -0,0 +1,32 @@
"""
S-Expression Effects System
Safe, shareable effects defined in S-expressions.
"""
from .parser import parse, parse_file, Symbol, Keyword
from .interpreter import (
Interpreter,
get_interpreter,
load_effect,
load_effects_dir,
run_effect,
list_effects,
make_process_frame,
)
from .primitives import PRIMITIVES
__all__ = [
'parse',
'parse_file',
'Symbol',
'Keyword',
'Interpreter',
'get_interpreter',
'load_effect',
'load_effects_dir',
'run_effect',
'list_effects',
'make_process_frame',
'PRIMITIVES',
]

View File

@@ -0,0 +1,206 @@
;; Derived Operations
;;
;; These are built from true primitives using S-expressions.
;; Load with: (require "derived")
;; =============================================================================
;; Math Helpers (derivable from where + basic ops)
;; =============================================================================
;; Absolute value
(define (abs x) (where (< x 0) (- x) x))
;; Minimum of two values
(define (min2 a b) (where (< a b) a b))
;; Maximum of two values
(define (max2 a b) (where (> a b) a b))
;; Clamp x to range [lo, hi]
(define (clamp x lo hi) (max2 lo (min2 hi x)))
;; Square of x
(define (sq x) (* x x))
;; Linear interpolation: a*(1-t) + b*t
(define (lerp a b t) (+ (* a (- 1 t)) (* b t)))
;; Smooth interpolation between edges
(define (smoothstep edge0 edge1 x)
(let ((t (clamp (/ (- x edge0) (- edge1 edge0)) 0 1)))
(* t (* t (- 3 (* 2 t))))))
;; =============================================================================
;; Channel Shortcuts (derivable from channel primitive)
;; =============================================================================
;; Extract red channel as xector
(define (red frame) (channel frame 0))
;; Extract green channel as xector
(define (green frame) (channel frame 1))
;; Extract blue channel as xector
(define (blue frame) (channel frame 2))
;; Convert to grayscale xector (ITU-R BT.601)
(define (gray frame)
(+ (* (red frame) 0.299)
(* (green frame) 0.587)
(* (blue frame) 0.114)))
;; Alias for gray
(define (luminance frame) (gray frame))
;; =============================================================================
;; Coordinate Generators (derivable from iota + repeat/tile)
;; =============================================================================
;; X coordinate for each pixel [0, width)
(define (x-coords frame) (tile (iota (width frame)) (height frame)))
;; Y coordinate for each pixel [0, height)
(define (y-coords frame) (repeat (iota (height frame)) (width frame)))
;; Normalized X coordinate [0, 1]
(define (x-norm frame) (/ (x-coords frame) (max2 1 (- (width frame) 1))))
;; Normalized Y coordinate [0, 1]
(define (y-norm frame) (/ (y-coords frame) (max2 1 (- (height frame) 1))))
;; Distance from frame center for each pixel
(define (dist-from-center frame)
(let* ((cx (/ (width frame) 2))
(cy (/ (height frame) 2))
(dx (- (x-coords frame) cx))
(dy (- (y-coords frame) cy)))
(sqrt (+ (sq dx) (sq dy)))))
;; Normalized distance from center [0, ~1]
(define (dist-norm frame)
(let ((d (dist-from-center frame)))
(/ d (max2 1 (βmax d)))))
;; =============================================================================
;; Cell/Grid Operations (derivable from floor + basic math)
;; =============================================================================
;; Cell row index for each pixel
(define (cell-row frame cell-size) (floor (/ (y-coords frame) cell-size)))
;; Cell column index for each pixel
(define (cell-col frame cell-size) (floor (/ (x-coords frame) cell-size)))
;; Number of cell rows
(define (num-rows frame cell-size) (floor (/ (height frame) cell-size)))
;; Number of cell columns
(define (num-cols frame cell-size) (floor (/ (width frame) cell-size)))
;; Flat cell index for each pixel
(define (cell-indices frame cell-size)
(+ (* (cell-row frame cell-size) (num-cols frame cell-size))
(cell-col frame cell-size)))
;; Total number of cells
(define (num-cells frame cell-size)
(* (num-rows frame cell-size) (num-cols frame cell-size)))
;; X position within cell [0, cell-size)
(define (local-x frame cell-size) (mod (x-coords frame) cell-size))
;; Y position within cell [0, cell-size)
(define (local-y frame cell-size) (mod (y-coords frame) cell-size))
;; Normalized X within cell [0, 1]
(define (local-x-norm frame cell-size)
(/ (local-x frame cell-size) (max2 1 (- cell-size 1))))
;; Normalized Y within cell [0, 1]
(define (local-y-norm frame cell-size)
(/ (local-y frame cell-size) (max2 1 (- cell-size 1))))
;; =============================================================================
;; Fill Operations (derivable from iota)
;; =============================================================================
;; Xector of n zeros
(define (zeros n) (* (iota n) 0))
;; Xector of n ones
(define (ones n) (+ (zeros n) 1))
;; Xector of n copies of val
(define (fill val n) (+ (zeros n) val))
;; Xector of zeros matching x's length
(define (zeros-like x) (* x 0))
;; Xector of ones matching x's length
(define (ones-like x) (+ (zeros-like x) 1))
;; =============================================================================
;; Pooling (derivable from group-reduce)
;; =============================================================================
;; Pool a channel by cell index
(define (pool-channel chan cell-idx num-cells)
(group-reduce chan cell-idx num-cells "mean"))
;; Pool red channel to cells
(define (pool-red frame cell-size)
(pool-channel (red frame)
(cell-indices frame cell-size)
(num-cells frame cell-size)))
;; Pool green channel to cells
(define (pool-green frame cell-size)
(pool-channel (green frame)
(cell-indices frame cell-size)
(num-cells frame cell-size)))
;; Pool blue channel to cells
(define (pool-blue frame cell-size)
(pool-channel (blue frame)
(cell-indices frame cell-size)
(num-cells frame cell-size)))
;; Pool grayscale to cells
(define (pool-gray frame cell-size)
(pool-channel (gray frame)
(cell-indices frame cell-size)
(num-cells frame cell-size)))
;; =============================================================================
;; Blending (derivable from math)
;; =============================================================================
;; Additive blend
(define (blend-add a b) (clamp (+ a b) 0 255))
;; Multiply blend (normalized)
(define (blend-multiply a b) (* (/ a 255) b))
;; Screen blend
(define (blend-screen a b) (- 255 (* (/ (- 255 a) 255) (- 255 b))))
;; Overlay blend
(define (blend-overlay a b)
(where (< a 128)
(* 2 (/ (* a b) 255))
(- 255 (* 2 (/ (* (- 255 a) (- 255 b)) 255)))))
;; =============================================================================
;; Simple Effects (derivable from primitives)
;; =============================================================================
;; Invert a channel (255 - c)
(define (invert-channel c) (- 255 c))
;; Binary threshold
(define (threshold-channel c thresh) (where (> c thresh) 255 0))
;; Reduce to n levels
(define (posterize-channel c levels)
(let ((step (/ 255 (- levels 1))))
(* (round (/ c step)) step)))

View File

@@ -0,0 +1,17 @@
;; ASCII Art effect - converts image to ASCII characters
(require-primitives "ascii")
(define-effect ascii_art
:params (
(char_size :type int :default 8 :range [4 32])
(alphabet :type string :default "standard")
(color_mode :type string :default "color" :desc "color, mono, invert, or any color name/hex")
(background_color :type string :default "black" :desc "background color name/hex")
(invert_colors :type int :default 0 :desc "swap foreground and background colors")
(contrast :type float :default 1.5 :range [1 3])
)
(let* ((sample (cell-sample frame char_size))
(colors (nth sample 0))
(luminances (nth sample 1))
(chars (luminance-to-chars luminances alphabet contrast)))
(render-char-grid frame chars colors char_size color_mode background_color invert_colors)))

View File

@@ -0,0 +1,52 @@
;; ASCII Art FX - converts image to ASCII characters with per-character effects
(require-primitives "ascii")
(define-effect ascii_art_fx
:params (
;; Basic parameters
(char_size :type int :default 8 :range [4 32]
:desc "Size of each character cell in pixels")
(alphabet :type string :default "standard"
:desc "Character set to use")
(color_mode :type string :default "color"
:choices [color mono invert]
:desc "Color mode: color, mono, invert, or any color name/hex")
(background_color :type string :default "black"
:desc "Background color name or hex value")
(invert_colors :type int :default 0 :range [0 1]
:desc "Swap foreground and background colors (0/1)")
(contrast :type float :default 1.5 :range [1 3]
:desc "Character selection contrast")
;; Per-character effects
(char_jitter :type float :default 0 :range [0 20]
:desc "Position jitter amount in pixels")
(char_scale :type float :default 1.0 :range [0.5 2.0]
:desc "Character scale factor")
(char_rotation :type float :default 0 :range [0 180]
:desc "Rotation amount in degrees")
(char_hue_shift :type float :default 0 :range [0 360]
:desc "Hue shift in degrees")
;; Modulation sources
(jitter_source :type string :default "none"
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
:desc "What drives jitter modulation")
(scale_source :type string :default "none"
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
:desc "What drives scale modulation")
(rotation_source :type string :default "none"
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
:desc "What drives rotation modulation")
(hue_source :type string :default "none"
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
:desc "What drives hue shift modulation")
)
(let* ((sample (cell-sample frame char_size))
(colors (nth sample 0))
(luminances (nth sample 1))
(chars (luminance-to-chars luminances alphabet contrast)))
(render-char-grid-fx frame chars colors luminances char_size
color_mode background_color invert_colors
char_jitter char_scale char_rotation char_hue_shift
jitter_source scale_source rotation_source hue_source)))

View File

@@ -0,0 +1,102 @@
;; Composable ASCII Art with Per-Zone Expression-Driven Effects
;; Requires ascii primitive library for the ascii-fx-zone primitive
(require-primitives "ascii")
;; Two modes of operation:
;;
;; 1. EXPRESSION MODE: Use zone-* variables in expression parameters
;; Zone variables available:
;; zone-row, zone-col: Grid position (integers)
;; zone-row-norm, zone-col-norm: Normalized position (0-1)
;; zone-lum: Cell luminance (0-1)
;; zone-sat: Cell saturation (0-1)
;; zone-hue: Cell hue (0-360)
;; zone-r, zone-g, zone-b: RGB components (0-1)
;;
;; Example:
;; (ascii-fx-zone frame
;; :cols 80
;; :char_hue (* zone-lum 180)
;; :char_rotation (* zone-col-norm 30))
;;
;; 2. CELL EFFECT MODE: Pass a lambda to apply arbitrary effects per-cell
;; The lambda receives (cell-image zone-dict) and returns modified cell.
;; Zone dict contains: row, col, row-norm, col-norm, lum, sat, hue, r, g, b,
;; char, color, cell_size, plus any bound analysis values.
;;
;; Any loaded sexp effect can be called on cells - each cell is just a small frame:
;; (blur cell radius) - Gaussian blur
;; (rotate cell angle) - Rotate by angle degrees
;; (brightness cell factor) - Adjust brightness
;; (contrast cell factor) - Adjust contrast
;; (saturation cell factor) - Adjust saturation
;; (hue_shift cell degrees) - Shift hue
;; (rgb_split cell offset_x offset_y) - RGB channel split
;; (invert cell) - Invert colors
;; (pixelate cell block_size) - Pixelate
;; (wave cell amplitude freq) - Wave distortion
;; ... and any other loaded effect
;;
;; Example:
;; (ascii-fx-zone frame
;; :cols 60
;; :cell_effect (lambda [cell zone]
;; (blur (rotate cell (* (get zone "energy") 45))
;; (if (> (get zone "lum") 0.5) 3 0))))
(define-effect ascii_fx_zone
:params (
(cols :type int :default 80 :range [20 200]
:desc "Number of character columns")
(char_size :type int :default nil :range [4 32]
:desc "Character cell size in pixels (overrides cols if set)")
(alphabet :type string :default "standard"
:desc "Character set: standard, blocks, simple, digits, or custom string")
(color_mode :type string :default "color"
:desc "Color mode: color, mono, invert, or any color name/hex")
(background :type string :default "black"
:desc "Background color name or hex value")
(contrast :type float :default 1.5 :range [0.5 3.0]
:desc "Contrast for character selection")
(char_hue :type any :default nil
:desc "Hue shift expression (evaluated per-zone with zone-* vars)")
(char_saturation :type any :default nil
:desc "Saturation multiplier expression (1.0 = unchanged)")
(char_brightness :type any :default nil
:desc "Brightness multiplier expression (1.0 = unchanged)")
(char_scale :type any :default nil
:desc "Character scale expression (1.0 = normal size)")
(char_rotation :type any :default nil
:desc "Character rotation expression (degrees)")
(char_jitter :type any :default nil
:desc "Position jitter expression (pixels)")
(cell_effect :type any :default nil
:desc "Lambda (cell zone) -> cell for arbitrary per-cell effects")
;; Convenience params for staged recipes (avoids compile-time expression issues)
(energy :type float :default nil
:desc "Energy multiplier (0-1) from audio analysis bind")
(rotation_scale :type float :default 0
:desc "Max rotation at top-right when energy=1 (degrees)")
)
;; The ascii-fx-zone special form handles expression params
;; If energy + rotation_scale provided, it builds: energy * scale * position_factor
;; where position_factor = 0 at bottom-left, 3 at top-right
;; If cell_effect provided, each character is rendered to a cell image,
;; passed to the lambda, and the result composited back
(ascii-fx-zone frame
:cols cols
:char_size char_size
:alphabet alphabet
:color_mode color_mode
:background background
:contrast contrast
:char_hue char_hue
:char_saturation char_saturation
:char_brightness char_brightness
:char_scale char_scale
:char_rotation char_rotation
:char_jitter char_jitter
:cell_effect cell_effect
:energy energy
:rotation_scale rotation_scale))

View File

@@ -0,0 +1,30 @@
;; ASCII Zones effect - different character sets for different brightness zones
;; Dark areas use simple chars, mid uses standard, bright uses blocks
(require-primitives "ascii")
(define-effect ascii_zones
:params (
(char_size :type int :default 8 :range [4 32])
(dark_threshold :type int :default 80 :range [0 128])
(bright_threshold :type int :default 180 :range [128 255])
(color_mode :type string :default "color")
)
(let* ((sample (cell-sample frame char_size))
(colors (nth sample 0))
(luminances (nth sample 1))
;; Start with simple chars as base
(base-chars (luminance-to-chars luminances "simple" 1.2))
;; Map each cell to appropriate alphabet based on brightness zone
(zoned-chars (map-char-grid base-chars luminances
(lambda (r c ch lum)
(cond
;; Bright zones: use block characters
((> lum bright_threshold)
(alphabet-char "blocks" (floor (/ (- lum bright_threshold) 15))))
;; Dark zones: use simple sparse chars
((< lum dark_threshold)
(alphabet-char " .-" (floor (/ lum 30))))
;; Mid zones: use standard ASCII
(else
(alphabet-char "standard" (floor (/ lum 4)))))))))
(render-char-grid frame zoned-chars colors char_size color_mode (list 0 0 0))))

View File

@@ -0,0 +1,31 @@
;; Blend effect - combines two video frames
;; Streaming-compatible: frame is background, overlay is second frame
;; Usage: (blend background overlay :opacity 0.5 :mode "alpha")
;;
;; Params:
;; mode - blend mode (add, multiply, screen, overlay, difference, lighten, darken, alpha)
;; opacity - blend amount (0-1)
(require-primitives "image" "blending" "core")
(define-effect blend
:params (
(overlay :type frame :default nil)
(mode :type string :default "alpha")
(opacity :type float :default 0.5)
)
(if (core:is-nil overlay)
frame
(let [a frame
b overlay
a-h (image:height a)
a-w (image:width a)
b-h (image:height b)
b-w (image:width b)
;; Resize b to match a if needed
b-sized (if (and (= a-w b-w) (= a-h b-h))
b
(image:resize b a-w a-h "linear"))]
(if (= mode "alpha")
(blending:blend-images a b-sized opacity)
(blending:blend-images a (blending:blend-mode a b-sized mode) opacity)))))

View File

@@ -0,0 +1,58 @@
;; N-way weighted blend effect
;; Streaming-compatible: pass inputs as a list of frames
;; Usage: (blend_multi :inputs [(read a) (read b) (read c)] :weights [0.3 0.4 0.3])
;;
;; Parameters:
;; inputs - list of N frames to blend
;; weights - list of N floats, one per input (resolved per-frame)
;; mode - blend mode applied when folding each frame in:
;; "alpha" — pure weighted average (default)
;; "multiply" — darken by multiplication
;; "screen" — lighten (inverse multiply)
;; "overlay" — contrast-boosting midtone blend
;; "soft-light" — gentle dodge/burn
;; "hard-light" — strong dodge/burn
;; "color-dodge" — brightens towards white
;; "color-burn" — darkens towards black
;; "difference" — absolute pixel difference
;; "exclusion" — softer difference
;; "add" — additive (clamped)
;; "subtract" — subtractive (clamped)
;; "darken" — per-pixel minimum
;; "lighten" — per-pixel maximum
;; resize_mode - how to match frame dimensions (fit, crop, stretch)
;;
;; Uses a left-fold over inputs[1..N-1]. At each step the running
;; opacity is: w[i] / (w[0] + w[1] + ... + w[i])
;; which produces the correct normalised weighted result.
(require-primitives "image" "blending")
(define-effect blend_multi
:params (
(inputs :type list :default [])
(weights :type list :default [])
(mode :type string :default "alpha")
(resize_mode :type string :default "fit")
)
(let [n (len inputs)
;; Target dimensions from first frame
target-w (image:width (nth inputs 0))
target-h (image:height (nth inputs 0))
;; Fold over indices 1..n-1
;; Accumulator is (list blended-frame running-weight-sum)
seed (list (nth inputs 0) (nth weights 0))
result (reduce (range 1 n) seed
(lambda (pair i)
(let [acc (nth pair 0)
running (nth pair 1)
w (nth weights i)
new-running (+ running w)
opacity (/ w (max new-running 0.001))
f (image:resize (nth inputs i) target-w target-h "linear")
;; Apply blend mode then mix with opacity
blended (if (= mode "alpha")
(blending:blend-images acc f opacity)
(blending:blend-images acc (blending:blend-mode acc f mode) opacity))]
(list blended new-running))))]
(nth result 0)))

View File

@@ -0,0 +1,16 @@
;; Bloom effect - glow on bright areas
(require-primitives "image" "blending")
(define-effect bloom
:params (
(intensity :type float :default 0.5 :range [0 2])
(threshold :type int :default 200 :range [0 255])
(radius :type int :default 15 :range [1 50])
)
(let* ((bright (map-pixels frame
(lambda (x y c)
(if (> (luminance c) threshold)
c
(rgb 0 0 0)))))
(blurred (image:blur bright radius)))
(blending:blend-mode frame blurred "add")))

View File

@@ -0,0 +1,8 @@
;; Blur effect - gaussian blur
(require-primitives "image")
(define-effect blur
:params (
(radius :type int :default 5 :range [1 50])
)
(image:blur frame (max 1 radius)))

View File

@@ -0,0 +1,9 @@
;; Brightness effect - adjusts overall brightness
;; Uses vectorized adjust primitive for fast processing
(require-primitives "color_ops")
(define-effect brightness
:params (
(amount :type int :default 0 :range [-255 255])
)
(color_ops:adjust-brightness frame amount))

View File

@@ -0,0 +1,65 @@
;; Cell Pattern effect - custom patterns within cells
;;
;; Demonstrates building arbitrary per-cell visuals from primitives.
;; Uses local coordinates within cells to draw patterns scaled by luminance.
(require-primitives "xector")
(define-effect cell_pattern
:params (
(cell-size :type int :default 16 :range [8 48] :desc "Cell size")
(pattern :type string :default "diagonal" :desc "Pattern: diagonal, cross, ring")
)
(let* (
;; Pool to get cell colors
(pooled (pool-frame frame cell-size))
(cell-r (nth pooled 0))
(cell-g (nth pooled 1))
(cell-b (nth pooled 2))
(cell-lum (α/ (nth pooled 3) 255))
;; Cell indices for each pixel
(cell-idx (cell-indices frame cell-size))
;; Look up cell values for each pixel
(pix-r (gather cell-r cell-idx))
(pix-g (gather cell-g cell-idx))
(pix-b (gather cell-b cell-idx))
(pix-lum (gather cell-lum cell-idx))
;; Local position within cell [0, 1]
(lx (local-x-norm frame cell-size))
(ly (local-y-norm frame cell-size))
;; Pattern mask based on pattern type
(mask
(cond
;; Diagonal lines - thickness based on luminance
((= pattern "diagonal")
(let* ((diag (αmod (α+ lx ly) 0.25))
(thickness (α* pix-lum 0.125)))
(α< diag thickness)))
;; Cross pattern
((= pattern "cross")
(let* ((cx (αabs (α- lx 0.5)))
(cy (αabs (α- ly 0.5)))
(thickness (α* pix-lum 0.25)))
(αor (α< cx thickness) (α< cy thickness))))
;; Ring pattern
((= pattern "ring")
(let* ((dx (α- lx 0.5))
(dy (α- ly 0.5))
(dist (αsqrt (α+ (α² dx) (α² dy))))
(target (α* pix-lum 0.4))
(thickness 0.05))
(α< (αabs (α- dist target)) thickness)))
;; Default: solid
(else (α> pix-lum 0)))))
;; Apply mask: show cell color where mask is true, black elsewhere
(rgb (where mask pix-r 0)
(where mask pix-g 0)
(where mask pix-b 0))))

View File

@@ -0,0 +1,13 @@
;; Color adjustment effect - replaces TRANSFORM node
(require-primitives "color_ops")
(define-effect color-adjust
:params (
(brightness :type int :default 0 :range [-255 255] :desc "Brightness adjustment")
(contrast :type float :default 1 :range [0 3] :desc "Contrast multiplier")
(saturation :type float :default 1 :range [0 2] :desc "Saturation multiplier")
)
(-> frame
(color_ops:adjust-brightness brightness)
(color_ops:adjust-contrast contrast)
(color_ops:adjust-saturation saturation)))

View File

@@ -0,0 +1,13 @@
;; Color Cycle effect - animated hue rotation
(require-primitives "color_ops")
(define-effect color_cycle
:params (
(speed :type int :default 1 :range [0 10])
)
(let ((shift (* t speed 360)))
(map-pixels frame
(lambda (x y c)
(let* ((hsv (rgb->hsv c))
(new-h (mod (+ (first hsv) shift) 360)))
(hsv->rgb (list new-h (nth hsv 1) (nth hsv 2))))))))

View File

@@ -0,0 +1,9 @@
;; Contrast effect - adjusts image contrast
;; Uses vectorized adjust primitive for fast processing
(require-primitives "color_ops")
(define-effect contrast
:params (
(amount :type int :default 1 :range [0.5 3])
)
(color_ops:adjust-contrast frame amount))

View File

@@ -0,0 +1,30 @@
;; CRT effect - old monitor simulation
(require-primitives "image")
(define-effect crt
:params (
(line_spacing :type int :default 2 :range [1 10])
(line_opacity :type float :default 0.3 :range [0 1])
(vignette_amount :type float :default 0.2)
)
(let* ((w (image:width frame))
(h (image:height frame))
(cx (/ w 2))
(cy (/ h 2))
(max-dist (sqrt (+ (* cx cx) (* cy cy)))))
(map-pixels frame
(lambda (x y c)
(let* (;; Scanline darkening
(scanline-factor (if (= 0 (mod y line_spacing))
(- 1 line_opacity)
1))
;; Vignette
(dx (- x cx))
(dy (- y cy))
(dist (sqrt (+ (* dx dx) (* dy dy))))
(vignette-factor (- 1 (* (/ dist max-dist) vignette_amount)))
;; Combined
(factor (* scanline-factor vignette-factor)))
(rgb (* (red c) factor)
(* (green c) factor)
(* (blue c) factor)))))))

Some files were not shown because too many files have changed in this diff Show More