Files
celery/server.py
2026-01-11 10:24:58 +00:00

6890 lines
280 KiB
Python

#!/usr/bin/env python3
"""
Art DAG L1 Server
Manages rendering runs and provides access to the cache.
- POST /runs - start a run (recipe + inputs)
- GET /runs/{run_id} - get run status/result
- GET /cache/{content_hash} - get cached content
"""
import asyncio
import base64
import hashlib
import json
import logging
import os
import time
import uuid
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s: %(message)s'
)
logger = logging.getLogger(__name__)
from fastapi import FastAPI, HTTPException, UploadFile, File, Depends, Form, Request
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from pydantic import BaseModel
import redis
import requests as http_requests
from urllib.parse import urlparse
import yaml
from celery_app import app as celery_app
from legacy_tasks import render_effect, execute_dag, build_effect_dag
from contextlib import asynccontextmanager
from cache_manager import L1CacheManager, get_cache_manager
import database
import storage_providers
# L1 public URL for redirects
L1_PUBLIC_URL = os.environ.get("L1_PUBLIC_URL", "http://localhost:8100")
def compute_run_id(input_hashes: list[str], recipe: str, recipe_hash: str = None) -> str:
"""
Compute a deterministic run_id from inputs and recipe.
The run_id is a SHA3-256 hash of:
- Sorted input content hashes
- Recipe identifier (recipe_hash if provided, else "effect:{recipe}")
This makes runs content-addressable: same inputs + recipe = same run_id.
"""
data = {
"inputs": sorted(input_hashes),
"recipe": recipe_hash or f"effect:{recipe}",
"version": "1", # For future schema changes
}
json_str = json.dumps(data, sort_keys=True, separators=(",", ":"))
return hashlib.sha3_256(json_str.encode()).hexdigest()
# IPFS gateway URL for public access to IPFS content
IPFS_GATEWAY_URL = os.environ.get("IPFS_GATEWAY_URL", "")
# IPFS-primary mode: everything stored on IPFS, no local cache
# Set to "true" to enable
IPFS_PRIMARY = os.environ.get("IPFS_PRIMARY", "").lower() in ("true", "1", "yes")
# Cache directory (use /data/cache in Docker, ~/.artdag/cache locally)
CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
CACHE_DIR.mkdir(parents=True, exist_ok=True)
# Redis for persistent run storage and shared cache index (multi-worker support)
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
parsed = urlparse(REDIS_URL)
redis_client = redis.Redis(
host=parsed.hostname or 'localhost',
port=parsed.port or 6379,
db=int(parsed.path.lstrip('/') or 0),
socket_timeout=5,
socket_connect_timeout=5
)
RUNS_KEY_PREFIX = "artdag:run:"
RECIPES_KEY_PREFIX = "artdag:recipe:"
REVOKED_KEY_PREFIX = "artdag:revoked:"
USER_TOKENS_PREFIX = "artdag:user_tokens:"
# Token revocation (30 day expiry to match token lifetime)
TOKEN_EXPIRY_SECONDS = 60 * 60 * 24 * 30
def register_user_token(username: str, token: str) -> None:
"""Track a token for a user (for later revocation by username)."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{USER_TOKENS_PREFIX}{username}"
redis_client.sadd(key, token_hash)
redis_client.expire(key, TOKEN_EXPIRY_SECONDS)
def revoke_token(token: str) -> bool:
"""Add token to revocation set. Returns True if newly revoked."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
result = redis_client.set(key, "1", ex=TOKEN_EXPIRY_SECONDS, nx=True)
return result is not None
def revoke_token_hash(token_hash: str) -> bool:
"""Add token hash to revocation set. Returns True if newly revoked."""
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
result = redis_client.set(key, "1", ex=TOKEN_EXPIRY_SECONDS, nx=True)
return result is not None
def revoke_all_user_tokens(username: str) -> int:
"""Revoke all tokens for a user. Returns count revoked."""
key = f"{USER_TOKENS_PREFIX}{username}"
token_hashes = redis_client.smembers(key)
count = 0
for token_hash in token_hashes:
if revoke_token_hash(token_hash.decode() if isinstance(token_hash, bytes) else token_hash):
count += 1
# Clear the user's token set
redis_client.delete(key)
return count
def is_token_revoked(token: str) -> bool:
"""Check if token has been revoked."""
token_hash = hashlib.sha256(token.encode()).hexdigest()
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
return redis_client.exists(key) > 0
# Initialize L1 cache manager with Redis for shared state between workers
cache_manager = L1CacheManager(cache_dir=CACHE_DIR, redis_client=redis_client)
def save_run(run: "RunStatus"):
"""Save run to Redis."""
redis_client.set(f"{RUNS_KEY_PREFIX}{run.run_id}", run.model_dump_json())
def load_run(run_id: str) -> Optional["RunStatus"]:
"""Load run from Redis."""
data = redis_client.get(f"{RUNS_KEY_PREFIX}{run_id}")
if data:
return RunStatus.model_validate_json(data)
return None
def list_all_runs() -> list["RunStatus"]:
"""List all runs from Redis."""
runs = []
for key in redis_client.scan_iter(f"{RUNS_KEY_PREFIX}*"):
data = redis_client.get(key)
if data:
runs.append(RunStatus.model_validate_json(data))
return sorted(runs, key=lambda r: r.created_at, reverse=True)
def find_runs_using_content(content_hash: str) -> list[tuple["RunStatus", str]]:
"""Find all runs that use a content_hash as input or output.
Returns list of (run, role) tuples where role is 'input' or 'output'.
"""
results = []
for run in list_all_runs():
if run.inputs and content_hash in run.inputs:
results.append((run, "input"))
if run.output_hash == content_hash:
results.append((run, "output"))
return results
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Initialize and cleanup resources."""
# Startup: initialize database
await database.init_db()
yield
# Shutdown: close database
await database.close_db()
app = FastAPI(
title="Art DAG L1 Server",
description="Distributed rendering server for Art DAG",
version="0.1.0",
lifespan=lifespan
)
@app.exception_handler(404)
async def not_found_handler(request: Request, exc):
"""Custom 404 page."""
from fastapi.responses import JSONResponse
accept = request.headers.get("accept", "")
if "text/html" in accept:
content = '''
<div class="text-center py-16">
<h2 class="text-6xl font-bold text-gray-600 mb-4">404</h2>
<p class="text-xl text-gray-400 mb-8">Page not found</p>
<a href="/" class="text-blue-400 hover:text-blue-300">Go to home page</a>
</div>
'''
# Import render_page at runtime to avoid circular dependency
html = f"""<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Not Found | Art DAG L1 Server</title>
<script src="https://cdn.tailwindcss.com"></script>
<script>
tailwind.config = {{
darkMode: 'class',
theme: {{
extend: {{
colors: {{
dark: {{ 900: '#0a0a0a', 800: '#111', 700: '#1a1a1a', 600: '#222', 500: '#333' }}
}}
}}
}}
}}
</script>
</head>
<body class="bg-dark-900 text-gray-100 min-h-screen">
<div class="max-w-6xl mx-auto px-4 py-6 sm:px-6 lg:px-8">
<header class="flex flex-wrap items-center justify-between gap-4 mb-6">
<h1 class="text-2xl font-bold">
<a href="/" class="text-white hover:text-gray-200">Art DAG L1 Server</a>
</h1>
</header>
<main class="bg-dark-700 rounded-lg p-6">
{content}
</main>
</div>
</body>
</html>"""
return HTMLResponse(html, status_code=404)
return JSONResponse({"detail": "Not found"}, status_code=404)
class RunRequest(BaseModel):
"""Request to start a run."""
recipe: str # Recipe name (e.g., "dog", "identity") or "dag" for custom DAG
inputs: list[str] # List of content hashes
output_name: Optional[str] = None
use_dag: bool = False # Use DAG engine instead of legacy effect runner
dag_json: Optional[str] = None # Custom DAG JSON (required if recipe="dag")
class RunStatus(BaseModel):
"""Status of a run."""
run_id: str
status: str # pending, running, completed, failed
recipe: str
inputs: list[str]
output_name: str
created_at: str
completed_at: Optional[str] = None
output_hash: Optional[str] = None
output_ipfs_cid: Optional[str] = None # IPFS CID of output (IPFS_PRIMARY mode)
error: Optional[str] = None
celery_task_id: Optional[str] = None
effects_commit: Optional[str] = None
effect_url: Optional[str] = None # URL to effect source code
username: Optional[str] = None # Owner of the run (ActivityPub actor ID)
infrastructure: Optional[dict] = None # Hardware/software used for rendering
provenance_cid: Optional[str] = None # IPFS CID of provenance record
# Plan execution tracking
plan_id: Optional[str] = None # ID of the execution plan
plan_name: Optional[str] = None # Human-readable plan name
step_results: Optional[dict] = None # step_id -> result dict (status, cache_id, outputs)
all_outputs: Optional[list] = None # All outputs from all steps
# ============ Recipe Models ============
class VariableInput(BaseModel):
"""A variable input that must be filled at run time."""
node_id: str
name: str
description: Optional[str] = None
required: bool = True
class FixedInput(BaseModel):
"""A fixed input resolved from the registry."""
node_id: str
asset: str
content_hash: str
class RecipeStatus(BaseModel):
"""Status/metadata of a recipe."""
recipe_id: str # Content hash of the YAML file
name: str
version: str
description: Optional[str] = None
variable_inputs: list[VariableInput]
fixed_inputs: list[FixedInput]
output_node: str
owner: Optional[str] = None
uploaded_at: str
uploader: Optional[str] = None
class RecipeRunRequest(BaseModel):
"""Request to run a recipe with variable inputs."""
inputs: dict[str, str] # node_id -> content_hash
def save_recipe(recipe: RecipeStatus):
"""Save recipe to Redis."""
redis_client.set(f"{RECIPES_KEY_PREFIX}{recipe.recipe_id}", recipe.model_dump_json())
def load_recipe(recipe_id: str) -> Optional[RecipeStatus]:
"""Load recipe from Redis."""
data = redis_client.get(f"{RECIPES_KEY_PREFIX}{recipe_id}")
if data:
return RecipeStatus.model_validate_json(data)
return None
def list_all_recipes() -> list[RecipeStatus]:
"""List all recipes from Redis."""
recipes = []
for key in redis_client.scan_iter(f"{RECIPES_KEY_PREFIX}*"):
data = redis_client.get(key)
if data:
recipes.append(RecipeStatus.model_validate_json(data))
return sorted(recipes, key=lambda c: c.uploaded_at, reverse=True)
def delete_recipe_from_redis(recipe_id: str) -> bool:
"""Delete recipe from Redis."""
return redis_client.delete(f"{RECIPES_KEY_PREFIX}{recipe_id}") > 0
def parse_recipe_yaml(yaml_content: str, recipe_hash: str, uploader: str) -> RecipeStatus:
"""Parse a recipe YAML file and extract metadata."""
config = yaml.safe_load(yaml_content)
# Extract basic info
name = config.get("name", "unnamed")
version = config.get("version", "1.0")
description = config.get("description")
owner = config.get("owner")
# Parse registry
registry = config.get("registry", {})
assets = registry.get("assets", {})
# Parse DAG nodes
dag = config.get("dag", {})
nodes = dag.get("nodes", [])
output_node = dag.get("output")
variable_inputs = []
fixed_inputs = []
for node in nodes:
node_id = node.get("id")
node_type = node.get("type")
node_config = node.get("config", {})
if node_type == "SOURCE":
if node_config.get("input"):
# Variable input
variable_inputs.append(VariableInput(
node_id=node_id,
name=node_config.get("name", node_id),
description=node_config.get("description"),
required=node_config.get("required", True)
))
elif "asset" in node_config:
# Fixed input - resolve from registry
asset_name = node_config["asset"]
asset_info = assets.get(asset_name, {})
fixed_inputs.append(FixedInput(
node_id=node_id,
asset=asset_name,
content_hash=asset_info.get("hash", "")
))
return RecipeStatus(
recipe_id=recipe_hash,
name=name,
version=version,
description=description,
variable_inputs=variable_inputs,
fixed_inputs=fixed_inputs,
output_node=output_node or "",
owner=owner,
uploaded_at=datetime.now(timezone.utc).isoformat(),
uploader=uploader
)
# ============ Auth ============
security = HTTPBearer(auto_error=False)
@dataclass
class UserContext:
"""User authentication context extracted from JWT token."""
username: str
l2_server: str # The L2 server that issued the token (e.g., "https://artdag.rose-ash.com")
l2_domain: str # Domain part for actor_id (e.g., "artdag.rose-ash.com")
@property
def actor_id(self) -> str:
"""ActivityPub-style actor ID: @username@domain"""
return f"@{self.username}@{self.l2_domain}"
def decode_token_claims(token: str) -> Optional[dict]:
"""Decode JWT payload without verification (L2 does verification)."""
try:
# JWT format: header.payload.signature (all base64url encoded)
parts = token.split(".")
if len(parts) != 3:
return None
# Decode payload (add padding if needed)
payload = parts[1]
padding = 4 - len(payload) % 4
if padding != 4:
payload += "=" * padding
decoded = base64.urlsafe_b64decode(payload)
return json.loads(decoded)
except Exception:
return None
def get_user_context_from_token(token: str) -> Optional[UserContext]:
"""Extract user context from JWT token. Token must contain l2_server claim."""
claims = decode_token_claims(token)
if not claims:
return None
username = claims.get("username") or claims.get("sub")
l2_server = claims.get("l2_server") # e.g., "https://artdag.rose-ash.com"
if not username or not l2_server:
return None
# Extract domain from l2_server URL for actor_id
from urllib.parse import urlparse
parsed = urlparse(l2_server)
l2_domain = parsed.netloc or l2_server
return UserContext(username=username, l2_server=l2_server, l2_domain=l2_domain)
def _verify_token_with_l2_sync(token: str, l2_server: str) -> Optional[str]:
"""Verify token with the L2 server that issued it, return username if valid. (Sync version)"""
try:
resp = http_requests.post(
f"{l2_server}/auth/verify",
headers={"Authorization": f"Bearer {token}"},
json={"l1_server": L1_PUBLIC_URL}, # Identify ourselves to L2
timeout=5
)
if resp.status_code == 200:
return resp.json().get("username")
except Exception:
pass
return None
async def verify_token_with_l2(token: str, l2_server: str) -> Optional[str]:
"""Verify token with the L2 server that issued it, return username if valid."""
return await asyncio.to_thread(_verify_token_with_l2_sync, token, l2_server)
async def get_verified_user_context(token: str) -> Optional[UserContext]:
"""Get verified user context from token. Verifies with the L2 that issued it."""
# Check if token has been revoked
if is_token_revoked(token):
return None
ctx = get_user_context_from_token(token)
if not ctx:
return None
# Verify token with the L2 server from the token (non-blocking)
verified_username = await verify_token_with_l2(token, ctx.l2_server)
if not verified_username:
return None
return ctx
async def get_optional_user(
credentials: HTTPAuthorizationCredentials = Depends(security)
) -> Optional[str]:
"""Get username if authenticated, None otherwise."""
if not credentials:
return None
ctx = await get_verified_user_context(credentials.credentials)
return ctx.username if ctx else None
async def get_required_user(
credentials: HTTPAuthorizationCredentials = Depends(security)
) -> str:
"""Get username, raise 401 if not authenticated."""
if not credentials:
raise HTTPException(401, "Not authenticated")
ctx = await get_verified_user_context(credentials.credentials)
if not ctx:
raise HTTPException(401, "Invalid token")
return ctx.username
async def get_required_user_context(
credentials: HTTPAuthorizationCredentials = Depends(security)
) -> UserContext:
"""Get full user context, raise 401 if not authenticated."""
if not credentials:
raise HTTPException(401, "Not authenticated")
ctx = await get_verified_user_context(credentials.credentials)
if not ctx:
raise HTTPException(401, "Invalid token")
return ctx
def file_hash(path: Path) -> str:
"""Compute SHA3-256 hash of a file."""
hasher = hashlib.sha3_256()
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
hasher.update(chunk)
return hasher.hexdigest()
async def cache_file(source: Path, node_type: str = "output") -> str:
"""
Copy file to cache using L1CacheManager, return content hash.
Uses artdag's Cache internally for proper tracking.
Saves IPFS CID to database.
"""
cached, ipfs_cid = cache_manager.put(source, node_type=node_type)
# Save to cache_items table (with IPFS CID)
await database.create_cache_item(cached.content_hash, ipfs_cid)
return cached.content_hash
def get_cache_path(content_hash: str) -> Optional[Path]:
"""Get the path for a cached file by content_hash."""
return cache_manager.get_by_content_hash(content_hash)
@app.get("/api")
async def api_info():
"""Server info (JSON)."""
runs = await asyncio.to_thread(list_all_runs)
return {
"name": "Art DAG L1 Server",
"version": "0.1.0",
"cache_dir": str(CACHE_DIR),
"runs_count": len(runs)
}
def render_home_html(actor_id: Optional[str] = None) -> str:
"""Render the home page HTML with optional user info."""
if actor_id:
# Extract username and domain from @username@domain format
parts = actor_id.lstrip("@").split("@")
username = parts[0] if parts else actor_id
domain = parts[1] if len(parts) > 1 else ""
l2_user_url = f"https://{domain}/users/{username}" if domain else "#"
user_section = f'''<div class="ml-auto flex items-center gap-2 text-sm text-gray-300">
Logged in as <a href="{l2_user_url}" class="text-blue-400 hover:text-blue-300">{actor_id}</a>
</div>'''
else:
user_section = '''<span class="ml-auto text-sm text-gray-400">Not logged in</span>'''
return f"""
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Art DAG L1 Server</title>
<script src="https://cdn.tailwindcss.com"></script>
<script>
tailwind.config = {{
darkMode: 'class',
theme: {{ extend: {{ colors: {{ dark: {{ 900: '#0a0a0a', 800: '#111', 700: '#1a1a1a', 600: '#222', 500: '#333' }} }} }} }}
}}
</script>
</head>
<body class="bg-dark-900 text-gray-100 min-h-screen">
<div class="max-w-4xl mx-auto px-4 py-8 sm:px-6 lg:px-8">
<nav class="flex flex-wrap gap-3 mb-8 p-4 bg-dark-700 rounded-lg">
<a href="/runs" class="px-4 py-2 bg-dark-500 hover:bg-dark-600 rounded-md text-blue-400 hover:text-blue-300 font-medium transition-colors">Runs</a>
<a href="/recipes" class="px-4 py-2 bg-dark-500 hover:bg-dark-600 rounded-md text-blue-400 hover:text-blue-300 font-medium transition-colors">Recipes</a>
<a href="/media" class="px-4 py-2 bg-dark-500 hover:bg-dark-600 rounded-md text-blue-400 hover:text-blue-300 font-medium transition-colors">Media</a>
<a href="/storage" class="px-4 py-2 bg-dark-500 hover:bg-dark-600 rounded-md text-blue-400 hover:text-blue-300 font-medium transition-colors">Storage</a>
<a href="/docs" class="px-4 py-2 bg-dark-500 hover:bg-dark-600 rounded-md text-blue-400 hover:text-blue-300 font-medium transition-colors">API Docs</a>
{user_section}
</nav>
<h1 class="text-3xl font-bold text-white border-b border-dark-500 pb-4 mb-6">Art DAG L1 Server</h1>
<p class="text-gray-300 mb-8">L1 rendering server for the Art DAG system. Manages distributed rendering jobs via Celery workers.</p>
<h2 class="text-xl font-semibold text-gray-200 mt-8 mb-4">Dependencies</h2>
<ul class="list-disc list-inside space-y-2 text-gray-300 mb-8">
<li><strong class="text-white">artdag</strong> (GitHub): Core DAG execution engine</li>
<li><strong class="text-white">artdag-effects</strong> (rose-ash): Effect implementations</li>
<li><strong class="text-white">Redis</strong>: Message broker, result backend, and run persistence</li>
</ul>
<h2 class="text-xl font-semibold text-gray-200 mt-8 mb-4">API Endpoints</h2>
<div class="overflow-x-auto mb-8">
<table class="w-full text-sm">
<thead>
<tr class="bg-dark-600">
<th class="px-4 py-3 text-left border border-dark-500">Method</th>
<th class="px-4 py-3 text-left border border-dark-500">Path</th>
<th class="px-4 py-3 text-left border border-dark-500">Description</th>
</tr>
</thead>
<tbody class="divide-y divide-dark-500">
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/ui</code></td><td class="px-4 py-2 border border-dark-500">Web UI for viewing runs</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">POST</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/runs</code></td><td class="px-4 py-2 border border-dark-500">Start a rendering run</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/runs</code></td><td class="px-4 py-2 border border-dark-500">List all runs</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/runs/{{run_id}}</code></td><td class="px-4 py-2 border border-dark-500">Get run status</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/media</code></td><td class="px-4 py-2 border border-dark-500">List media items</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/recipes</code></td><td class="px-4 py-2 border border-dark-500">List recipes</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/cache/{{hash}}</code></td><td class="px-4 py-2 border border-dark-500">Download cached content</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">POST</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/cache/upload</code></td><td class="px-4 py-2 border border-dark-500">Upload file to cache</td></tr>
<tr class="bg-dark-800"><td class="px-4 py-2 border border-dark-500">GET</td><td class="px-4 py-2 border border-dark-500"><code class="bg-dark-600 px-2 py-0.5 rounded text-blue-300">/assets</code></td><td class="px-4 py-2 border border-dark-500">List known assets</td></tr>
</tbody>
</table>
</div>
<h2 class="text-xl font-semibold text-gray-200 mt-8 mb-4">Start a Run</h2>
<pre class="bg-dark-700 p-4 rounded-lg overflow-x-auto border border-dark-500 mb-8"><code class="text-green-300">curl -X POST /runs \\
-H "Content-Type: application/json" \\
-d '{{"recipe": "dog", "inputs": ["33268b6e..."]}}'</code></pre>
<h2 class="text-xl font-semibold text-gray-200 mt-8 mb-4">Provenance</h2>
<p class="text-gray-300 mb-4">Every render produces a provenance record linking inputs, effects, and infrastructure:</p>
<pre class="bg-dark-700 p-4 rounded-lg overflow-x-auto border border-dark-500"><code class="text-green-300">{{
"output": {{"content_hash": "..."}},
"inputs": [...],
"effects": [...],
"infrastructure": {{...}}
}}</code></pre>
</div>
</body>
</html>
"""
@app.get("/", response_class=HTMLResponse)
async def root(request: Request):
"""Home page."""
ctx = await get_user_context_from_cookie(request)
actor_id = ctx.actor_id if ctx else None
return render_home_html(actor_id)
@app.post("/runs", response_model=RunStatus)
async def create_run(request: RunRequest, ctx: UserContext = Depends(get_required_user_context)):
"""Start a new rendering run. Checks cache before executing."""
# Compute content-addressable run_id
run_id = compute_run_id(request.inputs, request.recipe)
# Generate output name if not provided
output_name = request.output_name or f"{request.recipe}-{run_id[:8]}"
# Use actor_id from user context
actor_id = ctx.actor_id
# Check L1 cache first
cached_run = await database.get_run_cache(run_id)
if cached_run:
output_hash = cached_run["output_hash"]
# Verify the output file still exists in cache
if cache_manager.has_content(output_hash):
logger.info(f"create_run: Cache hit for run_id={run_id[:16]}... output={output_hash[:16]}...")
return RunStatus(
run_id=run_id,
status="completed",
recipe=request.recipe,
inputs=request.inputs,
output_name=output_name,
created_at=cached_run.get("created_at", datetime.now(timezone.utc).isoformat()),
completed_at=cached_run.get("created_at", datetime.now(timezone.utc).isoformat()),
output_hash=output_hash,
username=actor_id,
provenance_cid=cached_run.get("provenance_cid"),
)
else:
logger.info(f"create_run: Cache entry exists but output missing, will re-run")
# Check L2 if not in L1
l2_server = ctx.l2_server
try:
l2_resp = http_requests.get(
f"{l2_server}/assets/by-run-id/{run_id}",
timeout=10
)
if l2_resp.status_code == 200:
l2_data = l2_resp.json()
output_hash = l2_data.get("output_hash")
ipfs_cid = l2_data.get("ipfs_cid")
if output_hash and ipfs_cid:
logger.info(f"create_run: Found on L2, pulling from IPFS: {ipfs_cid}")
# Pull from IPFS to L1 cache
import ipfs_client
legacy_dir = CACHE_DIR / "legacy"
legacy_dir.mkdir(parents=True, exist_ok=True)
recovery_path = legacy_dir / output_hash
if ipfs_client.get_file(ipfs_cid, str(recovery_path)):
# File retrieved - put() updates indexes, but file is already in legacy location
# Just update the content and IPFS indexes manually
cache_manager._set_content_index(output_hash, output_hash)
cache_manager._set_ipfs_index(output_hash, ipfs_cid)
# Save to run cache
await database.save_run_cache(
run_id=run_id,
output_hash=output_hash,
recipe=request.recipe,
inputs=request.inputs,
ipfs_cid=ipfs_cid,
provenance_cid=l2_data.get("provenance_cid"),
actor_id=actor_id,
)
logger.info(f"create_run: Recovered from L2/IPFS: {output_hash[:16]}...")
return RunStatus(
run_id=run_id,
status="completed",
recipe=request.recipe,
inputs=request.inputs,
output_name=output_name,
created_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(timezone.utc).isoformat(),
output_hash=output_hash,
username=actor_id,
provenance_cid=l2_data.get("provenance_cid"),
)
except Exception as e:
logger.warning(f"create_run: L2 lookup failed (will run Celery): {e}")
# Not cached anywhere - create run record and submit to Celery
run = RunStatus(
run_id=run_id,
status="pending",
recipe=request.recipe,
inputs=request.inputs,
output_name=output_name,
created_at=datetime.now(timezone.utc).isoformat(),
username=actor_id
)
# Submit to Celery
if request.use_dag or request.recipe == "dag":
# DAG mode - use artdag engine
if request.dag_json:
# Custom DAG provided
dag_json = request.dag_json
else:
# Build simple effect DAG from recipe and inputs
dag = build_effect_dag(request.inputs, request.recipe)
dag_json = dag.to_json()
task = execute_dag.delay(dag_json, run.run_id)
else:
# Legacy mode - single effect
if len(request.inputs) != 1:
raise HTTPException(400, "Legacy mode only supports single-input recipes. Use use_dag=true for multi-input.")
input_hash = request.inputs[0]
task = render_effect.delay(input_hash, request.recipe, output_name)
run.celery_task_id = task.id
run.status = "running"
await asyncio.to_thread(save_run, run)
return run
def _check_celery_task_sync(task_id: str) -> tuple[bool, bool, Optional[dict], Optional[str]]:
"""Check Celery task status synchronously. Returns (is_ready, is_successful, result, error)."""
task = celery_app.AsyncResult(task_id)
if not task.ready():
return (False, False, None, None)
if task.successful():
return (True, True, task.result, None)
else:
return (True, False, None, str(task.result))
@app.get("/runs/{run_id}", response_model=RunStatus)
async def get_run(run_id: str):
"""Get status of a run."""
start = time.time()
logger.info(f"get_run: Starting for {run_id}")
t0 = time.time()
run = await asyncio.to_thread(load_run, run_id)
logger.info(f"get_run: load_run took {time.time()-t0:.3f}s, status={run.status if run else 'None'}")
if not run:
raise HTTPException(404, f"Run {run_id} not found")
# Check Celery task status if running
if run.status == "running" and run.celery_task_id:
t0 = time.time()
is_ready, is_successful, result, error = await asyncio.to_thread(
_check_celery_task_sync, run.celery_task_id
)
logger.info(f"get_run: Celery check took {time.time()-t0:.3f}s, ready={is_ready}")
if is_ready:
if is_successful:
run.status = "completed"
run.completed_at = datetime.now(timezone.utc).isoformat()
# Handle both legacy (render_effect) and new (execute_dag/run_plan) result formats
if "output_cid" in result:
# IPFS-primary mode: everything on IPFS
run.output_ipfs_cid = result.get("output_cid")
run.plan_id = result.get("plan_id")
# Store step CIDs for UI
run.step_results = {
step_id: {"cid": cid, "status": "completed"}
for step_id, cid in result.get("step_cids", {}).items()
}
# Try to get content_hash from cache_id mapping in Redis
# (cache_id is often the same as content_hash)
output_path = None
elif "output_hash" in result or "output_cache_id" in result:
# New DAG/plan result format
run.output_hash = result.get("output_hash") or result.get("output_cache_id")
run.provenance_cid = result.get("provenance_cid")
output_path = Path(result.get("output_path", "")) if result.get("output_path") else None
# Store plan execution data
run.plan_id = result.get("plan_id")
run.plan_name = result.get("plan_name")
run.step_results = result.get("results") # step_id -> result dict
run.all_outputs = result.get("outputs") # All outputs from all steps
elif "output" in result:
# Legacy render_effect format
run.output_hash = result.get("output", {}).get("content_hash")
run.provenance_cid = result.get("provenance_cid")
output_path = Path(result.get("output", {}).get("local_path", ""))
# Extract effects info from provenance (legacy only)
effects = result.get("effects", [])
if effects:
run.effects_commit = effects[0].get("repo_commit")
run.effect_url = effects[0].get("repo_url")
# Extract infrastructure info (legacy only)
run.infrastructure = result.get("infrastructure")
# Cache the output (legacy mode - DAG/plan already caches via cache_manager)
is_plan_result = "output_hash" in result or "output_cache_id" in result
if output_path and output_path.exists() and not is_plan_result:
t0 = time.time()
await cache_file(output_path, node_type="effect_output")
logger.info(f"get_run: cache_file took {time.time()-t0:.3f}s")
# Record activity for deletion tracking (legacy mode)
if run.output_hash and run.inputs:
await asyncio.to_thread(
cache_manager.record_simple_activity,
input_hashes=run.inputs,
output_hash=run.output_hash,
run_id=run.run_id,
)
# Save to run cache for content-addressable lookup
if run.output_hash:
ipfs_cid = cache_manager._get_ipfs_cid_from_index(run.output_hash)
await database.save_run_cache(
run_id=run.run_id,
output_hash=run.output_hash,
recipe=run.recipe,
inputs=run.inputs,
ipfs_cid=ipfs_cid,
provenance_cid=run.provenance_cid,
actor_id=run.username,
)
logger.info(f"get_run: Saved run cache for {run.run_id[:16]}...")
else:
run.status = "failed"
run.error = error
# Save updated status
t0 = time.time()
await asyncio.to_thread(save_run, run)
logger.info(f"get_run: save_run took {time.time()-t0:.3f}s")
logger.info(f"get_run: Total time {time.time()-start:.3f}s")
return run
@app.delete("/runs/{run_id}")
async def discard_run(run_id: str, ctx: UserContext = Depends(get_required_user_context)):
"""
Discard (delete) a run and its outputs.
Enforces deletion rules:
- Cannot discard if output is published to L2 (pinned)
- Deletes outputs and intermediate cache entries
- Preserves inputs (cache items and recipes are NOT deleted)
"""
run = await asyncio.to_thread(load_run, run_id)
if not run:
raise HTTPException(404, f"Run {run_id} not found")
# Check ownership
if run.username not in (ctx.username, ctx.actor_id):
raise HTTPException(403, "Access denied")
# Failed runs can always be deleted (no output to protect)
if run.status != "failed":
# Only check if output is pinned - inputs are preserved, not deleted
if run.output_hash:
meta = await database.load_item_metadata(run.output_hash, ctx.actor_id)
if meta.get("pinned"):
pin_reason = meta.get("pin_reason", "published")
raise HTTPException(400, f"Cannot discard run: output {run.output_hash[:16]}... is pinned ({pin_reason})")
# Check if activity exists for this run
activity = await asyncio.to_thread(cache_manager.get_activity, run_id)
if activity:
# Discard the activity - only delete outputs, preserve inputs
success, msg = await asyncio.to_thread(cache_manager.discard_activity_outputs_only, run_id)
if not success:
raise HTTPException(400, f"Cannot discard run: {msg}")
# Remove from Redis
await asyncio.to_thread(redis_client.delete, f"{RUNS_KEY_PREFIX}{run_id}")
return {"discarded": True, "run_id": run_id}
@app.delete("/ui/runs/{run_id}/discard", response_class=HTMLResponse)
async def ui_discard_run(run_id: str, request: Request):
"""HTMX handler: discard a run. Only deletes outputs, preserves inputs."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
run = await asyncio.to_thread(load_run, run_id)
if not run:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Run not found</div>'
# Check ownership
if run.username not in (ctx.username, ctx.actor_id):
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Failed runs can always be deleted
if run.status != "failed":
# Only check if output is pinned - inputs are preserved, not deleted
if run.output_hash:
meta = await database.load_item_metadata(run.output_hash, ctx.actor_id)
if meta.get("pinned"):
pin_reason = meta.get("pin_reason", "published")
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot discard: output is pinned ({pin_reason})</div>'
# Check if activity exists for this run
activity = await asyncio.to_thread(cache_manager.get_activity, run_id)
if activity:
# Discard the activity - only delete outputs, preserve inputs
success, msg = await asyncio.to_thread(cache_manager.discard_activity_outputs_only, run_id)
if not success:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot discard: {msg}</div>'
# Remove from Redis
await asyncio.to_thread(redis_client.delete, f"{RUNS_KEY_PREFIX}{run_id}")
return '''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Run deleted. <a href="/runs" class="underline">Back to runs</a>
</div>
'''
@app.get("/run/{run_id}")
async def run_detail(run_id: str, request: Request):
"""Run detail. HTML for browsers, JSON for APIs."""
run = await asyncio.to_thread(load_run, run_id)
if not run:
if wants_html(request):
content = f'<p class="text-red-400">Run not found: {run_id}</p>'
return HTMLResponse(render_page("Not Found", content, None, active_tab="runs"), status_code=404)
raise HTTPException(404, f"Run {run_id} not found")
# Check Celery task status if running
if run.status == "running" and run.celery_task_id:
is_ready, is_successful, result, error = await asyncio.to_thread(
_check_celery_task_sync, run.celery_task_id
)
if is_ready:
if is_successful:
run.status = "completed"
run.completed_at = datetime.now(timezone.utc).isoformat()
run.output_hash = result.get("output", {}).get("content_hash")
effects = result.get("effects", [])
if effects:
run.effects_commit = effects[0].get("repo_commit")
run.effect_url = effects[0].get("repo_url")
run.infrastructure = result.get("infrastructure")
output_path = Path(result.get("output", {}).get("local_path", ""))
if output_path.exists():
await cache_file(output_path)
# Save to run cache for content-addressable lookup
if run.output_hash:
ipfs_cid = cache_manager._get_ipfs_cid_from_index(run.output_hash)
await database.save_run_cache(
run_id=run.run_id,
output_hash=run.output_hash,
recipe=run.recipe,
inputs=run.inputs,
ipfs_cid=ipfs_cid,
provenance_cid=run.provenance_cid,
actor_id=run.username,
)
else:
run.status = "failed"
run.error = error
await asyncio.to_thread(save_run, run)
if wants_html(request):
ctx = await get_user_context_from_cookie(request)
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Login Required", content, None, active_tab="runs"), status_code=401)
# Check user owns this run
if run.username not in (ctx.username, ctx.actor_id):
content = '<p class="text-red-400 py-8 text-center">Access denied.</p>'
return HTMLResponse(render_page("Access Denied", content, ctx.actor_id, active_tab="runs"), status_code=403)
# Build effect URL
if run.effect_url:
effect_url = run.effect_url
elif run.effects_commit and run.effects_commit != "unknown":
effect_url = f"https://git.rose-ash.com/art-dag/effects/src/commit/{run.effects_commit}/{run.recipe}"
else:
effect_url = f"https://git.rose-ash.com/art-dag/effects/src/branch/main/{run.recipe}"
# Status badge colors
status_colors = {
"completed": "bg-green-600 text-white",
"running": "bg-yellow-600 text-white",
"failed": "bg-red-600 text-white",
"pending": "bg-gray-600 text-white"
}
status_badge = status_colors.get(run.status, "bg-gray-600 text-white")
# Try to get input names from recipe
input_names = {}
recipe_name = run.recipe.replace("recipe:", "") if run.recipe.startswith("recipe:") else run.recipe
for recipe in list_all_recipes():
if recipe.name == recipe_name:
# Match variable inputs first, then fixed inputs
for i, var_input in enumerate(recipe.variable_inputs):
if i < len(run.inputs):
input_names[run.inputs[i]] = var_input.name
# Fixed inputs follow variable inputs
offset = len(recipe.variable_inputs)
for i, fixed_input in enumerate(recipe.fixed_inputs):
idx = offset + i
if idx < len(run.inputs):
input_names[run.inputs[idx]] = fixed_input.asset
break
# Build media HTML for inputs and output
media_html = ""
available_inputs = [inp for inp in run.inputs if cache_manager.has_content(inp)]
has_output = run.status == "completed" and run.output_hash and cache_manager.has_content(run.output_hash)
has_ipfs_output = run.status == "completed" and run.output_ipfs_cid and not has_output
if available_inputs or has_output or has_ipfs_output:
# Flexible grid - more columns for more items
num_items = len(available_inputs) + (1 if has_output else 0)
grid_cols = min(num_items, 3) # Max 3 columns
media_html = f'<div class="grid gap-6 md:grid-cols-{grid_cols} mb-8">'
for idx, input_hash in enumerate(available_inputs):
input_media_type = detect_media_type(get_cache_path(input_hash))
input_video_src = video_src_for_request(input_hash, request)
if input_media_type == "video":
input_elem = f'<video src="{input_video_src}" controls muted loop playsinline class="max-w-full max-h-64 rounded-lg"></video>'
elif input_media_type == "image":
input_elem = f'<img src="/cache/{input_hash}/raw" alt="input" class="max-w-full max-h-64 rounded-lg">'
else:
input_elem = '<p class="text-gray-400">Unknown format</p>'
# Get input name or fall back to "Input N"
input_name = input_names.get(input_hash, f"Input {idx + 1}")
media_html += f'''
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-2">{input_name}</div>
<a href="/cache/{input_hash}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{input_hash[:24]}...</a>
<div class="mt-3 flex justify-center">{input_elem}</div>
</div>
'''
if has_output:
output_hash = run.output_hash
output_media_type = detect_media_type(get_cache_path(output_hash))
output_video_src = video_src_for_request(output_hash, request)
if output_media_type == "video":
output_elem = f'<video src="{output_video_src}" controls autoplay muted loop playsinline class="max-w-full max-h-64 rounded-lg"></video>'
elif output_media_type == "image":
output_elem = f'<img src="/cache/{output_hash}/raw" alt="output" class="max-w-full max-h-64 rounded-lg">'
else:
output_elem = '<p class="text-gray-400">Unknown format</p>'
media_html += f'''
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-2">Output</div>
<a href="/cache/{output_hash}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{output_hash[:24]}...</a>
<div class="mt-3 flex justify-center">{output_elem}</div>
</div>
'''
elif has_ipfs_output:
# IPFS-only output (IPFS_PRIMARY mode)
output_cid = run.output_ipfs_cid
ipfs_gateway = IPFS_GATEWAY_URL.rstrip('/') if IPFS_GATEWAY_URL else "https://ipfs.io/ipfs"
output_elem = f'<video src="{ipfs_gateway}/{output_cid}" controls autoplay muted loop playsinline class="max-w-full max-h-64 rounded-lg"></video>'
media_html += f'''
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-2">Output (IPFS)</div>
<a href="/ipfs/{output_cid}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{output_cid}</a>
<div class="mt-3 flex justify-center">{output_elem}</div>
</div>
'''
media_html += '</div>'
# Build inputs list with names
inputs_html = ''.join([
f'<div class="flex gap-2 items-baseline"><span class="text-gray-400 text-xs">{input_names.get(inp, f"Input {i+1}")}:</span> <a href="/cache/{inp}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{inp}</a></div>'
for i, inp in enumerate(run.inputs)
])
# Infrastructure section
infra_html = ""
if run.infrastructure:
software = run.infrastructure.get("software", {})
hardware = run.infrastructure.get("hardware", {})
infra_html = f'''
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Infrastructure</div>
<div class="text-gray-200 text-sm">
Software: {software.get("name", "unknown")} ({software.get("content_hash", "unknown")[:16]}...)<br>
Hardware: {hardware.get("name", "unknown")} ({hardware.get("content_hash", "unknown")[:16]}...)
</div>
</div>
'''
# Error display
error_html = ""
if run.error:
error_html = f'''
<div class="bg-red-900/30 border border-red-700 rounded-lg p-4 mb-6">
<div class="text-sm text-red-400 mb-1">Error</div>
<div class="text-red-300">{run.error}</div>
</div>
'''
# Publish section - check if already published to L2
publish_html = ""
if run.status == "completed" and run.output_hash:
l2_shares = await database.get_l2_shares(run.output_hash, ctx.actor_id)
if l2_shares:
# Already published - show link to L2
share = l2_shares[0]
l2_server = share.get("l2_server", "")
l2_https = l2_server.replace("http://", "https://")
asset_name = share.get("asset_name", "")
activity_id = share.get("activity_id")
# Link to activity if available, otherwise fall back to asset
l2_link = f"{l2_https}/activities/{activity_id}" if activity_id else f"{l2_https}/assets/{asset_name}"
publish_html = f'''
<div class="border-t border-dark-500 pt-6 mt-6">
<h2 class="text-lg font-semibold text-white mb-3">Published to L2</h2>
<div class="bg-green-900/30 border border-green-700 rounded-lg p-4">
<p class="text-green-300">
Published as <strong>{asset_name[:16]}...</strong>
<a href="{l2_link}" target="_blank" class="underline ml-2">View on L2</a>
</p>
</div>
</div>
'''
else:
# Not published - show publish form
publish_html = f'''
<div class="border-t border-dark-500 pt-6 mt-6">
<h2 class="text-lg font-semibold text-white mb-3">Publish to L2</h2>
<p class="text-sm text-gray-400 mb-4">Register this run (inputs, recipe, output) on the L2 ActivityPub server. Assets are identified by their content hash.</p>
<div id="publish-result"></div>
<form hx-post="/ui/publish-run/{run.run_id}" hx-target="#publish-result" hx-swap="innerHTML"
class="flex flex-wrap gap-3 items-center">
<button type="submit"
class="px-4 py-2 bg-blue-600 hover:bg-blue-700 text-white font-medium rounded-lg transition-colors">
Publish to L2
</button>
</form>
</div>
'''
# Delete section
delete_html = f'''
<div class="border-t border-dark-500 pt-6 mt-6">
<h2 class="text-lg font-semibold text-white mb-3">Delete Run</h2>
<p class="text-sm text-gray-400 mb-4">
{"This run failed and can be deleted." if run.status == "failed" else "Delete this run and its associated cache entries."}
</p>
<div id="delete-result"></div>
<button hx-delete="/ui/runs/{run.run_id}/discard" hx-target="#delete-result" hx-swap="innerHTML"
hx-confirm="Are you sure you want to delete this run? This cannot be undone."
class="px-4 py-2 bg-red-600 hover:bg-red-700 text-white font-medium rounded-lg transition-colors">
Delete Run
</button>
</div>
'''
output_link = ""
if run.output_hash:
output_link = f'''<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Output</div>
<a href="/cache/{run.output_hash}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{run.output_hash}</a>
</div>'''
elif run.output_ipfs_cid:
output_link = f'''<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Output (IPFS)</div>
<a href="/ipfs/{run.output_ipfs_cid}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{run.output_ipfs_cid}</a>
</div>'''
completed_html = ""
if run.completed_at:
completed_html = f'''<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Completed</div>
<div class="text-gray-200">{run.completed_at[:19].replace('T', ' ')}</div>
</div>'''
# Sub-navigation tabs for run detail pages
sub_tabs_html = render_run_sub_tabs(run_id, active="overview")
content = f'''
<a href="/runs" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to runs
</a>
{sub_tabs_html}
<div class="bg-dark-700 rounded-lg p-6">
<div class="flex flex-wrap items-center justify-between gap-4 mb-6">
<div class="flex items-center gap-3">
<a href="{effect_url}" target="_blank"
class="px-3 py-1 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium rounded-full transition-colors">
{run.recipe}
</a>
<span class="text-gray-400 font-mono text-sm">{run.run_id[:16]}...</span>
</div>
<span class="px-3 py-1 {status_badge} text-sm font-medium rounded-full">{run.status}</span>
</div>
{error_html}
{media_html}
<div class="border-t border-dark-500 pt-6">
<h2 class="text-lg font-semibold text-white mb-4">Provenance</h2>
<div class="grid gap-4 sm:grid-cols-2">
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Owner</div>
<div class="text-gray-200">{run.username or "anonymous"}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Effect</div>
<a href="{effect_url}" target="_blank" class="text-blue-400 hover:text-blue-300">{run.recipe}</a>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Effects Commit</div>
<div class="text-gray-200 font-mono text-xs">{run.effects_commit or "N/A"}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Input(s)</div>
<div>{inputs_html}</div>
</div>
{output_link}
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Run ID</div>
<div class="text-gray-200 font-mono text-xs">{run.run_id}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Created</div>
<div class="text-gray-200">{run.created_at[:19].replace('T', ' ')}</div>
</div>
{completed_html}
{infra_html}
</div>
</div>
{publish_html}
{delete_html}
</div>
'''
return HTMLResponse(render_page(f"Run: {run.recipe}", content, ctx.actor_id, active_tab="runs"))
# JSON response
return run.model_dump()
# Plan/Analysis cache directories (match tasks/orchestrate.py)
PLAN_CACHE_DIR = CACHE_DIR / 'plans'
ANALYSIS_CACHE_DIR = CACHE_DIR / 'analysis'
def load_plan_for_run(run: RunStatus) -> Optional[dict]:
"""Load plan data for a run, trying plan_id first, then matching by inputs."""
PLAN_CACHE_DIR.mkdir(parents=True, exist_ok=True)
logger.info(f"[load_plan] run_id={run.run_id[:16]}, plan_id={run.plan_id}, inputs={run.inputs}")
logger.info(f"[load_plan] PLAN_CACHE_DIR={PLAN_CACHE_DIR}")
# First try by plan_id if available
if run.plan_id:
plan_file = PLAN_CACHE_DIR / f"{run.plan_id}.json"
logger.info(f"[load_plan] Trying plan_id file: {plan_file}, exists={plan_file.exists()}")
if plan_file.exists():
try:
with open(plan_file) as f:
return json.load(f)
except (json.JSONDecodeError, IOError) as e:
logger.warning(f"[load_plan] Failed to load plan file: {e}")
# List available plan files
plan_files = list(PLAN_CACHE_DIR.glob("*.json"))
logger.info(f"[load_plan] Available plan files: {len(plan_files)}")
# Fall back to matching by inputs
for plan_file in plan_files:
try:
with open(plan_file) as f:
data = json.load(f)
plan_inputs = data.get("input_hashes", {})
if run.inputs and set(plan_inputs.values()) == set(run.inputs):
logger.info(f"[load_plan] Found matching plan by inputs: {plan_file}")
return data
except (json.JSONDecodeError, IOError):
continue
# Try to load from plan_json in step_results (for IPFS_PRIMARY mode)
if run.step_results:
logger.info(f"[load_plan] Checking step_results for embedded plan, keys={list(run.step_results.keys())[:5]}")
# Check if there's embedded plan data
for step_id, result in run.step_results.items():
if isinstance(result, dict) and "plan_json" in result:
logger.info(f"[load_plan] Found embedded plan_json in step {step_id}")
try:
return json.loads(result["plan_json"])
except (json.JSONDecodeError, TypeError):
pass
logger.warning(f"[load_plan] No plan found for run {run.run_id[:16]}")
return None
async def load_plan_for_run_with_fallback(run: RunStatus) -> Optional[dict]:
"""Load plan data for a run, with fallback to generate from recipe."""
# First try cached plans
plan_data = load_plan_for_run(run)
if plan_data:
return plan_data
# Fallback: generate from recipe
recipe_name = run.recipe.replace("recipe:", "") if run.recipe.startswith("recipe:") else run.recipe
recipe_status = None
for recipe in list_all_recipes():
if recipe.name == recipe_name:
recipe_status = recipe
break
if recipe_status:
recipe_path = cache_manager.get_by_content_hash(recipe_status.recipe_id)
if recipe_path and recipe_path.exists():
try:
recipe_yaml = recipe_path.read_text()
# Build input_hashes mapping from run inputs
input_hashes = {}
for i, var_input in enumerate(recipe_status.variable_inputs):
if i < len(run.inputs):
input_hashes[var_input.node_id] = run.inputs[i]
# Try to generate plan
try:
from tasks.orchestrate import generate_plan as gen_plan_task
plan_result = gen_plan_task(recipe_yaml, input_hashes)
if plan_result and plan_result.get("status") == "planned":
return plan_result
except ImportError:
pass
except Exception as e:
logger.warning(f"Failed to generate plan for run {run.run_id}: {e}")
return None
@app.get("/run/{run_id}/plan/node/{step_id}", response_class=HTMLResponse)
async def run_plan_node_detail(run_id: str, step_id: str, request: Request):
"""HTMX partial: Get node detail HTML fragment."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return HTMLResponse('<p class="text-red-400">Login required</p>', status_code=401)
run = await asyncio.to_thread(load_run, run_id)
if not run:
return HTMLResponse(f'<p class="text-red-400">Run not found</p>', status_code=404)
# Load plan data (with fallback to generate from recipe)
plan_data = await asyncio.to_thread(load_plan_for_run_with_fallback, run)
if not plan_data:
return HTMLResponse('<p class="text-gray-400">Plan not found</p>')
# Find the step
step = None
for s in plan_data.get("steps", []):
if s.get("step_id") == step_id:
step = s
break
if not step:
return HTMLResponse(f'<p class="text-gray-400">Step {step_id} not found</p>')
# Get step info
step_name = step.get("name", step_id[:20])
node_type = step.get("node_type", "EFFECT")
cache_id = step.get("cache_id", "")
config = step.get("config", {})
level = step.get("level", 0)
input_steps = step.get("input_steps", [])
# Check for IPFS CID
step_cid = None
if run.step_results:
res = run.step_results.get(step_id)
if isinstance(res, dict) and res.get("cid"):
step_cid = res["cid"]
has_cached = cache_manager.has_content(cache_id) if cache_id else False
color = NODE_COLORS.get(node_type, NODE_COLORS["default"])
# Build preview HTML
preview_html = ""
if has_cached and cache_id:
media_type = detect_media_type(get_cache_path(cache_id))
if media_type == "video":
preview_html = f'''
<div class="mb-4">
<video src="/cache/{cache_id}/raw" controls muted class="w-full max-h-64 rounded-lg"></video>
</div>'''
elif media_type == "image":
preview_html = f'''
<div class="mb-4">
<img src="/cache/{cache_id}/raw" class="w-full max-h-64 rounded-lg object-contain">
</div>'''
elif step_cid:
ipfs_gateway = IPFS_GATEWAY_URL.rstrip('/') if IPFS_GATEWAY_URL else "https://ipfs.io/ipfs"
preview_html = f'''
<div class="mb-4">
<video src="{ipfs_gateway}/{step_cid}" controls muted class="w-full max-h-64 rounded-lg"></video>
</div>'''
# Build output link
output_html = ""
if step_cid:
output_html = f'''
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Output (IPFS)</h5>
<a href="/ipfs/{step_cid}" class="flex items-center justify-between bg-dark-600 rounded p-3 hover:bg-dark-500 transition-colors">
<span class="font-mono text-xs text-gray-300 truncate">{step_cid}</span>
<span class="px-3 py-1 bg-blue-600 text-white text-xs rounded ml-2">View</span>
</a>
</div>'''
elif has_cached and cache_id:
output_html = f'''
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Output</h5>
<a href="/cache/{cache_id}" class="flex items-center justify-between bg-dark-600 rounded p-3 hover:bg-dark-500 transition-colors">
<span class="font-mono text-xs text-gray-300 truncate">{cache_id}</span>
<span class="px-3 py-1 bg-blue-600 text-white text-xs rounded ml-2">View</span>
</a>
</div>'''
# Config display
config_html = ""
if config:
config_json = json.dumps(config, indent=2)
config_html = f'''
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Config</h5>
<pre class="text-xs bg-dark-600 p-3 rounded overflow-x-auto max-h-40">{config_json}</pre>
</div>'''
# Input steps
inputs_html = ""
if input_steps:
inputs_list = "".join([
f'<span class="px-2 py-1 bg-dark-500 rounded text-xs font-mono">{inp[:16]}...</span>'
for inp in input_steps
])
inputs_html = f'''
<div class="mt-4">
<h5 class="text-sm font-medium text-gray-400 mb-2">Input Steps</h5>
<div class="flex flex-wrap gap-2">{inputs_list}</div>
</div>'''
status = "cached" if (has_cached or step_cid) else ("completed" if run.status == "completed" else "pending")
status_color = "green" if status in ("cached", "completed") else "yellow"
return HTMLResponse(f'''
<div class="flex justify-between items-start mb-4">
<div>
<h4 class="text-lg font-semibold text-white">{step_name}</h4>
<div class="flex items-center gap-2 mt-1">
<span class="px-2 py-0.5 rounded text-xs" style="background-color: {color}; color: white">{node_type}</span>
<span class="text-{status_color}-400 text-xs">{status}</span>
<span class="text-gray-500 text-xs">Level {level}</span>
</div>
</div>
<button onclick="closeNodeDetail()" class="text-gray-400 hover:text-white p-1">
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
{preview_html}
<div class="text-sm space-y-2">
<div><span class="text-gray-400">Step ID:</span> <span class="font-mono text-xs">{step_id}</span></div>
<div><span class="text-gray-400">Cache ID:</span> <span class="font-mono text-xs">{cache_id[:32] if cache_id else "N/A"}...</span></div>
</div>
{output_html}
{inputs_html}
{config_html}
''')
@app.get("/run/{run_id}/plan", response_class=HTMLResponse)
async def run_plan_visualization(run_id: str, request: Request, node: Optional[str] = None):
"""Visualize execution plan as interactive DAG."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Login Required", content, None, active_tab="runs"), status_code=401)
run = await asyncio.to_thread(load_run, run_id)
if not run:
content = f'<p class="text-red-400">Run not found: {run_id}</p>'
return HTMLResponse(render_page("Not Found", content, ctx.actor_id, active_tab="runs"), status_code=404)
# Check user owns this run
if run.username not in (ctx.username, ctx.actor_id):
content = '<p class="text-red-400 py-8 text-center">Access denied.</p>'
return HTMLResponse(render_page("Access Denied", content, ctx.actor_id, active_tab="runs"), status_code=403)
# Load plan data (with fallback to generate from recipe)
plan_data = await asyncio.to_thread(load_plan_for_run_with_fallback, run)
# Build sub-navigation tabs
tabs_html = render_run_sub_tabs(run_id, active="plan")
if not plan_data:
# Show a simpler visualization based on the run's recipe structure
content = f'''
<a href="/runs" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to runs
</a>
{tabs_html}
<div class="bg-dark-700 rounded-lg p-6">
<h2 class="text-xl font-bold text-white mb-4">Execution Plan</h2>
<p class="text-gray-400">Could not generate execution plan for this run.</p>
<p class="text-gray-500 text-sm mt-2">This may be a legacy effect-based run without a recipe, or the recipe is no longer available.</p>
</div>
'''
return HTMLResponse(render_page_with_cytoscape(f"Plan: {run_id[:16]}...", content, ctx.actor_id, active_tab="runs"))
# Build Cytoscape nodes and edges from plan
nodes = []
edges = []
steps = plan_data.get("steps", [])
for step in steps:
node_type = step.get("node_type", "EFFECT")
color = NODE_COLORS.get(node_type, NODE_COLORS["default"])
step_id = step.get("step_id", "")
cache_id = step.get("cache_id", "")
# Check if this step's output exists in cache (completed)
# For completed runs, check the actual cache
has_cached = cache_manager.has_content(cache_id) if cache_id else False
if has_cached:
status = "cached"
elif run.status == "completed":
# Run completed but this step not in cache - still mark as done
status = "cached"
elif run.status == "running":
status = "running"
else:
status = "pending"
# Use human-readable name if available, otherwise short step_id
step_name = step.get("name", "")
if step_name:
# Use last part of dotted name for label
label_parts = step_name.split(".")
label = label_parts[-1] if label_parts else step_name
else:
label = step_id[:12] + "..." if len(step_id) > 12 else step_id
nodes.append({
"data": {
"id": step_id,
"label": label,
"name": step_name,
"nodeType": node_type,
"level": step.get("level", 0),
"cacheId": cache_id,
"status": status,
"color": color,
"config": step.get("config"),
"hasCached": has_cached,
}
})
# Build edges from the full plan JSON if available
if "plan_json" in plan_data:
try:
full_plan = json.loads(plan_data["plan_json"])
for step in full_plan.get("steps", []):
step_id = step.get("step_id", "")
for input_step in step.get("input_steps", []):
edges.append({
"data": {
"source": input_step,
"target": step_id
}
})
except json.JSONDecodeError:
pass
else:
# Build edges directly from steps
for step in steps:
step_id = step.get("step_id", "")
for input_step in step.get("input_steps", []):
edges.append({
"data": {
"source": input_step,
"target": step_id
}
})
nodes_json = json.dumps(nodes)
edges_json = json.dumps(edges)
dag_html = render_dag_cytoscape(nodes_json, edges_json, run_id=run_id, initial_node=node or "")
# Stats summary - count from built nodes to reflect actual execution status
total = len(nodes)
cached_count = sum(1 for n in nodes if n["data"]["status"] == "cached")
completed_count = sum(1 for n in nodes if n["data"]["status"] == "completed")
running_count = sum(1 for n in nodes if n["data"]["status"] == "running")
pending_count = total - cached_count - completed_count - running_count
# Plan name for display
plan_name = plan_data.get("recipe", run.recipe.replace("recipe:", ""))
content = f'''
<a href="/runs" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to runs
</a>
{tabs_html}
<div class="bg-dark-700 rounded-lg p-6">
<h2 class="text-xl font-bold text-white mb-4">Execution Plan: {plan_name}</h2>
<div class="grid grid-cols-4 gap-4 mb-6">
<div class="bg-dark-600 rounded-lg p-4 text-center">
<div class="text-2xl font-bold text-white">{total}</div>
<div class="text-sm text-gray-400">Total Steps</div>
</div>
<div class="bg-dark-600 rounded-lg p-4 text-center">
<div class="text-2xl font-bold text-green-400">{completed_count}</div>
<div class="text-sm text-gray-400">Completed</div>
</div>
<div class="bg-dark-600 rounded-lg p-4 text-center">
<div class="text-2xl font-bold text-blue-400">{cached_count}</div>
<div class="text-sm text-gray-400">Cached</div>
</div>
<div class="bg-dark-600 rounded-lg p-4 text-center">
<div class="text-2xl font-bold text-yellow-400">{pending_count}</div>
<div class="text-sm text-gray-400">Pending</div>
</div>
</div>
<div class="mb-4">
<div class="flex gap-4 text-sm flex-wrap">
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #3b82f6"></span> SOURCE
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #22c55e"></span> EFFECT
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #6366f1"></span> _LIST
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded border-2 border-green-500 bg-dark-600"></span> Cached
</span>
</div>
</div>
{dag_html}
<!-- Steps List with Cache IDs -->
<div class="mt-8">
<h3 class="text-lg font-semibold text-white mb-4">Execution Steps</h3>
<div class="space-y-2">
'''
# Build steps list with cache_id links
# Check if we have step CIDs from IPFS_PRIMARY mode
step_cids = {}
if run.step_results:
for sid, res in run.step_results.items():
if isinstance(res, dict) and res.get("cid"):
step_cids[sid] = res["cid"]
for i, step in enumerate(steps):
step_id = step.get("step_id", "")
step_name = step.get("name", step_id[:20])
node_type = step.get("node_type", "EFFECT")
cache_id = step.get("cache_id", "")
step_cid = step_cids.get(step_id, "") # CID from IPFS_PRIMARY mode
has_cached = cache_manager.has_content(cache_id) if cache_id else False
color = NODE_COLORS.get(node_type, NODE_COLORS["default"])
status_badge = ""
if has_cached or step_cid:
status_badge = '<span class="text-green-400 text-xs">cached</span>'
elif run.status == "completed":
status_badge = '<span class="text-green-400 text-xs">completed</span>'
cache_link = ""
if step_cid:
# IPFS_PRIMARY mode - show CID link
cache_link = f'''
<div class="mt-1 ml-8 flex items-center gap-2">
<span class="text-gray-500 text-xs">Output (IPFS):</span>
<a href="/ipfs/{step_cid}" class="font-mono text-xs text-blue-400 hover:text-blue-300">{step_cid}</a>
</div>'''
elif cache_id:
if has_cached:
cache_link = f'''
<div class="mt-1 ml-8 flex items-center gap-2">
<span class="text-gray-500 text-xs">Output:</span>
<a href="/cache/{cache_id}" class="font-mono text-xs text-blue-400 hover:text-blue-300">{cache_id}</a>
</div>'''
else:
cache_link = f'''
<div class="mt-1 ml-8">
<span class="text-gray-600 text-xs font-mono">{cache_id[:32]}...</span>
</div>'''
content += f'''
<div class="bg-dark-600 rounded p-3">
<div class="flex items-center justify-between">
<div class="flex items-center gap-3">
<span class="w-6 h-6 rounded-full flex items-center justify-center text-xs text-white" style="background-color: {color}">{i + 1}</span>
<span class="font-medium text-white">{step_name}</span>
<span class="text-gray-500 text-sm">{node_type}</span>
</div>
<div class="flex items-center gap-2">
{status_badge}
</div>
</div>
{cache_link}
</div>
'''
content += '''
</div>
</div>
'''
# Add collapsible Plan JSON section
# Parse nested plan_json if present (it's double-encoded as a string)
display_plan = plan_data.copy()
if "plan_json" in display_plan and isinstance(display_plan["plan_json"], str):
try:
display_plan["plan_json"] = json.loads(display_plan["plan_json"])
except json.JSONDecodeError:
pass
plan_json_str = json.dumps(display_plan, indent=2)
# Escape HTML entities in JSON
plan_json_str = plan_json_str.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
content += f'''
<!-- Plan JSON -->
<details class="mt-6">
<summary class="cursor-pointer text-gray-400 hover:text-white text-sm mb-2">
Show Plan JSON
</summary>
<div class="bg-dark-600 rounded-lg border border-dark-500 p-4 overflow-x-auto">
<pre class="text-sm text-gray-300 whitespace-pre-wrap">{plan_json_str}</pre>
</div>
</details>
</div>
'''
return HTMLResponse(render_page_with_cytoscape(f"Plan: {run_id[:16]}...", content, ctx.actor_id, active_tab="runs"))
@app.get("/run/{run_id}/analysis", response_class=HTMLResponse)
async def run_analysis_page(run_id: str, request: Request):
"""Show analysis results for run inputs."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Login Required", content, None, active_tab="runs"), status_code=401)
run = await asyncio.to_thread(load_run, run_id)
if not run:
content = f'<p class="text-red-400">Run not found: {run_id}</p>'
return HTMLResponse(render_page("Not Found", content, ctx.actor_id, active_tab="runs"), status_code=404)
# Check user owns this run
if run.username not in (ctx.username, ctx.actor_id):
content = '<p class="text-red-400 py-8 text-center">Access denied.</p>'
return HTMLResponse(render_page("Access Denied", content, ctx.actor_id, active_tab="runs"), status_code=403)
tabs_html = render_run_sub_tabs(run_id, active="analysis")
# Load analysis results for each input
analysis_html = ""
ANALYSIS_CACHE_DIR.mkdir(parents=True, exist_ok=True)
for i, input_hash in enumerate(run.inputs):
analysis_path = ANALYSIS_CACHE_DIR / f"{input_hash}.json"
analysis_data = None
if analysis_path.exists():
try:
with open(analysis_path) as f:
analysis_data = json.load(f)
except (json.JSONDecodeError, IOError):
pass
input_name = f"Input {i + 1}"
if analysis_data:
tempo = analysis_data.get("tempo", "N/A")
if isinstance(tempo, float):
tempo = f"{tempo:.1f}"
beat_times = analysis_data.get("beat_times", [])
beat_count = len(beat_times)
energy = analysis_data.get("energy")
# Beat visualization (simple bar chart showing beat positions)
beat_bars = ""
if beat_times and len(beat_times) > 0:
# Show first 50 beats as vertical bars
display_beats = beat_times[:50]
max_time = max(display_beats) if display_beats else 1
for bt in display_beats:
# Normalize to percentage
pos = (bt / max_time) * 100 if max_time > 0 else 0
beat_bars += f'<div class="w-1 bg-blue-500" style="height: 100%; margin-left: {pos * 0.8}%"></div>'
energy_bar = ""
if energy is not None:
try:
energy_pct = min(float(energy) * 100, 100)
energy_bar = f'''
<div class="mt-4">
<div class="text-xs text-gray-400 mb-1">Energy Level</div>
<div class="w-full bg-dark-500 rounded-full h-3">
<div class="bg-gradient-to-r from-green-500 to-yellow-500 h-3 rounded-full" style="width: {energy_pct}%"></div>
</div>
<div class="text-xs text-gray-500 mt-1">{energy_pct:.1f}%</div>
</div>
'''
except (TypeError, ValueError):
pass
analysis_html += f'''
<div class="bg-dark-700 rounded-lg p-6 mb-4">
<div class="flex justify-between items-start mb-4">
<div>
<h3 class="text-lg font-semibold text-white">{input_name}</h3>
<a href="/cache/{input_hash}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{input_hash[:24]}...</a>
</div>
<span class="px-2 py-1 bg-green-600 text-white text-xs rounded-full">Analyzed</span>
</div>
<div class="grid grid-cols-2 gap-4 mb-4">
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-2xl font-bold text-white">{tempo}</div>
<div class="text-sm text-gray-400">BPM (Tempo)</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-2xl font-bold text-white">{beat_count}</div>
<div class="text-sm text-gray-400">Beats Detected</div>
</div>
</div>
{energy_bar}
<div class="mt-4">
<div class="text-xs text-gray-400 mb-2">Beat Timeline (first 50 beats)</div>
<div class="relative h-8 bg-dark-600 rounded overflow-hidden">
<div class="absolute inset-0 flex items-end">
{beat_bars if beat_bars else '<span class="text-gray-500 text-xs p-2">No beats detected</span>'}
</div>
</div>
</div>
</div>
'''
else:
analysis_html += f'''
<div class="bg-dark-700 rounded-lg p-6 mb-4">
<div class="flex justify-between items-start">
<div>
<h3 class="text-lg font-semibold text-white">{input_name}</h3>
<a href="/cache/{input_hash}" class="text-blue-400 hover:text-blue-300 font-mono text-xs">{input_hash[:24]}...</a>
</div>
<span class="px-2 py-1 bg-gray-600 text-white text-xs rounded-full">Not Analyzed</span>
</div>
<p class="text-gray-400 mt-4">No analysis data available for this input.</p>
<p class="text-gray-500 text-sm mt-1">Analysis is performed when using recipe-based runs.</p>
</div>
'''
if not run.inputs:
analysis_html = '<p class="text-gray-400">No inputs found for this run.</p>'
content = f'''
<a href="/runs" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to runs
</a>
{tabs_html}
<h2 class="text-xl font-bold text-white mb-4">Analysis Results</h2>
{analysis_html}
'''
return HTMLResponse(render_page(f"Analysis: {run_id[:16]}...", content, ctx.actor_id, active_tab="runs"))
@app.get("/run/{run_id}/artifacts", response_class=HTMLResponse)
async def run_artifacts_page(run_id: str, request: Request):
"""Show all cached artifacts produced by this run."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Login Required", content, None, active_tab="runs"), status_code=401)
run = await asyncio.to_thread(load_run, run_id)
if not run:
content = f'<p class="text-red-400">Run not found: {run_id}</p>'
return HTMLResponse(render_page("Not Found", content, ctx.actor_id, active_tab="runs"), status_code=404)
# Check user owns this run
if run.username not in (ctx.username, ctx.actor_id):
content = '<p class="text-red-400 py-8 text-center">Access denied.</p>'
return HTMLResponse(render_page("Access Denied", content, ctx.actor_id, active_tab="runs"), status_code=403)
tabs_html = render_run_sub_tabs(run_id, active="artifacts")
# Collect all artifacts: inputs + output
artifacts = []
# Add inputs
for i, content_hash in enumerate(run.inputs):
cache_path = get_cache_path(content_hash)
if cache_path and cache_path.exists():
size = cache_path.stat().st_size
media_type = detect_media_type(cache_path)
artifacts.append({
"hash": content_hash,
"path": cache_path,
"size": size,
"media_type": media_type,
"role": "input",
"role_color": "blue",
"name": f"Input {i + 1}",
})
# Add output
if run.output_hash:
cache_path = get_cache_path(run.output_hash)
if cache_path and cache_path.exists():
size = cache_path.stat().st_size
media_type = detect_media_type(cache_path)
artifacts.append({
"hash": run.output_hash,
"path": cache_path,
"size": size,
"media_type": media_type,
"role": "output",
"role_color": "green",
"name": "Output",
})
# Build artifacts HTML
artifacts_html = ""
for artifact in artifacts:
size_kb = artifact["size"] / 1024
if size_kb < 1024:
size_str = f"{size_kb:.1f} KB"
else:
size_str = f"{size_kb/1024:.1f} MB"
# Thumbnail for media
thumb = ""
if artifact["media_type"] == "video":
thumb = f'<video src="/cache/{artifact["hash"]}/raw" class="w-16 h-16 object-cover rounded" muted></video>'
elif artifact["media_type"] == "image":
thumb = f'<img src="/cache/{artifact["hash"]}/raw" class="w-16 h-16 object-cover rounded" alt="">'
else:
thumb = '<div class="w-16 h-16 bg-dark-500 rounded flex items-center justify-center text-gray-400 text-xs">File</div>'
role_color = artifact["role_color"]
artifacts_html += f'''
<div class="bg-dark-700 rounded-lg p-4 flex items-center gap-4">
{thumb}
<div class="flex-1 min-w-0">
<div class="text-sm text-gray-300 mb-1">{artifact["name"]}</div>
<a href="/cache/{artifact["hash"]}" class="text-blue-400 hover:text-blue-300 font-mono text-xs truncate block">{artifact["hash"][:32]}...</a>
<div class="flex gap-4 mt-1 text-xs text-gray-400">
<span>{size_str}</span>
<span>{artifact["media_type"]}</span>
</div>
</div>
<span class="px-2 py-1 bg-{role_color}-600 text-white text-xs rounded-full">{artifact["role"]}</span>
</div>
'''
if not artifacts:
artifacts_html = '<p class="text-gray-400">No cached artifacts found for this run.</p>'
content = f'''
<a href="/runs" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to runs
</a>
{tabs_html}
<h2 class="text-xl font-bold text-white mb-4">Cached Artifacts</h2>
<div class="space-y-3">
{artifacts_html}
</div>
'''
return HTMLResponse(render_page(f"Artifacts: {run_id[:16]}...", content, ctx.actor_id, active_tab="runs"))
# JSON API endpoints for future WebSocket support
@app.get("/api/run/{run_id}/plan")
async def api_run_plan(run_id: str, request: Request):
"""Get execution plan data as JSON for programmatic access."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
raise HTTPException(401, "Not logged in")
run = await asyncio.to_thread(load_run, run_id)
if not run:
raise HTTPException(404, f"Run {run_id} not found")
if run.username not in (ctx.username, ctx.actor_id):
raise HTTPException(403, "Access denied")
# Look for plan in cache
PLAN_CACHE_DIR.mkdir(parents=True, exist_ok=True)
for plan_file in PLAN_CACHE_DIR.glob("*.json"):
try:
with open(plan_file) as f:
data = json.load(f)
plan_inputs = data.get("input_hashes", {})
if set(plan_inputs.values()) == set(run.inputs):
return data
except (json.JSONDecodeError, IOError):
continue
return {"status": "not_found", "message": "No plan found for this run"}
@app.get("/api/run/{run_id}/analysis")
async def api_run_analysis(run_id: str, request: Request):
"""Get analysis data as JSON for programmatic access."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
raise HTTPException(401, "Not logged in")
run = await asyncio.to_thread(load_run, run_id)
if not run:
raise HTTPException(404, f"Run {run_id} not found")
if run.username not in (ctx.username, ctx.actor_id):
raise HTTPException(403, "Access denied")
ANALYSIS_CACHE_DIR.mkdir(parents=True, exist_ok=True)
results = {}
for input_hash in run.inputs:
analysis_path = ANALYSIS_CACHE_DIR / f"{input_hash}.json"
if analysis_path.exists():
try:
with open(analysis_path) as f:
results[input_hash] = json.load(f)
except (json.JSONDecodeError, IOError):
results[input_hash] = None
else:
results[input_hash] = None
return {"run_id": run_id, "inputs": run.inputs, "analysis": results}
@app.get("/runs")
async def list_runs(request: Request, page: int = 1, limit: int = 20):
"""List runs. HTML for browsers (with infinite scroll), JSON for APIs (with pagination)."""
ctx = await get_user_context_from_cookie(request)
all_runs = await asyncio.to_thread(list_all_runs)
total = len(all_runs)
# Filter by user if logged in for HTML
if wants_html(request) and ctx:
all_runs = [r for r in all_runs if r.username in (ctx.username, ctx.actor_id)]
total = len(all_runs)
# Pagination
start = (page - 1) * limit
end = start + limit
runs_page = all_runs[start:end]
has_more = end < total
if wants_html(request):
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Runs", content, None, active_tab="runs"))
if not runs_page:
if page == 1:
content = '<p class="text-gray-400 py-8 text-center">You have no runs yet. Use the CLI to start a run.</p>'
else:
return HTMLResponse("") # Empty for infinite scroll
else:
# Status badge colors
status_colors = {
"completed": "bg-green-600 text-white",
"running": "bg-yellow-600 text-white",
"failed": "bg-red-600 text-white",
"pending": "bg-gray-600 text-white"
}
html_parts = []
for run in runs_page:
status_badge = status_colors.get(run.status, "bg-gray-600 text-white")
html_parts.append(f'''
<a href="/run/{run.run_id}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors">
<div class="flex flex-wrap items-center justify-between gap-3 mb-3">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-blue-600 text-white text-sm font-medium rounded-full">{run.recipe}</span>
<span class="text-gray-400 font-mono text-xs hidden sm:inline">{run.run_id[:16]}...</span>
</div>
<span class="px-3 py-1 {status_badge} text-xs font-medium rounded-full">{run.status}</span>
</div>
<div class="text-sm text-gray-400 mb-3">
Created: {run.created_at[:19].replace('T', ' ')}
</div>
''')
# Show input and output thumbnails
has_input = run.inputs and cache_manager.has_content(run.inputs[0])
has_output = run.status == "completed" and run.output_hash and cache_manager.has_content(run.output_hash)
if has_input or has_output:
html_parts.append('<div class="grid gap-4 sm:grid-cols-2">')
if has_input:
input_hash = run.inputs[0]
input_media_type = detect_media_type(get_cache_path(input_hash))
html_parts.append(f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Input</div>
<div class="flex justify-center">
''')
if input_media_type == "video":
html_parts.append(f'<video src="{video_src_for_request(input_hash, request)}" muted loop playsinline class="max-h-24 rounded"></video>')
else:
html_parts.append(f'<img src="/cache/{input_hash}/raw" alt="input" class="max-h-24 rounded">')
html_parts.append('</div></div>')
if has_output:
output_hash = run.output_hash
output_media_type = detect_media_type(get_cache_path(output_hash))
html_parts.append(f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Output</div>
<div class="flex justify-center">
''')
if output_media_type == "video":
html_parts.append(f'<video src="{video_src_for_request(output_hash, request)}" autoplay muted loop playsinline class="max-h-24 rounded"></video>')
else:
html_parts.append(f'<img src="/cache/{output_hash}/raw" alt="output" class="max-h-24 rounded">')
html_parts.append('</div></div>')
html_parts.append('</div>')
if run.status == "failed" and run.error:
html_parts.append(f'<div class="mt-3 text-sm text-red-400">Error: {run.error[:100]}</div>')
html_parts.append('</div></a>')
# For infinite scroll, just return cards if not first page
if page > 1:
if has_more:
html_parts.append(f'''
<div hx-get="/runs?page={page + 1}" hx-trigger="revealed" hx-swap="afterend">
<p class="py-4 text-center text-gray-400">Loading more...</p>
</div>
''')
return HTMLResponse('\n'.join(html_parts))
# First page - full content
infinite_scroll_trigger = ""
if has_more:
infinite_scroll_trigger = f'''
<div hx-get="/runs?page=2" hx-trigger="revealed" hx-swap="afterend">
<p class="py-4 text-center text-gray-400">Loading more...</p>
</div>
'''
content = f'''
<h2 class="text-xl font-semibold text-white mb-6">Runs ({total} total)</h2>
<div class="space-y-4">
{''.join(html_parts)}
{infinite_scroll_trigger}
</div>
'''
return HTMLResponse(render_page("Runs", content, ctx.actor_id, active_tab="runs"))
# JSON response for APIs
return {
"runs": [r.model_dump() for r in runs_page],
"pagination": {
"page": page,
"limit": limit,
"total": total,
"has_more": has_more
}
}
# ============ Recipe Endpoints ============
@app.post("/recipes/upload")
async def upload_recipe(file: UploadFile = File(...), ctx: UserContext = Depends(get_required_user_context)):
"""Upload a recipe YAML file. Requires authentication."""
import tempfile
# Read file content
content = await file.read()
try:
yaml_content = content.decode('utf-8')
except UnicodeDecodeError:
raise HTTPException(400, "Recipe file must be valid UTF-8 text")
# Validate YAML
try:
yaml.safe_load(yaml_content)
except yaml.YAMLError as e:
raise HTTPException(400, f"Invalid YAML: {e}")
# Store YAML file in cache
with tempfile.NamedTemporaryFile(delete=False, suffix=".yaml") as tmp:
tmp.write(content)
tmp_path = Path(tmp.name)
cached, ipfs_cid = cache_manager.put(tmp_path, node_type="recipe", move=True)
recipe_hash = cached.content_hash
# Parse and save metadata
actor_id = ctx.actor_id
try:
recipe_status = parse_recipe_yaml(yaml_content, recipe_hash, actor_id)
except Exception as e:
raise HTTPException(400, f"Failed to parse recipe: {e}")
await asyncio.to_thread(save_recipe, recipe_status)
# Save cache metadata to database
await database.save_item_metadata(
content_hash=recipe_hash,
actor_id=actor_id,
item_type="recipe",
filename=file.filename,
description=recipe_status.name # Use recipe name as description
)
return {
"recipe_id": recipe_hash,
"name": recipe_status.name,
"version": recipe_status.version,
"variable_inputs": len(recipe_status.variable_inputs),
"fixed_inputs": len(recipe_status.fixed_inputs)
}
@app.get("/recipes")
async def list_recipes_api(request: Request, page: int = 1, limit: int = 20):
"""List recipes. HTML for browsers, JSON for APIs."""
ctx = await get_user_context_from_cookie(request)
all_recipes = await asyncio.to_thread(list_all_recipes)
if wants_html(request):
# HTML response
if not ctx:
return HTMLResponse(render_page(
"Recipes",
'<p class="text-gray-400 py-8 text-center">Not logged in.</p>',
None,
active_tab="recipes"
))
# Filter to user's recipes
user_recipes = [c for c in all_recipes if c.uploader in (ctx.username, ctx.actor_id)]
total = len(user_recipes)
if not user_recipes:
content = '''
<h2 class="text-xl font-semibold text-white mb-6">Recipes (0)</h2>
<p class="text-gray-400 py-8 text-center">No recipes yet. Upload a recipe YAML file to get started.</p>
'''
return HTMLResponse(render_page("Recipes", content, ctx.actor_id, active_tab="recipes"))
html_parts = []
for recipe in user_recipes:
var_count = len(recipe.variable_inputs)
fixed_count = len(recipe.fixed_inputs)
input_info = []
if var_count:
input_info.append(f"{var_count} variable")
if fixed_count:
input_info.append(f"{fixed_count} fixed")
inputs_str = ", ".join(input_info) if input_info else "no inputs"
html_parts.append(f'''
<a href="/recipe/{recipe.recipe_id}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors">
<div class="flex flex-wrap items-center justify-between gap-3 mb-3">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-purple-600 text-white text-sm font-medium rounded-full">{recipe.name}</span>
<span class="text-gray-400 text-xs">v{recipe.version}</span>
</div>
<span class="text-xs text-gray-400">{inputs_str}</span>
</div>
<div class="text-sm text-gray-400 mb-2">
{recipe.description or "No description"}
</div>
<div class="text-xs text-gray-500 font-mono truncate">
{recipe.recipe_id[:24]}...
</div>
</div>
</a>
''')
content = f'''
<h2 class="text-xl font-semibold text-white mb-6">Recipes ({total})</h2>
<div class="space-y-4">
{''.join(html_parts)}
</div>
'''
return HTMLResponse(render_page("Recipes", content, ctx.actor_id, active_tab="recipes"))
# JSON response for APIs
total = len(all_recipes)
start = (page - 1) * limit
end = start + limit
recipes_page = all_recipes[start:end]
has_more = end < total
return {
"recipes": [c.model_dump() for c in recipes_page],
"pagination": {
"page": page,
"limit": limit,
"total": total,
"has_more": has_more
}
}
@app.get("/recipes/{recipe_id}")
async def get_recipe_api(recipe_id: str):
"""Get recipe details."""
recipe = load_recipe(recipe_id)
if not recipe:
raise HTTPException(404, f"Recipe {recipe_id} not found")
return recipe
@app.delete("/recipes/{recipe_id}")
async def remove_recipe(recipe_id: str, ctx: UserContext = Depends(get_required_user_context)):
"""Delete a recipe. Requires authentication."""
recipe = load_recipe(recipe_id)
if not recipe:
raise HTTPException(404, f"Recipe {recipe_id} not found")
# Check ownership
if recipe.uploader not in (ctx.username, ctx.actor_id):
raise HTTPException(403, "Access denied")
# Check if pinned
pinned, reason = cache_manager.is_pinned(recipe_id)
if pinned:
raise HTTPException(400, f"Cannot delete pinned recipe: {reason}")
# Delete from Redis and cache
delete_recipe_from_redis(recipe_id)
cache_manager.delete_by_content_hash(recipe_id)
return {"deleted": True, "recipe_id": recipe_id}
@app.post("/recipes/{recipe_id}/run")
async def run_recipe(recipe_id: str, request: RecipeRunRequest, ctx: UserContext = Depends(get_required_user_context)):
"""Run a recipe with provided variable inputs. Requires authentication."""
recipe = await asyncio.to_thread(load_recipe, recipe_id)
if not recipe:
raise HTTPException(404, f"Recipe {recipe_id} not found")
# Validate all required inputs are provided
for var_input in recipe.variable_inputs:
if var_input.required and var_input.node_id not in request.inputs:
raise HTTPException(400, f"Missing required input: {var_input.name}")
# Load recipe YAML
recipe_path = await asyncio.to_thread(cache_manager.get_by_content_hash, recipe_id)
if not recipe_path:
raise HTTPException(500, "Recipe YAML not found in cache")
with open(recipe_path) as f:
yaml_config = yaml.safe_load(f)
# Build DAG from recipe
dag = build_dag_from_recipe(yaml_config, request.inputs, recipe)
actor_id = ctx.actor_id
# Collect all input hashes
all_inputs = list(request.inputs.values())
for fixed in recipe.fixed_inputs:
if fixed.content_hash:
all_inputs.append(fixed.content_hash)
# Compute content-addressable run_id
run_id = compute_run_id(all_inputs, f"recipe:{recipe.name}")
output_name = f"{recipe.name}-{run_id[:8]}"
# Check L1 cache first
cached_run = await database.get_run_cache(run_id)
if cached_run:
output_hash = cached_run["output_hash"]
if cache_manager.has_content(output_hash):
logger.info(f"run_recipe: Cache hit for run_id={run_id[:16]}...")
return RunStatus(
run_id=run_id,
status="completed",
recipe=f"recipe:{recipe.name}",
inputs=all_inputs,
output_name=output_name,
created_at=cached_run.get("created_at", datetime.now(timezone.utc).isoformat()),
completed_at=cached_run.get("created_at", datetime.now(timezone.utc).isoformat()),
output_hash=output_hash,
username=actor_id,
provenance_cid=cached_run.get("provenance_cid"),
)
# Check L2 if not in L1
l2_server = ctx.l2_server
try:
l2_resp = http_requests.get(f"{l2_server}/assets/by-run-id/{run_id}", timeout=10)
if l2_resp.status_code == 200:
l2_data = l2_resp.json()
output_hash = l2_data.get("output_hash")
ipfs_cid = l2_data.get("ipfs_cid")
if output_hash and ipfs_cid:
logger.info(f"run_recipe: Found on L2, pulling from IPFS")
import ipfs_client
legacy_dir = CACHE_DIR / "legacy"
legacy_dir.mkdir(parents=True, exist_ok=True)
recovery_path = legacy_dir / output_hash
if ipfs_client.get_file(ipfs_cid, str(recovery_path)):
cache_manager._set_content_index(output_hash, output_hash)
cache_manager._set_ipfs_index(output_hash, ipfs_cid)
await database.save_run_cache(
run_id=run_id, output_hash=output_hash,
recipe=f"recipe:{recipe.name}", inputs=all_inputs,
ipfs_cid=ipfs_cid, provenance_cid=l2_data.get("provenance_cid"),
actor_id=actor_id,
)
return RunStatus(
run_id=run_id, status="completed",
recipe=f"recipe:{recipe.name}", inputs=all_inputs,
output_name=output_name,
created_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(timezone.utc).isoformat(),
output_hash=output_hash, username=actor_id,
provenance_cid=l2_data.get("provenance_cid"),
)
except Exception as e:
logger.warning(f"run_recipe: L2 lookup failed: {e}")
# Not cached - run Celery
run = RunStatus(
run_id=run_id,
status="pending",
recipe=f"recipe:{recipe.name}",
inputs=all_inputs,
output_name=output_name,
created_at=datetime.now(timezone.utc).isoformat(),
username=actor_id
)
# Submit to Celery
dag_json = dag.to_json()
task = execute_dag.delay(dag_json, run.run_id)
run.celery_task_id = task.id
run.status = "running"
await asyncio.to_thread(save_run, run)
return run
def build_dag_from_recipe(yaml_config: dict, user_inputs: dict[str, str], recipe: RecipeStatus):
"""Build a DAG from recipe YAML with user-provided inputs."""
from artdag import DAG, Node
dag = DAG()
name_to_id = {} # Map YAML node names to content-addressed IDs
registry = yaml_config.get("registry", {})
assets = registry.get("assets", {})
effects = registry.get("effects", {})
dag_config = yaml_config.get("dag", {})
nodes = dag_config.get("nodes", [])
output_node = dag_config.get("output")
# First pass: create all nodes and map names to IDs
for node_def in nodes:
node_name = node_def.get("id")
node_type = node_def.get("type")
node_config = node_def.get("config", {})
if node_type == "SOURCE":
if node_config.get("input"):
# Variable input - use user-provided hash
content_hash = user_inputs.get(node_name)
if not content_hash:
raise HTTPException(400, f"Missing input for node {node_name}")
node = Node(
node_type="SOURCE",
config={"content_hash": content_hash},
inputs=[],
name=node_name
)
else:
# Fixed input - use registry hash
asset_name = node_config.get("asset")
asset_info = assets.get(asset_name, {})
content_hash = asset_info.get("hash")
if not content_hash:
raise HTTPException(400, f"Asset {asset_name} not found in registry")
node = Node(
node_type="SOURCE",
config={"content_hash": content_hash},
inputs=[],
name=node_name
)
name_to_id[node_name] = node.node_id
dag.add_node(node)
# Second pass: create nodes with inputs (now we can resolve input names to IDs)
for node_def in nodes:
node_name = node_def.get("id")
node_type = node_def.get("type")
node_config = node_def.get("config", {})
input_names = node_def.get("inputs", [])
# Skip SOURCE nodes (already added)
if node_type == "SOURCE":
continue
# Resolve input names to content-addressed IDs
input_ids = [name_to_id[name] for name in input_names if name in name_to_id]
if node_type == "EFFECT":
effect_name = node_config.get("effect")
effect_info = effects.get(effect_name, {})
effect_hash = effect_info.get("hash")
node = Node(
node_type="EFFECT",
config={"effect": effect_name, "effect_hash": effect_hash},
inputs=input_ids,
name=node_name
)
else:
node = Node(
node_type=node_type,
config=node_config,
inputs=input_ids,
name=node_name
)
name_to_id[node_name] = node.node_id
dag.add_node(node)
# Set output node
if output_node and output_node in name_to_id:
dag.set_output(name_to_id[output_node])
return dag
# ============ Recipe UI Pages ============
@app.get("/recipe/{recipe_id}", response_class=HTMLResponse)
async def recipe_detail_page(recipe_id: str, request: Request):
"""Recipe detail page with run form."""
ctx = await get_user_context_from_cookie(request)
recipe = load_recipe(recipe_id)
if not recipe:
return HTMLResponse(render_page(
"Recipe Not Found",
f'<p class="text-red-400">Recipe {recipe_id} not found.</p>',
ctx.actor_id if ctx else None,
active_tab="recipes"
), status_code=404)
# Build variable inputs form
var_inputs_html = ""
if recipe.variable_inputs:
var_inputs_html = '<div class="space-y-4 mb-6">'
for var_input in recipe.variable_inputs:
required = "required" if var_input.required else ""
var_inputs_html += f'''
<div>
<label class="block text-sm font-medium text-gray-300 mb-2">
{var_input.name} {'*' if var_input.required else ''}
</label>
<input type="text" name="{var_input.node_id}" {required}
placeholder="Content hash..."
class="w-full px-4 py-2 bg-dark-600 border border-dark-500 rounded-lg text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent">
<p class="text-xs text-gray-400 mt-1">{var_input.description or 'Enter a content hash from your cache'}</p>
</div>
'''
var_inputs_html += '</div>'
else:
var_inputs_html = '<p class="text-gray-400 mb-4">This recipe has no variable inputs - it uses fixed assets only.</p>'
# Build fixed inputs display
fixed_inputs_html = ""
if recipe.fixed_inputs:
fixed_inputs_html = '<div class="mt-4"><h4 class="text-sm font-medium text-gray-300 mb-2">Fixed Inputs</h4><ul class="text-sm text-gray-400 space-y-1">'
for fixed in recipe.fixed_inputs:
fixed_inputs_html += f'<li><span class="text-gray-500">{fixed.asset}:</span> <span class="font-mono text-xs">{fixed.content_hash[:16]}...</span></li>'
fixed_inputs_html += '</ul></div>'
# Check if pinned
pinned, pin_reason = cache_manager.is_pinned(recipe_id)
pinned_badge = ""
if pinned:
pinned_badge = f'<span class="px-2 py-1 bg-yellow-600 text-white text-xs rounded-full ml-2">Pinned: {pin_reason}</span>'
# Check if shared to L2
l2_shares = await database.get_l2_shares(recipe_id, ctx.actor_id if ctx else None)
l2_link_html = ""
if l2_shares:
share = l2_shares[0]
l2_server = share.get("l2_server", "").replace("http://", "https://")
asset_name = share.get("asset_name", "")
l2_link_html = f'<a href="{l2_server}/ui/asset/{asset_name}" target="_blank" class="px-2 py-1 bg-blue-600 text-white text-xs rounded-full ml-2 hover:bg-blue-700">View on L2</a>'
# Load recipe source YAML
recipe_path = cache_manager.get_by_content_hash(recipe_id)
recipe_source = ""
recipe_config = {}
if recipe_path and recipe_path.exists():
try:
recipe_source = recipe_path.read_text()
recipe_config = yaml.safe_load(recipe_source)
except Exception:
recipe_source = "(Could not load recipe source)"
# Escape HTML in source for display
import html
recipe_source_escaped = html.escape(recipe_source)
# Build DAG visualization for this recipe
dag_nodes = []
dag_edges = []
dag_config = recipe_config.get("dag", {})
dag_node_defs = dag_config.get("nodes", [])
output_node = dag_config.get("output")
for node_def in dag_node_defs:
node_id = node_def.get("id", "")
node_type = node_def.get("type", "EFFECT")
node_config = node_def.get("config", {})
input_names = node_def.get("inputs", [])
is_output = node_id == output_node
if is_output:
color = NODE_COLORS.get("OUTPUT", NODE_COLORS["default"])
else:
color = NODE_COLORS.get(node_type, NODE_COLORS["default"])
label = node_id
if node_type == "EFFECT" and "effect" in node_config:
label = node_config["effect"]
dag_nodes.append({
"data": {
"id": node_id,
"label": label,
"nodeType": node_type,
"isOutput": is_output,
"color": color,
"config": node_config
}
})
for input_name in input_names:
dag_edges.append({
"data": {
"source": input_name,
"target": node_id
}
})
nodes_json = json.dumps(dag_nodes)
edges_json = json.dumps(dag_edges)
dag_html = render_dag_cytoscape(nodes_json, edges_json) if dag_nodes else '<p class="text-gray-400">No DAG structure found in recipe.</p>'
content = f'''
<div class="mb-6">
<a href="/recipes" class="text-blue-400 hover:text-blue-300 text-sm">&larr; Back to recipes</a>
</div>
<div class="bg-dark-700 rounded-lg p-6 mb-6">
<div class="flex items-center gap-3 mb-4 flex-wrap">
<h2 class="text-2xl font-bold text-white">{recipe.name}</h2>
<span class="px-2 py-1 bg-gray-600 text-white text-xs rounded-full">v{recipe.version}</span>
{pinned_badge}
{l2_link_html}
</div>
<p class="text-gray-400 mb-4">{recipe.description or 'No description'}</p>
<div class="text-xs text-gray-500 font-mono mb-4">{recipe.recipe_id}</div>
{fixed_inputs_html}
</div>
<div class="bg-dark-700 rounded-lg p-6 mb-6">
<div class="flex gap-4 mb-4 border-b border-dark-500">
<button id="tab-dag" onclick="showRecipeTab('dag')" class="px-4 py-2 text-white font-medium border-b-2 border-blue-500">
DAG View
</button>
<button id="tab-yaml" onclick="showRecipeTab('yaml')" class="px-4 py-2 text-gray-400 hover:text-white">
YAML Source
</button>
</div>
<div id="panel-dag">
<div class="mb-4">
<div class="flex gap-4 text-sm flex-wrap">
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #3b82f6"></span> SOURCE
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #22c55e"></span> EFFECT
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #a855f7"></span> OUTPUT
</span>
</div>
</div>
<p class="text-sm text-gray-400 mb-4">Click on a node to see its configuration.</p>
{dag_html}
</div>
<div id="panel-yaml" class="hidden">
<pre class="bg-dark-900 p-4 rounded-lg overflow-x-auto text-sm text-gray-300 font-mono whitespace-pre-wrap"><code>{recipe_source_escaped}</code></pre>
</div>
</div>
<script>
function showRecipeTab(tab) {{
document.getElementById('panel-dag').classList.toggle('hidden', tab !== 'dag');
document.getElementById('panel-yaml').classList.toggle('hidden', tab !== 'yaml');
document.getElementById('tab-dag').classList.toggle('border-blue-500', tab === 'dag');
document.getElementById('tab-dag').classList.toggle('text-white', tab === 'dag');
document.getElementById('tab-dag').classList.toggle('text-gray-400', tab !== 'dag');
document.getElementById('tab-dag').classList.toggle('border-b-2', tab === 'dag');
document.getElementById('tab-yaml').classList.toggle('border-blue-500', tab === 'yaml');
document.getElementById('tab-yaml').classList.toggle('text-white', tab === 'yaml');
document.getElementById('tab-yaml').classList.toggle('text-gray-400', tab !== 'yaml');
document.getElementById('tab-yaml').classList.toggle('border-b-2', tab === 'yaml');
// Trigger resize for cytoscape
if (tab === 'dag' && window.cy) {{
setTimeout(function() {{ window.cy.resize(); window.cy.fit(); }}, 100);
}}
}}
</script>
<div class="bg-dark-700 rounded-lg p-6">
<h3 class="text-lg font-semibold text-white mb-4">Run this Recipe</h3>
<form hx-post="/ui/recipes/{recipe_id}/run" hx-target="#run-result" hx-swap="innerHTML">
{var_inputs_html}
<div id="run-result"></div>
<button type="submit"
class="px-6 py-2 bg-green-600 hover:bg-green-700 text-white font-medium rounded-lg transition-colors">
Run Recipe
</button>
</form>
</div>
'''
return HTMLResponse(render_page_with_cytoscape(f"Recipe: {recipe.name}", content, ctx.actor_id if ctx else None, active_tab="recipes"))
@app.get("/recipe/{recipe_id}/dag", response_class=HTMLResponse)
async def recipe_dag_visualization(recipe_id: str, request: Request):
"""Visualize recipe structure as DAG."""
ctx = await get_user_context_from_cookie(request)
recipe = load_recipe(recipe_id)
if not recipe:
return HTMLResponse(render_page_with_cytoscape(
"Recipe Not Found",
f'<p class="text-red-400">Recipe {recipe_id} not found.</p>',
ctx.actor_id if ctx else None,
active_tab="recipes"
), status_code=404)
# Load recipe YAML
recipe_path = cache_manager.get_by_content_hash(recipe_id)
if not recipe_path or not recipe_path.exists():
return HTMLResponse(render_page_with_cytoscape(
"Recipe Not Found",
'<p class="text-red-400">Recipe file not found in cache.</p>',
ctx.actor_id if ctx else None,
active_tab="recipes"
), status_code=404)
try:
recipe_yaml = recipe_path.read_text()
config = yaml.safe_load(recipe_yaml)
except Exception as e:
return HTMLResponse(render_page_with_cytoscape(
"Error",
f'<p class="text-red-400">Failed to parse recipe: {e}</p>',
ctx.actor_id if ctx else None,
active_tab="recipes"
), status_code=500)
dag_config = config.get("dag", {})
dag_nodes = dag_config.get("nodes", [])
output_node = dag_config.get("output")
# Build Cytoscape nodes and edges
nodes = []
edges = []
for node_def in dag_nodes:
node_id = node_def.get("id", "")
node_type = node_def.get("type", "EFFECT")
node_config = node_def.get("config", {})
input_names = node_def.get("inputs", [])
# Determine if this is the output node
is_output = node_id == output_node
if is_output:
color = NODE_COLORS.get("OUTPUT", NODE_COLORS["default"])
else:
color = NODE_COLORS.get(node_type, NODE_COLORS["default"])
# Get effect name if it's an effect node
label = node_id
if node_type == "EFFECT" and "effect" in node_config:
label = node_config["effect"]
nodes.append({
"data": {
"id": node_id,
"label": label,
"nodeType": node_type,
"isOutput": is_output,
"color": color,
"config": node_config
}
})
# Create edges from inputs
for input_name in input_names:
edges.append({
"data": {
"source": input_name,
"target": node_id
}
})
nodes_json = json.dumps(nodes)
edges_json = json.dumps(edges)
dag_html = render_dag_cytoscape(nodes_json, edges_json)
content = f'''
<div class="mb-6">
<a href="/recipe/{recipe_id}" class="text-blue-400 hover:text-blue-300 text-sm">&larr; Back to recipe</a>
</div>
<div class="bg-dark-700 rounded-lg p-6 mb-6">
<div class="flex items-center gap-3 mb-4">
<h2 class="text-2xl font-bold text-white">{recipe.name}</h2>
<span class="px-2 py-1 bg-gray-600 text-white text-xs rounded-full">v{recipe.version}</span>
</div>
<p class="text-gray-400 mb-4">{recipe.description or 'No description'}</p>
</div>
<div class="bg-dark-700 rounded-lg p-6">
<h3 class="text-lg font-semibold text-white mb-4">DAG Structure</h3>
<div class="mb-4">
<div class="flex gap-4 text-sm flex-wrap">
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #3b82f6"></span> SOURCE
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #22c55e"></span> EFFECT
</span>
<span class="flex items-center gap-2">
<span class="w-4 h-4 rounded" style="background-color: #a855f7"></span> OUTPUT
</span>
</div>
</div>
<p class="text-sm text-gray-400 mb-4">Click on a node to see its configuration. The purple-bordered node is the output.</p>
{dag_html}
</div>
'''
return HTMLResponse(render_page_with_cytoscape(f"DAG: {recipe.name}", content, ctx.actor_id if ctx else None, active_tab="recipes"))
@app.post("/ui/recipes/{recipe_id}/run", response_class=HTMLResponse)
async def ui_run_recipe(recipe_id: str, request: Request):
"""HTMX handler: run a recipe with form inputs using 3-phase orchestration."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
recipe = load_recipe(recipe_id)
if not recipe:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Recipe not found</div>'
# Parse form data
form_data = await request.form()
input_hashes = {}
for var_input in recipe.variable_inputs:
value = form_data.get(var_input.node_id, "").strip()
if var_input.required and not value:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Missing required input: {var_input.name}</div>'
if value:
input_hashes[var_input.node_id] = value
# Load recipe YAML
recipe_path = cache_manager.get_by_content_hash(recipe_id)
if not recipe_path:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Recipe YAML not found in cache</div>'
try:
recipe_yaml = recipe_path.read_text()
# Compute deterministic run_id
run_id = compute_run_id(
list(input_hashes.values()),
recipe.name,
recipe_id # recipe_id is already the content hash
)
# Check if already completed
cached = await database.get_run_cache(run_id)
if cached:
output_hash = cached.get("output_hash")
if cache_manager.has_content(output_hash):
return f'''
<div class="bg-blue-900/50 border border-blue-700 text-blue-300 px-4 py-3 rounded-lg mb-4">
Already completed! <a href="/run/{run_id}" class="underline">View run</a>
</div>
'''
# Collect all input hashes for RunStatus
all_inputs = list(input_hashes.values())
for fixed in recipe.fixed_inputs:
if fixed.content_hash:
all_inputs.append(fixed.content_hash)
# Create run status
run = RunStatus(
run_id=run_id,
status="pending",
recipe=f"recipe:{recipe.name}",
inputs=all_inputs,
output_name=f"{recipe.name}-{run_id[:8]}",
created_at=datetime.now(timezone.utc).isoformat(),
username=ctx.actor_id
)
# Submit to orchestrated run_recipe task (3-phase: analyze, plan, execute)
from tasks.orchestrate import run_recipe
task = run_recipe.delay(
recipe_yaml=recipe_yaml,
input_hashes=input_hashes,
features=["beats", "energy"],
run_id=run_id,
)
run.celery_task_id = task.id
run.status = "running"
save_run(run)
return f'''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Run started! <a href="/run/{run_id}" class="underline">View run</a>
</div>
'''
except Exception as e:
logger.error(f"Recipe run failed: {e}")
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Error: {str(e)}</div>'
@app.get("/ui/recipes-list", response_class=HTMLResponse)
async def ui_recipes_list(request: Request):
"""HTMX partial: list of recipes."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
all_recipes = list_all_recipes()
# Filter to user's recipes
user_recipes = [c for c in all_recipes if c.uploader in (ctx.username, ctx.actor_id)]
if not user_recipes:
return '<p class="text-gray-400 py-8 text-center">No recipes yet. Upload a recipe YAML file to get started.</p>'
html_parts = ['<div class="space-y-4">']
for recipe in user_recipes:
var_count = len(recipe.variable_inputs)
fixed_count = len(recipe.fixed_inputs)
input_info = []
if var_count:
input_info.append(f"{var_count} variable")
if fixed_count:
input_info.append(f"{fixed_count} fixed")
inputs_str = ", ".join(input_info) if input_info else "no inputs"
html_parts.append(f'''
<a href="/recipe/{recipe.recipe_id}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors">
<div class="flex flex-wrap items-center justify-between gap-3 mb-3">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-purple-600 text-white text-sm font-medium rounded-full">{recipe.name}</span>
<span class="text-gray-400 text-xs">v{recipe.version}</span>
</div>
<span class="text-xs text-gray-400">{inputs_str}</span>
</div>
<div class="text-sm text-gray-400 mb-2">
{recipe.description or "No description"}
</div>
<div class="text-xs text-gray-500 font-mono truncate">
{recipe.recipe_id[:24]}...
</div>
</div>
</a>
''')
html_parts.append('</div>')
return '\n'.join(html_parts)
@app.delete("/ui/recipes/{recipe_id}/discard", response_class=HTMLResponse)
async def ui_discard_recipe(recipe_id: str, request: Request):
"""HTMX handler: discard a recipe."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
recipe = load_recipe(recipe_id)
if not recipe:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Recipe not found</div>'
# Check ownership
if recipe.uploader not in (ctx.username, ctx.actor_id):
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Check if pinned
pinned, reason = cache_manager.is_pinned(recipe_id)
if pinned:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot delete: recipe is pinned ({reason})</div>'
# Delete from Redis and cache
delete_recipe_from_redis(recipe_id)
cache_manager.delete_by_content_hash(recipe_id)
return '''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Recipe deleted. <a href="/recipes" class="underline">Back to recipes</a>
</div>
'''
@app.get("/cache/{content_hash}")
async def get_cached(content_hash: str, request: Request):
"""Get cached content by hash. Content negotiation: HTML for browsers, JSON for APIs, file for downloads."""
start = time.time()
accept = request.headers.get("accept", "")
logger.info(f"get_cached: {content_hash[:16]}... Accept={accept[:50]}")
ctx = await get_user_context_from_cookie(request)
cache_path = get_cache_path(content_hash)
if not cache_path:
logger.info(f"get_cached: Not found, took {time.time()-start:.3f}s")
if wants_html(request):
content = f'<p class="text-red-400">Content not found: {content_hash}</p>'
return HTMLResponse(render_page("Not Found", content, ctx.actor_id if ctx else None, active_tab="media"), status_code=404)
raise HTTPException(404, f"Content {content_hash} not in cache")
# JSON response only if explicitly requested
if "application/json" in accept and "text/html" not in accept:
t0 = time.time()
meta = await database.load_item_metadata(content_hash, ctx.actor_id if ctx else None)
logger.debug(f"get_cached: load_item_metadata took {time.time()-t0:.3f}s")
t0 = time.time()
cache_item = await database.get_cache_item(content_hash)
logger.debug(f"get_cached: get_cache_item took {time.time()-t0:.3f}s")
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
file_size = cache_path.stat().st_size
# Use stored type from metadata, fall back to auto-detection
stored_type = meta.get("type") if meta else None
if stored_type == "recipe":
media_type = "recipe"
else:
media_type = detect_media_type(cache_path)
logger.info(f"get_cached: JSON response, ipfs_cid={ipfs_cid[:16] if ipfs_cid else 'None'}..., took {time.time()-start:.3f}s")
return {
"content_hash": content_hash,
"size": file_size,
"media_type": media_type,
"ipfs_cid": ipfs_cid,
"meta": meta
}
# HTML response for browsers (default for all non-JSON requests)
# Raw data is only served from /cache/{hash}/raw endpoint
if True: # Always show HTML page, raw data via /raw endpoint
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Login Required", content, None, active_tab="media"), status_code=401)
# Check user has access
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
content = '<p class="text-red-400 py-8 text-center">Access denied.</p>'
return HTMLResponse(render_page("Access Denied", content, ctx.actor_id, active_tab="media"), status_code=403)
media_type = detect_media_type(cache_path)
file_size = cache_path.stat().st_size
size_str = f"{file_size:,} bytes"
if file_size > 1024*1024:
size_str = f"{file_size/(1024*1024):.1f} MB"
elif file_size > 1024:
size_str = f"{file_size/1024:.1f} KB"
# Get IPFS CID from database
cache_item = await database.get_cache_item(content_hash)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
# Build media display HTML
if media_type == "video":
video_src = video_src_for_request(content_hash, request)
media_html = f'<video src="{video_src}" controls autoplay muted loop playsinline class="max-w-full max-h-96 rounded-lg"></video>'
elif media_type == "image":
media_html = f'<img src="/cache/{content_hash}/raw" alt="{content_hash}" class="max-w-full max-h-96 rounded-lg">'
else:
media_html = f'<p class="text-gray-400">Unknown file type. <a href="/cache/{content_hash}/raw" download class="text-blue-400 hover:text-blue-300">Download file</a></p>'
content = f'''
<a href="/media" class="inline-flex items-center text-blue-400 hover:text-blue-300 mb-6">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
</svg>
Back to media
</a>
<div class="bg-dark-700 rounded-lg p-6">
<div class="flex flex-wrap items-center justify-between gap-4 mb-6">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-blue-600 text-white text-sm font-medium rounded-full">{media_type.capitalize()}</span>
<span class="text-gray-400 font-mono text-sm">{content_hash[:24]}...</span>
</div>
<a href="/cache/{content_hash}/raw" download
class="px-4 py-2 bg-green-600 hover:bg-green-700 text-white font-medium rounded-lg transition-colors">
Download
</a>
</div>
<div class="flex justify-center mb-8">
{media_html}
</div>
<div class="border-t border-dark-500 pt-6">
<h2 class="text-lg font-semibold text-white mb-4">Details</h2>
<div class="grid gap-4 sm:grid-cols-2">
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Content Hash (SHA3-256)</div>
<div class="font-mono text-xs text-gray-200 break-all">{content_hash}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Type</div>
<div class="text-gray-200">{media_type}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Size</div>
<div class="text-gray-200">{size_str}</div>
</div>
<div class="bg-dark-600 rounded-lg p-4">
<div class="text-sm text-gray-400 mb-1">Raw URL</div>
<div class="text-blue-400 text-sm truncate">
<a href="/cache/{content_hash}/raw" class="hover:text-blue-300">/cache/{content_hash}/raw</a>
</div>
</div>
</div>
</div>
'''
# Add IPFS section if we have a CID
if ipfs_cid:
gateway_links = []
if IPFS_GATEWAY_URL:
gateway_links.append(f'''
<a href="{IPFS_GATEWAY_URL}/ipfs/{ipfs_cid}" target="_blank" rel="noopener"
class="px-3 py-1 bg-green-600 hover:bg-green-700 text-white text-sm rounded-lg transition-colors">
Local Gateway
</a>''')
gateway_links.extend([
f'''<a href="https://ipfs.io/ipfs/{ipfs_cid}" target="_blank" rel="noopener"
class="px-3 py-1 bg-purple-600 hover:bg-purple-700 text-white text-sm rounded-lg transition-colors">
ipfs.io
</a>''',
f'''<a href="https://dweb.link/ipfs/{ipfs_cid}" target="_blank" rel="noopener"
class="px-3 py-1 bg-purple-600 hover:bg-purple-700 text-white text-sm rounded-lg transition-colors">
dweb.link
</a>''',
f'''<a href="https://cloudflare-ipfs.com/ipfs/{ipfs_cid}" target="_blank" rel="noopener"
class="px-3 py-1 bg-purple-600 hover:bg-purple-700 text-white text-sm rounded-lg transition-colors">
Cloudflare
</a>''',
])
gateways_html = '\n'.join(gateway_links)
content += f'''
<div class="border-t border-dark-500 pt-6 mt-6">
<h2 class="text-lg font-semibold text-white mb-4">IPFS</h2>
<div class="bg-dark-600 rounded-lg p-4 mb-4">
<div class="text-sm text-gray-400 mb-1">Content Identifier (CID)</div>
<div class="font-mono text-xs text-gray-200 break-all">{ipfs_cid}</div>
</div>
<div class="text-sm text-gray-400 mb-2">Gateways:</div>
<div class="flex flex-wrap gap-2">
{gateways_html}
</div>
</div>
'''
else:
content += '''
<div class="border-t border-dark-500 pt-6 mt-6">
<h2 class="text-lg font-semibold text-white mb-4">IPFS</h2>
<div class="text-gray-400 text-sm">Not yet uploaded to IPFS</div>
</div>
'''
content += f'''
<!-- Metadata Section -->
<div class="border-t border-dark-500 pt-6 mt-6" id="metadata-section"
hx-get="/cache/{content_hash}/meta-form" hx-trigger="load" hx-swap="innerHTML">
<div class="text-gray-400">Loading metadata...</div>
</div>
</div>
'''
return HTMLResponse(render_page(f"Cache: {content_hash[:16]}...", content, ctx.actor_id, active_tab="media"))
@app.get("/ipfs/{cid}")
async def ipfs_redirect(cid: str):
"""Redirect to IPFS gateway for content viewing."""
from fastapi.responses import RedirectResponse
if IPFS_GATEWAY_URL:
gateway_url = f"{IPFS_GATEWAY_URL.rstrip('/')}/{cid}"
else:
gateway_url = f"https://ipfs.io/ipfs/{cid}"
return RedirectResponse(url=gateway_url, status_code=302)
@app.get("/ipfs/{cid}/raw")
async def ipfs_raw(cid: str):
"""Fetch content from IPFS and serve it."""
# Try to get from IPFS and serve
import tempfile
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp_path = Path(tmp.name)
if not ipfs_client.get_file(cid, tmp_path):
raise HTTPException(404, f"Could not fetch CID {cid} from IPFS")
# Detect media type
media_type_name = detect_media_type(tmp_path)
if media_type_name == "video":
return FileResponse(tmp_path, media_type="video/mp4", filename=f"{cid[:16]}.mp4")
elif media_type_name == "image":
return FileResponse(tmp_path, media_type="image/jpeg", filename=f"{cid[:16]}.jpg")
return FileResponse(tmp_path, filename=f"{cid[:16]}.bin")
@app.get("/cache/{content_hash}/raw")
async def get_cached_raw(content_hash: str):
"""Get raw cached content (file download)."""
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, f"Content {content_hash} not in cache")
# Detect media type and set appropriate content-type and filename
media_type_name = detect_media_type(cache_path)
if media_type_name == "video":
# Check actual format
with open(cache_path, "rb") as f:
header = f.read(12)
if header[:4] == b'\x1a\x45\xdf\xa3': # WebM/MKV
return FileResponse(cache_path, media_type="video/x-matroska", filename=f"{content_hash}.mkv")
elif header[4:8] == b'ftyp': # MP4
return FileResponse(cache_path, media_type="video/mp4", filename=f"{content_hash}.mp4")
return FileResponse(cache_path, media_type="video/mp4", filename=f"{content_hash}.mp4")
elif media_type_name == "image":
with open(cache_path, "rb") as f:
header = f.read(8)
if header[:8] == b'\x89PNG\r\n\x1a\n':
return FileResponse(cache_path, media_type="image/png", filename=f"{content_hash}.png")
elif header[:2] == b'\xff\xd8':
return FileResponse(cache_path, media_type="image/jpeg", filename=f"{content_hash}.jpg")
return FileResponse(cache_path, media_type="image/jpeg", filename=f"{content_hash}.jpg")
return FileResponse(cache_path, filename=f"{content_hash}.bin")
@app.get("/cache/{content_hash}/mp4")
async def get_cached_mp4(content_hash: str):
"""Get cached content as MP4 (transcodes MKV on first request, caches result)."""
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, f"Content {content_hash} not in cache")
# MP4 transcodes stored alongside original in CACHE_DIR
mp4_path = CACHE_DIR / f"{content_hash}.mp4"
# If MP4 already cached, serve it
if mp4_path.exists():
return FileResponse(mp4_path, media_type="video/mp4")
# Check if source is already MP4
media_type = detect_media_type(cache_path)
if media_type != "video":
raise HTTPException(400, "Content is not a video")
# Check if already MP4 format
import subprocess
try:
result = subprocess.run(
["ffprobe", "-v", "error", "-select_streams", "v:0",
"-show_entries", "format=format_name", "-of", "csv=p=0", str(cache_path)],
capture_output=True, text=True, timeout=10
)
if "mp4" in result.stdout.lower() or "mov" in result.stdout.lower():
# Already MP4-compatible, just serve original
return FileResponse(cache_path, media_type="video/mp4")
except Exception:
pass # Continue with transcoding
# Transcode to MP4 (H.264 + AAC)
transcode_path = CACHE_DIR / f"{content_hash}.transcoding.mp4"
try:
result = subprocess.run(
["ffmpeg", "-y", "-i", str(cache_path),
"-c:v", "libx264", "-preset", "fast", "-crf", "23",
"-c:a", "aac", "-b:a", "128k",
"-movflags", "+faststart",
str(transcode_path)],
capture_output=True, text=True, timeout=600 # 10 min timeout
)
if result.returncode != 0:
raise HTTPException(500, f"Transcoding failed: {result.stderr[:200]}")
# Move to final location
transcode_path.rename(mp4_path)
except subprocess.TimeoutExpired:
if transcode_path.exists():
transcode_path.unlink()
raise HTTPException(500, "Transcoding timed out")
except Exception as e:
if transcode_path.exists():
transcode_path.unlink()
raise HTTPException(500, f"Transcoding failed: {e}")
return FileResponse(mp4_path, media_type="video/mp4")
@app.get("/cache/{content_hash}/meta-form", response_class=HTMLResponse)
async def cache_meta_form(content_hash: str, request: Request):
"""Clean URL redirect to the HTMX meta form."""
# Just redirect to the old endpoint for now
from starlette.responses import RedirectResponse
return RedirectResponse(f"/ui/cache/{content_hash}/meta-form", status_code=302)
@app.get("/ui/cache/{content_hash}/meta-form", response_class=HTMLResponse)
async def ui_cache_meta_form(content_hash: str, request: Request):
"""HTMX partial: metadata editing form for a cached item."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="text-red-400">Login required to edit metadata</div>'
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
return '<div class="text-red-400">Access denied</div>'
# Load metadata
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
origin = meta.get("origin", {})
origin_type = origin.get("type", "")
origin_url = origin.get("url", "")
origin_note = origin.get("note", "")
description = meta.get("description", "")
tags = meta.get("tags", [])
tags_str = ", ".join(tags) if tags else ""
l2_shares = meta.get("l2_shares", [])
pinned = meta.get("pinned", False)
pin_reason = meta.get("pin_reason", "")
# Detect media type for publish
cache_path = get_cache_path(content_hash)
media_type = detect_media_type(cache_path) if cache_path else "unknown"
asset_type = "video" if media_type == "video" else "image"
# Origin radio checked states
self_checked = 'checked' if origin_type == "self" else ''
external_checked = 'checked' if origin_type == "external" else ''
# Build publish section - show list of L2 shares
if l2_shares:
shares_html = ""
for share in l2_shares:
l2_server = share.get("l2_server", "Unknown")
asset_name = share.get("asset_name", "")
published_at = share.get("published_at", "")[:10] if share.get("published_at") else ""
last_synced = share.get("last_synced_at", "")[:10] if share.get("last_synced_at") else ""
asset_url = f"{l2_server}/assets/{asset_name}"
shares_html += f'''
<div class="flex justify-between items-start py-2 border-b border-green-800 last:border-0">
<div>
<a href="{asset_url}" target="_blank" class="text-white font-medium hover:text-blue-300">{asset_name}</a>
<div class="text-xs text-gray-400">{l2_server}</div>
</div>
<div class="text-right text-xs text-gray-400">
Published: {published_at}<br>
Synced: {last_synced}
</div>
</div>
'''
publish_html = f'''
<div class="bg-green-900/30 border border-green-700 rounded-lg p-4 mb-4">
<div class="text-green-400 font-medium mb-2">Published to L2 ({len(l2_shares)} share{"s" if len(l2_shares) != 1 else ""})</div>
<div class="text-sm">
{shares_html}
</div>
</div>
<div id="republish-result"></div>
<button hx-patch="/ui/cache/{content_hash}/republish" hx-target="#republish-result" hx-swap="innerHTML"
class="px-4 py-2 bg-blue-600 hover:bg-blue-700 text-white font-medium rounded-lg transition-colors">
Update on L2
</button>
'''
else:
# Show publish form only if origin is set
if origin_type:
publish_html = f'''
<div id="publish-result"></div>
<form hx-post="/ui/cache/{content_hash}/publish" hx-target="#publish-result" hx-swap="innerHTML"
class="flex flex-wrap gap-3 items-end">
<div>
<label class="block text-sm text-gray-400 mb-1">Asset Name</label>
<input type="text" name="asset_name" placeholder="my-{asset_type}" required
class="px-4 py-2 bg-dark-600 border border-dark-500 rounded-lg text-white placeholder-gray-500 focus:border-blue-500 focus:outline-none min-w-[200px]">
</div>
<input type="hidden" name="asset_type" value="{asset_type}">
<button type="submit"
class="px-4 py-2 bg-blue-600 hover:bg-blue-700 text-white font-medium rounded-lg transition-colors">
Publish to L2
</button>
</form>
'''
else:
publish_html = '''
<div class="bg-yellow-900/30 border border-yellow-700 text-yellow-300 px-4 py-3 rounded-lg">
Set an origin (self or external URL) before publishing.
</div>
'''
return f'''
<h2 class="text-lg font-semibold text-white mb-4">Metadata</h2>
<div id="meta-save-result"></div>
<form hx-patch="/ui/cache/{content_hash}/meta" hx-target="#meta-save-result" hx-swap="innerHTML" class="space-y-4 mb-6">
<!-- Origin -->
<div class="bg-dark-600 rounded-lg p-4">
<label class="block text-sm font-medium text-gray-300 mb-3">Origin</label>
<div class="space-y-3">
<label class="flex items-center gap-3 cursor-pointer">
<input type="radio" name="origin_type" value="self" {self_checked}
class="w-4 h-4 text-blue-600 bg-dark-500 border-dark-400">
<span class="text-gray-200">Created by me (original content)</span>
</label>
<label class="flex items-center gap-3 cursor-pointer">
<input type="radio" name="origin_type" value="external" {external_checked}
class="w-4 h-4 text-blue-600 bg-dark-500 border-dark-400">
<span class="text-gray-200">External source</span>
</label>
<div class="ml-7 space-y-2">
<input type="url" name="origin_url" value="{origin_url}" placeholder="https://example.com/source"
class="w-full px-3 py-2 bg-dark-500 border border-dark-400 rounded-lg text-white placeholder-gray-500 focus:border-blue-500 focus:outline-none text-sm">
<input type="text" name="origin_note" value="{origin_note}" placeholder="Note (optional)"
class="w-full px-3 py-2 bg-dark-500 border border-dark-400 rounded-lg text-white placeholder-gray-500 focus:border-blue-500 focus:outline-none text-sm">
</div>
</div>
</div>
<!-- Description -->
<div class="bg-dark-600 rounded-lg p-4">
<label class="block text-sm font-medium text-gray-300 mb-2">Description</label>
<textarea name="description" rows="2" placeholder="Optional description..."
class="w-full px-3 py-2 bg-dark-500 border border-dark-400 rounded-lg text-white placeholder-gray-500 focus:border-blue-500 focus:outline-none text-sm">{description}</textarea>
</div>
<!-- Tags -->
<div class="bg-dark-600 rounded-lg p-4">
<label class="block text-sm font-medium text-gray-300 mb-2">Tags</label>
<input type="text" name="tags" value="{tags_str}" placeholder="tag1, tag2, tag3"
class="w-full px-3 py-2 bg-dark-500 border border-dark-400 rounded-lg text-white placeholder-gray-500 focus:border-blue-500 focus:outline-none text-sm">
<p class="text-xs text-gray-500 mt-1">Comma-separated list</p>
</div>
<button type="submit"
class="px-4 py-2 bg-green-600 hover:bg-green-700 text-white font-medium rounded-lg transition-colors">
Save Metadata
</button>
</form>
<!-- Publishing Section -->
<div class="border-t border-dark-500 pt-6">
<h3 class="text-lg font-semibold text-white mb-4">Publish to L2 (ActivityPub)</h3>
{publish_html}
</div>
<!-- Status & Actions Section -->
<div class="border-t border-dark-500 pt-6 mt-6">
<h3 class="text-lg font-semibold text-white mb-4">Status</h3>
<div class="bg-dark-600 rounded-lg p-4 mb-4">
<div class="flex items-center gap-2 mb-2">
<span class="text-sm text-gray-400">Pinned:</span>
{'<span class="text-green-400">Yes</span>' if pinned else '<span class="text-gray-500">No</span>'}
{f'<span class="text-xs text-gray-500 ml-2">({pin_reason})</span>' if pinned and pin_reason else ''}
</div>
<p class="text-xs text-gray-500">Pinned items cannot be discarded. Items are pinned when published or used as inputs to published content.</p>
</div>
<div id="discard-result"></div>
{'<p class="text-gray-500 text-sm">Cannot discard pinned items.</p>' if pinned else f"""
<button hx-delete="/ui/cache/{content_hash}/discard" hx-target="#discard-result" hx-swap="innerHTML"
hx-confirm="Are you sure you want to discard this item? This cannot be undone."
class="px-4 py-2 bg-red-600 hover:bg-red-700 text-white font-medium rounded-lg transition-colors">
Discard Item
</button>
"""}
</div>
'''
@app.patch("/ui/cache/{content_hash}/meta", response_class=HTMLResponse)
async def ui_update_cache_meta(content_hash: str, request: Request):
"""HTMX handler: update cache metadata from form."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Parse form data
form = await request.form()
origin_type = form.get("origin_type", "")
origin_url = form.get("origin_url", "").strip()
origin_note = form.get("origin_note", "").strip()
description = form.get("description", "").strip()
tags_str = form.get("tags", "").strip()
# Build origin
source_type = None
if origin_type == "self":
source_type = "self"
elif origin_type == "external":
if not origin_url:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">External origin requires a URL</div>'
source_type = "external"
# Parse tags
tags = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else []
# Save to database
await database.update_item_metadata(
content_hash=content_hash,
actor_id=ctx.actor_id,
item_type="media",
description=description if description else None,
source_type=source_type,
source_url=origin_url if origin_url else None,
source_note=origin_note if origin_note else None,
tags=tags
)
return '<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">Metadata saved!</div>'
@app.post("/ui/cache/{content_hash}/publish", response_class=HTMLResponse)
async def ui_publish_cache(content_hash: str, request: Request):
"""HTMX handler: publish cache item to L2."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
token = request.cookies.get("auth_token")
if not token:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Auth token required</div>'
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Parse form
form = await request.form()
asset_name = form.get("asset_name", "").strip()
asset_type = form.get("asset_type", "image")
if not asset_name:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Asset name required</div>'
# Load metadata from database
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
origin = meta.get("origin")
if not origin or "type" not in origin:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Set origin before publishing</div>'
# Get IPFS CID from cache item
cache_item = await database.get_cache_item(content_hash)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
# Call the L2 server from user's context
l2_server = ctx.l2_server
try:
resp = http_requests.post(
f"{l2_server}/assets/publish-cache",
headers={"Authorization": f"Bearer {token}"},
json={
"content_hash": content_hash,
"ipfs_cid": ipfs_cid,
"asset_name": asset_name,
"asset_type": asset_type,
"origin": origin,
"description": meta.get("description"),
"tags": meta.get("tags", []),
"metadata": {
"filename": meta.get("filename"),
"folder": meta.get("folder"),
"collections": meta.get("collections", [])
}
},
timeout=30
)
resp.raise_for_status()
except http_requests.exceptions.HTTPError as e:
error_detail = ""
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
error_detail = str(e)
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Error: {error_detail}</div>'
except Exception as e:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Error: {e}</div>'
# Save L2 share to database and pin the item
await database.save_l2_share(
content_hash=content_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=asset_name,
content_type=asset_type
)
await database.update_item_metadata(
content_hash=content_hash,
actor_id=ctx.actor_id,
pinned=True,
pin_reason="published"
)
# Use HTTPS for L2 links
l2_https = l2_server.replace("http://", "https://")
return f'''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Published to L2 as <strong>{asset_name}</strong>!
<a href="{l2_https}/asset/{asset_name}" target="_blank" class="underline">View on L2</a>
</div>
'''
@app.patch("/ui/cache/{content_hash}/republish", response_class=HTMLResponse)
async def ui_republish_cache(content_hash: str, request: Request):
"""HTMX handler: re-publish (update) cache item on L2."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
token = request.cookies.get("auth_token")
if not token:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Auth token required</div>'
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Load metadata
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
l2_shares = meta.get("l2_shares", [])
# Find share for current L2 server (user's L2)
l2_server = ctx.l2_server
current_share = None
share_index = -1
for i, share in enumerate(l2_shares):
if share.get("l2_server") == l2_server:
current_share = share
share_index = i
break
if not current_share:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Item not published to this L2 yet</div>'
asset_name = current_share.get("asset_name")
if not asset_name:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">No asset name found</div>'
# Call L2 update
try:
resp = http_requests.patch(
f"{l2_server}/assets/{asset_name}",
headers={"Authorization": f"Bearer {token}"},
json={
"description": meta.get("description"),
"tags": meta.get("tags"),
"origin": meta.get("origin"),
"metadata": {
"filename": meta.get("filename"),
"folder": meta.get("folder"),
"collections": meta.get("collections", [])
}
},
timeout=30
)
resp.raise_for_status()
except http_requests.exceptions.HTTPError as e:
error_detail = ""
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
error_detail = str(e)
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Error: {error_detail}</div>'
except Exception as e:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Error: {e}</div>'
# Update local metadata - save_l2_share updates last_synced_at on conflict
await database.save_l2_share(
content_hash=content_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=asset_name,
content_type=current_share.get("content_type", "media")
)
return '<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">Updated on L2!</div>'
@app.get("/media")
async def list_media(
request: Request,
page: int = 1,
limit: int = 20,
folder: Optional[str] = None,
collection: Optional[str] = None,
tag: Optional[str] = None
):
"""List media items. HTML for browsers (with infinite scroll), JSON for APIs (with pagination)."""
ctx = await get_user_context_from_cookie(request)
if wants_html(request):
# Require login for HTML media view
if not ctx:
content = '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
return HTMLResponse(render_page("Media", content, None, active_tab="media"))
# Get hashes owned by/associated with this user
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
# Get cache items that belong to the user (from cache_manager)
cache_items = []
seen_hashes = set() # Deduplicate by content_hash
for cached_file in cache_manager.list_all():
content_hash = cached_file.content_hash
if content_hash not in user_hashes:
continue
# Skip duplicates (same content from multiple runs)
if content_hash in seen_hashes:
continue
seen_hashes.add(content_hash)
# Skip recipes - they have their own section
if cached_file.node_type == "recipe":
continue
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
# Apply folder filter
if folder:
item_folder = meta.get("folder", "/")
if folder != "/" and not item_folder.startswith(folder):
continue
if folder == "/" and item_folder != "/":
continue
# Apply collection filter
if collection:
if collection not in meta.get("collections", []):
continue
# Apply tag filter
if tag:
if tag not in meta.get("tags", []):
continue
cache_items.append({
"hash": content_hash,
"size": cached_file.size_bytes,
"mtime": cached_file.created_at,
"meta": meta
})
# Sort by modification time (newest first)
cache_items.sort(key=lambda x: x["mtime"], reverse=True)
total = len(cache_items)
# Pagination
start = (page - 1) * limit
end = start + limit
items_page = cache_items[start:end]
has_more = end < total
if not items_page:
if page == 1:
filter_msg = ""
if folder:
filter_msg = f" in folder {folder}"
elif collection:
filter_msg = f" in collection '{collection}'"
elif tag:
filter_msg = f" with tag '{tag}'"
content = f'<p class="text-gray-400 py-8 text-center">No media{filter_msg}. Upload files or run effects to see them here.</p>'
else:
return HTMLResponse("") # Empty for infinite scroll
else:
html_parts = []
for item in items_page:
content_hash = item["hash"]
cache_path = get_cache_path(content_hash)
media_type = detect_media_type(cache_path) if cache_path else "unknown"
# Format size
size = item["size"]
if size > 1024*1024:
size_str = f"{size/(1024*1024):.1f} MB"
elif size > 1024:
size_str = f"{size/1024:.1f} KB"
else:
size_str = f"{size} bytes"
html_parts.append(f'''
<a href="/cache/{content_hash}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors">
<div class="flex items-center justify-between gap-2 mb-3">
<span class="px-2 py-1 bg-blue-600 text-white text-xs font-medium rounded-full">{media_type}</span>
<span class="text-xs text-gray-400">{size_str}</span>
</div>
<div class="text-xs text-gray-400 font-mono mb-3 truncate">{content_hash[:24]}...</div>
<div class="flex justify-center bg-dark-600 rounded-lg p-2">
''')
if media_type == "video":
video_src = video_src_for_request(content_hash, request)
html_parts.append(f'<video src="{video_src}" controls muted loop playsinline class="max-h-32 rounded"></video>')
elif media_type == "image":
html_parts.append(f'<img src="/cache/{content_hash}/raw" alt="{content_hash[:16]}" class="max-h-32 rounded object-contain">')
else:
html_parts.append('<p class="text-gray-400 text-sm py-4">Unknown file type</p>')
html_parts.append('</div></div></a>')
# For infinite scroll, just return cards if not first page
if page > 1:
if has_more:
query_params = f"page={page + 1}"
if folder:
query_params += f"&folder={folder}"
if collection:
query_params += f"&collection={collection}"
if tag:
query_params += f"&tag={tag}"
html_parts.append(f'''
<div hx-get="/media?{query_params}" hx-trigger="revealed" hx-swap="afterend">
<p class="py-4 text-center text-gray-400">Loading more...</p>
</div>
''')
return HTMLResponse('\n'.join(html_parts))
# First page - full content
infinite_scroll_trigger = ""
if has_more:
query_params = "page=2"
if folder:
query_params += f"&folder={folder}"
if collection:
query_params += f"&collection={collection}"
if tag:
query_params += f"&tag={tag}"
infinite_scroll_trigger = f'''
<div hx-get="/media?{query_params}" hx-trigger="revealed" hx-swap="afterend">
<p class="py-4 text-center text-gray-400">Loading more...</p>
</div>
'''
content = f'''
<div class="flex items-center justify-between mb-6">
<h2 class="text-xl font-semibold text-white">Media ({total} items)</h2>
<div class="flex items-center gap-3">
<div id="sync-result"></div>
<button hx-post="/ui/sync-l2" hx-target="#sync-result" hx-swap="innerHTML"
class="px-3 py-1.5 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium rounded-lg transition-colors">
Sync with L2
</button>
</div>
</div>
<div class="grid gap-4 sm:grid-cols-2 lg:grid-cols-3">
{''.join(html_parts)}
{infinite_scroll_trigger}
</div>
'''
return HTMLResponse(render_page("Media", content, ctx.actor_id, active_tab="media"))
# JSON response for APIs - list all hashes with optional pagination
all_hashes = [cf.content_hash for cf in cache_manager.list_all()]
total = len(all_hashes)
start = (page - 1) * limit
end = start + limit
hashes_page = all_hashes[start:end]
has_more = end < total
return {
"hashes": hashes_page,
"pagination": {
"page": page,
"limit": limit,
"total": total,
"has_more": has_more
}
}
@app.delete("/cache/{content_hash}")
async def discard_cache(content_hash: str, ctx: UserContext = Depends(get_required_user_context)):
"""
Discard (delete) a cached item.
Enforces deletion rules:
- Cannot delete items published to L2 (shared)
- Cannot delete inputs/outputs of activities (runs)
- Cannot delete pinned items
"""
# Check if content exists
if not cache_manager.has_content(content_hash):
raise HTTPException(404, "Content not found")
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
raise HTTPException(403, "Access denied")
# Check if pinned
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
if meta.get("pinned"):
pin_reason = meta.get("pin_reason", "unknown")
raise HTTPException(400, f"Cannot discard pinned item (reason: {pin_reason})")
# Check if used by any run (Redis runs, not just activity store)
runs_using = await asyncio.to_thread(find_runs_using_content, content_hash)
if runs_using:
run, role = runs_using[0]
raise HTTPException(400, f"Cannot discard: item is {role} of run {run.run_id}")
# Check deletion rules via cache_manager (L2 shared status, activity store)
can_delete, reason = await asyncio.to_thread(cache_manager.can_delete, content_hash)
if not can_delete:
raise HTTPException(400, f"Cannot discard: {reason}")
# Delete via cache_manager
success, msg = await asyncio.to_thread(cache_manager.delete_by_content_hash, content_hash)
if not success:
# Fallback to legacy deletion
cache_path = get_cache_path(content_hash)
if cache_path and cache_path.exists():
cache_path.unlink()
# Clean up legacy metadata files
meta_path = CACHE_DIR / f"{content_hash}.meta.json"
if meta_path.exists():
meta_path.unlink()
mp4_path = CACHE_DIR / f"{content_hash}.mp4"
if mp4_path.exists():
mp4_path.unlink()
return {"discarded": True, "content_hash": content_hash}
@app.delete("/ui/cache/{content_hash}/discard", response_class=HTMLResponse)
async def ui_discard_cache(content_hash: str, request: Request):
"""HTMX handler: discard a cached item."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Login required</div>'
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Access denied</div>'
# Check if content exists
has_content = await asyncio.to_thread(cache_manager.has_content, content_hash)
if not has_content:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Content not found</div>'
# Check if pinned
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
if meta.get("pinned"):
pin_reason = meta.get("pin_reason", "unknown")
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot discard: item is pinned ({pin_reason})</div>'
# Check if used by any run (Redis runs, not just activity store)
runs_using = await asyncio.to_thread(find_runs_using_content, content_hash)
if runs_using:
run, role = runs_using[0]
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot discard: item is {role} of run {run.run_id}</div>'
# Check deletion rules via cache_manager (L2 shared status, activity store)
can_delete, reason = await asyncio.to_thread(cache_manager.can_delete, content_hash)
if not can_delete:
return f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg mb-4">Cannot discard: {reason}</div>'
# Delete via cache_manager
success, msg = await asyncio.to_thread(cache_manager.delete_by_content_hash, content_hash)
if not success:
# Fallback to legacy deletion
cache_path = get_cache_path(content_hash)
if cache_path and cache_path.exists():
cache_path.unlink()
# Clean up legacy metadata files
meta_path = CACHE_DIR / f"{content_hash}.meta.json"
if meta_path.exists():
meta_path.unlink()
mp4_path = CACHE_DIR / f"{content_hash}.mp4"
if mp4_path.exists():
mp4_path.unlink()
return '''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Item discarded. <a href="/media" class="underline">Back to media</a>
</div>
'''
# Known assets (bootstrap data)
KNOWN_ASSETS = {
"cat": "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b",
}
@app.get("/assets")
async def list_assets():
"""List known assets."""
return KNOWN_ASSETS
@app.post("/cache/import")
async def import_to_cache(path: str):
"""Import a local file to cache."""
source = Path(path)
if not source.exists():
raise HTTPException(404, f"File not found: {path}")
content_hash = await cache_file(source)
return {"content_hash": content_hash, "cached": True}
def save_cache_meta(content_hash: str, uploader: str = None, filename: str = None, **updates):
"""Save or update metadata for a cached file."""
meta_path = CACHE_DIR / f"{content_hash}.meta.json"
# Load existing or create new
if meta_path.exists():
with open(meta_path) as f:
meta = json.load(f)
else:
meta = {
"uploader": uploader,
"uploaded_at": datetime.now(timezone.utc).isoformat(),
"filename": filename
}
# Apply updates (but never change uploader or uploaded_at)
for key, value in updates.items():
if key not in ("uploader", "uploaded_at"):
meta[key] = value
with open(meta_path, "w") as f:
json.dump(meta, f, indent=2)
return meta
def load_cache_meta(content_hash: str) -> dict:
"""Load metadata for a cached file."""
meta_path = CACHE_DIR / f"{content_hash}.meta.json"
if meta_path.exists():
with open(meta_path) as f:
return json.load(f)
return {}
# User data storage (folders, collections)
USER_DATA_DIR = CACHE_DIR / ".user-data"
def load_user_data(username: str) -> dict:
"""Load user's folders and collections."""
USER_DATA_DIR.mkdir(parents=True, exist_ok=True)
# Normalize username (remove @ prefix if present)
safe_name = username.replace("@", "").replace("/", "_")
user_file = USER_DATA_DIR / f"{safe_name}.json"
if user_file.exists():
with open(user_file) as f:
return json.load(f)
return {"folders": ["/"], "collections": []}
def save_user_data(username: str, data: dict):
"""Save user's folders and collections."""
USER_DATA_DIR.mkdir(parents=True, exist_ok=True)
safe_name = username.replace("@", "").replace("/", "_")
user_file = USER_DATA_DIR / f"{safe_name}.json"
with open(user_file, "w") as f:
json.dump(data, f, indent=2)
async def get_user_cache_hashes(username: str, actor_id: Optional[str] = None) -> set:
"""Get all cache hashes owned by or associated with a user.
username: The plain username
actor_id: The full actor ID (@user@server), if available
"""
# Match against both formats for backwards compatibility
match_values = [username]
if actor_id:
match_values.append(actor_id)
hashes = set()
# Query database for items owned by user (new system)
if actor_id:
try:
db_items = await database.get_user_items(actor_id)
for item in db_items:
hashes.add(item["content_hash"])
except Exception:
pass # Database may not be initialized
# Legacy: Files uploaded by user (JSON metadata)
if CACHE_DIR.exists():
for f in CACHE_DIR.iterdir():
if f.name.endswith('.meta.json'):
try:
meta_path = CACHE_DIR / f.name
if meta_path.exists():
import json
with open(meta_path, 'r') as mf:
meta = json.load(mf)
if meta.get("uploader") in match_values:
hashes.add(f.name.replace('.meta.json', ''))
except Exception:
pass
# Files from user's runs (inputs and outputs)
for run in list_all_runs():
if run.username in match_values:
hashes.update(run.inputs)
if run.output_hash:
hashes.add(run.output_hash)
return hashes
@app.post("/cache/upload")
async def upload_to_cache(file: UploadFile = File(...), ctx: UserContext = Depends(get_required_user_context)):
"""Upload a file to cache. Requires authentication."""
# Write to temp file first
import tempfile
with tempfile.NamedTemporaryFile(delete=False) as tmp:
content = await file.read()
tmp.write(content)
tmp_path = Path(tmp.name)
# Store in cache via cache_manager
cached, ipfs_cid = cache_manager.put(tmp_path, node_type="upload", move=True)
content_hash = cached.content_hash
# Save to cache_items table (with IPFS CID)
await database.create_cache_item(content_hash, ipfs_cid)
# Save uploader metadata to database
await database.save_item_metadata(
content_hash=content_hash,
actor_id=ctx.actor_id,
item_type="media",
filename=file.filename
)
return {"content_hash": content_hash, "filename": file.filename, "size": len(content)}
class CacheMetaUpdate(BaseModel):
"""Request to update cache metadata."""
origin: Optional[dict] = None # {"type": "self"|"external", "url": "...", "note": "..."}
description: Optional[str] = None
tags: Optional[list[str]] = None
folder: Optional[str] = None
collections: Optional[list[str]] = None
class PublishRequest(BaseModel):
"""Request to publish a cache item to L2."""
asset_name: str
asset_type: str = "image" # image, video, etc.
class AddStorageRequest(BaseModel):
"""Request to add a storage provider."""
provider_type: str # 'pinata', 'web3storage', 'local', etc.
provider_name: Optional[str] = None # User-friendly name
config: dict # Provider-specific config (api_key, path, etc.)
capacity_gb: int # Storage capacity in GB
class UpdateStorageRequest(BaseModel):
"""Request to update a storage provider."""
config: Optional[dict] = None
capacity_gb: Optional[int] = None
is_active: Optional[bool] = None
@app.get("/cache/{content_hash}/meta")
async def get_cache_meta(content_hash: str, ctx: UserContext = Depends(get_required_user_context)):
"""Get metadata for a cached file."""
# Check file exists
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, "Content not found")
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
raise HTTPException(403, "Access denied")
return await database.load_item_metadata(content_hash, ctx.actor_id)
@app.patch("/cache/{content_hash}/meta")
async def update_cache_meta(content_hash: str, update: CacheMetaUpdate, ctx: UserContext = Depends(get_required_user_context)):
"""Update metadata for a cached file."""
# Check file exists
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, "Content not found")
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
raise HTTPException(403, "Access denied")
# Build update dict from non-None fields
updates = {}
if update.origin is not None:
updates["origin"] = update.origin
if update.description is not None:
updates["description"] = update.description
if update.tags is not None:
updates["tags"] = update.tags
if update.folder is not None:
# Ensure folder exists in user's folder list
user_data = load_user_data(ctx.username)
if update.folder not in user_data["folders"]:
raise HTTPException(400, f"Folder does not exist: {update.folder}")
updates["folder"] = update.folder
if update.collections is not None:
# Validate collections exist
user_data = load_user_data(ctx.username)
existing = {c["name"] for c in user_data["collections"]}
for col in update.collections:
if col not in existing:
raise HTTPException(400, f"Collection does not exist: {col}")
updates["collections"] = update.collections
meta = await database.update_item_metadata(content_hash, ctx.actor_id, **updates)
return meta
@app.post("/cache/{content_hash}/publish")
async def publish_cache_to_l2(
content_hash: str,
req: PublishRequest,
request: Request,
ctx: UserContext = Depends(get_required_user_context)
):
"""
Publish a cache item to L2 (ActivityPub).
Requires origin to be set in metadata before publishing.
"""
# Check file exists
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, "Content not found")
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
raise HTTPException(403, "Access denied")
# Load metadata
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
# Check origin is set
origin = meta.get("origin")
if not origin or "type" not in origin:
raise HTTPException(400, "Origin must be set before publishing. Use --origin self or --origin-url <url>")
# Get IPFS CID from cache item
cache_item = await database.get_cache_item(content_hash)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
# Get auth token to pass to L2
token = request.cookies.get("auth_token")
if not token:
# Try from header
auth_header = request.headers.get("Authorization", "")
if auth_header.startswith("Bearer "):
token = auth_header[7:]
if not token:
raise HTTPException(401, "Authentication token required")
# Call L2 publish-cache endpoint (use user's L2 server)
l2_server = ctx.l2_server
try:
resp = http_requests.post(
f"{l2_server}/assets/publish-cache",
headers={"Authorization": f"Bearer {token}"},
json={
"content_hash": content_hash,
"ipfs_cid": ipfs_cid,
"asset_name": req.asset_name,
"asset_type": req.asset_type,
"origin": origin,
"description": meta.get("description"),
"tags": meta.get("tags", []),
"metadata": {
"filename": meta.get("filename"),
"folder": meta.get("folder"),
"collections": meta.get("collections", [])
}
},
timeout=10
)
resp.raise_for_status()
l2_result = resp.json()
except http_requests.exceptions.HTTPError as e:
error_detail = ""
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
error_detail = str(e)
raise HTTPException(400, f"L2 publish failed: {error_detail}")
except Exception as e:
raise HTTPException(500, f"L2 publish failed: {e}")
# Update local metadata with publish status and pin
await database.save_l2_share(
content_hash=content_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=req.asset_name,
content_type=req.asset_type
)
await database.update_item_metadata(
content_hash=content_hash,
actor_id=ctx.actor_id,
pinned=True,
pin_reason="published"
)
return {
"published": True,
"asset_name": req.asset_name,
"l2_result": l2_result
}
@app.patch("/cache/{content_hash}/republish")
async def republish_cache_to_l2(
content_hash: str,
request: Request,
ctx: UserContext = Depends(get_required_user_context)
):
"""
Re-publish (update) a cache item on L2 after metadata changes.
Only works for already-published items.
"""
# Check file exists
cache_path = get_cache_path(content_hash)
if not cache_path:
raise HTTPException(404, "Content not found")
# Check ownership
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
if content_hash not in user_hashes:
raise HTTPException(403, "Access denied")
# Load metadata
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
l2_shares = meta.get("l2_shares", [])
# Find share for current L2 server (user's L2)
l2_server = ctx.l2_server
current_share = None
for share in l2_shares:
if share.get("l2_server") == l2_server:
current_share = share
break
if not current_share:
raise HTTPException(400, "Item not published yet. Use publish first.")
asset_name = current_share.get("asset_name")
if not asset_name:
raise HTTPException(400, "No asset name found in publish info")
# Get auth token
token = request.cookies.get("auth_token")
if not token:
auth_header = request.headers.get("Authorization", "")
if auth_header.startswith("Bearer "):
token = auth_header[7:]
if not token:
raise HTTPException(401, "Authentication token required")
# Get IPFS CID from cache item
cache_item = await database.get_cache_item(content_hash)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
# Call L2 update endpoint (use user's L2 server)
l2_server = ctx.l2_server
try:
resp = http_requests.patch(
f"{l2_server}/assets/{asset_name}",
headers={"Authorization": f"Bearer {token}"},
json={
"description": meta.get("description"),
"tags": meta.get("tags"),
"origin": meta.get("origin"),
"ipfs_cid": ipfs_cid,
"metadata": {
"filename": meta.get("filename"),
"folder": meta.get("folder"),
"collections": meta.get("collections", [])
}
},
timeout=10
)
resp.raise_for_status()
l2_result = resp.json()
except http_requests.exceptions.HTTPError as e:
error_detail = ""
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
error_detail = str(e)
raise HTTPException(400, f"L2 update failed: {error_detail}")
except Exception as e:
raise HTTPException(500, f"L2 update failed: {e}")
# Update local metadata - save_l2_share updates last_synced_at on conflict
await database.save_l2_share(
content_hash=content_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=asset_name,
content_type=current_share.get("content_type", "media")
)
return {
"updated": True,
"asset_name": asset_name,
"l2_result": l2_result
}
# ============ L2 Sync ============
def _fetch_l2_outbox_sync(l2_server: str, username: str) -> list:
"""Fetch user's outbox from L2 (sync version for asyncio.to_thread)."""
try:
# Fetch outbox page with activities
resp = http_requests.get(
f"{l2_server}/users/{username}/outbox?page=true",
headers={"Accept": "application/activity+json"},
timeout=10
)
if resp.status_code != 200:
logger.warning(f"L2 outbox fetch failed: {resp.status_code}")
return []
data = resp.json()
return data.get("orderedItems", [])
except Exception as e:
logger.error(f"Failed to fetch L2 outbox: {e}")
return []
@app.post("/user/sync-l2")
async def sync_with_l2(ctx: UserContext = Depends(get_required_user_context)):
"""
Sync local L2 share records with user's L2 outbox.
Fetches user's published assets from their L2 server and updates local tracking.
"""
l2_server = ctx.l2_server
username = ctx.username
# Fetch outbox activities
activities = await asyncio.to_thread(_fetch_l2_outbox_sync, l2_server, username)
if not activities:
return {"synced": 0, "message": "No activities found or L2 unavailable"}
# Process Create activities for assets
synced_count = 0
for activity in activities:
if activity.get("type") != "Create":
continue
obj = activity.get("object", {})
if not isinstance(obj, dict):
continue
# Get asset info - look for content_hash in attachment or directly
content_hash = None
asset_name = obj.get("name", "")
# Check attachments for content hash
for attachment in obj.get("attachment", []):
if attachment.get("name") == "content_hash":
content_hash = attachment.get("value")
break
# Also check if there's a hash in the object URL or ID
if not content_hash:
# Try to extract from object ID like /objects/{hash}
obj_id = obj.get("id", "")
if "/objects/" in obj_id:
content_hash = obj_id.split("/objects/")[-1].split("/")[0]
if not content_hash or not asset_name:
continue
# Check if we have this content locally
cache_path = get_cache_path(content_hash)
if not cache_path:
continue # We don't have this content, skip
# Determine content type from object type
obj_type = obj.get("type", "")
if obj_type == "Video":
content_type = "video"
elif obj_type == "Image":
content_type = "image"
else:
content_type = "media"
# Update local L2 share record
await database.save_l2_share(
content_hash=content_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=asset_name,
content_type=content_type
)
synced_count += 1
return {"synced": synced_count, "total_activities": len(activities)}
@app.post("/ui/sync-l2", response_class=HTMLResponse)
async def ui_sync_with_l2(request: Request):
"""HTMX handler: sync with L2 server."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return '<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg">Login required</div>'
try:
result = await sync_with_l2(ctx)
synced = result.get("synced", 0)
total = result.get("total_activities", 0)
if synced > 0:
return f'''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg">
Synced {synced} asset(s) from L2 ({total} activities found)
</div>
'''
else:
return f'''
<div class="bg-yellow-900/50 border border-yellow-700 text-yellow-300 px-4 py-3 rounded-lg">
No new assets to sync ({total} activities found)
</div>
'''
except Exception as e:
logger.error(f"L2 sync failed: {e}")
return f'''
<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg">
Sync failed: {str(e)}
</div>
'''
# ============ Folder & Collection Management ============
@app.get("/user/folders")
async def list_folders(username: str = Depends(get_required_user)):
"""List user's folders."""
user_data = load_user_data(username)
return {"folders": user_data["folders"]}
@app.post("/user/folders")
async def create_folder(folder_path: str, username: str = Depends(get_required_user)):
"""Create a new folder."""
user_data = load_user_data(username)
# Validate path format
if not folder_path.startswith("/"):
raise HTTPException(400, "Folder path must start with /")
# Check parent exists
parent = "/".join(folder_path.rsplit("/", 1)[:-1]) or "/"
if parent != "/" and parent not in user_data["folders"]:
raise HTTPException(400, f"Parent folder does not exist: {parent}")
# Check doesn't already exist
if folder_path in user_data["folders"]:
raise HTTPException(400, f"Folder already exists: {folder_path}")
user_data["folders"].append(folder_path)
user_data["folders"].sort()
save_user_data(username, user_data)
return {"folder": folder_path, "created": True}
@app.delete("/user/folders")
async def delete_folder(folder_path: str, ctx: UserContext = Depends(get_required_user_context)):
"""Delete a folder (must be empty)."""
if folder_path == "/":
raise HTTPException(400, "Cannot delete root folder")
user_data = load_user_data(ctx.username)
if folder_path not in user_data["folders"]:
raise HTTPException(404, "Folder not found")
# Check no subfolders
for f in user_data["folders"]:
if f.startswith(folder_path + "/"):
raise HTTPException(400, f"Folder has subfolders: {f}")
# Check no items in folder
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
for h in user_hashes:
meta = await database.load_item_metadata(h, ctx.actor_id)
if meta.get("folder") == folder_path:
raise HTTPException(400, "Folder is not empty")
user_data["folders"].remove(folder_path)
save_user_data(ctx.username, user_data)
return {"folder": folder_path, "deleted": True}
@app.get("/user/collections")
async def list_collections(username: str = Depends(get_required_user)):
"""List user's collections."""
user_data = load_user_data(username)
return {"collections": user_data["collections"]}
@app.post("/user/collections")
async def create_collection(name: str, username: str = Depends(get_required_user)):
"""Create a new collection."""
user_data = load_user_data(username)
# Check doesn't already exist
for col in user_data["collections"]:
if col["name"] == name:
raise HTTPException(400, f"Collection already exists: {name}")
user_data["collections"].append({
"name": name,
"created_at": datetime.now(timezone.utc).isoformat()
})
save_user_data(username, user_data)
return {"collection": name, "created": True}
@app.delete("/user/collections")
async def delete_collection(name: str, ctx: UserContext = Depends(get_required_user_context)):
"""Delete a collection."""
user_data = load_user_data(ctx.username)
# Find and remove
for i, col in enumerate(user_data["collections"]):
if col["name"] == name:
user_data["collections"].pop(i)
save_user_data(ctx.username, user_data)
# Remove from all cache items
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
for h in user_hashes:
meta = await database.load_item_metadata(h, ctx.actor_id)
if name in meta.get("collections", []):
new_collections = [c for c in meta.get("collections", []) if c != name]
await database.update_item_metadata(h, ctx.actor_id, collections=new_collections)
return {"collection": name, "deleted": True}
raise HTTPException(404, "Collection not found")
def is_ios_request(request: Request) -> bool:
"""Check if request is from iOS device."""
ua = request.headers.get("user-agent", "").lower()
return "iphone" in ua or "ipad" in ua
def video_src_for_request(content_hash: str, request: Request) -> str:
"""Get video src URL, using MP4 endpoint for iOS, raw for others."""
if is_ios_request(request):
return f"/cache/{content_hash}/mp4"
return f"/cache/{content_hash}/raw"
def detect_media_type(cache_path: Path) -> str:
"""Detect if file is image or video based on magic bytes."""
with open(cache_path, "rb") as f:
header = f.read(32)
# Video signatures
if header[:4] == b'\x1a\x45\xdf\xa3': # WebM/MKV
return "video"
if header[4:8] == b'ftyp': # MP4/MOV
return "video"
if header[:4] == b'RIFF' and header[8:12] == b'AVI ': # AVI
return "video"
# Image signatures
if header[:8] == b'\x89PNG\r\n\x1a\n': # PNG
return "image"
if header[:2] == b'\xff\xd8': # JPEG
return "image"
if header[:6] in (b'GIF87a', b'GIF89a'): # GIF
return "image"
if header[:4] == b'RIFF' and header[8:12] == b'WEBP': # WebP
return "image"
return "unknown"
async def get_user_context_from_cookie(request) -> Optional[UserContext]:
"""Get user context from auth cookie. Returns full context with actor_id and l2_server."""
token = request.cookies.get("auth_token")
if not token:
return None
return await get_verified_user_context(token)
async def get_user_from_cookie(request) -> Optional[str]:
"""Get username from auth cookie (backwards compat - prefer get_user_context_from_cookie)."""
ctx = await get_user_context_from_cookie(request)
return ctx.username if ctx else None
def wants_html(request: Request) -> bool:
"""Check if request wants HTML (browser) vs JSON (API)."""
accept = request.headers.get("accept", "")
# Check for explicit HTML request
if "text/html" in accept and "application/json" not in accept:
return True
# Check for browser navigation (direct URL access)
fetch_mode = request.headers.get("sec-fetch-mode", "")
if fetch_mode == "navigate":
return True
return False
# Tailwind CSS config for all L1 templates
TAILWIND_CONFIG = '''
<script src="https://cdn.tailwindcss.com"></script>
<script>
tailwind.config = {
darkMode: 'class',
theme: {
extend: {
colors: {
dark: { 900: '#0a0a0a', 800: '#111', 700: '#1a1a1a', 600: '#222', 500: '#333' }
}
}
}
}
</script>
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
'''
# Cytoscape.js for DAG visualization (extends TAILWIND_CONFIG)
CYTOSCAPE_CONFIG = TAILWIND_CONFIG + '''
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.28.1/cytoscape.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"></script>
'''
# Node colors for DAG visualization
NODE_COLORS = {
"SOURCE": "#3b82f6", # Blue
"EFFECT": "#22c55e", # Green
"OUTPUT": "#a855f7", # Purple
"ANALYSIS": "#f59e0b", # Amber
"_LIST": "#6366f1", # Indigo
"default": "#6b7280" # Gray
}
def render_run_sub_tabs(run_id: str, active: str = "overview") -> str:
"""Render sub-navigation tabs for run detail pages."""
tabs = [
("overview", "Overview", f"/run/{run_id}"),
("plan", "Plan", f"/run/{run_id}/plan"),
("analysis", "Analysis", f"/run/{run_id}/analysis"),
("artifacts", "Artifacts", f"/run/{run_id}/artifacts"),
]
html = '<div class="flex gap-4 mb-6 border-b border-dark-500">'
for tab_id, label, url in tabs:
if tab_id == active:
active_class = "border-b-2 border-blue-500 text-white"
else:
active_class = "text-gray-400 hover:text-white"
html += f'<a href="{url}" class="pb-3 px-2 font-medium transition-colors {active_class}">{label}</a>'
html += '</div>'
return html
def render_dag_cytoscape(nodes_json: str, edges_json: str, container_id: str = "cy", run_id: str = "", initial_node: str = "") -> str:
"""Render Cytoscape.js DAG visualization HTML with HTMX SPA-style navigation."""
return f'''
<div id="{container_id}" class="w-full h-96 bg-dark-800 rounded-lg border border-dark-500"></div>
<div id="{container_id}-details" class="mt-4 p-4 bg-dark-700 rounded-lg {"" if initial_node else "hidden"}">
<div id="{container_id}-details-content">
{"Loading..." if initial_node else ""}
</div>
</div>
<script>
// SPA-style node detail functions
var currentRunId = '{run_id}';
var currentNodeId = '{initial_node}';
function loadNodeDetail(stepId) {{
if (!currentRunId || !stepId) return;
currentNodeId = stepId;
var detailsEl = document.getElementById('{container_id}-details');
var contentEl = document.getElementById('{container_id}-details-content');
detailsEl.classList.remove('hidden');
contentEl.innerHTML = '<div class="text-gray-400">Loading...</div>';
// Update URL without full page reload
var newUrl = '/run/' + currentRunId + '/plan?node=' + encodeURIComponent(stepId);
history.pushState({{ node: stepId }}, '', newUrl);
// Fetch node details via HTMX-style fetch
fetch('/run/' + currentRunId + '/plan/node/' + encodeURIComponent(stepId))
.then(function(response) {{ return response.text(); }})
.then(function(html) {{
contentEl.innerHTML = html;
// Trigger HTMX to process any dynamic content
if (typeof htmx !== 'undefined') {{
htmx.process(contentEl);
}}
}})
.catch(function(err) {{
contentEl.innerHTML = '<p class="text-red-400">Error loading node details</p>';
}});
// Select the node in cytoscape
if (window.artdagCy) {{
window.artdagCy.nodes().unselect();
var node = window.artdagCy.getElementById(stepId);
if (node && node.length) {{
node.select();
}}
}}
}}
function closeNodeDetail() {{
var detailsEl = document.getElementById('{container_id}-details');
detailsEl.classList.add('hidden');
currentNodeId = '';
// Update URL to remove node parameter
var newUrl = '/run/' + currentRunId + '/plan';
history.pushState({{ node: null }}, '', newUrl);
// Unselect in cytoscape
if (window.artdagCy) {{
window.artdagCy.nodes().unselect();
}}
}}
// Handle browser back/forward
window.addEventListener('popstate', function(event) {{
if (event.state && event.state.node) {{
loadNodeDetail(event.state.node);
}} else {{
closeNodeDetail();
}}
}});
document.addEventListener('DOMContentLoaded', function() {{
// Register dagre layout
if (typeof cytoscape !== 'undefined' && typeof cytoscapeDagre !== 'undefined') {{
cytoscape.use(cytoscapeDagre);
}}
// Global instance for future WebSocket updates
window.artdagCy = cytoscape({{
container: document.getElementById('{container_id}'),
elements: {{
nodes: {nodes_json},
edges: {edges_json}
}},
style: [
{{
selector: 'node',
style: {{
'label': 'data(label)',
'text-valign': 'center',
'text-halign': 'center',
'background-color': 'data(color)',
'color': '#fff',
'font-size': '10px',
'text-outline-color': '#000',
'text-outline-width': 1,
'width': 80,
'height': 40,
'shape': 'roundrectangle',
'border-width': 2,
'border-color': '#444'
}}
}},
{{
selector: 'node[status="cached"]',
style: {{
'border-color': '#22c55e',
'border-width': 3
}}
}},
{{
selector: 'node[status="completed"]',
style: {{
'border-color': '#22c55e',
'border-width': 3
}}
}},
{{
selector: 'node[status="running"]',
style: {{
'border-color': '#eab308',
'border-width': 3
}}
}},
{{
selector: 'node[isOutput]',
style: {{
'border-color': '#a855f7',
'border-width': 3
}}
}},
{{
selector: 'edge',
style: {{
'width': 2,
'line-color': '#666',
'target-arrow-color': '#666',
'target-arrow-shape': 'triangle',
'curve-style': 'bezier'
}}
}},
{{
selector: ':selected',
style: {{
'border-color': '#3b82f6',
'border-width': 4
}}
}}
],
layout: {{
name: 'dagre',
rankDir: 'TB',
nodeSep: 50,
rankSep: 80,
padding: 20
}}
}});
// Node click handler - SPA style with URL update
window.artdagCy.on('tap', 'node', function(evt) {{
var node = evt.target;
var stepId = node.data('id');
loadNodeDetail(stepId);
}});
// Click on background closes detail
window.artdagCy.on('tap', function(evt) {{
if (evt.target === window.artdagCy) {{
closeNodeDetail();
}}
}});
// Load initial node from URL if specified
if (currentNodeId) {{
loadNodeDetail(currentNodeId);
}}
// WebSocket update function for real-time status updates
window.updateNodeStatus = function(stepId, status, cacheId, hasCached) {{
if (!window.artdagCy) return;
var node = window.artdagCy.getElementById(stepId);
if (node && node.length) {{
node.data('status', status);
if (cacheId) node.data('cacheId', cacheId);
if (hasCached !== undefined) node.data('hasCached', hasCached);
}}
}};
}});
</script>
'''
def render_page_with_cytoscape(title: str, content: str, actor_id: Optional[str] = None, active_tab: str = None) -> str:
"""Render a page with Cytoscape.js support for DAG visualization."""
user_info = ""
if actor_id:
parts = actor_id.lstrip("@").split("@")
username = parts[0] if parts else actor_id
domain = parts[1] if len(parts) > 1 else ""
l2_user_url = f"https://{domain}/users/{username}" if domain else "#"
user_info = f'''
<div class="flex items-center gap-4 text-sm text-gray-400">
Logged in as <a href="{l2_user_url}" class="text-white hover:text-blue-300">{actor_id}</a>
</div>
'''
else:
user_info = '''
<div class="text-sm text-gray-400">
Not logged in
</div>
'''
runs_active = "border-b-2 border-blue-500 text-white" if active_tab == "runs" else "text-gray-400 hover:text-white"
recipes_active = "border-b-2 border-blue-500 text-white" if active_tab == "recipes" else "text-gray-400 hover:text-white"
media_active = "border-b-2 border-blue-500 text-white" if active_tab == "media" else "text-gray-400 hover:text-white"
storage_active = "border-b-2 border-blue-500 text-white" if active_tab == "storage" else "text-gray-400 hover:text-white"
return f"""
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{title} | Art DAG L1 Server</title>
{CYTOSCAPE_CONFIG}
</head>
<body class="bg-dark-900 text-gray-100 min-h-screen">
<div class="max-w-6xl mx-auto px-4 py-6 sm:px-6 lg:px-8">
<header class="flex flex-wrap items-center justify-between gap-4 mb-6">
<h1 class="text-2xl font-bold">
<a href="/" class="text-white hover:text-gray-200">Art DAG L1 Server</a>
</h1>
{user_info}
</header>
<nav class="flex gap-6 mb-6 border-b border-dark-500 pb-0">
<a href="/runs" class="pb-3 px-1 font-medium transition-colors {runs_active}">Runs</a>
<a href="/recipes" class="pb-3 px-1 font-medium transition-colors {recipes_active}">Recipes</a>
<a href="/media" class="pb-3 px-1 font-medium transition-colors {media_active}">Media</a>
<a href="/storage" class="pb-3 px-1 font-medium transition-colors {storage_active}">Storage</a>
<a href="/download/client" class="pb-3 px-1 font-medium transition-colors text-gray-400 hover:text-white ml-auto" title="Download CLI client">Download Client</a>
</nav>
<main>
{content}
</main>
</div>
</body>
</html>
"""
def render_page(title: str, content: str, actor_id: Optional[str] = None, active_tab: str = None) -> str:
"""Render a page with nav bar and content. Used for clean URL pages.
actor_id: The user's actor ID (@user@server) or None if not logged in.
"""
user_info = ""
if actor_id:
# Extract username and domain from @username@domain format
parts = actor_id.lstrip("@").split("@")
username = parts[0] if parts else actor_id
domain = parts[1] if len(parts) > 1 else ""
l2_user_url = f"https://{domain}/users/{username}" if domain else "#"
user_info = f'''
<div class="flex items-center gap-4 text-sm text-gray-400">
Logged in as <a href="{l2_user_url}" class="text-white hover:text-blue-300">{actor_id}</a>
</div>
'''
else:
user_info = '''
<div class="text-sm text-gray-400">
Not logged in
</div>
'''
runs_active = "border-b-2 border-blue-500 text-white" if active_tab == "runs" else "text-gray-400 hover:text-white"
recipes_active = "border-b-2 border-blue-500 text-white" if active_tab == "recipes" else "text-gray-400 hover:text-white"
media_active = "border-b-2 border-blue-500 text-white" if active_tab == "media" else "text-gray-400 hover:text-white"
storage_active = "border-b-2 border-blue-500 text-white" if active_tab == "storage" else "text-gray-400 hover:text-white"
return f"""
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{title} | Art DAG L1 Server</title>
{TAILWIND_CONFIG}
</head>
<body class="bg-dark-900 text-gray-100 min-h-screen">
<div class="max-w-6xl mx-auto px-4 py-6 sm:px-6 lg:px-8">
<header class="flex flex-wrap items-center justify-between gap-4 mb-6">
<h1 class="text-2xl font-bold">
<a href="/" class="text-white hover:text-gray-200">Art DAG L1 Server</a>
</h1>
{user_info}
</header>
<nav class="flex gap-6 mb-6 border-b border-dark-500 pb-0">
<a href="/runs" class="pb-3 px-1 font-medium transition-colors {runs_active}">Runs</a>
<a href="/recipes" class="pb-3 px-1 font-medium transition-colors {recipes_active}">Recipes</a>
<a href="/media" class="pb-3 px-1 font-medium transition-colors {media_active}">Media</a>
<a href="/storage" class="pb-3 px-1 font-medium transition-colors {storage_active}">Storage</a>
<a href="/download/client" class="pb-3 px-1 font-medium transition-colors text-gray-400 hover:text-white ml-auto" title="Download CLI client">Download Client</a>
</nav>
<main>
{content}
</main>
</div>
</body>
</html>
"""
def render_ui_html(actor_id: Optional[str] = None, tab: str = "runs") -> str:
"""Render main UI HTML with optional user context.
actor_id: The user's actor ID (@user@server) or None if not logged in.
"""
user_info = ""
if actor_id:
# Extract username and domain from @username@domain format
parts = actor_id.lstrip("@").split("@")
username = parts[0] if parts else actor_id
domain = parts[1] if len(parts) > 1 else ""
l2_user_url = f"https://{domain}/users/{username}" if domain else "#"
user_info = f'''
<div class="flex items-center gap-4 text-sm text-gray-400">
Logged in as <a href="{l2_user_url}" class="text-white hover:text-blue-300">{actor_id}</a>
</div>
'''
else:
user_info = '''
<div class="text-sm text-gray-400">
Not logged in
</div>
'''
runs_active = "border-b-2 border-blue-500 text-white" if tab == "runs" else "text-gray-400 hover:text-white"
recipes_active = "border-b-2 border-blue-500 text-white" if tab == "recipes" else "text-gray-400 hover:text-white"
media_active = "border-b-2 border-blue-500 text-white" if tab == "media" else "text-gray-400 hover:text-white"
storage_active = "border-b-2 border-blue-500 text-white" if tab == "storage" else "text-gray-400 hover:text-white"
if tab == "runs":
content_url = "/ui/runs"
elif tab == "recipes":
content_url = "/ui/recipes-list"
else:
content_url = "/ui/media-list"
return f"""
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Art DAG L1 Server</title>
{TAILWIND_CONFIG}
</head>
<body class="bg-dark-900 text-gray-100 min-h-screen">
<div class="max-w-6xl mx-auto px-4 py-6 sm:px-6 lg:px-8">
<header class="flex flex-wrap items-center justify-between gap-4 mb-6">
<h1 class="text-2xl font-bold">
<a href="/runs" class="text-white hover:text-gray-200">Art DAG L1 Server</a>
</h1>
{user_info}
</header>
<nav class="flex gap-6 mb-6 border-b border-dark-500 pb-0">
<a href="/runs" class="pb-3 px-1 font-medium transition-colors {runs_active}">Runs</a>
<a href="/recipes" class="pb-3 px-1 font-medium transition-colors {recipes_active}">Recipes</a>
<a href="/media" class="pb-3 px-1 font-medium transition-colors {media_active}">Media</a>
<a href="/storage" class="pb-3 px-1 font-medium transition-colors {storage_active}">Storage</a>
<a href="/download/client" class="pb-3 px-1 font-medium transition-colors text-gray-400 hover:text-white ml-auto" title="Download CLI client">Download Client</a>
</nav>
<div id="content" hx-get="{content_url}" hx-trigger="load" hx-swap="innerHTML">
<div class="flex items-center justify-center py-12">
<div class="animate-pulse text-gray-400">Loading...</div>
</div>
</div>
</div>
</body>
</html>
"""
# Auth - L1 doesn't handle login (user logs in at their L2 server)
# Token can be passed via URL from L2 redirect, then L1 sets its own cookie
@app.get("/auth")
async def auth_callback(auth_token: str = None):
"""
Receive auth token from L2 redirect and set local cookie.
This enables cross-subdomain auth on iOS Safari which blocks shared cookies.
"""
if not auth_token:
return RedirectResponse(url="/", status_code=302)
# Verify the token is valid
ctx = await get_verified_user_context(auth_token)
if not ctx:
return RedirectResponse(url="/", status_code=302)
# Register token for this user (for revocation by username later)
register_user_token(ctx.username, auth_token)
# Set local first-party cookie and redirect to home
response = RedirectResponse(url="/runs", status_code=302)
response.set_cookie(
key="auth_token",
value=auth_token,
httponly=True,
max_age=60 * 60 * 24 * 30, # 30 days
samesite="lax",
secure=True
)
return response
@app.get("/logout")
async def logout():
"""Logout - clear local cookie and redirect to home."""
response = RedirectResponse(url="/", status_code=302)
response.delete_cookie("auth_token")
return response
@app.post("/auth/revoke")
async def auth_revoke(credentials: HTTPAuthorizationCredentials = Depends(security)):
"""
Revoke a token. Called by L2 when user logs out.
The token to revoke is passed in the Authorization header.
"""
if not credentials:
raise HTTPException(401, "No token provided")
token = credentials.credentials
# Verify token is valid before revoking (ensures caller has the token)
ctx = get_user_context_from_token(token)
if not ctx:
raise HTTPException(401, "Invalid token")
# Revoke the token
newly_revoked = revoke_token(token)
return {"revoked": True, "newly_revoked": newly_revoked}
class RevokeUserRequest(BaseModel):
username: str
l2_server: str # L2 server requesting the revocation
@app.post("/auth/revoke-user")
async def auth_revoke_user(request: RevokeUserRequest):
"""
Revoke all tokens for a user. Called by L2 when user logs out.
This handles the case where L2 issued scoped tokens that differ from L2's own token.
"""
# Verify the L2 server is authorized (must be in L1's known list or match token's l2_server)
# For now, we trust any request since this only affects users already on this L1
# Revoke all tokens registered for this user
count = revoke_all_user_tokens(request.username)
return {"revoked": True, "tokens_revoked": count, "username": request.username}
@app.post("/ui/publish-run/{run_id}", response_class=HTMLResponse)
async def ui_publish_run(run_id: str, request: Request):
"""Publish a run to L2 from the web UI. Assets are named by content_hash."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return HTMLResponse('<div class="error">Not logged in</div>')
token = request.cookies.get("auth_token")
if not token:
return HTMLResponse('<div class="error">Not logged in</div>')
# Get the run to pin its output and inputs
run = load_run(run_id)
if not run:
return HTMLResponse('<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg">Run not found</div>')
# Call L2 to publish the run, including this L1's public URL
# Assets are named by their content_hash - no output_name needed
l2_server = ctx.l2_server
try:
resp = http_requests.post(
f"{l2_server}/assets/record-run",
json={"run_id": run_id, "l1_server": L1_PUBLIC_URL},
headers={"Authorization": f"Bearer {token}"},
timeout=30
)
if resp.status_code == 400:
error = resp.json().get("detail", "Bad request")
return HTMLResponse(f'<div class="error">Error: {error}</div>')
resp.raise_for_status()
result = resp.json()
# Pin the output and record L2 share
if run.output_hash and result.get("asset"):
await database.update_item_metadata(run.output_hash, ctx.actor_id, pinned=True, pin_reason="published")
# Record L2 share so UI shows published status
cache_path = get_cache_path(run.output_hash)
media_type = detect_media_type(cache_path) if cache_path else "image"
content_type = "video" if media_type == "video" else "image"
# Get activity_id for linking to the published run
activity = result.get("activity")
activity_id = activity.get("activity_id") if activity else None
await database.save_l2_share(
content_hash=run.output_hash,
actor_id=ctx.actor_id,
l2_server=l2_server,
asset_name=result["asset"]["name"],
content_type=content_type,
activity_id=activity_id
)
# Pin the inputs (for provenance)
for input_hash in run.inputs:
await database.update_item_metadata(input_hash, ctx.actor_id, pinned=True, pin_reason="input_to_published")
# If this was a recipe-based run, pin the recipe and its fixed inputs
if run.recipe.startswith("recipe:"):
config_name = run.recipe.replace("recipe:", "")
for recipe in list_all_recipes():
if recipe.name == config_name:
# Pin the recipe YAML
cache_manager.pin(recipe.recipe_id, reason="recipe_for_published")
# Pin all fixed inputs referenced by the recipe
for fixed in recipe.fixed_inputs:
if fixed.content_hash:
cache_manager.pin(fixed.content_hash, reason="fixed_input_in_published_recipe")
break
# Use HTTPS for L2 links
l2_https = l2_server.replace("http://", "https://")
asset_name = result["asset"]["name"]
short_name = asset_name[:16] + "..." if len(asset_name) > 20 else asset_name
# Link to activity (the published run) rather than just the asset
activity = result.get("activity")
activity_id = activity.get("activity_id") if activity else None
l2_link = f"{l2_https}/activities/{activity_id}" if activity_id else f"{l2_https}/assets/{asset_name}"
return HTMLResponse(f'''
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
Published to L2 as <strong>{short_name}</strong>!
<a href="{l2_link}" target="_blank" class="underline">View on L2</a>
</div>
''')
except http_requests.exceptions.HTTPError as e:
error_detail = ""
try:
error_detail = e.response.json().get("detail", str(e))
except Exception:
error_detail = str(e)
return HTMLResponse(f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg">Error: {error_detail}</div>')
except Exception as e:
return HTMLResponse(f'<div class="bg-red-900/50 border border-red-700 text-red-300 px-4 py-3 rounded-lg">Error: {e}</div>')
@app.get("/ui/runs", response_class=HTMLResponse)
async def ui_runs(request: Request):
"""HTMX partial: list of runs."""
ctx = await get_user_context_from_cookie(request)
runs = list_all_runs()
# Require login to see runs
if not ctx:
return '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
# Filter runs by user - match both plain username and ActivityPub format (@user@domain)
runs = [r for r in runs if r.username in (ctx.username, ctx.actor_id)]
if not runs:
return '<p class="text-gray-400 py-8 text-center">You have no runs yet. Use the CLI to start a run.</p>'
# Status badge colors
status_colors = {
"completed": "bg-green-600 text-white",
"running": "bg-yellow-600 text-white",
"failed": "bg-red-600 text-white",
"pending": "bg-gray-600 text-white"
}
html_parts = ['<div class="space-y-4">']
for run in runs[:20]: # Limit to 20 most recent
status_badge = status_colors.get(run.status, "bg-gray-600 text-white")
html_parts.append(f'''
<a href="/ui/detail/{run.run_id}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors" hx-get="/ui/run/{run.run_id}" hx-trigger="every 2s[classList.contains('status-running')]" hx-swap="outerHTML">
<div class="flex flex-wrap items-center justify-between gap-3 mb-3">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-blue-600 text-white text-sm font-medium rounded-full">{run.recipe}</span>
<span class="text-gray-400 font-mono text-xs hidden sm:inline">{run.run_id[:16]}...</span>
</div>
<span class="px-3 py-1 {status_badge} text-xs font-medium rounded-full">{run.status}</span>
</div>
<div class="text-sm text-gray-400 mb-3">
Created: {run.created_at[:19].replace('T', ' ')}
</div>
''')
# Show input and output side by side
has_input = run.inputs and cache_manager.has_content(run.inputs[0])
has_output = run.status == "completed" and run.output_hash and cache_manager.has_content(run.output_hash)
if has_input or has_output:
html_parts.append('<div class="grid gap-4 sm:grid-cols-2">')
# Input box
if has_input:
input_hash = run.inputs[0]
input_media_type = detect_media_type(get_cache_path(input_hash))
html_parts.append(f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Input: {input_hash[:16]}...</div>
<div class="flex justify-center">
''')
if input_media_type == "video":
input_video_src = video_src_for_request(input_hash, request)
html_parts.append(f'<video src="{input_video_src}" controls muted loop playsinline class="max-h-32 rounded"></video>')
elif input_media_type == "image":
html_parts.append(f'<img src="/cache/{input_hash}/raw" alt="input" class="max-h-32 rounded">')
html_parts.append('</div></div>')
# Output box
if has_output:
output_hash = run.output_hash
output_media_type = detect_media_type(get_cache_path(output_hash))
html_parts.append(f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Output: {output_hash[:16]}...</div>
<div class="flex justify-center">
''')
if output_media_type == "video":
output_video_src = video_src_for_request(output_hash, request)
html_parts.append(f'<video src="{output_video_src}" controls autoplay muted loop playsinline class="max-h-32 rounded"></video>')
elif output_media_type == "image":
html_parts.append(f'<img src="/cache/{output_hash}/raw" alt="output" class="max-h-32 rounded">')
html_parts.append('</div></div>')
html_parts.append('</div>')
# Show error if failed
if run.status == "failed" and run.error:
html_parts.append(f'<div class="mt-3 text-sm text-red-400">Error: {run.error}</div>')
html_parts.append('</div></a>')
html_parts.append('</div>')
return '\n'.join(html_parts)
@app.get("/ui/media-list", response_class=HTMLResponse)
async def ui_media_list(
request: Request,
folder: Optional[str] = None,
collection: Optional[str] = None,
tag: Optional[str] = None
):
"""HTMX partial: list of media items with optional filtering."""
ctx = await get_user_context_from_cookie(request)
# Require login to see media
if not ctx:
return '<p class="text-gray-400 py-8 text-center">Not logged in.</p>'
# Get hashes owned by/associated with this user
user_hashes = await get_user_cache_hashes(ctx.username, ctx.actor_id)
# Get cache items that belong to the user (from cache_manager)
cache_items = []
seen_hashes = set() # Deduplicate by content_hash
for cached_file in cache_manager.list_all():
content_hash = cached_file.content_hash
if content_hash not in user_hashes:
continue
# Skip duplicates (same content from multiple runs)
if content_hash in seen_hashes:
continue
seen_hashes.add(content_hash)
# Skip recipes - they have their own section
if cached_file.node_type == "recipe":
continue
# Load metadata for filtering
meta = await database.load_item_metadata(content_hash, ctx.actor_id)
# Apply folder filter
if folder:
item_folder = meta.get("folder", "/")
if folder != "/" and not item_folder.startswith(folder):
continue
if folder == "/" and item_folder != "/":
continue
# Apply collection filter
if collection:
if collection not in meta.get("collections", []):
continue
# Apply tag filter
if tag:
if tag not in meta.get("tags", []):
continue
cache_items.append({
"hash": content_hash,
"size": cached_file.size_bytes,
"mtime": cached_file.created_at,
"meta": meta
})
# Sort by modification time (newest first)
cache_items.sort(key=lambda x: x["mtime"], reverse=True)
if not cache_items:
filter_msg = ""
if folder:
filter_msg = f" in folder {folder}"
elif collection:
filter_msg = f" in collection '{collection}'"
elif tag:
filter_msg = f" with tag '{tag}'"
return f'<p class="text-gray-400 py-8 text-center">No cached files{filter_msg}. Upload files or run effects to see them here.</p>'
html_parts = ['<div class="grid gap-4 sm:grid-cols-2 lg:grid-cols-3">']
for item in cache_items[:50]: # Limit to 50 items
content_hash = item["hash"]
cache_path = get_cache_path(content_hash)
media_type = detect_media_type(cache_path) if cache_path else "unknown"
# Check IPFS status
cache_item = await database.get_cache_item(content_hash)
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
ipfs_badge = '<span class="px-2 py-1 bg-purple-600 text-white text-xs font-medium rounded-full" title="On IPFS">IPFS</span>' if ipfs_cid else ''
# Check L2 publish status
l2_shares = item["meta"].get("l2_shares", [])
if l2_shares:
first_share = l2_shares[0]
l2_server = first_share.get("l2_server", "")
asset_name = first_share.get("asset_name", "")
asset_url = f"{l2_server}/assets/{asset_name}"
published_badge = f'<span class="px-2 py-1 bg-green-600 text-white text-xs font-medium rounded-full" title="Published to L2">L2</span>'
else:
published_badge = ''
# Format size
size = item["size"]
if size > 1024*1024:
size_str = f"{size/(1024*1024):.1f} MB"
elif size > 1024:
size_str = f"{size/1024:.1f} KB"
else:
size_str = f"{size} bytes"
html_parts.append(f'''
<a href="/cache/{content_hash}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors">
<div class="flex items-center justify-between gap-2 mb-3">
<div class="flex items-center gap-2">
<span class="px-2 py-1 bg-blue-600 text-white text-xs font-medium rounded-full">{media_type}</span>
{ipfs_badge}
{published_badge}
</div>
<span class="text-xs text-gray-400">{size_str}</span>
</div>
<div class="text-xs text-gray-400 font-mono mb-3 truncate">{content_hash[:24]}...</div>
<div class="flex justify-center bg-dark-600 rounded-lg p-2">
''')
if media_type == "video":
video_src = video_src_for_request(content_hash, request)
html_parts.append(f'<video src="{video_src}" controls muted loop playsinline class="max-h-32 rounded"></video>')
elif media_type == "image":
html_parts.append(f'<img src="/cache/{content_hash}/raw" alt="{content_hash[:16]}" class="max-h-32 rounded object-contain">')
else:
html_parts.append('<p class="text-gray-400 text-sm py-4">Unknown file type</p>')
html_parts.append('''
</div>
</div>
</a>
''')
html_parts.append('</div>')
return '\n'.join(html_parts)
@app.get("/ui/detail/{run_id}")
async def ui_detail_page(run_id: str):
"""Redirect to clean URL."""
return RedirectResponse(url=f"/run/{run_id}", status_code=302)
@app.get("/ui/run/{run_id}", response_class=HTMLResponse)
async def ui_run_partial(run_id: str, request: Request):
"""HTMX partial: single run (for polling updates)."""
run = load_run(run_id)
if not run:
return '<div class="bg-dark-700 rounded-lg p-4 text-gray-400">Run not found</div>'
# Check Celery task status if running
if run.status == "running" and run.celery_task_id:
task = celery_app.AsyncResult(run.celery_task_id)
if task.ready():
if task.successful():
result = task.result
run.status = "completed"
run.completed_at = datetime.now(timezone.utc).isoformat()
run.output_hash = result.get("output", {}).get("content_hash")
# Extract effects info from provenance
effects = result.get("effects", [])
if effects:
run.effects_commit = effects[0].get("repo_commit")
run.effect_url = effects[0].get("repo_url")
# Extract infrastructure info
run.infrastructure = result.get("infrastructure")
output_path = Path(result.get("output", {}).get("local_path", ""))
if output_path.exists():
await cache_file(output_path)
else:
run.status = "failed"
run.error = str(task.result)
save_run(run)
# Status badge colors
status_colors = {
"completed": "bg-green-600 text-white",
"running": "bg-yellow-600 text-white",
"failed": "bg-red-600 text-white",
"pending": "bg-gray-600 text-white"
}
status_badge = status_colors.get(run.status, "bg-gray-600 text-white")
poll_attr = f'hx-get="/ui/run/{run_id}" hx-trigger="every 2s" hx-swap="outerHTML"' if run.status == "running" else ""
html = f'''
<a href="/ui/detail/{run.run_id}" class="block">
<div class="bg-dark-700 rounded-lg p-4 hover:bg-dark-600 transition-colors" {poll_attr}>
<div class="flex flex-wrap items-center justify-between gap-3 mb-3">
<div class="flex items-center gap-3">
<span class="px-3 py-1 bg-blue-600 text-white text-sm font-medium rounded-full">{run.recipe}</span>
<span class="text-gray-400 font-mono text-xs hidden sm:inline">{run.run_id[:16]}...</span>
</div>
<span class="px-3 py-1 {status_badge} text-xs font-medium rounded-full">{run.status}</span>
</div>
<div class="text-sm text-gray-400 mb-3">
Created: {run.created_at[:19].replace('T', ' ')}
</div>
'''
# Show input and output side by side
has_input = run.inputs and cache_manager.has_content(run.inputs[0])
has_output = run.status == "completed" and run.output_hash and cache_manager.has_content(run.output_hash)
if has_input or has_output:
html += '<div class="grid gap-4 sm:grid-cols-2">'
if has_input:
input_hash = run.inputs[0]
input_media_type = detect_media_type(get_cache_path(input_hash))
html += f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Input: {input_hash[:16]}...</div>
<div class="flex justify-center">
'''
if input_media_type == "video":
input_video_src = video_src_for_request(input_hash, request)
html += f'<video src="{input_video_src}" controls muted loop playsinline class="max-h-32 rounded"></video>'
elif input_media_type == "image":
html += f'<img src="/cache/{input_hash}/raw" alt="input" class="max-h-32 rounded">'
html += '</div></div>'
if has_output:
output_hash = run.output_hash
output_media_type = detect_media_type(get_cache_path(output_hash))
html += f'''
<div class="bg-dark-600 rounded-lg p-3">
<div class="text-xs text-gray-400 mb-2">Output: {output_hash[:16]}...</div>
<div class="flex justify-center">
'''
if output_media_type == "video":
output_video_src = video_src_for_request(output_hash, request)
html += f'<video src="{output_video_src}" controls autoplay muted loop playsinline class="max-h-32 rounded"></video>'
elif output_media_type == "image":
html += f'<img src="/cache/{output_hash}/raw" alt="output" class="max-h-32 rounded">'
html += '</div></div>'
html += '</div>'
if run.status == "failed" and run.error:
html += f'<div class="mt-3 text-sm text-red-400">Error: {run.error}</div>'
html += '</div></a>'
return html
# ============ User Storage Configuration ============
STORAGE_PROVIDERS_INFO = {
"pinata": {"name": "Pinata", "desc": "1GB free, IPFS pinning", "color": "blue"},
"web3storage": {"name": "web3.storage", "desc": "IPFS + Filecoin", "color": "green"},
"nftstorage": {"name": "NFT.Storage", "desc": "Free for NFTs", "color": "pink"},
"infura": {"name": "Infura IPFS", "desc": "5GB free", "color": "orange"},
"filebase": {"name": "Filebase", "desc": "5GB free, S3+IPFS", "color": "cyan"},
"storj": {"name": "Storj", "desc": "25GB free", "color": "indigo"},
"local": {"name": "Local Storage", "desc": "Your own disk", "color": "purple"},
}
@app.get("/storage")
async def list_storage(request: Request):
"""List user's storage providers. HTML for browsers, JSON for API."""
accept = request.headers.get("accept", "")
wants_json = "application/json" in accept and "text/html" not in accept
ctx = await get_user_context_from_cookie(request)
if not ctx:
if wants_json:
raise HTTPException(401, "Authentication required")
return RedirectResponse(url="/auth", status_code=302)
storages = await database.get_user_storage(ctx.actor_id)
# Add usage stats to each storage
for storage in storages:
usage = await database.get_storage_usage(storage["id"])
storage["used_bytes"] = usage["used_bytes"]
storage["pin_count"] = usage["pin_count"]
storage["donated_gb"] = storage["capacity_gb"] // 2
# Mask sensitive config keys for display
if storage.get("config"):
config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
masked = {}
for k, v in config.items():
if "key" in k.lower() or "token" in k.lower() or "secret" in k.lower():
masked[k] = v[:4] + "..." + v[-4:] if len(str(v)) > 8 else "****"
else:
masked[k] = v
storage["config_display"] = masked
if wants_json:
return {"storages": storages}
return await ui_storage_page(ctx.username, storages, request)
@app.post("/storage")
async def add_storage(req: AddStorageRequest, ctx: UserContext = Depends(get_required_user_context)):
"""Add a storage provider."""
valid_types = ["pinata", "web3storage", "nftstorage", "infura", "filebase", "storj", "local"]
if req.provider_type not in valid_types:
raise HTTPException(400, f"Invalid provider type: {req.provider_type}")
# Test the provider connection before saving
provider = storage_providers.create_provider(req.provider_type, {
**req.config,
"capacity_gb": req.capacity_gb
})
if not provider:
raise HTTPException(400, "Failed to create provider with given config")
success, message = await provider.test_connection()
if not success:
raise HTTPException(400, f"Provider connection failed: {message}")
# Save to database
provider_name = req.provider_name or f"{req.provider_type}-{ctx.username}"
storage_id = await database.add_user_storage(
actor_id=ctx.actor_id,
provider_type=req.provider_type,
provider_name=provider_name,
config=req.config,
capacity_gb=req.capacity_gb
)
if not storage_id:
raise HTTPException(500, "Failed to save storage provider")
return {"id": storage_id, "message": f"Storage provider added: {provider_name}"}
@app.post("/storage/add")
async def add_storage_form(
request: Request,
provider_type: str = Form(...),
provider_name: Optional[str] = Form(None),
description: Optional[str] = Form(None),
capacity_gb: int = Form(5),
api_key: Optional[str] = Form(None),
secret_key: Optional[str] = Form(None),
api_token: Optional[str] = Form(None),
project_id: Optional[str] = Form(None),
project_secret: Optional[str] = Form(None),
access_key: Optional[str] = Form(None),
bucket: Optional[str] = Form(None),
path: Optional[str] = Form(None),
):
"""Add a storage provider via HTML form (cookie auth)."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Not authenticated</div>', status_code=401)
valid_types = ["pinata", "web3storage", "nftstorage", "infura", "filebase", "storj", "local"]
if provider_type not in valid_types:
return HTMLResponse(f'<div class="text-red-400">Invalid provider type: {provider_type}</div>')
# Build config based on provider type
config = {}
if provider_type == "pinata":
if not api_key or not secret_key:
return HTMLResponse('<div class="text-red-400">Pinata requires API Key and Secret Key</div>')
config = {"api_key": api_key, "secret_key": secret_key}
elif provider_type == "web3storage":
if not api_token:
return HTMLResponse('<div class="text-red-400">web3.storage requires API Token</div>')
config = {"api_token": api_token}
elif provider_type == "nftstorage":
if not api_token:
return HTMLResponse('<div class="text-red-400">NFT.Storage requires API Token</div>')
config = {"api_token": api_token}
elif provider_type == "infura":
if not project_id or not project_secret:
return HTMLResponse('<div class="text-red-400">Infura requires Project ID and Project Secret</div>')
config = {"project_id": project_id, "project_secret": project_secret}
elif provider_type == "filebase":
if not access_key or not secret_key or not bucket:
return HTMLResponse('<div class="text-red-400">Filebase requires Access Key, Secret Key, and Bucket</div>')
config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket}
elif provider_type == "storj":
if not access_key or not secret_key or not bucket:
return HTMLResponse('<div class="text-red-400">Storj requires Access Key, Secret Key, and Bucket</div>')
config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket}
elif provider_type == "local":
if not path:
return HTMLResponse('<div class="text-red-400">Local storage requires a path</div>')
config = {"path": path}
# Test the provider connection before saving
provider = storage_providers.create_provider(provider_type, {
**config,
"capacity_gb": capacity_gb
})
if not provider:
return HTMLResponse('<div class="text-red-400">Failed to create provider with given config</div>')
success, message = await provider.test_connection()
if not success:
return HTMLResponse(f'<div class="text-red-400">Provider connection failed: {message}</div>')
# Save to database
name = provider_name or f"{provider_type}-{ctx.username}-{len(await database.get_user_storage_by_type(ctx.actor_id, provider_type)) + 1}"
storage_id = await database.add_user_storage(
actor_id=ctx.actor_id,
provider_type=provider_type,
provider_name=name,
config=config,
capacity_gb=capacity_gb,
description=description
)
if not storage_id:
return HTMLResponse('<div class="text-red-400">Failed to save storage provider</div>')
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Storage provider "{name}" added successfully!</div>
<script>setTimeout(() => window.location.href = '/storage/type/{provider_type}', 1500);</script>
''')
@app.get("/storage/{storage_id}")
async def get_storage(storage_id: int, ctx: UserContext = Depends(get_required_user_context)):
"""Get a specific storage provider."""
storage = await database.get_storage_by_id(storage_id)
if not storage:
raise HTTPException(404, "Storage provider not found")
if storage["actor_id"] != ctx.actor_id:
raise HTTPException(403, "Not authorized")
usage = await database.get_storage_usage(storage_id)
storage["used_bytes"] = usage["used_bytes"]
storage["pin_count"] = usage["pin_count"]
storage["donated_gb"] = storage["capacity_gb"] // 2
return storage
@app.patch("/storage/{storage_id}")
async def update_storage(storage_id: int, req: UpdateStorageRequest, ctx: UserContext = Depends(get_required_user_context)):
"""Update a storage provider."""
storage = await database.get_storage_by_id(storage_id)
if not storage:
raise HTTPException(404, "Storage provider not found")
if storage["actor_id"] != ctx.actor_id:
raise HTTPException(403, "Not authorized")
# If updating config, test the new connection
if req.config:
existing_config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
new_config = {**existing_config, **req.config}
provider = storage_providers.create_provider(storage["provider_type"], {
**new_config,
"capacity_gb": req.capacity_gb or storage["capacity_gb"]
})
if provider:
success, message = await provider.test_connection()
if not success:
raise HTTPException(400, f"Provider connection failed: {message}")
success = await database.update_user_storage(
storage_id,
config=req.config,
capacity_gb=req.capacity_gb,
is_active=req.is_active
)
if not success:
raise HTTPException(500, "Failed to update storage provider")
return {"message": "Storage provider updated"}
@app.delete("/storage/{storage_id}")
async def remove_storage(storage_id: int, request: Request):
"""Remove a storage provider."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
raise HTTPException(401, "Not authenticated")
storage = await database.get_storage_by_id(storage_id)
if not storage:
raise HTTPException(404, "Storage provider not found")
if storage["actor_id"] != ctx.actor_id:
raise HTTPException(403, "Not authorized")
success = await database.remove_user_storage(storage_id)
if not success:
raise HTTPException(500, "Failed to remove storage provider")
if wants_html(request):
return HTMLResponse("")
return {"message": "Storage provider removed"}
@app.post("/storage/{storage_id}/test")
async def test_storage(storage_id: int, request: Request):
"""Test storage provider connectivity."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Not authenticated</span>', status_code=401)
raise HTTPException(401, "Not authenticated")
storage = await database.get_storage_by_id(storage_id)
if not storage:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Storage not found</span>', status_code=404)
raise HTTPException(404, "Storage provider not found")
if storage["actor_id"] != ctx.actor_id:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Not authorized</span>', status_code=403)
raise HTTPException(403, "Not authorized")
config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
provider = storage_providers.create_provider(storage["provider_type"], {
**config,
"capacity_gb": storage["capacity_gb"]
})
if not provider:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Failed to create provider</span>')
raise HTTPException(500, "Failed to create provider")
success, message = await provider.test_connection()
if wants_html(request):
if success:
return HTMLResponse(f'<span class="text-green-400">{message}</span>')
return HTMLResponse(f'<span class="text-red-400">{message}</span>')
return {"success": success, "message": message}
@app.get("/storage/type/{provider_type}")
async def storage_type_page(provider_type: str, request: Request):
"""Page for managing storage configs of a specific type."""
ctx = await get_user_context_from_cookie(request)
if not ctx:
return RedirectResponse(url="/auth", status_code=302)
if provider_type not in STORAGE_PROVIDERS_INFO:
raise HTTPException(404, "Invalid provider type")
storages = await database.get_user_storage_by_type(ctx.actor_id, provider_type)
# Add usage stats and mask config
for storage in storages:
usage = await database.get_storage_usage(storage["id"])
storage["used_bytes"] = usage["used_bytes"]
storage["pin_count"] = usage["pin_count"]
# Mask sensitive config keys
if storage.get("config"):
config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"])
masked = {}
for k, v in config.items():
if "key" in k.lower() or "token" in k.lower() or "secret" in k.lower():
masked[k] = v[:4] + "..." + v[-4:] if len(str(v)) > 8 else "****"
else:
masked[k] = v
storage["config_display"] = masked
info = STORAGE_PROVIDERS_INFO[provider_type]
return await ui_storage_type_page(ctx.username, provider_type, info, storages, request)
async def ui_storage_page(username: str, storages: list, request: Request) -> HTMLResponse:
"""Render the main storage management page."""
# Count by provider type
type_counts = {}
for s in storages:
ptype = s["provider_type"]
type_counts[ptype] = type_counts.get(ptype, 0) + 1
# Build provider type cards
cards = ""
for ptype, info in STORAGE_PROVIDERS_INFO.items():
count = type_counts.get(ptype, 0)
count_badge = f'<span class="ml-2 px-2 py-0.5 bg-{info["color"]}-600 text-white text-xs rounded-full">{count}</span>' if count > 0 else ""
cards += f'''
<a href="/storage/type/{ptype}" class="block p-4 bg-gray-800 rounded-lg hover:bg-gray-700 transition">
<div class="flex items-center justify-between">
<div>
<h3 class="text-lg font-medium text-{info["color"]}-400">{info["name"]}</h3>
<p class="text-sm text-gray-400">{info["desc"]}</p>
</div>
{count_badge}
</div>
</a>
'''
# Total stats
total_capacity = sum(s["capacity_gb"] for s in storages)
total_used = sum(s["used_bytes"] for s in storages)
total_pins = sum(s["pin_count"] for s in storages)
html = f'''
<!DOCTYPE html>
<html class="dark">
<head>
<title>Storage - Art DAG L1</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
</head>
<body class="bg-gray-900 text-white min-h-screen">
<nav class="bg-gray-800 border-b border-gray-700 px-6 py-3">
<div class="flex items-center justify-between">
<a href="/" class="text-xl font-bold text-blue-400">Art DAG L1</a>
<div class="flex items-center gap-4">
<a href="/media" class="text-gray-300 hover:text-white">Media</a>
<a href="/ui/runs" class="text-gray-300 hover:text-white">Runs</a>
<a href="/storage" class="text-blue-400">Storage</a>
<span class="text-gray-400">@{username}</span>
</div>
</div>
</nav>
<main class="max-w-4xl mx-auto px-6 py-8">
<h1 class="text-2xl font-bold mb-6">Storage Providers</h1>
<div class="mb-8 p-4 bg-gray-800 rounded-lg">
<div class="flex items-center justify-between">
<div>
<h2 class="text-lg font-medium">Total Storage</h2>
<p class="text-gray-400">{len(storages)} providers configured</p>
</div>
<div class="text-right">
<p class="text-2xl font-bold">{total_used / (1024**3):.1f} / {total_capacity} GB</p>
<p class="text-sm text-gray-400">{total_pins} items pinned</p>
</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
{cards}
</div>
</main>
</body>
</html>
'''
return HTMLResponse(html)
async def ui_storage_type_page(username: str, provider_type: str, info: dict, storages: list, request: Request) -> HTMLResponse:
"""Render storage management page for a specific provider type."""
# Build storage list
storage_rows = ""
for s in storages:
used_gb = s["used_bytes"] / (1024**3)
status_class = "bg-green-600" if s.get("is_active", True) else "bg-gray-600"
status_text = "Active" if s.get("is_active", True) else "Inactive"
config_display = ""
if s.get("config_display"):
for k, v in s["config_display"].items():
config_display += f'<span class="text-gray-500">{k}:</span> <span class="text-gray-300">{v}</span><br>'
storage_rows += f'''
<div id="storage-{s["id"]}" class="p-4 bg-gray-800 rounded-lg mb-4">
<div class="flex items-center justify-between mb-2">
<h3 class="text-lg font-medium">{s.get("provider_name", "Unnamed")}</h3>
<div class="flex items-center gap-2">
<span class="px-2 py-1 {status_class} text-white text-xs rounded-full">{status_text}</span>
<button hx-post="/storage/{s["id"]}/test" hx-target="#test-result-{s["id"]}" hx-swap="innerHTML"
class="px-2 py-1 bg-blue-600 hover:bg-blue-700 text-white text-xs rounded">Test</button>
<button hx-delete="/storage/{s["id"]}" hx-confirm="Remove this storage config?"
hx-target="#storage-{s["id"]}" hx-swap="outerHTML"
class="px-2 py-1 bg-red-600 hover:bg-red-700 text-white text-xs rounded">Remove</button>
</div>
</div>
<div class="text-sm text-gray-400 mb-2">
{used_gb:.2f} / {s["capacity_gb"]} GB used ({s["pin_count"]} items)
</div>
<div class="text-xs font-mono">{config_display}</div>
<div id="test-result-{s["id"]}" class="mt-2 text-sm"></div>
</div>
'''
if not storages:
storage_rows = f'<p class="text-gray-400 text-center py-8">No {info["name"]} configs yet. Add one below.</p>'
# Build form fields based on provider type
form_fields = ""
if provider_type == "pinata":
form_fields = '''
<input type="text" name="api_key" placeholder="API Key" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
<input type="password" name="secret_key" placeholder="Secret Key" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
'''
elif provider_type in ["web3storage", "nftstorage"]:
form_fields = '''
<input type="password" name="api_token" placeholder="API Token" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
'''
elif provider_type == "infura":
form_fields = '''
<input type="text" name="project_id" placeholder="Project ID" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
<input type="password" name="project_secret" placeholder="Project Secret" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
'''
elif provider_type in ["filebase", "storj"]:
form_fields = '''
<input type="text" name="access_key" placeholder="Access Key" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
<input type="password" name="secret_key" placeholder="Secret Key" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
<input type="text" name="bucket" placeholder="Bucket Name" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
'''
elif provider_type == "local":
form_fields = '''
<input type="text" name="path" placeholder="/path/to/storage" required
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
'''
html = f'''
<!DOCTYPE html>
<html class="dark">
<head>
<title>{info["name"]} Storage - Art DAG L1</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
</head>
<body class="bg-gray-900 text-white min-h-screen">
<nav class="bg-gray-800 border-b border-gray-700 px-6 py-3">
<div class="flex items-center justify-between">
<a href="/" class="text-xl font-bold text-blue-400">Art DAG L1</a>
<div class="flex items-center gap-4">
<a href="/media" class="text-gray-300 hover:text-white">Media</a>
<a href="/ui/runs" class="text-gray-300 hover:text-white">Runs</a>
<a href="/storage" class="text-gray-300 hover:text-white">Storage</a>
<span class="text-gray-400">@{username}</span>
</div>
</div>
</nav>
<main class="max-w-3xl mx-auto px-6 py-8">
<div class="flex items-center gap-4 mb-6">
<a href="/storage" class="text-gray-400 hover:text-white">&larr; Back</a>
<h1 class="text-2xl font-bold text-{info["color"]}-400">{info["name"]}</h1>
</div>
<div class="mb-8">
{storage_rows}
</div>
<div class="p-4 bg-gray-800 rounded-lg">
<h2 class="text-lg font-medium mb-4">Add New {info["name"]} Config</h2>
<form hx-post="/storage/add" hx-target="#add-result" hx-swap="innerHTML" class="space-y-3">
<input type="hidden" name="provider_type" value="{provider_type}">
<input type="text" name="provider_name" placeholder="Name (optional)"
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
{form_fields}
<input type="number" name="capacity_gb" value="5" min="1"
class="w-full p-2 bg-gray-700 border border-gray-600 rounded">
<button type="submit" class="w-full py-2 bg-{info["color"]}-600 hover:bg-{info["color"]}-700 text-white rounded">
Add Storage Provider
</button>
</form>
<div id="add-result" class="mt-3"></div>
</div>
</main>
</body>
</html>
'''
return HTMLResponse(html)
# ============ Client Download ============
CLIENT_TARBALL = Path(__file__).parent / "artdag-client.tar.gz"
@app.get("/download/client")
async def download_client():
"""Download the Art DAG CLI client."""
if not CLIENT_TARBALL.exists():
raise HTTPException(404, "Client package not found")
return FileResponse(
CLIENT_TARBALL,
media_type="application/gzip",
filename="artdag-client.tar.gz"
)
# ============================================================================
# Documentation Routes
# ============================================================================
# Documentation paths
DOCS_DIR = Path(__file__).parent
COMMON_DOCS_DIR = Path(__file__).parent.parent / "common"
DOCS_MAP = {
"l1": DOCS_DIR / "README.md",
"common": COMMON_DOCS_DIR / "README.md",
}
def render_markdown(content: str) -> str:
"""Convert markdown to HTML with basic styling."""
import re
# Escape HTML first
content = content.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
# Code blocks (``` ... ```)
def code_block_replace(match):
lang = match.group(1) or ""
code = match.group(2)
return f'<pre class="bg-gray-800 p-4 rounded-lg overflow-x-auto text-sm"><code class="language-{lang}">{code}</code></pre>'
content = re.sub(r'```(\w*)\n(.*?)```', code_block_replace, content, flags=re.DOTALL)
# Inline code
content = re.sub(r'`([^`]+)`', r'<code class="bg-gray-700 px-1 rounded text-sm">\1</code>', content)
# Headers
content = re.sub(r'^### (.+)$', r'<h3 class="text-lg font-semibold text-white mt-6 mb-2">\1</h3>', content, flags=re.MULTILINE)
content = re.sub(r'^## (.+)$', r'<h2 class="text-xl font-bold text-white mt-8 mb-3 border-b border-gray-700 pb-2">\1</h2>', content, flags=re.MULTILINE)
content = re.sub(r'^# (.+)$', r'<h1 class="text-2xl font-bold text-white mb-4">\1</h1>', content, flags=re.MULTILINE)
# Bold and italic
content = re.sub(r'\*\*([^*]+)\*\*', r'<strong class="font-semibold">\1</strong>', content)
content = re.sub(r'\*([^*]+)\*', r'<em>\1</em>', content)
# Links
content = re.sub(r'\[([^\]]+)\]\(([^)]+)\)', r'<a href="\2" class="text-blue-400 hover:underline">\1</a>', content)
# Tables
def table_replace(match):
lines = match.group(0).strip().split('\n')
if len(lines) < 2:
return match.group(0)
header = lines[0]
rows = lines[2:] if len(lines) > 2 else []
header_cells = [cell.strip() for cell in header.split('|')[1:-1]]
header_html = ''.join(f'<th class="px-4 py-2 text-left border-b border-gray-600">{cell}</th>' for cell in header_cells)
rows_html = ''
for row in rows:
cells = [cell.strip() for cell in row.split('|')[1:-1]]
cells_html = ''.join(f'<td class="px-4 py-2 border-b border-gray-700">{cell}</td>' for cell in cells)
rows_html += f'<tr class="hover:bg-gray-700">{cells_html}</tr>'
return f'<table class="w-full text-sm mb-4"><thead><tr class="bg-gray-700">{header_html}</tr></thead><tbody>{rows_html}</tbody></table>'
content = re.sub(r'(\|[^\n]+\|\n)+', table_replace, content)
# Bullet points
content = re.sub(r'^- (.+)$', r'<li class="ml-4 list-disc">\1</li>', content, flags=re.MULTILINE)
content = re.sub(r'(<li[^>]*>.*</li>\n?)+', r'<ul class="mb-4">\g<0></ul>', content)
# Paragraphs (lines not starting with < or whitespace)
lines = content.split('\n')
result = []
in_paragraph = False
for line in lines:
stripped = line.strip()
if not stripped:
if in_paragraph:
result.append('</p>')
in_paragraph = False
result.append('')
elif stripped.startswith('<'):
if in_paragraph:
result.append('</p>')
in_paragraph = False
result.append(line)
else:
if not in_paragraph:
result.append('<p class="mb-4 text-gray-300">')
in_paragraph = True
result.append(line)
if in_paragraph:
result.append('</p>')
content = '\n'.join(result)
return content
@app.get("/docs", response_class=HTMLResponse)
async def docs_index(request: Request):
"""Documentation index page."""
user = await get_optional_user(request)
html = f"""<!DOCTYPE html>
<html class="dark">
<head>
<title>Documentation - Art DAG L1</title>
<script src="https://cdn.tailwindcss.com"></script>
<script>tailwind.config = {{ darkMode: 'class' }}</script>
</head>
<body class="bg-gray-900 text-gray-100 min-h-screen">
<nav class="bg-gray-800 border-b border-gray-700 px-6 py-4">
<div class="flex items-center justify-between">
<a href="/" class="text-xl font-bold text-white">Art DAG L1</a>
<div class="flex items-center gap-4">
<a href="/runs" class="text-gray-300 hover:text-white">Runs</a>
<a href="/recipes" class="text-gray-300 hover:text-white">Recipes</a>
<a href="/media" class="text-gray-300 hover:text-white">Media</a>
<a href="/docs" class="text-white font-semibold">Docs</a>
</div>
</div>
</nav>
<main class="max-w-4xl mx-auto p-8">
<h1 class="text-3xl font-bold mb-8">Documentation</h1>
<div class="grid gap-4">
<a href="/docs/l1" class="block p-6 bg-gray-800 rounded-lg hover:bg-gray-700 transition">
<h2 class="text-xl font-semibold text-white mb-2">L1 Server (Celery)</h2>
<p class="text-gray-400">Distributed rendering server with Celery workers, IPFS integration, and 3-phase execution.</p>
</a>
<a href="/docs/common" class="block p-6 bg-gray-800 rounded-lg hover:bg-gray-700 transition">
<h2 class="text-xl font-semibold text-white mb-2">Common Library</h2>
<p class="text-gray-400">Shared components: Jinja2 templates, middleware, content negotiation, and utilities.</p>
</a>
</div>
</main>
</body>
</html>"""
return HTMLResponse(html)
@app.get("/docs/{doc_name}", response_class=HTMLResponse)
async def docs_page(doc_name: str, request: Request):
"""Render a markdown documentation file as HTML."""
if doc_name not in DOCS_MAP:
raise HTTPException(404, f"Documentation '{doc_name}' not found")
doc_path = DOCS_MAP[doc_name]
if not doc_path.exists():
raise HTTPException(404, f"Documentation file not found: {doc_path}")
content = doc_path.read_text()
html_content = render_markdown(content)
html = f"""<!DOCTYPE html>
<html class="dark">
<head>
<title>{doc_name.upper()} - Art DAG Documentation</title>
<script src="https://cdn.tailwindcss.com"></script>
<script>tailwind.config = {{ darkMode: 'class' }}</script>
</head>
<body class="bg-gray-900 text-gray-100 min-h-screen">
<nav class="bg-gray-800 border-b border-gray-700 px-6 py-4">
<div class="flex items-center justify-between">
<a href="/" class="text-xl font-bold text-white">Art DAG L1</a>
<div class="flex items-center gap-4">
<a href="/runs" class="text-gray-300 hover:text-white">Runs</a>
<a href="/recipes" class="text-gray-300 hover:text-white">Recipes</a>
<a href="/media" class="text-gray-300 hover:text-white">Media</a>
<a href="/docs" class="text-white font-semibold">Docs</a>
</div>
</div>
</nav>
<main class="max-w-4xl mx-auto p-8">
<div class="mb-4">
<a href="/docs" class="text-blue-400 hover:underline">&larr; Back to Documentation</a>
</div>
<article class="prose prose-invert max-w-none">
{html_content}
</article>
</main>
</body>
</html>"""
return HTMLResponse(html)
# ============================================================================
# 3-Phase Execution API (Analyze → Plan → Execute)
# ============================================================================
class RecipeRunRequest(BaseModel):
"""Request to run a recipe with the 3-phase execution model."""
recipe_yaml: str # Recipe YAML content
input_hashes: dict # Mapping from input name to content hash
features: Optional[list[str]] = None # Features to extract (default: beats, energy)
class PlanRequest(BaseModel):
"""Request to generate an execution plan."""
recipe_yaml: str
input_hashes: dict
features: Optional[list[str]] = None
class ExecutePlanRequest(BaseModel):
"""Request to execute a pre-generated plan."""
plan_json: str # JSON-serialized ExecutionPlan
@app.post("/api/plan")
async def generate_plan_endpoint(
request: PlanRequest,
ctx: UserContext = Depends(get_required_user_context)
):
"""
Generate an execution plan without executing it.
Phase 1 (Analyze) + Phase 2 (Plan) of the 3-phase model.
Returns the plan with cache status for each step.
"""
from tasks.orchestrate import generate_plan
try:
# Submit to Celery
task = generate_plan.delay(
recipe_yaml=request.recipe_yaml,
input_hashes=request.input_hashes,
features=request.features,
)
# Wait for result (plan generation is usually fast)
result = task.get(timeout=60)
return {
"status": result.get("status"),
"recipe": result.get("recipe"),
"plan_id": result.get("plan_id"),
"total_steps": result.get("total_steps"),
"cached_steps": result.get("cached_steps"),
"pending_steps": result.get("pending_steps"),
"steps": result.get("steps"),
}
except Exception as e:
logger.error(f"Plan generation failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/api/execute")
async def execute_plan_endpoint(
request: ExecutePlanRequest,
ctx: UserContext = Depends(get_required_user_context)
):
"""
Execute a pre-generated execution plan.
Phase 3 (Execute) of the 3-phase model.
Submits the plan to Celery for parallel execution.
"""
from tasks.orchestrate import run_plan
run_id = str(uuid.uuid4())
try:
# Submit to Celery (async)
task = run_plan.delay(
plan_json=request.plan_json,
run_id=run_id,
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
}
except Exception as e:
logger.error(f"Plan execution failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/api/run-recipe")
async def run_recipe_endpoint(
request: RecipeRunRequest,
ctx: UserContext = Depends(get_required_user_context)
):
"""
Run a complete recipe through all 3 phases.
1. Analyze: Extract features from inputs
2. Plan: Generate execution plan with cache IDs
3. Execute: Run steps with parallel execution
Returns immediately with run_id. Poll /api/run/{run_id} for status.
Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS).
"""
# Compute run_id from inputs and recipe
try:
recipe_data = yaml.safe_load(request.recipe_yaml)
recipe_name = recipe_data.get("name", "unknown")
except Exception:
recipe_name = "unknown"
run_id = compute_run_id(
list(request.input_hashes.values()),
recipe_name,
hashlib.sha3_256(request.recipe_yaml.encode()).hexdigest()
)
# Check if already completed
cached = await database.get_run_cache(run_id)
if cached:
output_hash = cached.get("output_hash")
if cache_manager.has_content(output_hash):
return {
"status": "completed",
"run_id": run_id,
"output_hash": output_hash,
"output_ipfs_cid": cache_manager.get_ipfs_cid(output_hash),
"cached": True,
}
# Submit to Celery
try:
if IPFS_PRIMARY:
# IPFS-primary mode: register recipe and get input CIDs
from tasks.orchestrate_cid import run_recipe_cid
import ipfs_client
# Register recipe on IPFS
recipe_cid = ipfs_client.add_bytes(request.recipe_yaml.encode('utf-8'))
if not recipe_cid:
raise HTTPException(status_code=500, detail="Failed to register recipe on IPFS")
# Get input CIDs from cache manager
input_cids = {}
for name, content_hash in request.input_hashes.items():
cid = cache_manager.get_ipfs_cid(content_hash)
if cid:
input_cids[name] = cid
else:
raise HTTPException(
status_code=400,
detail=f"Input '{name}' not found on IPFS. Upload first."
)
task = run_recipe_cid.delay(
recipe_cid=recipe_cid,
input_cids=input_cids,
input_hashes=request.input_hashes,
features=request.features,
)
else:
# Standard mode: local cache + IPFS backup
from tasks.orchestrate import run_recipe
task = run_recipe.delay(
recipe_yaml=request.recipe_yaml,
input_hashes=request.input_hashes,
features=request.features,
run_id=run_id,
)
# Store run status in Redis
run_data = {
"run_id": run_id,
"status": "pending",
"recipe": recipe_name,
"inputs": list(request.input_hashes.values()),
"celery_task_id": task.id,
"created_at": datetime.now(timezone.utc).isoformat(),
"username": ctx.actor_id,
}
redis_client.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400, # 24 hour expiry
json.dumps(run_data)
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
"recipe": recipe_name,
}
except Exception as e:
logger.error(f"Recipe run failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/run/{run_id}")
async def get_run_api(run_id: str, ctx: UserContext = Depends(get_required_user_context)):
"""
Get status of a recipe execution run.
"""
# Check Redis for run status
run_data = redis_client.get(f"{RUNS_KEY_PREFIX}{run_id}")
if run_data:
data = json.loads(run_data)
# If pending, check Celery task status
if data.get("status") == "pending" and data.get("celery_task_id"):
from celery.result import AsyncResult
result = AsyncResult(data["celery_task_id"])
if result.ready():
if result.successful():
task_result = result.get()
data["status"] = task_result.get("status", "completed")
data["output_hash"] = task_result.get("output_cache_id")
data["output_ipfs_cid"] = task_result.get("output_ipfs_cid")
data["total_steps"] = task_result.get("total_steps")
data["cached"] = task_result.get("cached")
data["executed"] = task_result.get("executed")
# Update Redis
redis_client.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400,
json.dumps(data)
)
else:
data["status"] = "failed"
data["error"] = str(result.result)
else:
data["celery_status"] = result.status
return data
# Check database cache
cached = await database.get_run_cache(run_id)
if cached:
return {
"run_id": run_id,
"status": "completed",
"output_hash": cached.get("output_hash"),
"cached": True,
}
raise HTTPException(status_code=404, detail="Run not found")
if __name__ == "__main__":
import uvicorn
# Workers enabled - cache indexes shared via Redis
uvicorn.run("server:app", host="0.0.0.0", port=8100, workers=4)