Import L1 (celery) as l1/

This commit is contained in:
giles
2026-02-24 23:07:19 +00:00
225 changed files with 57298 additions and 0 deletions

View File

@@ -0,0 +1,23 @@
"""
L1 Server Routers.
Each router handles a specific domain of functionality.
"""
from . import auth
from . import storage
from . import api
from . import recipes
from . import cache
from . import runs
from . import home
__all__ = [
"auth",
"storage",
"api",
"recipes",
"cache",
"runs",
"home",
]

257
l1/app/routers/api.py Normal file
View File

@@ -0,0 +1,257 @@
"""
3-phase API routes for L1 server.
Provides the plan/execute/run-recipe endpoints for programmatic access.
"""
import hashlib
import json
import logging
import uuid
from datetime import datetime, timezone
from typing import Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from artdag_common.middleware.auth import UserContext
from ..dependencies import require_auth, get_redis_client, get_cache_manager
router = APIRouter()
logger = logging.getLogger(__name__)
# Redis key prefix
RUNS_KEY_PREFIX = "artdag:run:"
class PlanRequest(BaseModel):
recipe_sexp: str
input_hashes: Dict[str, str]
class ExecutePlanRequest(BaseModel):
plan_json: str
run_id: Optional[str] = None
class RecipeRunRequest(BaseModel):
recipe_sexp: str
input_hashes: Dict[str, str]
def compute_run_id(input_hashes: List[str], recipe: str, recipe_hash: str = None) -> str:
"""Compute deterministic run_id from inputs and recipe."""
data = {
"inputs": sorted(input_hashes),
"recipe": recipe_hash or f"effect:{recipe}",
"version": "1",
}
json_str = json.dumps(data, sort_keys=True, separators=(",", ":"))
return hashlib.sha3_256(json_str.encode()).hexdigest()
@router.post("/plan")
async def generate_plan_endpoint(
request: PlanRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Generate an execution plan without executing it.
Phase 1 (Analyze) + Phase 2 (Plan) of the 3-phase model.
Returns the plan with cache status for each step.
"""
from tasks.orchestrate import generate_plan
try:
task = generate_plan.delay(
recipe_sexp=request.recipe_sexp,
input_hashes=request.input_hashes,
)
# Wait for result (plan generation is usually fast)
result = task.get(timeout=60)
return {
"status": result.get("status"),
"recipe": result.get("recipe"),
"plan_id": result.get("plan_id"),
"total_steps": result.get("total_steps"),
"cached_steps": result.get("cached_steps"),
"pending_steps": result.get("pending_steps"),
"steps": result.get("steps"),
}
except Exception as e:
logger.error(f"Plan generation failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/execute")
async def execute_plan_endpoint(
request: ExecutePlanRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Execute a pre-generated execution plan.
Phase 3 (Execute) of the 3-phase model.
Submits the plan to Celery for parallel execution.
"""
from tasks.orchestrate import run_plan
run_id = request.run_id or str(uuid.uuid4())
try:
task = run_plan.delay(
plan_json=request.plan_json,
run_id=run_id,
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
}
except Exception as e:
logger.error(f"Plan execution failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/run-recipe")
async def run_recipe_endpoint(
request: RecipeRunRequest,
ctx: UserContext = Depends(require_auth),
):
"""
Run a complete recipe through all 3 phases.
1. Analyze: Extract features from inputs
2. Plan: Generate execution plan with cache IDs
3. Execute: Run steps with parallel execution
Returns immediately with run_id. Poll /api/run/{run_id} for status.
"""
from tasks.orchestrate import run_recipe
from artdag.sexp import compile_string
import database
redis = get_redis_client()
cache = get_cache_manager()
# Parse recipe name from S-expression
try:
compiled = compile_string(request.recipe_sexp)
recipe_name = compiled.name or "unknown"
except Exception:
recipe_name = "unknown"
# Compute deterministic run_id
run_id = compute_run_id(
list(request.input_hashes.values()),
recipe_name,
hashlib.sha3_256(request.recipe_sexp.encode()).hexdigest()
)
# Check if already completed
cached = await database.get_run_cache(run_id)
if cached:
output_cid = cached.get("output_cid")
if cache.has_content(output_cid):
return {
"status": "completed",
"run_id": run_id,
"output_cid": output_cid,
"output_ipfs_cid": cache.get_ipfs_cid(output_cid),
"cached": True,
}
# Submit to Celery
try:
task = run_recipe.delay(
recipe_sexp=request.recipe_sexp,
input_hashes=request.input_hashes,
run_id=run_id,
)
# Store run status in Redis
run_data = {
"run_id": run_id,
"status": "pending",
"recipe": recipe_name,
"inputs": list(request.input_hashes.values()),
"celery_task_id": task.id,
"created_at": datetime.now(timezone.utc).isoformat(),
"username": ctx.actor_id,
}
redis.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400,
json.dumps(run_data)
)
return {
"status": "submitted",
"run_id": run_id,
"celery_task_id": task.id,
"recipe": recipe_name,
}
except Exception as e:
logger.error(f"Recipe run failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/run/{run_id}")
async def get_run_status(
run_id: str,
ctx: UserContext = Depends(require_auth),
):
"""Get status of a recipe execution run."""
import database
from celery.result import AsyncResult
redis = get_redis_client()
# Check Redis for run status
run_data = redis.get(f"{RUNS_KEY_PREFIX}{run_id}")
if run_data:
data = json.loads(run_data)
# If pending, check Celery task status
if data.get("status") == "pending" and data.get("celery_task_id"):
result = AsyncResult(data["celery_task_id"])
if result.ready():
if result.successful():
task_result = result.get()
data["status"] = task_result.get("status", "completed")
data["output_cid"] = task_result.get("output_cache_id")
data["output_ipfs_cid"] = task_result.get("output_ipfs_cid")
data["total_steps"] = task_result.get("total_steps")
data["cached"] = task_result.get("cached")
data["executed"] = task_result.get("executed")
# Update Redis
redis.setex(
f"{RUNS_KEY_PREFIX}{run_id}",
86400,
json.dumps(data)
)
else:
data["status"] = "failed"
data["error"] = str(result.result)
else:
data["celery_status"] = result.status
return data
# Check database cache
cached = await database.get_run_cache(run_id)
if cached:
return {
"run_id": run_id,
"status": "completed",
"output_cid": cached.get("output_cid"),
"cached": True,
}
raise HTTPException(status_code=404, detail="Run not found")

165
l1/app/routers/auth.py Normal file
View File

@@ -0,0 +1,165 @@
"""
Authentication routes — OAuth2 authorization code flow via account.rose-ash.com.
GET /auth/login — redirect to account OAuth authorize
GET /auth/callback — exchange code for user info, set session cookie
GET /auth/logout — clear cookie, redirect through account SSO logout
"""
import secrets
import time
import httpx
from fastapi import APIRouter, Request
from fastapi.responses import RedirectResponse
from itsdangerous import URLSafeSerializer
from artdag_common.middleware.auth import UserContext, set_auth_cookie, clear_auth_cookie
from ..config import settings
router = APIRouter()
_signer = None
def _get_signer() -> URLSafeSerializer:
global _signer
if _signer is None:
_signer = URLSafeSerializer(settings.secret_key, salt="oauth-state")
return _signer
@router.get("/login")
async def login(request: Request):
"""Store state + next in signed cookie, redirect to account OAuth authorize."""
next_url = request.query_params.get("next", "/")
prompt = request.query_params.get("prompt", "")
state = secrets.token_urlsafe(32)
signer = _get_signer()
state_payload = signer.dumps({"state": state, "next": next_url, "prompt": prompt})
device_id = getattr(request.state, "device_id", "")
authorize_url = (
f"{settings.oauth_authorize_url}"
f"?client_id={settings.oauth_client_id}"
f"&redirect_uri={settings.oauth_redirect_uri}"
f"&device_id={device_id}"
f"&state={state}"
)
if prompt:
authorize_url += f"&prompt={prompt}"
response = RedirectResponse(url=authorize_url, status_code=302)
response.set_cookie(
key="oauth_state",
value=state_payload,
max_age=600, # 10 minutes
httponly=True,
samesite="lax",
secure=True,
)
return response
@router.get("/callback")
async def callback(request: Request):
"""Validate state, exchange code via token endpoint, set session cookie."""
code = request.query_params.get("code", "")
state = request.query_params.get("state", "")
error = request.query_params.get("error", "")
account_did = request.query_params.get("account_did", "")
# Adopt account's device ID as our own (one identity across all apps)
if account_did:
request.state.device_id = account_did
request.state._new_device_id = True # device_id middleware will set cookie
# Recover state from signed cookie
state_cookie = request.cookies.get("oauth_state", "")
signer = _get_signer()
try:
payload = signer.loads(state_cookie) if state_cookie else {}
except Exception:
payload = {}
next_url = payload.get("next", "/")
# Handle prompt=none rejection (user not logged in on account)
if error == "login_required":
response = RedirectResponse(url=next_url, status_code=302)
response.delete_cookie("oauth_state")
# Set cooldown cookie — don't re-check for 5 minutes
response.set_cookie(
key="pnone_at",
value=str(time.time()),
max_age=300,
httponly=True,
samesite="lax",
secure=True,
)
# Set device cookie if adopted
if account_did:
response.set_cookie(
key="artdag_did",
value=account_did,
max_age=30 * 24 * 3600,
httponly=True,
samesite="lax",
secure=True,
)
return response
# Normal callback — validate state + code
if not state_cookie or not code or not state:
return RedirectResponse(url="/", status_code=302)
if payload.get("state") != state:
return RedirectResponse(url="/", status_code=302)
# Exchange code for user info via account's token endpoint
async with httpx.AsyncClient(timeout=10) as client:
try:
resp = await client.post(
settings.oauth_token_url,
json={
"code": code,
"client_id": settings.oauth_client_id,
"redirect_uri": settings.oauth_redirect_uri,
},
)
except httpx.HTTPError:
return RedirectResponse(url="/", status_code=302)
if resp.status_code != 200:
return RedirectResponse(url="/", status_code=302)
data = resp.json()
if "error" in data:
return RedirectResponse(url="/", status_code=302)
# Map OAuth response to artdag UserContext
# Note: account token endpoint returns user.email as "username"
display_name = data.get("display_name", "")
username = data.get("username", "")
email = username # OAuth response "username" is the user's email
actor_id = f"@{username}"
user = UserContext(username=username, actor_id=actor_id, email=email)
response = RedirectResponse(url=next_url, status_code=302)
set_auth_cookie(response, user)
response.delete_cookie("oauth_state")
response.delete_cookie("pnone_at")
return response
@router.get("/logout")
async def logout():
"""Clear session cookie, redirect through account SSO logout."""
response = RedirectResponse(url=settings.oauth_logout_url, status_code=302)
clear_auth_cookie(response)
response.delete_cookie("oauth_state")
response.delete_cookie("pnone_at")
return response

515
l1/app/routers/cache.py Normal file
View File

@@ -0,0 +1,515 @@
"""
Cache and media routes for L1 server.
Handles content retrieval, metadata, media preview, and publishing.
"""
import logging
from pathlib import Path
from typing import Optional, Dict, Any
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, FileResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import (
require_auth, get_templates, get_redis_client,
get_cache_manager, get_current_user
)
from ..services.auth_service import AuthService
from ..services.cache_service import CacheService
router = APIRouter()
logger = logging.getLogger(__name__)
class UpdateMetadataRequest(BaseModel):
title: Optional[str] = None
description: Optional[str] = None
tags: Optional[list] = None
custom: Optional[Dict[str, Any]] = None
def get_cache_service():
"""Get cache service instance."""
import database
return CacheService(database, get_cache_manager())
@router.get("/{cid}")
async def get_cached(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get cached content by hash. Content negotiation: HTML for browsers, JSON for APIs."""
ctx = await get_current_user(request)
# Pass actor_id to get friendly name and user-specific metadata
actor_id = ctx.actor_id if ctx else None
cache_item = await cache_service.get_cache_item(cid, actor_id=actor_id)
if not cache_item:
if wants_html(request):
templates = get_templates(request)
return render(templates, "cache/not_found.html", request,
cid=cid,
user=ctx,
active_tab="media",
)
raise HTTPException(404, f"Content {cid} not in cache")
# JSON response
if wants_json(request):
return cache_item
# HTML response
if not ctx:
from fastapi.responses import RedirectResponse
return RedirectResponse(url="/auth", status_code=302)
# Check access
has_access = await cache_service.check_access(cid, ctx.actor_id, ctx.username)
if not has_access:
raise HTTPException(403, "Access denied")
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "cache/detail.html", request,
cache=cache_item,
user=ctx,
nav_counts=nav_counts,
active_tab="media",
)
@router.get("/{cid}/raw")
async def get_cached_raw(
cid: str,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get raw cached content (file download)."""
file_path, media_type, filename = await cache_service.get_raw_file(cid)
if not file_path:
raise HTTPException(404, f"Content {cid} not in cache")
return FileResponse(file_path, media_type=media_type, filename=filename)
@router.get("/{cid}/mp4")
async def get_cached_mp4(
cid: str,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get cached content as MP4 (transcodes MKV on first request)."""
mp4_path, error = await cache_service.get_as_mp4(cid)
if error:
raise HTTPException(400 if "not a video" in error else 404, error)
return FileResponse(mp4_path, media_type="video/mp4")
@router.get("/{cid}/meta")
async def get_metadata(
cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Get content metadata."""
meta = await cache_service.get_metadata(cid, ctx.actor_id)
if meta is None:
raise HTTPException(404, "Content not found")
return meta
@router.patch("/{cid}/meta")
async def update_metadata(
cid: str,
req: UpdateMetadataRequest,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Update content metadata."""
success, error = await cache_service.update_metadata(
cid=cid,
actor_id=ctx.actor_id,
title=req.title,
description=req.description,
tags=req.tags,
custom=req.custom,
)
if error:
raise HTTPException(400, error)
return {"updated": True}
@router.post("/{cid}/publish")
async def publish_content(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Publish content to L2 and IPFS."""
ipfs_cid, error = await cache_service.publish_to_l2(
cid=cid,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Published: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "published": True}
@router.delete("/{cid}")
async def delete_content(
cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Delete content from cache."""
success, error = await cache_service.delete_content(cid, ctx.actor_id)
if error:
raise HTTPException(400 if "Cannot" in error or "pinned" in error else 404, error)
return {"deleted": True}
@router.post("/import")
async def import_from_ipfs(
ipfs_cid: str,
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Import content from IPFS."""
cid, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id)
if error:
raise HTTPException(400, error)
return {"cid": cid, "imported": True}
@router.post("/upload/chunk")
async def upload_chunk(
request: Request,
chunk: UploadFile = File(...),
upload_id: str = Form(...),
chunk_index: int = Form(...),
total_chunks: int = Form(...),
filename: str = Form(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload a file chunk. Assembles file when all chunks received."""
import tempfile
import os
# Create temp dir for this upload
chunk_dir = Path(tempfile.gettempdir()) / "uploads" / upload_id
chunk_dir.mkdir(parents=True, exist_ok=True)
# Save this chunk
chunk_path = chunk_dir / f"chunk_{chunk_index:05d}"
chunk_data = await chunk.read()
chunk_path.write_bytes(chunk_data)
# Check if all chunks received
received = len(list(chunk_dir.glob("chunk_*")))
if received < total_chunks:
return {"status": "partial", "received": received, "total": total_chunks}
# All chunks received - assemble file
final_path = chunk_dir / filename
with open(final_path, 'wb') as f:
for i in range(total_chunks):
cp = chunk_dir / f"chunk_{i:05d}"
f.write(cp.read_bytes())
cp.unlink() # Clean up chunk
# Read assembled file
content = final_path.read_bytes()
final_path.unlink()
chunk_dir.rmdir()
# Now do the normal upload flow
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
filename=filename,
actor_id=ctx.actor_id,
)
if error:
raise HTTPException(400, error)
# Assign friendly name
final_cid = ipfs_cid or cid
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
filename=filename,
)
return {
"status": "complete",
"cid": final_cid,
"friendly_name": friendly_entry["friendly_name"],
"filename": filename,
"size": len(content),
"uploaded": True,
}
@router.post("/upload")
async def upload_content(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload content to cache and IPFS.
Args:
file: The file to upload
display_name: Optional custom name for the media (used as friendly name)
"""
content = await file.read()
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
filename=file.filename,
actor_id=ctx.actor_id,
)
if error:
raise HTTPException(400, error)
# Assign friendly name (use IPFS CID if available, otherwise local hash)
final_cid = ipfs_cid or cid
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name, # Use custom name if provided
filename=file.filename,
)
return {
"cid": final_cid,
"content_hash": cid, # Legacy, for backwards compatibility
"friendly_name": friendly_entry["friendly_name"],
"filename": file.filename,
"size": len(content),
"uploaded": True,
}
# Media listing endpoint
@router.get("")
async def list_media(
request: Request,
offset: int = 0,
limit: int = 24,
media_type: Optional[str] = None,
cache_service: CacheService = Depends(get_cache_service),
ctx: UserContext = Depends(require_auth),
):
"""List all media in cache."""
items = await cache_service.list_media(
actor_id=ctx.actor_id,
username=ctx.username,
offset=offset,
limit=limit,
media_type=media_type,
)
has_more = len(items) >= limit
if wants_json(request):
return {"items": items, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "cache/media_list.html", request,
items=items,
user=ctx,
nav_counts=nav_counts,
offset=offset,
limit=limit,
has_more=has_more,
active_tab="media",
)
# HTMX metadata form
@router.get("/{cid}/meta-form", response_class=HTMLResponse)
async def get_metadata_form(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get metadata editing form (HTMX)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
meta = await cache_service.get_metadata(cid, ctx.actor_id)
return HTMLResponse(f'''
<h2 class="text-lg font-semibold mb-4">Metadata</h2>
<form hx-patch="/cache/{cid}/meta"
hx-target="#metadata-section"
hx-swap="innerHTML"
class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Title</label>
<input type="text" name="title" value="{meta.get('title', '') if meta else ''}"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Description</label>
<textarea name="description" rows="3"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white"
>{meta.get('description', '') if meta else ''}</textarea>
</div>
<button type="submit"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Save Metadata
</button>
</form>
''')
@router.patch("/{cid}/meta", response_class=HTMLResponse)
async def update_metadata_htmx(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Update metadata (HTMX form handler)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
form_data = await request.form()
success, error = await cache_service.update_metadata(
cid=cid,
actor_id=ctx.actor_id,
title=form_data.get("title"),
description=form_data.get("description"),
)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse('''
<div class="text-green-400 mb-4">Metadata saved!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
# Friendly name editing
@router.get("/{cid}/name-form", response_class=HTMLResponse)
async def get_name_form(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get friendly name editing form (HTMX)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
# Get current friendly name
from ..services.naming_service import get_naming_service
naming = get_naming_service()
entry = await naming.get_by_cid(ctx.actor_id, cid)
current_name = entry.get("base_name", "") if entry else ""
return HTMLResponse(f'''
<form hx-post="/cache/{cid}/name"
hx-target="#friendly-name-section"
hx-swap="innerHTML"
class="space-y-3">
<div>
<label class="block text-gray-400 text-sm mb-1">Friendly Name</label>
<input type="text" name="display_name" value="{current_name}"
placeholder="e.g., my-background-video"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A name to reference this media in recipes</p>
</div>
<div class="flex space-x-2">
<button type="submit"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Save
</button>
<button type="button"
onclick="location.reload()"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
</div>
</form>
''')
@router.post("/{cid}/name", response_class=HTMLResponse)
async def update_friendly_name(
cid: str,
request: Request,
):
"""Update friendly name (HTMX form handler)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
form_data = await request.form()
display_name = form_data.get("display_name", "").strip()
if not display_name:
return HTMLResponse('<div class="text-red-400">Name cannot be empty</div>')
from ..services.naming_service import get_naming_service
naming = get_naming_service()
try:
entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
)
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Name updated!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
except Exception as e:
return HTMLResponse(f'<div class="text-red-400">Error: {e}</div>')

415
l1/app/routers/effects.py Normal file
View File

@@ -0,0 +1,415 @@
"""
Effects routes for L1 server.
Handles effect upload, listing, and metadata.
Effects are S-expression files stored in IPFS like all other content-addressed data.
"""
import json
import logging
import re
import time
from pathlib import Path
from typing import Optional
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, PlainTextResponse
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import (
require_auth, get_templates, get_redis_client,
get_cache_manager,
)
from ..services.auth_service import AuthService
import ipfs_client
router = APIRouter()
logger = logging.getLogger(__name__)
def get_effects_dir() -> Path:
"""Get effects storage directory."""
cache_mgr = get_cache_manager()
effects_dir = Path(cache_mgr.cache_dir) / "_effects"
effects_dir.mkdir(parents=True, exist_ok=True)
return effects_dir
def parse_effect_metadata(source: str) -> dict:
"""
Parse effect metadata from S-expression source code.
Extracts metadata from comment headers (;; @key value format)
or from (defeffect name ...) form.
"""
metadata = {
"name": "",
"version": "1.0.0",
"author": "",
"temporal": False,
"description": "",
"params": [],
}
# Parse comment-based metadata (;; @key value)
for line in source.split("\n"):
stripped = line.strip()
if not stripped.startswith(";"):
# Stop parsing metadata at first non-comment line
if stripped and not stripped.startswith("("):
continue
if stripped.startswith("("):
break
# Remove comment prefix
comment = stripped.lstrip(";").strip()
if comment.startswith("@effect "):
metadata["name"] = comment[8:].strip()
elif comment.startswith("@name "):
metadata["name"] = comment[6:].strip()
elif comment.startswith("@version "):
metadata["version"] = comment[9:].strip()
elif comment.startswith("@author "):
metadata["author"] = comment[8:].strip()
elif comment.startswith("@temporal"):
val = comment[9:].strip().lower() if len(comment) > 9 else "true"
metadata["temporal"] = val in ("true", "yes", "1", "")
elif comment.startswith("@description "):
metadata["description"] = comment[13:].strip()
elif comment.startswith("@param "):
# Format: @param name type [description]
parts = comment[7:].split(None, 2)
if len(parts) >= 2:
param = {"name": parts[0], "type": parts[1]}
if len(parts) > 2:
param["description"] = parts[2]
metadata["params"].append(param)
# Also try to extract name from (defeffect "name" ...) or (effect "name" ...)
if not metadata["name"]:
name_match = re.search(r'\((defeffect|effect)\s+"([^"]+)"', source)
if name_match:
metadata["name"] = name_match.group(2)
# Try to extract name from first (define ...) form
if not metadata["name"]:
define_match = re.search(r'\(define\s+(\w+)', source)
if define_match:
metadata["name"] = define_match.group(1)
return metadata
@router.post("/upload")
async def upload_effect(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
):
"""
Upload an S-expression effect to IPFS.
Parses metadata from comment headers.
Returns IPFS CID for use in recipes.
Args:
file: The .sexp effect file
display_name: Optional custom friendly name for the effect
"""
content = await file.read()
try:
source = content.decode("utf-8")
except UnicodeDecodeError:
raise HTTPException(400, "Effect must be valid UTF-8 text")
# Parse metadata from sexp source
try:
meta = parse_effect_metadata(source)
except Exception as e:
logger.warning(f"Failed to parse effect metadata: {e}")
meta = {"name": file.filename or "unknown"}
if not meta.get("name"):
meta["name"] = Path(file.filename).stem if file.filename else "unknown"
# Store effect source in IPFS
cid = ipfs_client.add_bytes(content)
if not cid:
raise HTTPException(500, "Failed to store effect in IPFS")
# Also keep local cache for fast worker access
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
effect_dir.mkdir(parents=True, exist_ok=True)
(effect_dir / "effect.sexp").write_text(source, encoding="utf-8")
# Store metadata (locally and in IPFS)
full_meta = {
"cid": cid,
"meta": meta,
"uploader": ctx.actor_id,
"uploaded_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"filename": file.filename,
}
(effect_dir / "metadata.json").write_text(json.dumps(full_meta, indent=2))
# Also store metadata in IPFS for discoverability
meta_cid = ipfs_client.add_json(full_meta)
# Track ownership in item_types
import database
await database.save_item_metadata(
cid=cid,
actor_id=ctx.actor_id,
item_type="effect",
filename=file.filename,
)
# Assign friendly name (use custom display_name if provided, else from metadata)
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="effect",
display_name=display_name or meta.get("name"),
filename=file.filename,
)
logger.info(f"Uploaded effect '{meta.get('name')}' cid={cid} friendly_name='{friendly_entry['friendly_name']}' by {ctx.actor_id}")
return {
"cid": cid,
"metadata_cid": meta_cid,
"name": meta.get("name"),
"friendly_name": friendly_entry["friendly_name"],
"version": meta.get("version"),
"temporal": meta.get("temporal", False),
"params": meta.get("params", []),
"uploaded": True,
}
@router.get("/{cid}")
async def get_effect(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
):
"""Get effect metadata by CID."""
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
metadata_path = effect_dir / "metadata.json"
# Try local cache first
if metadata_path.exists():
meta = json.loads(metadata_path.read_text())
else:
# Fetch from IPFS
source_bytes = ipfs_client.get_bytes(cid)
if not source_bytes:
raise HTTPException(404, f"Effect {cid[:16]}... not found")
# Cache locally
effect_dir.mkdir(parents=True, exist_ok=True)
source = source_bytes.decode("utf-8")
(effect_dir / "effect.sexp").write_text(source)
# Parse metadata from source
parsed_meta = parse_effect_metadata(source)
meta = {"cid": cid, "meta": parsed_meta}
(effect_dir / "metadata.json").write_text(json.dumps(meta, indent=2))
# Add friendly name if available
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly = await naming.get_by_cid(ctx.actor_id, cid)
if friendly:
meta["friendly_name"] = friendly["friendly_name"]
meta["base_name"] = friendly["base_name"]
meta["version_id"] = friendly["version_id"]
if wants_json(request):
return meta
# HTML response
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "effects/detail.html", request,
effect=meta,
user=ctx,
nav_counts=nav_counts,
active_tab="effects",
)
@router.get("/{cid}/source")
async def get_effect_source(
cid: str,
ctx: UserContext = Depends(require_auth),
):
"""Get effect source code."""
effects_dir = get_effects_dir()
source_path = effects_dir / cid / "effect.sexp"
# Try local cache first (check both .sexp and legacy .py)
if source_path.exists():
return PlainTextResponse(source_path.read_text())
legacy_path = effects_dir / cid / "effect.py"
if legacy_path.exists():
return PlainTextResponse(legacy_path.read_text())
# Fetch from IPFS
source_bytes = ipfs_client.get_bytes(cid)
if not source_bytes:
raise HTTPException(404, f"Effect {cid[:16]}... not found")
# Cache locally
source_path.parent.mkdir(parents=True, exist_ok=True)
source = source_bytes.decode("utf-8")
source_path.write_text(source)
return PlainTextResponse(source)
@router.get("")
async def list_effects(
request: Request,
offset: int = 0,
limit: int = 20,
ctx: UserContext = Depends(require_auth),
):
"""List user's effects with pagination."""
import database
effects_dir = get_effects_dir()
effects = []
# Get user's effect CIDs from item_types
user_items = await database.get_user_items(ctx.actor_id, item_type="effect", limit=1000)
effect_cids = [item["cid"] for item in user_items]
# Get naming service for friendly name lookup
from ..services.naming_service import get_naming_service
naming = get_naming_service()
for cid in effect_cids:
effect_dir = effects_dir / cid
metadata_path = effect_dir / "metadata.json"
if metadata_path.exists():
try:
meta = json.loads(metadata_path.read_text())
# Add friendly name if available
friendly = await naming.get_by_cid(ctx.actor_id, cid)
if friendly:
meta["friendly_name"] = friendly["friendly_name"]
meta["base_name"] = friendly["base_name"]
effects.append(meta)
except json.JSONDecodeError:
pass
# Sort by upload time (newest first)
effects.sort(key=lambda e: e.get("uploaded_at", ""), reverse=True)
# Apply pagination
total = len(effects)
paginated_effects = effects[offset:offset + limit]
has_more = offset + limit < total
if wants_json(request):
return {"effects": paginated_effects, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "effects/list.html", request,
effects=paginated_effects,
user=ctx,
nav_counts=nav_counts,
active_tab="effects",
offset=offset,
limit=limit,
has_more=has_more,
)
@router.post("/{cid}/publish")
async def publish_effect(
cid: str,
request: Request,
ctx: UserContext = Depends(require_auth),
):
"""Publish effect to L2 ActivityPub server."""
from ..services.cache_service import CacheService
import database
# Verify effect exists
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
if not effect_dir.exists():
error = "Effect not found"
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(404, error)
# Use cache service to publish
cache_service = CacheService(database, get_cache_manager())
ipfs_cid, error = await cache_service.publish_to_l2(
cid=cid,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
logger.info(f"Published effect {cid[:16]}... to L2 by {ctx.actor_id}")
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "cid": cid, "published": True}
@router.delete("/{cid}")
async def delete_effect(
cid: str,
ctx: UserContext = Depends(require_auth),
):
"""Remove user's ownership link to an effect."""
import database
# Remove user's ownership link from item_types
await database.delete_item_type(cid, ctx.actor_id, "effect")
# Remove friendly name
await database.delete_friendly_name(ctx.actor_id, cid)
# Check if anyone still owns this effect
remaining_owners = await database.get_item_types(cid)
# Only delete local files if no one owns it anymore
if not remaining_owners:
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
if effect_dir.exists():
import shutil
shutil.rmtree(effect_dir)
# Unpin from IPFS
ipfs_client.unpin(cid)
logger.info(f"Garbage collected effect {cid[:16]}... (no remaining owners)")
logger.info(f"Removed effect {cid[:16]}... ownership for {ctx.actor_id}")
return {"deleted": True}

143
l1/app/routers/fragments.py Normal file
View File

@@ -0,0 +1,143 @@
"""
Art-DAG fragment endpoints.
Exposes HTML fragments at ``/internal/fragments/{type}`` for consumption
by coop apps via the fragment client.
"""
import os
from fastapi import APIRouter, Request, Response
router = APIRouter()
# Registry of fragment handlers: type -> async callable(request) returning HTML str
_handlers: dict[str, object] = {}
FRAGMENT_HEADER = "X-Fragment-Request"
@router.get("/internal/fragments/{fragment_type}")
async def get_fragment(fragment_type: str, request: Request):
if not request.headers.get(FRAGMENT_HEADER):
return Response(content="", status_code=403)
handler = _handlers.get(fragment_type)
if handler is None:
return Response(content="", media_type="text/html", status_code=200)
html = await handler(request)
return Response(content=html, media_type="text/html", status_code=200)
# --- nav-item fragment ---
async def _nav_item_handler(request: Request) -> str:
from artdag_common import render_fragment
templates = request.app.state.templates
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
return render_fragment(templates, "fragments/nav_item.html", artdag_url=artdag_url)
_handlers["nav-item"] = _nav_item_handler
# --- link-card fragment ---
async def _link_card_handler(request: Request) -> str:
from artdag_common import render_fragment
import database
templates = request.app.state.templates
cid = request.query_params.get("cid", "")
content_type = request.query_params.get("type", "media")
slug = request.query_params.get("slug", "")
keys_raw = request.query_params.get("keys", "")
# Batch mode: return multiple cards separated by markers
if keys_raw:
keys = [k.strip() for k in keys_raw.split(",") if k.strip()]
parts = []
for key in keys:
parts.append(f"<!-- fragment:{key} -->")
card_html = await _render_single_link_card(
templates, key, content_type,
)
parts.append(card_html)
return "\n".join(parts)
# Single mode: use cid or slug
lookup_cid = cid or slug
if not lookup_cid:
return ""
return await _render_single_link_card(templates, lookup_cid, content_type)
async def _render_single_link_card(templates, cid: str, content_type: str) -> str:
import database
from artdag_common import render_fragment
if not cid:
return ""
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
# Try item_types first (has metadata)
item = await database.get_item_types(cid)
# get_item_types returns a list; pick best match for content_type
meta = None
if item:
for it in item:
if it.get("type") == content_type:
meta = it
break
if not meta:
meta = item[0]
# Try friendly name for display
friendly = None
if meta and meta.get("actor_id"):
friendly = await database.get_friendly_name_by_cid(meta["actor_id"], cid)
# Try run cache if type is "run"
run = None
if content_type == "run":
run = await database.get_run_cache(cid)
title = ""
description = ""
link = ""
if friendly:
title = friendly.get("display_name") or friendly.get("base_name", cid[:12])
elif meta:
title = meta.get("filename") or meta.get("description", cid[:12])
elif run:
title = f"Run {cid[:12]}"
else:
title = cid[:16]
if meta:
description = meta.get("description", "")
if content_type == "run":
link = f"{artdag_url}/runs/{cid}"
elif content_type == "recipe":
link = f"{artdag_url}/recipes/{cid}"
elif content_type == "effect":
link = f"{artdag_url}/effects/{cid}"
else:
link = f"{artdag_url}/cache/{cid}"
return render_fragment(
templates, "fragments/link_card.html",
title=title,
description=description,
link=link,
cid=cid,
content_type=content_type,
artdag_url=artdag_url,
)
_handlers["link-card"] = _link_card_handler

253
l1/app/routers/home.py Normal file
View File

@@ -0,0 +1,253 @@
"""
Home and root routes for L1 server.
"""
from pathlib import Path
import markdown
from fastapi import APIRouter, Request, Depends, HTTPException
from fastapi.responses import HTMLResponse, RedirectResponse, FileResponse
from artdag_common import render
from artdag_common.middleware import wants_html
from ..dependencies import get_templates, get_current_user
router = APIRouter()
@router.get("/health")
async def health():
"""Health check endpoint — always returns 200."""
return {"status": "ok"}
async def get_user_stats(actor_id: str) -> dict:
"""Get stats for a user."""
import database
from ..services.run_service import RunService
from ..dependencies import get_redis_client, get_cache_manager
stats = {}
try:
# Count only actual media types (video, image, audio), not effects/recipes
media_count = 0
for media_type in ["video", "image", "audio", "unknown"]:
media_count += await database.count_user_items(actor_id, item_type=media_type)
stats["media"] = media_count
except Exception:
stats["media"] = 0
try:
# Count user's recipes from database (ownership-based)
stats["recipes"] = await database.count_user_items(actor_id, item_type="recipe")
except Exception:
stats["recipes"] = 0
try:
run_service = RunService(database, get_redis_client(), get_cache_manager())
runs = await run_service.list_runs(actor_id)
stats["runs"] = len(runs)
except Exception:
stats["runs"] = 0
try:
storage_providers = await database.get_user_storage_providers(actor_id)
stats["storage"] = len(storage_providers) if storage_providers else 0
except Exception:
stats["storage"] = 0
try:
# Count user's effects from database (ownership-based)
stats["effects"] = await database.count_user_items(actor_id, item_type="effect")
except Exception:
stats["effects"] = 0
return stats
@router.get("/api/stats")
async def api_stats(request: Request):
"""Get user stats as JSON for CLI and API clients."""
user = await get_current_user(request)
if not user:
raise HTTPException(401, "Authentication required")
stats = await get_user_stats(user.actor_id)
return stats
@router.delete("/api/clear-data")
async def clear_user_data(request: Request):
"""
Clear all user L1 data except storage configuration.
Deletes: runs, recipes, effects, media/cache items.
Preserves: storage provider configurations.
"""
import logging
logger = logging.getLogger(__name__)
user = await get_current_user(request)
if not user:
raise HTTPException(401, "Authentication required")
import database
from ..services.recipe_service import RecipeService
from ..services.run_service import RunService
from ..dependencies import get_redis_client, get_cache_manager
actor_id = user.actor_id
username = user.username
deleted = {
"runs": 0,
"recipes": 0,
"effects": 0,
"media": 0,
}
errors = []
# Delete all runs
try:
run_service = RunService(database, get_redis_client(), get_cache_manager())
runs = await run_service.list_runs(actor_id, offset=0, limit=10000)
for run in runs:
try:
await run_service.discard_run(run["run_id"], actor_id, username)
deleted["runs"] += 1
except Exception as e:
errors.append(f"Run {run['run_id']}: {e}")
except Exception as e:
errors.append(f"Failed to list runs: {e}")
# Delete all recipes
try:
recipe_service = RecipeService(get_redis_client(), get_cache_manager())
recipes = await recipe_service.list_recipes(actor_id, offset=0, limit=10000)
for recipe in recipes:
try:
success, error = await recipe_service.delete_recipe(recipe["recipe_id"], actor_id)
if success:
deleted["recipes"] += 1
else:
errors.append(f"Recipe {recipe['recipe_id']}: {error}")
except Exception as e:
errors.append(f"Recipe {recipe['recipe_id']}: {e}")
except Exception as e:
errors.append(f"Failed to list recipes: {e}")
# Delete all effects (uses ownership model)
cache_manager = get_cache_manager()
try:
# Get user's effects from item_types
effect_items = await database.get_user_items(actor_id, item_type="effect", limit=10000)
for item in effect_items:
cid = item.get("cid")
if cid:
try:
# Remove ownership link
await database.delete_item_type(cid, actor_id, "effect")
await database.delete_friendly_name(actor_id, cid)
# Check if orphaned
remaining = await database.get_item_types(cid)
if not remaining:
# Garbage collect
effects_dir = Path(cache_manager.cache_dir) / "_effects" / cid
if effects_dir.exists():
import shutil
shutil.rmtree(effects_dir)
import ipfs_client
ipfs_client.unpin(cid)
deleted["effects"] += 1
except Exception as e:
errors.append(f"Effect {cid[:16]}...: {e}")
except Exception as e:
errors.append(f"Failed to delete effects: {e}")
# Delete all media/cache items for user (uses ownership model)
try:
from ..services.cache_service import CacheService
cache_service = CacheService(database, cache_manager)
# Get user's media items (video, image, audio)
for media_type in ["video", "image", "audio", "unknown"]:
items = await database.get_user_items(actor_id, item_type=media_type, limit=10000)
for item in items:
cid = item.get("cid")
if cid:
try:
success, error = await cache_service.delete_content(cid, actor_id)
if success:
deleted["media"] += 1
elif error:
errors.append(f"Media {cid[:16]}...: {error}")
except Exception as e:
errors.append(f"Media {cid[:16]}...: {e}")
except Exception as e:
errors.append(f"Failed to delete media: {e}")
logger.info(f"Cleared data for {actor_id}: {deleted}")
if errors:
logger.warning(f"Errors during clear: {errors[:10]}") # Log first 10 errors
return {
"message": "User data cleared",
"deleted": deleted,
"errors": errors[:10] if errors else [], # Return first 10 errors
"storage_preserved": True,
}
@router.get("/")
async def home(request: Request):
"""
Home page - show README and stats.
"""
user = await get_current_user(request)
# Load README
readme_html = ""
try:
readme_path = Path(__file__).parent.parent.parent / "README.md"
if readme_path.exists():
readme_html = markdown.markdown(readme_path.read_text(), extensions=['tables', 'fenced_code'])
except Exception:
pass
# Get stats for current user
stats = {}
if user:
stats = await get_user_stats(user.actor_id)
templates = get_templates(request)
return render(templates, "home.html", request,
user=user,
readme_html=readme_html,
stats=stats,
nav_counts=stats, # Reuse stats for nav counts
active_tab="home",
)
@router.get("/login")
async def login_redirect(request: Request):
"""Redirect to OAuth login flow."""
return RedirectResponse(url="/auth/login", status_code=302)
# Client tarball path
CLIENT_TARBALL = Path(__file__).parent.parent.parent / "artdag-client.tar.gz"
@router.get("/download/client")
async def download_client():
"""Download the Art DAG CLI client."""
if not CLIENT_TARBALL.exists():
raise HTTPException(404, "Client package not found. Run build-client.sh to create it.")
return FileResponse(
CLIENT_TARBALL,
media_type="application/gzip",
filename="artdag-client.tar.gz"
)

125
l1/app/routers/inbox.py Normal file
View File

@@ -0,0 +1,125 @@
"""AP-style inbox endpoint for receiving signed activities from the coop.
POST /inbox — verify HTTP Signature, dispatch by activity type.
"""
from __future__ import annotations
import logging
import time
import httpx
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
from ..dependencies import get_redis_client
from ..utils.http_signatures import verify_request_signature, parse_key_id
log = logging.getLogger(__name__)
router = APIRouter()
# Cache fetched public keys in Redis for 24 hours
_KEY_CACHE_TTL = 86400
async def _fetch_actor_public_key(actor_url: str) -> str | None:
"""Fetch an actor's public key, with Redis caching."""
redis = get_redis_client()
cache_key = f"actor_pubkey:{actor_url}"
# Check cache
cached = redis.get(cache_key)
if cached:
return cached
# Fetch actor JSON
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.get(
actor_url,
headers={"Accept": "application/activity+json, application/ld+json"},
)
if resp.status_code != 200:
log.warning("Failed to fetch actor %s: %d", actor_url, resp.status_code)
return None
data = resp.json()
except Exception:
log.warning("Error fetching actor %s", actor_url, exc_info=True)
return None
pub_key_pem = (data.get("publicKey") or {}).get("publicKeyPem")
if not pub_key_pem:
log.warning("No publicKey in actor %s", actor_url)
return None
# Cache it
redis.set(cache_key, pub_key_pem, ex=_KEY_CACHE_TTL)
return pub_key_pem
@router.post("/inbox")
async def inbox(request: Request):
"""Receive signed AP activities from the coop platform."""
sig_header = request.headers.get("signature", "")
if not sig_header:
return JSONResponse({"error": "missing signature"}, status_code=401)
# Read body
body = await request.body()
# Verify HTTP Signature
actor_url = parse_key_id(sig_header)
if not actor_url:
return JSONResponse({"error": "invalid keyId"}, status_code=401)
pub_key = await _fetch_actor_public_key(actor_url)
if not pub_key:
return JSONResponse({"error": "could not fetch public key"}, status_code=401)
req_headers = dict(request.headers)
path = request.url.path
valid = verify_request_signature(
public_key_pem=pub_key,
signature_header=sig_header,
method="POST",
path=path,
headers=req_headers,
)
if not valid:
log.warning("Invalid signature from %s", actor_url)
return JSONResponse({"error": "invalid signature"}, status_code=401)
# Parse and dispatch
try:
activity = await request.json()
except Exception:
return JSONResponse({"error": "invalid json"}, status_code=400)
activity_type = activity.get("type", "")
log.info("Inbox received: %s from %s", activity_type, actor_url)
if activity_type == "rose:DeviceAuth":
_handle_device_auth(activity)
# Always 202 — AP convention
return JSONResponse({"status": "accepted"}, status_code=202)
def _handle_device_auth(activity: dict) -> None:
"""Set or delete did_auth:{device_id} in local Redis."""
obj = activity.get("object", {})
device_id = obj.get("device_id", "")
action = obj.get("action", "")
if not device_id:
log.warning("rose:DeviceAuth missing device_id")
return
redis = get_redis_client()
if action == "login":
redis.set(f"did_auth:{device_id}", str(time.time()), ex=30 * 24 * 3600)
log.info("did_auth set for device %s...", device_id[:16])
elif action == "logout":
redis.delete(f"did_auth:{device_id}")
log.info("did_auth cleared for device %s...", device_id[:16])
else:
log.warning("rose:DeviceAuth unknown action: %s", action)

74
l1/app/routers/oembed.py Normal file
View File

@@ -0,0 +1,74 @@
"""Art-DAG oEmbed endpoint.
Returns oEmbed JSON responses for Art-DAG content (media, recipes, effects, runs).
"""
import os
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
router = APIRouter()
@router.get("/oembed")
async def oembed(request: Request):
url = request.query_params.get("url", "")
if not url:
return JSONResponse({"error": "url parameter required"}, status_code=400)
# Parse URL to extract content type and CID
# URL patterns: /cache/{cid}, /recipes/{cid}, /effects/{cid}, /runs/{cid}
from urllib.parse import urlparse
parsed = urlparse(url)
parts = [p for p in parsed.path.strip("/").split("/") if p]
if len(parts) < 2:
return JSONResponse({"error": "could not parse content URL"}, status_code=404)
content_type = parts[0].rstrip("s") # recipes -> recipe, runs -> run
cid = parts[1]
import database
title = cid[:16]
thumbnail_url = None
# Look up metadata
items = await database.get_item_types(cid)
if items:
meta = items[0]
title = meta.get("filename") or meta.get("description") or title
# Try friendly name
actor_id = meta.get("actor_id")
if actor_id:
friendly = await database.get_friendly_name_by_cid(actor_id, cid)
if friendly:
title = friendly.get("display_name") or friendly.get("base_name", title)
# Media items get a thumbnail
if meta.get("type") == "media":
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
thumbnail_url = f"{artdag_url}/cache/{cid}/raw"
elif content_type == "run":
run = await database.get_run_cache(cid)
if run:
title = f"Run {cid[:12]}"
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
resp = {
"version": "1.0",
"type": "link",
"title": title,
"provider_name": "art-dag",
"provider_url": artdag_url,
"url": url,
}
if thumbnail_url:
resp["thumbnail_url"] = thumbnail_url
return JSONResponse(resp)

686
l1/app/routers/recipes.py Normal file
View File

@@ -0,0 +1,686 @@
"""
Recipe management routes for L1 server.
Handles recipe upload, listing, viewing, and execution.
"""
import json
import logging
from typing import Any, Dict, List, Optional, Tuple
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
from fastapi.responses import HTMLResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import require_auth, get_current_user, get_templates, get_redis_client, get_cache_manager
from ..services.auth_service import AuthService
from ..services.recipe_service import RecipeService
from ..types import (
CompiledNode, TransformedNode, Registry, Recipe,
is_variable_input, get_effect_cid,
)
router = APIRouter()
logger = logging.getLogger(__name__)
class RecipeUploadRequest(BaseModel):
content: str # S-expression or YAML
name: Optional[str] = None
description: Optional[str] = None
class RecipeRunRequest(BaseModel):
"""Request to run a recipe with variable inputs."""
inputs: Dict[str, str] = {} # Map input names to CIDs
def get_recipe_service() -> RecipeService:
"""Get recipe service instance."""
return RecipeService(get_redis_client(), get_cache_manager())
def transform_node(
node: CompiledNode,
assets: Dict[str, Dict[str, Any]],
effects: Dict[str, Dict[str, Any]],
) -> TransformedNode:
"""
Transform a compiled node to artdag execution format.
- Resolves asset references to CIDs for SOURCE nodes
- Resolves effect references to CIDs for EFFECT nodes
- Renames 'type' to 'node_type', 'id' to 'node_id'
"""
node_id = node.get("id", "")
config = dict(node.get("config", {})) # Copy to avoid mutation
# Resolve asset references for SOURCE nodes
if node.get("type") == "SOURCE" and "asset" in config:
asset_name = config["asset"]
if asset_name in assets:
config["cid"] = assets[asset_name].get("cid")
# Resolve effect references for EFFECT nodes
if node.get("type") == "EFFECT" and "effect" in config:
effect_name = config["effect"]
if effect_name in effects:
config["cid"] = effects[effect_name].get("cid")
return {
"node_id": node_id,
"node_type": node.get("type", "EFFECT"),
"config": config,
"inputs": node.get("inputs", []),
"name": node.get("name"),
}
def build_input_name_mapping(
nodes: Dict[str, TransformedNode],
) -> Dict[str, str]:
"""
Build a mapping from input names to node IDs for variable inputs.
Variable inputs can be referenced by:
- node_id directly
- config.name (e.g., "Second Video")
- snake_case version (e.g., "second_video")
- kebab-case version (e.g., "second-video")
- node.name (def binding name)
"""
input_name_to_node: Dict[str, str] = {}
for node_id, node in nodes.items():
if node.get("node_type") != "SOURCE":
continue
config = node.get("config", {})
if not is_variable_input(config):
continue
# Map by node_id
input_name_to_node[node_id] = node_id
# Map by config.name
name = config.get("name")
if name:
input_name_to_node[name] = node_id
input_name_to_node[name.lower().replace(" ", "_")] = node_id
input_name_to_node[name.lower().replace(" ", "-")] = node_id
# Map by node.name (def binding)
node_name = node.get("name")
if node_name:
input_name_to_node[node_name] = node_id
input_name_to_node[node_name.replace("-", "_")] = node_id
return input_name_to_node
def bind_inputs(
nodes: Dict[str, TransformedNode],
input_name_to_node: Dict[str, str],
user_inputs: Dict[str, str],
) -> List[str]:
"""
Bind user-provided input CIDs to source nodes.
Returns list of warnings for inputs that couldn't be bound.
"""
warnings: List[str] = []
for input_name, cid in user_inputs.items():
# Try direct node ID match first
if input_name in nodes:
node = nodes[input_name]
if node.get("node_type") == "SOURCE":
node["config"]["cid"] = cid
logger.info(f"Bound input {input_name} directly to node, cid={cid[:16]}...")
continue
# Try input name lookup
if input_name in input_name_to_node:
node_id = input_name_to_node[input_name]
node = nodes[node_id]
node["config"]["cid"] = cid
logger.info(f"Bound input {input_name} via lookup to node {node_id}, cid={cid[:16]}...")
continue
# Input not found
warnings.append(f"Input '{input_name}' not found in recipe")
logger.warning(f"Input {input_name} not found in nodes or input_name_to_node")
return warnings
async def resolve_friendly_names_in_registry(
registry: dict,
actor_id: str,
) -> dict:
"""
Resolve friendly names to CIDs in the registry.
Friendly names are identified by containing a space (e.g., "brightness 01hw3x9k")
or by not being a valid CID format.
"""
from ..services.naming_service import get_naming_service
import re
naming = get_naming_service()
resolved = {"assets": {}, "effects": {}}
# CID patterns: IPFS CID (Qm..., bafy...) or SHA256 hash (64 hex chars)
cid_pattern = re.compile(r'^(Qm[a-zA-Z0-9]{44}|bafy[a-zA-Z0-9]+|[a-f0-9]{64})$')
for asset_name, asset_info in registry.get("assets", {}).items():
cid = asset_info.get("cid", "")
if cid and not cid_pattern.match(cid):
# Looks like a friendly name, resolve it
resolved_cid = await naming.resolve(actor_id, cid, item_type="media")
if resolved_cid:
asset_info = dict(asset_info)
asset_info["cid"] = resolved_cid
asset_info["_resolved_from"] = cid
resolved["assets"][asset_name] = asset_info
for effect_name, effect_info in registry.get("effects", {}).items():
cid = effect_info.get("cid", "")
if cid and not cid_pattern.match(cid):
# Looks like a friendly name, resolve it
resolved_cid = await naming.resolve(actor_id, cid, item_type="effect")
if resolved_cid:
effect_info = dict(effect_info)
effect_info["cid"] = resolved_cid
effect_info["_resolved_from"] = cid
resolved["effects"][effect_name] = effect_info
return resolved
async def prepare_dag_for_execution(
recipe: Recipe,
user_inputs: Dict[str, str],
actor_id: str = None,
) -> Tuple[str, List[str]]:
"""
Prepare a recipe DAG for execution by transforming nodes and binding inputs.
Resolves friendly names to CIDs if actor_id is provided.
Returns (dag_json, warnings).
"""
recipe_dag = recipe.get("dag")
if not recipe_dag or not isinstance(recipe_dag, dict):
raise ValueError("Recipe has no DAG definition")
# Deep copy to avoid mutating original
dag_copy = json.loads(json.dumps(recipe_dag))
nodes = dag_copy.get("nodes", {})
# Get registry for resolving references
registry = recipe.get("registry", {})
# Resolve friendly names to CIDs
if actor_id and registry:
registry = await resolve_friendly_names_in_registry(registry, actor_id)
assets = registry.get("assets", {}) if registry else {}
effects = registry.get("effects", {}) if registry else {}
# Transform nodes from list to dict if needed
if isinstance(nodes, list):
nodes_dict: Dict[str, TransformedNode] = {}
for node in nodes:
node_id = node.get("id")
if node_id:
nodes_dict[node_id] = transform_node(node, assets, effects)
nodes = nodes_dict
dag_copy["nodes"] = nodes
# Build input name mapping and bind user inputs
input_name_to_node = build_input_name_mapping(nodes)
logger.info(f"Input name to node mapping: {input_name_to_node}")
logger.info(f"User-provided inputs: {user_inputs}")
warnings = bind_inputs(nodes, input_name_to_node, user_inputs)
# Log final SOURCE node configs for debugging
for nid, n in nodes.items():
if n.get("node_type") == "SOURCE":
logger.info(f"Final SOURCE node {nid}: config={n.get('config')}")
# Transform output to output_id
if "output" in dag_copy:
dag_copy["output_id"] = dag_copy.pop("output")
# Add metadata if not present
if "metadata" not in dag_copy:
dag_copy["metadata"] = {}
return json.dumps(dag_copy), warnings
@router.post("/upload")
async def upload_recipe(
file: UploadFile = File(...),
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Upload a new recipe from S-expression or YAML file."""
import yaml
# Read content from the uploaded file
content = (await file.read()).decode("utf-8")
# Detect format (skip comments starting with ;)
def is_sexp_format(text):
for line in text.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
return stripped.startswith('(')
return False
is_sexp = is_sexp_format(content)
try:
from artdag.sexp import compile_string, ParseError, CompileError
SEXP_AVAILABLE = True
except ImportError:
SEXP_AVAILABLE = False
recipe_name = None
recipe_version = "1.0"
recipe_description = None
variable_inputs = []
fixed_inputs = []
if is_sexp:
if not SEXP_AVAILABLE:
raise HTTPException(500, "S-expression recipes require artdag.sexp module (not installed on server)")
# Parse S-expression
try:
compiled = compile_string(content)
recipe_name = compiled.name
recipe_version = compiled.version
recipe_description = compiled.description
for node in compiled.nodes:
if node.get("type") == "SOURCE":
config = node.get("config", {})
if config.get("input"):
variable_inputs.append(config.get("name", node.get("id")))
elif config.get("asset"):
fixed_inputs.append(config.get("asset"))
except Exception as e:
raise HTTPException(400, f"Parse error: {e}")
else:
# Parse YAML
try:
recipe_data = yaml.safe_load(content)
recipe_name = recipe_data.get("name")
recipe_version = recipe_data.get("version", "1.0")
recipe_description = recipe_data.get("description")
inputs = recipe_data.get("inputs", {})
for input_name, input_def in inputs.items():
if isinstance(input_def, dict) and input_def.get("fixed"):
fixed_inputs.append(input_name)
else:
variable_inputs.append(input_name)
except yaml.YAMLError as e:
raise HTTPException(400, f"Invalid YAML: {e}")
# Use filename as recipe name if not specified
if not recipe_name and file.filename:
recipe_name = file.filename.rsplit(".", 1)[0]
recipe_id, error = await recipe_service.upload_recipe(
content=content,
uploader=ctx.actor_id,
name=recipe_name,
description=recipe_description,
)
if error:
raise HTTPException(400, error)
return {
"recipe_id": recipe_id,
"name": recipe_name or "unnamed",
"version": recipe_version,
"variable_inputs": variable_inputs,
"fixed_inputs": fixed_inputs,
"message": "Recipe uploaded successfully",
}
@router.get("")
async def list_recipes(
request: Request,
offset: int = 0,
limit: int = 20,
recipe_service: RecipeService = Depends(get_recipe_service),
ctx: UserContext = Depends(require_auth),
):
"""List available recipes."""
recipes = await recipe_service.list_recipes(ctx.actor_id, offset=offset, limit=limit)
has_more = len(recipes) >= limit
if wants_json(request):
return {"recipes": recipes, "offset": offset, "limit": limit, "has_more": has_more}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "recipes/list.html", request,
recipes=recipes,
user=ctx,
nav_counts=nav_counts,
active_tab="recipes",
offset=offset,
limit=limit,
has_more=has_more,
)
@router.get("/{recipe_id}")
async def get_recipe(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
ctx: UserContext = Depends(require_auth),
):
"""Get recipe details."""
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
# Add friendly name if available
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly = await naming.get_by_cid(ctx.actor_id, recipe_id)
if friendly:
recipe["friendly_name"] = friendly["friendly_name"]
recipe["base_name"] = friendly["base_name"]
recipe["version_id"] = friendly["version_id"]
if wants_json(request):
return recipe
# Build DAG elements for visualization and convert nodes to steps format
dag_elements = []
steps = []
node_colors = {
"SOURCE": "#3b82f6",
"EFFECT": "#8b5cf6",
"SEQUENCE": "#ec4899",
"transform": "#10b981",
"output": "#f59e0b",
}
# Debug: log recipe structure
logger.info(f"Recipe keys: {list(recipe.keys())}")
# Get nodes from dag - can be list or dict, can be under "dag" or directly on recipe
dag = recipe.get("dag", {})
logger.info(f"DAG type: {type(dag)}, keys: {list(dag.keys()) if isinstance(dag, dict) else 'not dict'}")
nodes = dag.get("nodes", []) if isinstance(dag, dict) else []
logger.info(f"Nodes from dag.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
# Also check for nodes directly on recipe (alternative formats)
if not nodes:
nodes = recipe.get("nodes", [])
logger.info(f"Nodes from recipe.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
if not nodes:
nodes = recipe.get("pipeline", [])
logger.info(f"Nodes from recipe.pipeline: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
if not nodes:
nodes = recipe.get("steps", [])
logger.info(f"Nodes from recipe.steps: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
logger.info(f"Final nodes count: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
# Convert list of nodes to steps format
if isinstance(nodes, list):
for node in nodes:
node_id = node.get("id", "")
node_type = node.get("type", "EFFECT")
inputs = node.get("inputs", [])
config = node.get("config", {})
steps.append({
"id": node_id,
"name": node_id,
"type": node_type,
"inputs": inputs,
"params": config,
})
dag_elements.append({
"data": {
"id": node_id,
"label": node_id,
"color": node_colors.get(node_type, "#6b7280"),
}
})
for inp in inputs:
if isinstance(inp, str):
dag_elements.append({
"data": {"source": inp, "target": node_id}
})
elif isinstance(nodes, dict):
for node_id, node in nodes.items():
node_type = node.get("type", "EFFECT")
inputs = node.get("inputs", [])
config = node.get("config", {})
steps.append({
"id": node_id,
"name": node_id,
"type": node_type,
"inputs": inputs,
"params": config,
})
dag_elements.append({
"data": {
"id": node_id,
"label": node_id,
"color": node_colors.get(node_type, "#6b7280"),
}
})
for inp in inputs:
if isinstance(inp, str):
dag_elements.append({
"data": {"source": inp, "target": node_id}
})
# Add steps to recipe for template
recipe["steps"] = steps
# Use S-expression source if available
if "sexp" not in recipe:
recipe["sexp"] = "; No S-expression source available"
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "recipes/detail.html", request,
recipe=recipe,
dag_elements=dag_elements,
user=ctx,
nav_counts=nav_counts,
active_tab="recipes",
)
@router.delete("/{recipe_id}")
async def delete_recipe(
recipe_id: str,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Delete a recipe."""
success, error = await recipe_service.delete_recipe(recipe_id, ctx.actor_id)
if error:
raise HTTPException(400 if "Cannot" in error else 404, error)
return {"deleted": True, "recipe_id": recipe_id}
@router.post("/{recipe_id}/run")
async def run_recipe(
recipe_id: str,
req: RecipeRunRequest,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Run a recipe with given inputs."""
from ..services.run_service import RunService
from ..dependencies import get_cache_manager
import database
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
try:
# Create run using run service
run_service = RunService(database, get_redis_client(), get_cache_manager())
# Prepare DAG for execution (transform nodes, bind inputs, resolve friendly names)
dag_json = None
if recipe.get("dag"):
dag_json, warnings = await prepare_dag_for_execution(recipe, req.inputs, actor_id=ctx.actor_id)
for warning in warnings:
logger.warning(warning)
run, error = await run_service.create_run(
recipe=recipe_id, # Use recipe hash as primary identifier
inputs=req.inputs,
use_dag=True,
dag_json=dag_json,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
recipe_name=recipe.get("name"), # Store name for display
recipe_sexp=recipe.get("sexp"), # S-expression for code-addressed execution
)
if error:
raise HTTPException(400, error)
if not run:
raise HTTPException(500, "Run creation returned no result")
return {
"run_id": run["run_id"] if isinstance(run, dict) else run.run_id,
"status": run.get("status", "pending") if isinstance(run, dict) else run.status,
"message": "Recipe execution started",
}
except HTTPException:
raise
except Exception as e:
logger.exception(f"Error running recipe {recipe_id}")
raise HTTPException(500, f"Run failed: {e}")
@router.get("/{recipe_id}/dag")
async def recipe_dag(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Get recipe DAG visualization data."""
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
dag_elements = []
node_colors = {
"input": "#3b82f6",
"effect": "#8b5cf6",
"analyze": "#ec4899",
"transform": "#10b981",
"output": "#f59e0b",
}
for i, step in enumerate(recipe.get("steps", [])):
step_id = step.get("id", f"step-{i}")
dag_elements.append({
"data": {
"id": step_id,
"label": step.get("name", f"Step {i+1}"),
"color": node_colors.get(step.get("type", "effect"), "#6b7280"),
}
})
for inp in step.get("inputs", []):
dag_elements.append({
"data": {"source": inp, "target": step_id}
})
return {"elements": dag_elements}
@router.delete("/{recipe_id}/ui", response_class=HTMLResponse)
async def ui_discard_recipe(
recipe_id: str,
request: Request,
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""HTMX handler: discard a recipe."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>', status_code=401)
success, error = await recipe_service.delete_recipe(recipe_id, ctx.actor_id)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse(
'<div class="text-green-400">Recipe deleted</div>'
'<script>setTimeout(() => window.location.href = "/recipes", 1500);</script>'
)
@router.post("/{recipe_id}/publish")
async def publish_recipe(
recipe_id: str,
request: Request,
ctx: UserContext = Depends(require_auth),
recipe_service: RecipeService = Depends(get_recipe_service),
):
"""Publish recipe to L2 and IPFS."""
from ..services.cache_service import CacheService
from ..dependencies import get_cache_manager
import database
# Verify recipe exists
recipe = await recipe_service.get_recipe(recipe_id)
if not recipe:
raise HTTPException(404, "Recipe not found")
# Use cache service to publish (recipes are stored in cache)
cache_service = CacheService(database, get_cache_manager())
ipfs_cid, error = await cache_service.publish_to_l2(
cid=recipe_id,
actor_id=ctx.actor_id,
l2_server=ctx.l2_server,
auth_token=request.cookies.get("auth_token"),
)
if error:
if wants_html(request):
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "published": True}

1704
l1/app/routers/runs.py Normal file

File diff suppressed because it is too large Load Diff

264
l1/app/routers/storage.py Normal file
View File

@@ -0,0 +1,264 @@
"""
Storage provider routes for L1 server.
Manages user storage backends (Pinata, web3.storage, local, etc.)
"""
from typing import Optional, Dict, Any
from fastapi import APIRouter, Request, Depends, HTTPException, Form
from fastapi.responses import HTMLResponse, RedirectResponse
from pydantic import BaseModel
from artdag_common import render
from artdag_common.middleware import wants_html, wants_json
from artdag_common.middleware.auth import UserContext
from ..dependencies import get_database, get_current_user, require_auth, get_templates
from ..services.storage_service import StorageService, STORAGE_PROVIDERS_INFO, VALID_PROVIDER_TYPES
router = APIRouter()
# Import storage_providers module
import storage_providers as sp_module
def get_storage_service():
"""Get storage service instance."""
import database
return StorageService(database, sp_module)
class AddStorageRequest(BaseModel):
provider_type: str
config: Dict[str, Any]
capacity_gb: int = 5
provider_name: Optional[str] = None
class UpdateStorageRequest(BaseModel):
config: Optional[Dict[str, Any]] = None
capacity_gb: Optional[int] = None
is_active: Optional[bool] = None
@router.get("")
async def list_storage(
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""List user's storage providers. HTML for browsers, JSON for API."""
storages = await storage_service.list_storages(ctx.actor_id)
if wants_json(request):
return {"storages": storages}
# Render HTML template
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "storage/list.html", request,
storages=storages,
user=ctx,
nav_counts=nav_counts,
providers_info=STORAGE_PROVIDERS_INFO,
active_tab="storage",
)
@router.post("")
async def add_storage(
req: AddStorageRequest,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Add a storage provider via API."""
ctx = await require_auth(request)
storage_id, error = await storage_service.add_storage(
actor_id=ctx.actor_id,
provider_type=req.provider_type,
config=req.config,
capacity_gb=req.capacity_gb,
provider_name=req.provider_name,
)
if error:
raise HTTPException(400, error)
return {"id": storage_id, "message": "Storage provider added"}
@router.post("/add")
async def add_storage_form(
request: Request,
provider_type: str = Form(...),
provider_name: Optional[str] = Form(None),
description: Optional[str] = Form(None),
capacity_gb: int = Form(5),
api_key: Optional[str] = Form(None),
secret_key: Optional[str] = Form(None),
api_token: Optional[str] = Form(None),
project_id: Optional[str] = Form(None),
project_secret: Optional[str] = Form(None),
access_key: Optional[str] = Form(None),
bucket: Optional[str] = Form(None),
path: Optional[str] = Form(None),
storage_service: StorageService = Depends(get_storage_service),
):
"""Add a storage provider via HTML form."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Not authenticated</div>', status_code=401)
# Build config from form
form_data = {
"api_key": api_key,
"secret_key": secret_key,
"api_token": api_token,
"project_id": project_id,
"project_secret": project_secret,
"access_key": access_key,
"bucket": bucket,
"path": path,
}
config, error = storage_service.build_config_from_form(provider_type, form_data)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
storage_id, error = await storage_service.add_storage(
actor_id=ctx.actor_id,
provider_type=provider_type,
config=config,
capacity_gb=capacity_gb,
provider_name=provider_name,
description=description,
)
if error:
return HTMLResponse(f'<div class="text-red-400">{error}</div>')
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Storage provider added successfully!</div>
<script>setTimeout(() => window.location.href = '/storage/type/{provider_type}', 1500);</script>
''')
@router.get("/{storage_id}")
async def get_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Get a specific storage provider."""
ctx = await require_auth(request)
storage = await storage_service.get_storage(storage_id, ctx.actor_id)
if not storage:
raise HTTPException(404, "Storage provider not found")
return storage
@router.patch("/{storage_id}")
async def update_storage(
storage_id: int,
req: UpdateStorageRequest,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Update a storage provider."""
ctx = await require_auth(request)
success, error = await storage_service.update_storage(
storage_id=storage_id,
actor_id=ctx.actor_id,
config=req.config,
capacity_gb=req.capacity_gb,
is_active=req.is_active,
)
if error:
raise HTTPException(400, error)
return {"message": "Storage provider updated"}
@router.delete("/{storage_id}")
async def delete_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""Remove a storage provider."""
success, error = await storage_service.delete_storage(storage_id, ctx.actor_id)
if error:
raise HTTPException(400, error)
if wants_html(request):
return HTMLResponse("")
return {"message": "Storage provider removed"}
@router.post("/{storage_id}/test")
async def test_storage(
storage_id: int,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
):
"""Test storage provider connectivity."""
ctx = await get_current_user(request)
if not ctx:
if wants_html(request):
return HTMLResponse('<span class="text-red-400">Not authenticated</span>', status_code=401)
raise HTTPException(401, "Not authenticated")
success, message = await storage_service.test_storage(storage_id, ctx.actor_id)
if wants_html(request):
color = "green" if success else "red"
return HTMLResponse(f'<span class="text-{color}-400">{message}</span>')
return {"success": success, "message": message}
@router.get("/type/{provider_type}")
async def storage_type_page(
provider_type: str,
request: Request,
storage_service: StorageService = Depends(get_storage_service),
ctx: UserContext = Depends(require_auth),
):
"""Page for managing storage configs of a specific type."""
if provider_type not in STORAGE_PROVIDERS_INFO:
raise HTTPException(404, "Invalid provider type")
storages = await storage_service.list_by_type(ctx.actor_id, provider_type)
provider_info = STORAGE_PROVIDERS_INFO[provider_type]
if wants_json(request):
return {
"provider_type": provider_type,
"provider_info": provider_info,
"storages": storages,
}
from ..dependencies import get_nav_counts
nav_counts = await get_nav_counts(ctx.actor_id)
templates = get_templates(request)
return render(templates, "storage/type.html", request,
provider_type=provider_type,
provider_info=provider_info,
storages=storages,
user=ctx,
nav_counts=nav_counts,
active_tab="storage",
)