Fix completed runs not appearing in list + add purge-failed endpoint

- Update save_run_cache to also update actor_id, recipe, inputs on conflict
- Add logging for actor_id when saving runs to run_cache
- Add admin endpoint DELETE /runs/admin/purge-failed to delete all failed runs

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
giles
2026-02-02 23:24:39 +00:00
parent 581da68b3b
commit d20eef76ad
24 changed files with 1671 additions and 453 deletions

View File

@@ -8,7 +8,7 @@ import logging
from pathlib import Path
from typing import Optional, Dict, Any
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, FileResponse
from pydantic import BaseModel
@@ -208,13 +208,95 @@ async def import_from_ipfs(
return {"cid": cid, "imported": True}
@router.post("/upload")
async def upload_content(
file: UploadFile = File(...),
@router.post("/upload/chunk")
async def upload_chunk(
request: Request,
chunk: UploadFile = File(...),
upload_id: str = Form(...),
chunk_index: int = Form(...),
total_chunks: int = Form(...),
filename: str = Form(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload content to cache and IPFS."""
"""Upload a file chunk. Assembles file when all chunks received."""
import tempfile
import os
# Create temp dir for this upload
chunk_dir = Path(tempfile.gettempdir()) / "uploads" / upload_id
chunk_dir.mkdir(parents=True, exist_ok=True)
# Save this chunk
chunk_path = chunk_dir / f"chunk_{chunk_index:05d}"
chunk_data = await chunk.read()
chunk_path.write_bytes(chunk_data)
# Check if all chunks received
received = len(list(chunk_dir.glob("chunk_*")))
if received < total_chunks:
return {"status": "partial", "received": received, "total": total_chunks}
# All chunks received - assemble file
final_path = chunk_dir / filename
with open(final_path, 'wb') as f:
for i in range(total_chunks):
cp = chunk_dir / f"chunk_{i:05d}"
f.write(cp.read_bytes())
cp.unlink() # Clean up chunk
# Read assembled file
content = final_path.read_bytes()
final_path.unlink()
chunk_dir.rmdir()
# Now do the normal upload flow
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
filename=filename,
actor_id=ctx.actor_id,
)
if error:
raise HTTPException(400, error)
# Assign friendly name
final_cid = ipfs_cid or cid
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
filename=filename,
)
return {
"status": "complete",
"cid": final_cid,
"friendly_name": friendly_entry["friendly_name"],
"filename": filename,
"size": len(content),
"uploaded": True,
}
@router.post("/upload")
async def upload_content(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
cache_service: CacheService = Depends(get_cache_service),
):
"""Upload content to cache and IPFS.
Args:
file: The file to upload
display_name: Optional custom name for the media (used as friendly name)
"""
content = await file.read()
cid, ipfs_cid, error = await cache_service.upload_content(
content=content,
@@ -233,6 +315,7 @@ async def upload_content(
cid=final_cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name, # Use custom name if provided
filename=file.filename,
)
@@ -350,3 +433,83 @@ async def update_metadata_htmx(
<div class="text-green-400 mb-4">Metadata saved!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
# Friendly name editing
@router.get("/{cid}/name-form", response_class=HTMLResponse)
async def get_name_form(
cid: str,
request: Request,
cache_service: CacheService = Depends(get_cache_service),
):
"""Get friendly name editing form (HTMX)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
# Get current friendly name
from ..services.naming_service import get_naming_service
naming = get_naming_service()
entry = await naming.get_by_cid(ctx.actor_id, cid)
current_name = entry.get("base_name", "") if entry else ""
return HTMLResponse(f'''
<form hx-post="/cache/{cid}/name"
hx-target="#friendly-name-section"
hx-swap="innerHTML"
class="space-y-3">
<div>
<label class="block text-gray-400 text-sm mb-1">Friendly Name</label>
<input type="text" name="display_name" value="{current_name}"
placeholder="e.g., my-background-video"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A name to reference this media in recipes</p>
</div>
<div class="flex space-x-2">
<button type="submit"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Save
</button>
<button type="button"
onclick="location.reload()"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
</div>
</form>
''')
@router.post("/{cid}/name", response_class=HTMLResponse)
async def update_friendly_name(
cid: str,
request: Request,
):
"""Update friendly name (HTMX form handler)."""
ctx = await get_current_user(request)
if not ctx:
return HTMLResponse('<div class="text-red-400">Login required</div>')
form_data = await request.form()
display_name = form_data.get("display_name", "").strip()
if not display_name:
return HTMLResponse('<div class="text-red-400">Name cannot be empty</div>')
from ..services.naming_service import get_naming_service
naming = get_naming_service()
try:
entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="media",
display_name=display_name,
)
return HTMLResponse(f'''
<div class="text-green-400 mb-2">Name updated!</div>
<script>setTimeout(() => location.reload(), 1000);</script>
''')
except Exception as e:
return HTMLResponse(f'<div class="text-red-400">Error: {e}</div>')

View File

@@ -2,17 +2,17 @@
Effects routes for L1 server.
Handles effect upload, listing, and metadata.
Effects are stored in IPFS like all other content-addressed data.
Effects are S-expression files stored in IPFS like all other content-addressed data.
"""
import hashlib
import json
import logging
import re
import time
from pathlib import Path
from typing import Optional
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import HTMLResponse, PlainTextResponse
from artdag_common import render
@@ -40,12 +40,11 @@ def get_effects_dir() -> Path:
def parse_effect_metadata(source: str) -> dict:
"""
Parse effect metadata from source code.
Parse effect metadata from S-expression source code.
Extracts PEP 723 dependencies and @-tag metadata from docstring.
Extracts metadata from comment headers (;; @key value format)
or from (defeffect name ...) form.
"""
import re
metadata = {
"name": "",
"version": "1.0.0",
@@ -53,97 +52,54 @@ def parse_effect_metadata(source: str) -> dict:
"temporal": False,
"description": "",
"params": [],
"dependencies": [],
"requires_python": ">=3.10",
}
# Parse PEP 723 dependencies
pep723_match = re.search(r"# /// script\n(.*?)# ///", source, re.DOTALL)
if pep723_match:
block = pep723_match.group(1)
deps_match = re.search(r'# dependencies = \[(.*?)\]', block, re.DOTALL)
if deps_match:
metadata["dependencies"] = re.findall(r'"([^"]+)"', deps_match.group(1))
python_match = re.search(r'# requires-python = "([^"]+)"', block)
if python_match:
metadata["requires_python"] = python_match.group(1)
# Parse docstring @-tags
docstring_match = re.search(r'"""(.*?)"""', source, re.DOTALL)
if not docstring_match:
docstring_match = re.search(r"'''(.*?)'''", source, re.DOTALL)
if docstring_match:
docstring = docstring_match.group(1)
lines = docstring.split("\n")
current_param = None
desc_lines = []
in_description = False
for line in lines:
# Parse comment-based metadata (;; @key value)
for line in source.split("\n"):
stripped = line.strip()
if not stripped.startswith(";"):
# Stop parsing metadata at first non-comment line
if stripped and not stripped.startswith("("):
continue
if stripped.startswith("("):
break
if stripped.startswith("@effect "):
metadata["name"] = stripped[8:].strip()
in_description = False
# Remove comment prefix
comment = stripped.lstrip(";").strip()
elif stripped.startswith("@version "):
metadata["version"] = stripped[9:].strip()
elif stripped.startswith("@author "):
metadata["author"] = stripped[8:].strip()
elif stripped.startswith("@temporal "):
val = stripped[10:].strip().lower()
metadata["temporal"] = val in ("true", "yes", "1")
elif stripped.startswith("@description"):
in_description = True
desc_lines = []
elif stripped.startswith("@param "):
in_description = False
if current_param:
metadata["params"].append(current_param)
parts = stripped[7:].split()
if comment.startswith("@effect "):
metadata["name"] = comment[8:].strip()
elif comment.startswith("@name "):
metadata["name"] = comment[6:].strip()
elif comment.startswith("@version "):
metadata["version"] = comment[9:].strip()
elif comment.startswith("@author "):
metadata["author"] = comment[8:].strip()
elif comment.startswith("@temporal"):
val = comment[9:].strip().lower() if len(comment) > 9 else "true"
metadata["temporal"] = val in ("true", "yes", "1", "")
elif comment.startswith("@description "):
metadata["description"] = comment[13:].strip()
elif comment.startswith("@param "):
# Format: @param name type [description]
parts = comment[7:].split(None, 2)
if len(parts) >= 2:
current_param = {
"name": parts[0],
"type": parts[1],
"description": "",
}
else:
current_param = None
param = {"name": parts[0], "type": parts[1]}
if len(parts) > 2:
param["description"] = parts[2]
metadata["params"].append(param)
elif stripped.startswith("@range ") and current_param:
range_parts = stripped[7:].split()
if len(range_parts) >= 2:
try:
current_param["range"] = [float(range_parts[0]), float(range_parts[1])]
except ValueError:
pass
# Also try to extract name from (defeffect "name" ...) or (effect "name" ...)
if not metadata["name"]:
name_match = re.search(r'\((defeffect|effect)\s+"([^"]+)"', source)
if name_match:
metadata["name"] = name_match.group(2)
elif stripped.startswith("@default ") and current_param:
current_param["default"] = stripped[9:].strip()
elif stripped.startswith("@example"):
in_description = False
if current_param:
metadata["params"].append(current_param)
current_param = None
elif in_description and stripped:
desc_lines.append(stripped)
elif current_param and stripped and not stripped.startswith("@"):
current_param["description"] = stripped
if in_description:
metadata["description"] = " ".join(desc_lines)
if current_param:
metadata["params"].append(current_param)
# Try to extract name from first (define ...) form
if not metadata["name"]:
define_match = re.search(r'\(define\s+(\w+)', source)
if define_match:
metadata["name"] = define_match.group(1)
return metadata
@@ -151,13 +107,18 @@ def parse_effect_metadata(source: str) -> dict:
@router.post("/upload")
async def upload_effect(
file: UploadFile = File(...),
display_name: Optional[str] = Form(None),
ctx: UserContext = Depends(require_auth),
):
"""
Upload an effect to IPFS.
Upload an S-expression effect to IPFS.
Parses PEP 723 metadata and @-tag docstring.
Parses metadata from comment headers.
Returns IPFS CID for use in recipes.
Args:
file: The .sexp effect file
display_name: Optional custom friendly name for the effect
"""
content = await file.read()
@@ -166,7 +127,7 @@ async def upload_effect(
except UnicodeDecodeError:
raise HTTPException(400, "Effect must be valid UTF-8 text")
# Parse metadata
# Parse metadata from sexp source
try:
meta = parse_effect_metadata(source)
except Exception as e:
@@ -185,7 +146,7 @@ async def upload_effect(
effects_dir = get_effects_dir()
effect_dir = effects_dir / cid
effect_dir.mkdir(parents=True, exist_ok=True)
(effect_dir / "effect.py").write_text(source, encoding="utf-8")
(effect_dir / "effect.sexp").write_text(source, encoding="utf-8")
# Store metadata (locally and in IPFS)
full_meta = {
@@ -209,14 +170,14 @@ async def upload_effect(
filename=file.filename,
)
# Assign friendly name
# Assign friendly name (use custom display_name if provided, else from metadata)
from ..services.naming_service import get_naming_service
naming = get_naming_service()
friendly_entry = await naming.assign_name(
cid=cid,
actor_id=ctx.actor_id,
item_type="effect",
display_name=meta.get("name"),
display_name=display_name or meta.get("name"),
filename=file.filename,
)
@@ -230,7 +191,6 @@ async def upload_effect(
"version": meta.get("version"),
"temporal": meta.get("temporal", False),
"params": meta.get("params", []),
"dependencies": meta.get("dependencies", []),
"uploaded": True,
}
@@ -258,7 +218,7 @@ async def get_effect(
# Cache locally
effect_dir.mkdir(parents=True, exist_ok=True)
source = source_bytes.decode("utf-8")
(effect_dir / "effect.py").write_text(source)
(effect_dir / "effect.sexp").write_text(source)
# Parse metadata from source
parsed_meta = parse_effect_metadata(source)
@@ -297,12 +257,16 @@ async def get_effect_source(
):
"""Get effect source code."""
effects_dir = get_effects_dir()
source_path = effects_dir / cid / "effect.py"
source_path = effects_dir / cid / "effect.sexp"
# Try local cache first
# Try local cache first (check both .sexp and legacy .py)
if source_path.exists():
return PlainTextResponse(source_path.read_text())
legacy_path = effects_dir / cid / "effect.py"
if legacy_path.exists():
return PlainTextResponse(legacy_path.read_text())
# Fetch from IPFS
source_bytes = ipfs_client.get_bytes(cid)
if not source_bytes:

View File

@@ -156,7 +156,6 @@ async def create_run(
async def create_stream_run(
request: StreamRequest,
ctx: UserContext = Depends(require_auth),
redis = Depends(get_redis_client),
):
"""Start a streaming video render.
@@ -166,13 +165,57 @@ async def create_stream_run(
Assets can be referenced by CID or friendly name in the recipe.
"""
import uuid
import tempfile
from pathlib import Path
import database
from tasks.streaming import run_stream
# Generate run ID
run_id = str(uuid.uuid4())
created_at = datetime.now(timezone.utc).isoformat()
# Store recipe in cache so it appears on /recipes page
recipe_id = None
try:
cache_manager = get_cache_manager()
with tempfile.NamedTemporaryFile(delete=False, suffix=".sexp", mode="w") as tmp:
tmp.write(request.recipe_sexp)
tmp_path = Path(tmp.name)
cached, ipfs_cid = cache_manager.put(tmp_path, node_type="recipe", move=True)
recipe_id = cached.cid
# Extract recipe name from S-expression (look for (stream "name" ...) pattern)
import re
name_match = re.search(r'\(stream\s+"([^"]+)"', request.recipe_sexp)
recipe_name = name_match.group(1) if name_match else f"stream-{run_id[:8]}"
# Track ownership in item_types
await database.save_item_metadata(
cid=recipe_id,
actor_id=ctx.actor_id,
item_type="recipe",
description=f"Streaming recipe: {recipe_name}",
filename=f"{recipe_name}.sexp",
)
# Assign friendly name
from ..services.naming_service import get_naming_service
naming = get_naming_service()
await naming.assign_name(
cid=recipe_id,
actor_id=ctx.actor_id,
item_type="recipe",
display_name=recipe_name,
)
logger.info(f"Stored streaming recipe {recipe_id[:16]}... as '{recipe_name}'")
except Exception as e:
logger.warning(f"Failed to store recipe in cache: {e}")
# Continue anyway - run will still work, just won't appear in /recipes
# Submit Celery task
task = run_stream.delay(
run_id=run_id,
recipe_sexp=request.recipe_sexp,
output_name=request.output_name,
duration=request.duration,
@@ -182,21 +225,15 @@ async def create_stream_run(
audio_sexp=request.audio_sexp,
)
# Store run metadata in Redis
run_data = {
"run_id": run_id,
"status": "pending",
"recipe": "streaming",
"actor_id": ctx.actor_id,
"created_at": created_at,
"celery_task_id": task.id,
"output_name": request.output_name,
}
await redis.set(
f"{RUNS_KEY_PREFIX}{run_id}",
json.dumps(run_data),
ex=86400 * 7 # 7 days
# Store in database for durability
pending = await database.create_pending_run(
run_id=run_id,
celery_task_id=task.id,
recipe=recipe_id or "streaming", # Use recipe CID if available
inputs=[], # Streaming recipes don't have traditional inputs
actor_id=ctx.actor_id,
dag_json=request.recipe_sexp, # Store recipe content for viewing
output_name=request.output_name,
)
logger.info(f"Started stream run {run_id} with task {task.id}")
@@ -204,8 +241,8 @@ async def create_stream_run(
return RunStatus(
run_id=run_id,
status="pending",
recipe="streaming",
created_at=created_at,
recipe=recipe_id or "streaming",
created_at=pending.get("created_at"),
celery_task_id=task.id,
)
@@ -305,6 +342,32 @@ async def get_run(
except Exception as e:
logger.warning(f"Failed to load recipe for plan: {e}")
# Handle streaming runs - detect by recipe_sexp content or legacy "streaming" marker
recipe_sexp_content = run.get("recipe_sexp")
is_streaming = run.get("recipe") == "streaming" # Legacy marker
if not is_streaming and recipe_sexp_content:
# Check if content starts with (stream after skipping comments
for line in recipe_sexp_content.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
is_streaming = stripped.startswith('(stream')
break
if is_streaming and recipe_sexp_content and not plan:
plan_sexp = recipe_sexp_content
plan = {
"steps": [{
"id": "stream",
"type": "STREAM",
"name": "Streaming Recipe",
"inputs": [],
"config": {},
"status": "completed" if run.get("status") == "completed" else "pending",
}]
}
run["total_steps"] = 1
run["executed"] = 1 if run.get("status") == "completed" else 0
# Helper to convert simple type to MIME type prefix for template
def type_to_mime(simple_type: str) -> str:
if simple_type == "video":
@@ -564,10 +627,14 @@ async def run_detail(
"analysis": analysis,
}
# Extract plan_sexp for streaming runs
plan_sexp = plan.get("sexp") if plan else None
templates = get_templates(request)
return render(templates, "runs/detail.html", request,
run=run,
plan=plan,
plan_sexp=plan_sexp,
artifacts=artifacts,
analysis=analysis,
dag_elements=dag_elements,
@@ -824,3 +891,26 @@ async def publish_run(
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
return {"ipfs_cid": ipfs_cid, "output_cid": output_cid, "published": True}
@router.delete("/admin/purge-failed")
async def purge_failed_runs(
ctx: UserContext = Depends(require_auth),
):
"""Delete all failed runs from pending_runs table."""
import database
# Get all failed runs
failed_runs = await database.list_pending_runs(status="failed")
deleted = []
for run in failed_runs:
run_id = run.get("run_id")
try:
await database.delete_pending_run(run_id)
deleted.append(run_id)
except Exception as e:
logger.warning(f"Failed to delete run {run_id}: {e}")
logger.info(f"Purged {len(deleted)} failed runs")
return {"purged": len(deleted), "run_ids": deleted}

View File

@@ -4,6 +4,7 @@ Cache Service - business logic for cache and media management.
import asyncio
import json
import logging
import os
import subprocess
from pathlib import Path
@@ -11,6 +12,8 @@ from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
import httpx
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from database import Database
from cache_manager import L1CacheManager
@@ -513,7 +516,11 @@ class CacheService:
filename: str,
actor_id: str,
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
"""Upload content to cache. Returns (cid, ipfs_cid, error)."""
"""Upload content to cache. Returns (cid, ipfs_cid, error).
Files are stored locally first for fast response, then uploaded
to IPFS in the background.
"""
import tempfile
try:
@@ -525,21 +532,28 @@ class CacheService:
# Detect media type (video/image/audio) before moving file
media_type = detect_media_type(tmp_path)
# Store in cache (also stores in IPFS)
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True)
cid = ipfs_cid or cached.cid # Prefer IPFS CID
# Store locally first (skip_ipfs=True for fast response)
# IPFS upload happens in background
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True, skip_ipfs=True)
cid = cached.cid # Use local hash since we skipped IPFS
# Save to database with media category type
# Using media_type ("video", "image", "audio") not mime_type ("video/mp4")
# so list_media filtering works correctly
await self.db.create_cache_item(cid, ipfs_cid)
await self.db.create_cache_item(cid, ipfs_cid) # ipfs_cid is None initially
await self.db.save_item_metadata(
cid=cid,
actor_id=actor_id,
item_type=media_type, # Store media category for filtering
item_type=media_type,
filename=filename
)
# Queue background IPFS upload
try:
from tasks.ipfs_upload import upload_to_ipfs
upload_to_ipfs.delay(cid, actor_id)
logger.info(f"Queued background IPFS upload for {cid[:16]}...")
except Exception as e:
logger.warning(f"Failed to queue IPFS upload (will retry manually): {e}")
return cid, ipfs_cid, None
except Exception as e:
return None, None, f"Upload failed: {e}"

View File

@@ -60,7 +60,31 @@ class RecipeService:
logger = logging.getLogger(__name__)
if is_sexp_format(content):
# Parse S-expression
# Detect if this is a streaming recipe (starts with (stream ...))
def is_streaming_recipe(text):
for line in text.split('\n'):
stripped = line.strip()
if not stripped or stripped.startswith(';'):
continue
return stripped.startswith('(stream')
return False
if is_streaming_recipe(content):
# Streaming recipes have different format - parse manually
import re
name_match = re.search(r'\(stream\s+"([^"]+)"', content)
recipe_name = name_match.group(1) if name_match else "streaming"
recipe_data = {
"name": recipe_name,
"sexp": content,
"format": "sexp",
"type": "streaming",
"dag": {"nodes": []}, # Streaming recipes don't have traditional DAG
}
logger.info(f"Parsed streaming recipe {recipe_id[:16]}..., name: {recipe_name}")
else:
# Parse traditional (recipe ...) S-expression
try:
compiled = compile_string(content)
recipe_data = compiled.to_dict()

View File

@@ -128,10 +128,25 @@ class RunService:
# Only return as completed if we have an output
# (runs with no output should be re-executed)
if output_cid:
# Also fetch recipe content from pending_runs for streaming runs
recipe_sexp = None
recipe_name = None
pending = await self.db.get_pending_run(run_id)
if pending:
recipe_sexp = pending.get("dag_json")
# Extract recipe name from streaming recipe content
if recipe_sexp:
import re
name_match = re.search(r'\(stream\s+"([^"]+)"', recipe_sexp)
if name_match:
recipe_name = name_match.group(1)
return {
"run_id": run_id,
"status": "completed",
"recipe": cached.get("recipe"),
"recipe_name": recipe_name,
"inputs": self._ensure_inputs_list(cached.get("inputs")),
"output_cid": output_cid,
"ipfs_cid": cached.get("ipfs_cid"),
@@ -140,6 +155,7 @@ class RunService:
"actor_id": cached.get("actor_id"),
"created_at": cached.get("created_at"),
"completed_at": cached.get("created_at"),
"recipe_sexp": recipe_sexp,
}
# Check database for pending run
@@ -175,6 +191,7 @@ class RunService:
"output_name": pending.get("output_name"),
"created_at": pending.get("created_at"),
"error": pending.get("error"),
"recipe_sexp": pending.get("dag_json"), # Recipe content for streaming runs
}
# If task completed, get result
@@ -209,6 +226,7 @@ class RunService:
"actor_id": pending.get("actor_id"),
"created_at": pending.get("created_at"),
"error": pending.get("error"),
"recipe_sexp": pending.get("dag_json"), # Recipe content for streaming runs
}
# Fallback: Check Redis for backwards compatibility
@@ -714,12 +732,21 @@ class RunService:
"""Get execution plan for a run.
Plans are just node outputs - cached by content hash like everything else.
For streaming runs, returns the recipe content as the plan.
"""
# Get run to find plan_cache_id
run = await self.get_run(run_id)
if not run:
return None
# For streaming runs, return the recipe as the plan
if run.get("recipe") == "streaming" and run.get("recipe_sexp"):
return {
"steps": [{"id": "stream", "type": "STREAM", "name": "Streaming Recipe"}],
"sexp": run.get("recipe_sexp"),
"format": "sexp",
}
# Check plan_cid (stored in database) or plan_cache_id (legacy)
plan_cid = run.get("plan_cid") or run.get("plan_cache_id")
if plan_cid:

View File

@@ -40,15 +40,23 @@
</div>
<!-- Friendly Name -->
{% if cache.friendly_name %}
<div class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
<div class="mb-2">
<div id="friendly-name-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
<div class="flex items-center justify-between mb-2">
<span class="text-gray-500 text-sm">Friendly Name</span>
<p class="text-blue-400 font-medium text-lg mt-1">{{ cache.friendly_name }}</p>
</div>
<p class="text-gray-500 text-xs">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
<button hx-get="/cache/{{ cache.cid }}/name-form"
hx-target="#friendly-name-section"
hx-swap="innerHTML"
class="text-blue-400 hover:text-blue-300 text-sm">
Edit
</button>
</div>
{% if cache.friendly_name %}
<p class="text-blue-400 font-medium text-lg">{{ cache.friendly_name }}</p>
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
{% else %}
<p class="text-gray-500 text-sm">No friendly name assigned. Click Edit to add one.</p>
{% endif %}
</div>
<!-- User Metadata (editable) -->
<div id="metadata-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">

View File

@@ -7,6 +7,10 @@
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Media</h1>
<div class="flex items-center space-x-4">
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload Media
</button>
<select id="type-filter" onchange="filterMedia()"
class="bg-gray-800 border border-gray-600 rounded px-3 py-2 text-white">
<option value="">All Types</option>
@@ -17,6 +21,58 @@
</div>
</div>
<!-- Upload Modal -->
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
<div class="flex justify-between items-center mb-4">
<h2 class="text-xl font-semibold">Upload Media</h2>
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="text-gray-400 hover:text-white">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Files</label>
<input type="file" name="files" id="upload-file" required multiple
accept="image/*,video/*,audio/*"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
<p class="text-gray-500 text-xs mt-1">Select one or more files to upload</p>
</div>
<div id="single-name-field">
<label class="block text-gray-400 text-sm mb-1">Name (optional, for single file)</label>
<input type="text" name="display_name" id="upload-name" placeholder="e.g., my-background-video"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A friendly name to reference this media in recipes</p>
</div>
<div id="upload-progress" class="hidden">
<div class="bg-gray-700 rounded-full h-2">
<div id="progress-bar" class="bg-blue-600 h-2 rounded-full transition-all" style="width: 0%"></div>
</div>
<p id="progress-text" class="text-gray-400 text-sm mt-1">Uploading...</p>
</div>
<div id="upload-result" class="hidden max-h-48 overflow-y-auto"></div>
<div class="flex justify-end space-x-3">
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
<button type="submit" id="upload-btn"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload
</button>
</div>
</form>
</div>
</div>
{% if items %}
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4" id="media-grid">
{% for item in items %}
@@ -115,5 +171,155 @@ function filterMedia() {
}
});
}
// Show/hide name field based on file count
document.getElementById('upload-file').addEventListener('change', function(e) {
const nameField = document.getElementById('single-name-field');
if (e.target.files.length > 1) {
nameField.style.display = 'none';
} else {
nameField.style.display = 'block';
}
});
// Handle upload form
document.getElementById('upload-form').addEventListener('submit', async function(e) {
e.preventDefault();
const form = e.target;
const fileInput = document.getElementById('upload-file');
const files = fileInput.files;
const displayName = document.getElementById('upload-name').value;
const progressDiv = document.getElementById('upload-progress');
const progressBar = document.getElementById('progress-bar');
const progressText = document.getElementById('progress-text');
const resultDiv = document.getElementById('upload-result');
const uploadBtn = document.getElementById('upload-btn');
// Show progress
progressDiv.classList.remove('hidden');
resultDiv.classList.add('hidden');
uploadBtn.disabled = true;
const results = [];
const errors = [];
const CHUNK_SIZE = 1024 * 1024; // 1MB chunks
for (let i = 0; i < files.length; i++) {
const file = files[i];
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
const uploadId = crypto.randomUUID();
const useChunked = file.size > CHUNK_SIZE * 2; // Use chunked for files > 2MB
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name}`;
try {
let data;
if (useChunked && totalChunks > 1) {
// Chunked upload for large files
for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
const start = chunkIndex * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
const chunkForm = new FormData();
chunkForm.append('chunk', chunk);
chunkForm.append('upload_id', uploadId);
chunkForm.append('chunk_index', chunkIndex);
chunkForm.append('total_chunks', totalChunks);
chunkForm.append('filename', file.name);
if (files.length === 1 && displayName) {
chunkForm.append('display_name', displayName);
}
const chunkProgress = ((i + (chunkIndex + 1) / totalChunks) / files.length) * 100;
progressBar.style.width = `${chunkProgress}%`;
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name} (${chunkIndex + 1}/${totalChunks} chunks)`;
const response = await fetch('/media/upload/chunk', {
method: 'POST',
body: chunkForm,
});
const contentType = response.headers.get('content-type') || '';
if (!contentType.includes('application/json')) {
const text = await response.text();
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
}
data = await response.json();
if (!response.ok) {
throw new Error(data.detail || 'Chunk upload failed');
}
}
} else {
// Regular upload for small files
const formData = new FormData();
formData.append('file', file);
if (files.length === 1 && displayName) {
formData.append('display_name', displayName);
}
progressBar.style.width = `${((i + 0.5) / files.length) * 100}%`;
const response = await fetch('/media/upload', {
method: 'POST',
body: formData,
});
const contentType = response.headers.get('content-type') || '';
if (!contentType.includes('application/json')) {
const text = await response.text();
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
}
data = await response.json();
if (!response.ok) {
throw new Error(data.detail || 'Upload failed');
}
}
results.push({ filename: file.name, friendly_name: data.friendly_name, cid: data.cid });
} catch (err) {
errors.push({ filename: file.name, error: err.message });
}
progressBar.style.width = `${((i + 1) / files.length) * 100}%`;
}
progressText.textContent = 'Upload complete!';
// Show results
let html = '';
if (results.length > 0) {
html += '<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300 mb-2">';
html += `<p class="font-medium">${results.length} file(s) uploaded successfully!</p>`;
for (const r of results) {
html += `<p class="text-sm mt-1">${r.filename} → <span class="font-mono">${r.friendly_name}</span></p>`;
}
html += '</div>';
}
if (errors.length > 0) {
html += '<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">';
html += `<p class="font-medium">${errors.length} file(s) failed:</p>`;
for (const e of errors) {
html += `<p class="text-sm mt-1">${e.filename}: ${e.error}</p>`;
}
html += '</div>';
}
resultDiv.innerHTML = html;
resultDiv.classList.remove('hidden');
if (results.length > 0) {
// Reload page after 2 seconds
setTimeout(() => location.reload(), 2000);
} else {
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
});
</script>
{% endblock %}

View File

@@ -8,7 +8,8 @@
{{ super() }}
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/python.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/lisp.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/scheme.min.js"></script>
{% endblock %}
{% block content %}
@@ -93,35 +94,23 @@
</div>
{% endif %}
<!-- Dependencies -->
{% if meta.dependencies %}
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2">
<span class="text-gray-400 text-sm font-medium">Dependencies</span>
</div>
<div class="p-4">
<div class="flex flex-wrap gap-2">
{% for dep in meta.dependencies %}
<span class="bg-gray-700 text-gray-300 px-3 py-1 rounded">{{ dep }}</span>
{% endfor %}
</div>
{% if meta.requires_python %}
<p class="text-gray-500 text-sm mt-3">Python {{ meta.requires_python }}</p>
{% endif %}
</div>
</div>
{% endif %}
<!-- Usage in Recipe -->
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2">
<span class="text-gray-400 text-sm font-medium">Usage in Recipe</span>
</div>
<div class="p-4">
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect {{ meta.name or 'effect' }} :cid "{{ effect.cid }}")</code></pre>
{% if effect.base_name %}
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">({{ effect.base_name }} ...)</code></pre>
<p class="text-gray-500 text-xs mt-2">
Reference this effect in your recipe S-expression.
Use the friendly name to reference this effect.
</p>
{% else %}
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect :cid "{{ effect.cid }}")</code></pre>
<p class="text-gray-500 text-xs mt-2">
Reference this effect by CID in your recipe.
</p>
{% endif %}
</div>
</div>
</div>
@@ -130,17 +119,17 @@
<div class="lg:col-span-2">
<div class="bg-gray-800 rounded-lg border border-gray-700">
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
<span class="text-gray-400 text-sm font-medium">Source Code</span>
<span class="text-gray-400 text-sm font-medium">Source Code (S-expression)</span>
<div class="flex items-center space-x-2">
<a href="/effects/{{ effect.cid }}/source"
class="text-gray-400 hover:text-white text-sm"
download="{{ meta.name or 'effect' }}.py">
download="{{ meta.name or 'effect' }}.sexp">
Download
</a>
</div>
</div>
<div class="p-4">
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-python" id="source-code">Loading...</code></pre>
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-lisp" id="source-code">Loading...</code></pre>
</div>
</div>
</div>

View File

@@ -6,15 +6,59 @@
<div class="max-w-6xl mx-auto">
<div class="flex items-center justify-between mb-6">
<h1 class="text-3xl font-bold">Effects</h1>
<label class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium cursor-pointer">
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload Effect
<input type="file" accept=".py" class="hidden" id="effect-upload" />
</label>
</button>
</div>
<!-- Upload Modal -->
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
<div class="flex justify-between items-center mb-4">
<h2 class="text-xl font-semibold">Upload Effect</h2>
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="text-gray-400 hover:text-white">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
</svg>
</button>
</div>
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
<div>
<label class="block text-gray-400 text-sm mb-1">Effect File (.sexp)</label>
<input type="file" name="file" id="upload-file" required
accept=".sexp,.lisp"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
</div>
<div>
<label class="block text-gray-400 text-sm mb-1">Friendly Name (optional)</label>
<input type="text" name="display_name" id="upload-name" placeholder="e.g., color-shift"
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
<p class="text-gray-500 text-xs mt-1">A name to reference this effect in recipes</p>
</div>
<div id="upload-result" class="hidden"></div>
<div class="flex justify-end space-x-3">
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
Cancel
</button>
<button type="submit" id="upload-btn"
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
Upload
</button>
</div>
</form>
</div>
</div>
<p class="text-gray-400 mb-8">
Effects are Python scripts that process video frames or whole videos.
Each effect is stored in IPFS and can be referenced by CID in recipes.
Effects are S-expression files that define video processing operations.
Each effect is stored in IPFS and can be referenced by name in recipes.
</p>
{% if effects %}
@@ -49,17 +93,6 @@
</div>
{% endif %}
{% if meta.dependencies %}
<div class="mt-2 flex flex-wrap gap-1">
{% for dep in meta.dependencies[:3] %}
<span class="bg-gray-700 text-gray-300 px-2 py-0.5 rounded text-xs">{{ dep }}</span>
{% endfor %}
{% if meta.dependencies | length > 3 %}
<span class="text-gray-500 text-xs">+{{ meta.dependencies | length - 3 }} more</span>
{% endif %}
</div>
{% endif %}
<div class="mt-3 text-xs">
{% if effect.friendly_name %}
<span class="text-blue-400 font-medium">{{ effect.friendly_name }}</span>
@@ -83,67 +116,85 @@
{% else %}
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
<svg class="w-16 h-16 mx-auto mb-4 text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4"/>
</svg>
<p class="text-gray-500 mb-4">No effects uploaded yet.</p>
<p class="text-gray-600 text-sm mb-6">
Effects are Python files with @effect metadata in a docstring.
Effects are S-expression files with metadata in comment headers.
</p>
<label class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium cursor-pointer inline-block">
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium">
Upload Your First Effect
<input type="file" accept=".py" class="hidden" id="effect-upload-empty" />
</label>
</button>
</div>
{% endif %}
</div>
<div id="upload-result" class="fixed bottom-4 right-4 max-w-sm"></div>
<script>
function handleEffectUpload(input) {
const file = input.files[0];
// Handle upload form
document.getElementById('upload-form').addEventListener('submit', async function(e) {
e.preventDefault();
const form = e.target;
const fileInput = document.getElementById('upload-file');
const displayName = document.getElementById('upload-name').value;
const resultDiv = document.getElementById('upload-result');
const uploadBtn = document.getElementById('upload-btn');
const file = fileInput.files[0];
if (!file) return;
const formData = new FormData();
formData.append('file', file);
if (displayName) {
formData.append('display_name', displayName);
}
fetch('/effects/upload', {
uploadBtn.disabled = true;
uploadBtn.textContent = 'Uploading...';
resultDiv.classList.add('hidden');
try {
const response = await fetch('/effects/upload', {
method: 'POST',
body: formData
})
.then(response => {
if (!response.ok) throw new Error('Upload failed');
return response.json();
})
.then(data => {
const resultDiv = document.getElementById('upload-result');
resultDiv.innerHTML = `
<div class="bg-green-900 border border-green-700 rounded-lg p-4">
<p class="text-green-300 font-medium">Effect uploaded!</p>
<p class="text-green-400 text-sm mt-1">${data.name} v${data.version}</p>
<p class="text-gray-400 text-xs mt-2 font-mono">${data.cid}</p>
</div>
`;
setTimeout(() => {
window.location.reload();
}, 1500);
})
.catch(error => {
const resultDiv = document.getElementById('upload-result');
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded-lg p-4">
<p class="text-red-300 font-medium">Upload failed</p>
<p class="text-red-400 text-sm mt-1">${error.message}</p>
</div>
`;
});
input.value = '';
}
const data = await response.json();
document.getElementById('effect-upload')?.addEventListener('change', function() {
handleEffectUpload(this);
});
document.getElementById('effect-upload-empty')?.addEventListener('change', function() {
handleEffectUpload(this);
if (response.ok) {
resultDiv.innerHTML = `
<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300">
<p class="font-medium">Effect uploaded!</p>
<p class="text-sm mt-1">${data.name} <span class="font-mono">${data.friendly_name}</span></p>
</div>
`;
resultDiv.classList.remove('hidden');
setTimeout(() => location.reload(), 1500);
} else {
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
<p class="font-medium">Upload failed</p>
<p class="text-sm mt-1">${data.detail || 'Unknown error'}</p>
</div>
`;
resultDiv.classList.remove('hidden');
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
} catch (error) {
resultDiv.innerHTML = `
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
<p class="font-medium">Upload failed</p>
<p class="text-sm mt-1">${error.message}</p>
</div>
`;
resultDiv.classList.remove('hidden');
uploadBtn.disabled = false;
uploadBtn.textContent = 'Upload';
}
});
</script>
{% endblock %}

View File

@@ -50,6 +50,17 @@
</div>
</div>
{% if recipe.type == 'streaming' %}
<!-- Streaming Recipe Info -->
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 p-4">
<div class="flex items-center space-x-2 mb-2">
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">Streaming Recipe</span>
</div>
<p class="text-gray-400 text-sm">
This recipe uses frame-by-frame streaming rendering. The pipeline is defined as an S-expression that generates frames dynamically.
</p>
</div>
{% else %}
<!-- DAG Visualization -->
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6">
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
@@ -99,11 +110,16 @@
</div>
{% endfor %}
</div>
{% endif %}
<!-- YAML Source -->
<h2 class="text-lg font-semibold mb-4">Source</h2>
<!-- Source Code -->
<h2 class="text-lg font-semibold mb-4">Recipe (S-expression)</h2>
<div class="bg-gray-900 rounded-lg p-4 border border-gray-700">
<pre class="text-sm text-gray-300 overflow-x-auto whitespace-pre-wrap">{{ recipe.yaml }}</pre>
{% if recipe.sexp %}
<pre class="text-sm font-mono text-gray-300 overflow-x-auto whitespace-pre-wrap">{{ recipe.sexp }}</pre>
{% else %}
<p class="text-gray-500">No source available</p>
{% endif %}
</div>
<!-- Actions -->

View File

@@ -24,6 +24,9 @@
{% if run.cached %}
<span class="bg-purple-900 text-purple-300 px-3 py-1 rounded text-sm">Cached</span>
{% endif %}
{% if run.error %}
<span class="text-red-400 text-sm ml-2">{{ run.error }}</span>
{% endif %}
<div class="flex-grow"></div>
<button hx-post="/runs/{{ run.run_id }}/publish"
hx-target="#share-result"
@@ -50,7 +53,11 @@
<div class="bg-gray-800 rounded-lg p-4">
<div class="text-gray-500 text-sm">Steps</div>
<div class="text-white font-medium">
{% if run.recipe == 'streaming' %}
{% if run.status == 'completed' %}1 / 1{% else %}0 / 1{% endif %}
{% else %}
{{ run.executed or 0 }} / {{ run.total_steps or (plan.steps|length if plan and plan.steps else '?') }}
{% endif %}
{% if run.cached_steps %}
<span class="text-purple-400 text-sm">({{ run.cached_steps }} cached)</span>
{% endif %}

View File

@@ -175,15 +175,17 @@ class L1CacheManager:
# No fallbacks - failures raise exceptions.
def _run_async(self, coro):
"""Run async coroutine from sync context."""
"""Run async coroutine from sync context.
Always creates a fresh event loop to avoid issues with Celery's
prefork workers where loops may be closed by previous tasks.
"""
import asyncio
# Check if we're already in an async context
try:
loop = asyncio.get_running_loop()
# Already in async context - schedule on the running loop
future = asyncio.ensure_future(coro, loop=loop)
# Can't block here, so we need a different approach
# Use a new thread with its own loop
asyncio.get_running_loop()
# We're in an async context - use a thread with its own loop
import threading
result = [None]
error = [None]
@@ -206,13 +208,13 @@ class L1CacheManager:
raise error[0]
return result[0]
except RuntimeError:
# No running loop - safe to use run_until_complete
try:
loop = asyncio.get_event_loop()
except RuntimeError:
# No running loop - create a fresh one (don't reuse potentially closed loops)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
def _set_content_index(self, cache_id: str, ipfs_cid: str):
"""Set content index entry in database (cache_id -> ipfs_cid)."""
@@ -341,28 +343,38 @@ class L1CacheManager:
cache_id: str = None,
execution_time: float = 0.0,
move: bool = False,
skip_ipfs: bool = False,
) -> tuple[CachedFile, Optional[str]]:
"""
Store a file in the cache and upload to IPFS.
Store a file in the cache and optionally upload to IPFS.
Files are ALWAYS stored by IPFS CID. The cache_id parameter creates
an index from cache_id -> IPFS CID for code-addressed lookups.
Files are stored by IPFS CID when skip_ipfs=False (default), or by
local content hash when skip_ipfs=True. The cache_id parameter creates
an index from cache_id -> CID for code-addressed lookups.
Args:
source_path: Path to file to cache
node_type: Type of node (e.g., "upload", "source", "effect")
node_id: DEPRECATED - ignored, always uses IPFS CID
node_id: DEPRECATED - ignored, always uses CID
cache_id: Optional code-addressed cache ID to index
execution_time: How long the operation took
move: If True, move instead of copy
skip_ipfs: If True, skip IPFS upload and use local hash (faster for large files)
Returns:
Tuple of (CachedFile with both node_id and cid, CID)
Tuple of (CachedFile with both node_id and cid, CID or None if skip_ipfs)
"""
if skip_ipfs:
# Use local content hash instead of IPFS CID (much faster)
cid = file_hash(source_path)
ipfs_cid = None
logger.info(f"put: Using local hash (skip_ipfs=True): {cid[:16]}...")
else:
# Upload to IPFS first to get the CID (primary identifier)
cid = ipfs_client.add_file(source_path)
if not cid:
raise RuntimeError(f"IPFS upload failed for {source_path}. IPFS is required.")
ipfs_cid = cid
# Always store by IPFS CID (node_id parameter is deprecated)
node_id = cid
@@ -370,11 +382,12 @@ class L1CacheManager:
# Check if already cached (by node_id)
existing = self.cache.get_entry(node_id)
if existing and existing.output_path.exists():
return CachedFile.from_cache_entry(existing), cid
return CachedFile.from_cache_entry(existing), ipfs_cid
# Compute local hash BEFORE moving the file (for dual-indexing)
# Only needed if we uploaded to IPFS (to map local hash -> IPFS CID)
local_hash = None
if self._is_ipfs_cid(cid):
if not skip_ipfs and self._is_ipfs_cid(cid):
local_hash = file_hash(source_path)
# Store in local cache
@@ -405,9 +418,9 @@ class L1CacheManager:
self._set_content_index(local_hash, cid)
logger.debug(f"Indexed local hash {local_hash[:16]}... -> IPFS {cid}")
logger.info(f"Cached: {cid[:16]}...")
logger.info(f"Cached: {cid[:16]}..." + (" (local only)" if skip_ipfs else " (IPFS)"))
return CachedFile.from_cache_entry(entry), cid
return CachedFile.from_cache_entry(entry), ipfs_cid if not skip_ipfs else None
def get_by_node_id(self, node_id: str) -> Optional[Path]:
"""Get cached file path by node_id."""

View File

@@ -14,7 +14,7 @@ app = Celery(
'art_celery',
broker=REDIS_URL,
backend=REDIS_URL,
include=['tasks', 'tasks.streaming']
include=['tasks', 'tasks.streaming', 'tasks.ipfs_upload']
)
app.conf.update(

View File

@@ -1129,7 +1129,10 @@ async def save_run_cache(
output_cid = EXCLUDED.output_cid,
ipfs_cid = COALESCE(EXCLUDED.ipfs_cid, run_cache.ipfs_cid),
provenance_cid = COALESCE(EXCLUDED.provenance_cid, run_cache.provenance_cid),
plan_cid = COALESCE(EXCLUDED.plan_cid, run_cache.plan_cid)
plan_cid = COALESCE(EXCLUDED.plan_cid, run_cache.plan_cid),
actor_id = COALESCE(EXCLUDED.actor_id, run_cache.actor_id),
recipe = COALESCE(EXCLUDED.recipe, run_cache.recipe),
inputs = COALESCE(EXCLUDED.inputs, run_cache.inputs)
RETURNING run_id, output_cid, ipfs_cid, provenance_cid, plan_cid, recipe, inputs, actor_id, created_at
""",
run_id, output_cid, ipfs_cid, provenance_cid, plan_cid, recipe, _json.dumps(inputs), actor_id

View File

@@ -19,8 +19,8 @@ logger = logging.getLogger(__name__)
# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
# Connection timeout in seconds
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "30"))
# Connection timeout in seconds (increased for large files)
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "120"))
def _multiaddr_to_url(multiaddr: str) -> str:

134
recipes/woods-recipe.sexp Normal file
View File

@@ -0,0 +1,134 @@
;; Woods Recipe - Using friendly names for all assets
;;
;; Requires uploaded:
;; - Media: woods-1 through woods-8 (videos), woods-audio (audio)
;; - Effects: fx-rotate, fx-zoom, fx-blend, fx-ripple, fx-invert, fx-hue-shift
;; - Templates: tpl-standard-primitives, tpl-standard-effects, tpl-process-pair,
;; tpl-crossfade-zoom, tpl-scan-spin, tpl-scan-ripple
(stream "woods-recipe"
:fps 30
:width 1920
:height 1080
:seed 42
;; Load standard primitives and effects via friendly names
(include :name "tpl-standard-primitives")
(include :name "tpl-standard-effects")
;; Load reusable templates
(include :name "tpl-process-pair")
(include :name "tpl-crossfade-zoom")
;; === SOURCES AS ARRAY (using friendly names) ===
(def sources [
(streaming:make-video-source "woods-1" 30)
(streaming:make-video-source "woods-2" 30)
(streaming:make-video-source "woods-3" 30)
(streaming:make-video-source "woods-4" 30)
(streaming:make-video-source "woods-5" 30)
(streaming:make-video-source "woods-6" 30)
(streaming:make-video-source "woods-7" 30)
(streaming:make-video-source "woods-8" 30)
])
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
(def pair-configs [
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
])
;; Audio analyzer (using friendly name)
(def music (streaming:make-audio-analyzer "woods-audio"))
;; Audio playback (friendly name resolved by streaming primitives)
(audio-playback "woods-audio")
;; === GLOBAL SCANS ===
;; Cycle state: which source is active
(scan cycle (streaming:audio-beat music t)
:init {:active 0 :beat 0 :clen 16}
:step (if (< (+ beat 1) clen)
(dict :active active :beat (+ beat 1) :clen clen)
(dict :active (mod (+ active 1) (len sources)) :beat 0
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
;; Reusable scans from templates
(include :name "tpl-scan-spin")
(include :name "tpl-scan-ripple")
;; === PER-PAIR STATE ===
(scan pairs (streaming:audio-beat music t)
:init {:states (map (core:range (len sources)) (lambda (_)
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
:step (dict :states (map states (lambda (p)
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
old-hue-a (get p :hue-a)
old-hue-b (get p :hue-b)
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
mix-rem (get p :mix-rem)
old-mix (get p :mix)
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
rot-beat (get p :rot-beat)
rot-clen (get p :rot-clen)
old-angle (get p :angle)
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
new-angle (+ old-angle (/ 360 rot-clen))]
(dict :inv-a new-inv-a :inv-b new-inv-b
:hue-a new-hue-a :hue-b new-hue-b
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
:mix new-mix :mix-rem new-mix-rem
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
;; === FRAME PIPELINE ===
(frame
(let [now t
e (streaming:audio-energy music now)
;; Get cycle state
active (bind cycle :active)
beat-pos (bind cycle :beat)
clen (bind cycle :clen)
;; Transition logic
phase3 (* beat-pos 3)
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
next-idx (mod (+ active 1) (len sources))
;; Get pair states array
pair-states (bind pairs :states)
;; Process active pair using macro from template
active-frame (process-pair active)
;; Crossfade with zoom during transition
result (if fading
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
active-frame)
;; Final: global spin + ripple
spun (rotate result :angle (bind spin :angle))
rip-gate (bind ripple-state :gate)
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
(ripple spun
:amplitude rip-amp
:center_x (bind ripple-state :cx)
:center_y (bind ripple-state :cy)
:frequency 8
:decay 2
:speed 5))))

View File

@@ -1,168 +1,395 @@
"""
S-Expression Parser
S-expression parser for ArtDAG recipes and plans.
Parses S-expressions into Python data structures:
- Lists become Python lists
- Symbols become Symbol objects
- Numbers become int/float
- Strings become str
- Keywords (:foo) become Keyword objects
Supports:
- Lists: (a b c)
- Symbols: foo, bar-baz, ->
- Keywords: :key
- Strings: "hello world"
- Numbers: 42, 3.14, -1.5
- Comments: ; to end of line
- Vectors: [a b c] (syntactic sugar for lists)
- Maps: {:key1 val1 :key2 val2} (parsed as Python dicts)
"""
import re
from dataclasses import dataclass
from typing import Any, List, Union
from typing import Any, Dict, List, Union
import re
@dataclass(frozen=True)
@dataclass
class Symbol:
"""A symbol (identifier) in the S-expression."""
"""An unquoted symbol/identifier."""
name: str
def __repr__(self):
return self.name
return f"Symbol({self.name!r})"
def __eq__(self, other):
if isinstance(other, Symbol):
return self.name == other.name
if isinstance(other, str):
return self.name == other
return False
def __hash__(self):
return hash(self.name)
@dataclass(frozen=True)
@dataclass
class Keyword:
"""A keyword like :foo in the S-expression."""
"""A keyword starting with colon."""
name: str
def __repr__(self):
return f":{self.name}"
return f"Keyword({self.name!r})"
def __eq__(self, other):
if isinstance(other, Keyword):
return self.name == other.name
return False
def __hash__(self):
return hash((':' , self.name))
# Token patterns
TOKEN_PATTERNS = [
(r'\s+', None), # Whitespace (skip)
(r';[^\n]*', None), # Comments (skip)
(r'\(', 'LPAREN'),
(r'\)', 'RPAREN'),
(r'\[', 'LBRACKET'),
(r'\]', 'RBRACKET'),
(r"'", 'QUOTE'),
(r'"([^"\\]|\\.)*"', 'STRING'),
(r':[a-zA-Z_][a-zA-Z0-9_\-]*', 'KEYWORD'),
(r'-?[0-9]+\.[0-9]+', 'FLOAT'),
(r'-?[0-9]+', 'INT'),
(r'#t|#f|true|false', 'BOOL'),
(r'[a-zA-Z_+\-*/<>=!?][a-zA-Z0-9_+\-*/<>=!?]*', 'SYMBOL'),
]
TOKEN_REGEX = '|'.join(f'(?P<{name}>{pattern})' if name else f'(?:{pattern})'
for pattern, name in TOKEN_PATTERNS)
class ParseError(Exception):
"""Error during S-expression parsing."""
def __init__(self, message: str, position: int = 0, line: int = 1, col: int = 1):
self.position = position
self.line = line
self.col = col
super().__init__(f"{message} at line {line}, column {col}")
def tokenize(source: str) -> List[tuple]:
"""Tokenize S-expression source code."""
tokens = []
for match in re.finditer(TOKEN_REGEX, source):
kind = match.lastgroup
value = match.group()
if kind:
tokens.append((kind, value))
return tokens
class Tokenizer:
"""Tokenize S-expression text into tokens."""
# Token patterns
WHITESPACE = re.compile(r'\s+')
COMMENT = re.compile(r';[^\n]*')
STRING = re.compile(r'"(?:[^"\\]|\\.)*"')
NUMBER = re.compile(r'-?(?:\d+\.?\d*|\.\d+)(?:[eE][+-]?\d+)?')
KEYWORD = re.compile(r':[a-zA-Z_][a-zA-Z0-9_-]*')
SYMBOL = re.compile(r'[a-zA-Z_*+\-><=/!?][a-zA-Z0-9_*+\-><=/!?.:]*')
def parse(source: str) -> Any:
"""Parse S-expression source into Python data structures."""
tokens = tokenize(source)
pos = [0] # Use list for mutability in nested function
def parse_expr():
if pos[0] >= len(tokens):
raise SyntaxError("Unexpected end of input")
kind, value = tokens[pos[0]]
if kind == 'LPAREN':
pos[0] += 1
items = []
while pos[0] < len(tokens) and tokens[pos[0]][0] != 'RPAREN':
items.append(parse_expr())
if pos[0] >= len(tokens):
raise SyntaxError("Missing closing parenthesis")
pos[0] += 1 # Skip RPAREN
return items
if kind == 'LBRACKET':
pos[0] += 1
items = []
while pos[0] < len(tokens) and tokens[pos[0]][0] != 'RBRACKET':
items.append(parse_expr())
if pos[0] >= len(tokens):
raise SyntaxError("Missing closing bracket")
pos[0] += 1 # Skip RBRACKET
return items
elif kind == 'RPAREN':
raise SyntaxError("Unexpected closing parenthesis")
elif kind == 'QUOTE':
pos[0] += 1
return [Symbol('quote'), parse_expr()]
elif kind == 'STRING':
pos[0] += 1
# Remove quotes and unescape
return value[1:-1].replace('\\"', '"').replace('\\n', '\n')
elif kind == 'INT':
pos[0] += 1
return int(value)
elif kind == 'FLOAT':
pos[0] += 1
return float(value)
elif kind == 'BOOL':
pos[0] += 1
return value in ('#t', 'true')
elif kind == 'KEYWORD':
pos[0] += 1
return Keyword(value[1:]) # Remove leading :
elif kind == 'SYMBOL':
pos[0] += 1
return Symbol(value)
def __init__(self, text: str):
self.text = text
self.pos = 0
self.line = 1
self.col = 1
def _advance(self, count: int = 1):
"""Advance position, tracking line/column."""
for _ in range(count):
if self.pos < len(self.text):
if self.text[self.pos] == '\n':
self.line += 1
self.col = 1
else:
raise SyntaxError(f"Unknown token: {kind} {value}")
self.col += 1
self.pos += 1
result = parse_expr()
def _skip_whitespace_and_comments(self):
"""Skip whitespace and comments."""
while self.pos < len(self.text):
# Whitespace
match = self.WHITESPACE.match(self.text, self.pos)
if match:
self._advance(match.end() - self.pos)
continue
# Check for multiple top-level expressions
if pos[0] < len(tokens):
# Allow multiple top-level expressions, return as list
results = [result]
while pos[0] < len(tokens):
results.append(parse_expr())
return results
# Comments
match = self.COMMENT.match(self.text, self.pos)
if match:
self._advance(match.end() - self.pos)
continue
break
def peek(self) -> str | None:
"""Peek at current character."""
self._skip_whitespace_and_comments()
if self.pos >= len(self.text):
return None
return self.text[self.pos]
def next_token(self) -> Any:
"""Get the next token."""
self._skip_whitespace_and_comments()
if self.pos >= len(self.text):
return None
char = self.text[self.pos]
start_line, start_col = self.line, self.col
# Single-character tokens (parens, brackets, braces)
if char in '()[]{}':
self._advance()
return char
# String
if char == '"':
match = self.STRING.match(self.text, self.pos)
if not match:
raise ParseError("Unterminated string", self.pos, self.line, self.col)
self._advance(match.end() - self.pos)
# Parse escape sequences
content = match.group()[1:-1]
content = content.replace('\\n', '\n')
content = content.replace('\\t', '\t')
content = content.replace('\\"', '"')
content = content.replace('\\\\', '\\')
return content
# Keyword
if char == ':':
match = self.KEYWORD.match(self.text, self.pos)
if match:
self._advance(match.end() - self.pos)
return Keyword(match.group()[1:]) # Strip leading colon
raise ParseError(f"Invalid keyword", self.pos, self.line, self.col)
# Number (must check before symbol due to - prefix)
if char.isdigit() or (char == '-' and self.pos + 1 < len(self.text) and
(self.text[self.pos + 1].isdigit() or self.text[self.pos + 1] == '.')):
match = self.NUMBER.match(self.text, self.pos)
if match:
self._advance(match.end() - self.pos)
num_str = match.group()
if '.' in num_str or 'e' in num_str or 'E' in num_str:
return float(num_str)
return int(num_str)
# Symbol
match = self.SYMBOL.match(self.text, self.pos)
if match:
self._advance(match.end() - self.pos)
return Symbol(match.group())
raise ParseError(f"Unexpected character: {char!r}", self.pos, self.line, self.col)
def parse(text: str) -> Any:
"""
Parse an S-expression string into Python data structures.
Returns:
Parsed S-expression as nested Python structures:
- Lists become Python lists
- Symbols become Symbol objects
- Keywords become Keyword objects
- Strings become Python strings
- Numbers become int/float
Example:
>>> parse('(recipe "test" :version "1.0")')
[Symbol('recipe'), 'test', Keyword('version'), '1.0']
"""
tokenizer = Tokenizer(text)
result = _parse_expr(tokenizer)
# Check for trailing content
if tokenizer.peek() is not None:
raise ParseError("Unexpected content after expression",
tokenizer.pos, tokenizer.line, tokenizer.col)
return result
def parse_all(text: str) -> List[Any]:
"""
Parse multiple S-expressions from a string.
Returns list of parsed expressions.
"""
tokenizer = Tokenizer(text)
results = []
while tokenizer.peek() is not None:
results.append(_parse_expr(tokenizer))
return results
def _parse_expr(tokenizer: Tokenizer) -> Any:
"""Parse a single expression."""
token = tokenizer.next_token()
if token is None:
raise ParseError("Unexpected end of input", tokenizer.pos, tokenizer.line, tokenizer.col)
# List
if token == '(':
return _parse_list(tokenizer, ')')
# Vector (sugar for list)
if token == '[':
return _parse_list(tokenizer, ']')
# Map/dict: {:key1 val1 :key2 val2}
if token == '{':
return _parse_map(tokenizer)
# Unexpected closers
if isinstance(token, str) and token in ')]}':
raise ParseError(f"Unexpected {token!r}", tokenizer.pos, tokenizer.line, tokenizer.col)
# Atom
return token
def _parse_list(tokenizer: Tokenizer, closer: str) -> List[Any]:
"""Parse a list until the closing delimiter."""
items = []
while True:
char = tokenizer.peek()
if char is None:
raise ParseError(f"Unterminated list, expected {closer!r}",
tokenizer.pos, tokenizer.line, tokenizer.col)
if char == closer:
tokenizer.next_token() # Consume closer
return items
items.append(_parse_expr(tokenizer))
def _parse_map(tokenizer: Tokenizer) -> Dict[str, Any]:
"""Parse a map/dict: {:key1 val1 :key2 val2} -> {"key1": val1, "key2": val2}."""
result = {}
while True:
char = tokenizer.peek()
if char is None:
raise ParseError("Unterminated map, expected '}'",
tokenizer.pos, tokenizer.line, tokenizer.col)
if char == '}':
tokenizer.next_token() # Consume closer
return result
# Parse key (should be a keyword like :key)
key_token = _parse_expr(tokenizer)
if isinstance(key_token, Keyword):
key = key_token.name
elif isinstance(key_token, str):
key = key_token
else:
raise ParseError(f"Map key must be keyword or string, got {type(key_token).__name__}",
tokenizer.pos, tokenizer.line, tokenizer.col)
# Parse value
value = _parse_expr(tokenizer)
result[key] = value
def serialize(expr: Any, indent: int = 0, pretty: bool = False) -> str:
"""
Serialize a Python data structure back to S-expression format.
Args:
expr: The expression to serialize
indent: Current indentation level (for pretty printing)
pretty: Whether to use pretty printing with newlines
Returns:
S-expression string
"""
if isinstance(expr, list):
if not expr:
return "()"
if pretty:
return _serialize_pretty(expr, indent)
else:
items = [serialize(item, indent, False) for item in expr]
return "(" + " ".join(items) + ")"
if isinstance(expr, Symbol):
return expr.name
if isinstance(expr, Keyword):
return f":{expr.name}"
if isinstance(expr, str):
# Escape special characters
escaped = expr.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n').replace('\t', '\\t')
return f'"{escaped}"'
if isinstance(expr, bool):
return "true" if expr else "false"
if isinstance(expr, (int, float)):
return str(expr)
if expr is None:
return "nil"
if isinstance(expr, dict):
# Serialize dict as property list: {:key1 val1 :key2 val2}
items = []
for k, v in expr.items():
items.append(f":{k}")
items.append(serialize(v, indent, pretty))
return "{" + " ".join(items) + "}"
raise ValueError(f"Cannot serialize {type(expr).__name__}: {expr!r}")
def _serialize_pretty(expr: List, indent: int) -> str:
"""Pretty-print a list expression with smart formatting."""
if not expr:
return "()"
prefix = " " * indent
inner_prefix = " " * (indent + 1)
# Check if this is a simple list that fits on one line
simple = serialize(expr, indent, False)
if len(simple) < 60 and '\n' not in simple:
return simple
# Start building multiline output
head = serialize(expr[0], indent + 1, False)
parts = [f"({head}"]
i = 1
while i < len(expr):
item = expr[i]
# Group keyword-value pairs on same line
if isinstance(item, Keyword) and i + 1 < len(expr):
key = serialize(item, 0, False)
val = serialize(expr[i + 1], indent + 1, False)
# If value is short, put on same line
if len(val) < 50 and '\n' not in val:
parts.append(f"{inner_prefix}{key} {val}")
else:
# Value is complex, serialize it pretty
val_pretty = serialize(expr[i + 1], indent + 1, True)
parts.append(f"{inner_prefix}{key} {val_pretty}")
i += 2
else:
# Regular item
item_str = serialize(item, indent + 1, True)
parts.append(f"{inner_prefix}{item_str}")
i += 1
return "\n".join(parts) + ")"
def parse_file(path: str) -> Any:
"""Parse an S-expression file."""
with open(path, 'r') as f:
return parse(f.read())
# Convenience for pretty-printing
def to_sexp(obj: Any) -> str:
"""Convert Python object back to S-expression string."""
if isinstance(obj, list):
return '(' + ' '.join(to_sexp(x) for x in obj) + ')'
elif isinstance(obj, Symbol):
return obj.name
elif isinstance(obj, Keyword):
return f':{obj.name}'
elif isinstance(obj, str):
return f'"{obj}"'
elif isinstance(obj, bool):
return '#t' if obj else '#f'
elif isinstance(obj, (int, float)):
return str(obj)
else:
return repr(obj)
"""Convert Python object back to S-expression string (alias for serialize)."""
return serialize(obj)

View File

@@ -55,9 +55,13 @@ class VideoSource:
self._proc.kill()
self._proc = None
# Check file exists before trying to open
if not self.path.exists():
raise FileNotFoundError(f"Video file not found: {self.path}")
w, h = self._frame_size
cmd = [
"ffmpeg", "-v", "quiet",
"ffmpeg", "-v", "error", # Show errors instead of quiet
"-ss", f"{seek_time:.3f}",
"-i", str(self.path),
"-f", "rawvideo", "-pix_fmt", "rgb24",
@@ -65,9 +69,18 @@ class VideoSource:
"-r", str(self.fps), # Output at specified fps
"-"
]
self._proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
self._proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self._stream_time = seek_time
# Check if process started successfully by reading first bit of stderr
import select
import sys
readable, _, _ = select.select([self._proc.stderr], [], [], 0.5)
if readable:
err = self._proc.stderr.read(4096).decode('utf-8', errors='ignore')
if err:
print(f"ffmpeg error for {self.path.name}: {err}", file=sys.stderr)
def _read_frame_from_stream(self) -> np.ndarray:
"""Read one frame from the stream."""
w, h = self._frame_size
@@ -130,8 +143,12 @@ class VideoSource:
frame = self._read_frame_from_stream()
if frame is None:
import sys
print(f"NULL FRAME {self.path.name}: t={t:.2f} seek={seek_time:.2f}", file=sys.stderr)
frame = np.zeros((h, w, 3), dtype=np.uint8)
# Check for ffmpeg errors
if self._proc and self._proc.stderr:
err = self._proc.stderr.read(4096).decode('utf-8', errors='ignore')
if err:
raise RuntimeError(f"Failed to read video frame from {self.path.name}: {err}")
raise RuntimeError(f"Failed to read video frame from {self.path.name} at t={t:.2f} - file may be corrupted or inaccessible")
else:
self._stream_time += self._frame_time

View File

@@ -30,12 +30,9 @@ from pathlib import Path
from dataclasses import dataclass
from typing import Dict, List, Any, Optional, Tuple
# Try pip-installed artdag first, fall back to local path
try:
from artdag.sexp.parser import parse, parse_all, Symbol, Keyword
except ImportError:
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "artdag"))
from artdag.sexp.parser import parse, parse_all, Symbol, Keyword
# Use local sexp_effects parser (supports namespaced symbols like math:sin)
sys.path.insert(0, str(Path(__file__).parent.parent))
from sexp_effects.parser import parse, parse_all, Symbol, Keyword
@dataclass
@@ -54,9 +51,10 @@ class StreamInterpreter:
and calls primitives.
"""
def __init__(self, sexp_path: str):
def __init__(self, sexp_path: str, actor_id: Optional[str] = None):
self.sexp_path = Path(sexp_path)
self.sexp_dir = self.sexp_path.parent
self.actor_id = actor_id # For friendly name resolution
text = self.sexp_path.read_text()
self.ast = parse(text)
@@ -84,6 +82,26 @@ class StreamInterpreter:
self.sources_config: Optional[Path] = None
self.audio_config: Optional[Path] = None
# Error tracking
self.errors: List[str] = []
def _resolve_name(self, name: str) -> Optional[Path]:
"""Resolve a friendly name to a file path using the naming service."""
try:
# Import here to avoid circular imports
from tasks.streaming import resolve_asset
path = resolve_asset(name, self.actor_id)
if path:
return path
except Exception as e:
print(f"Warning: failed to resolve name '{name}': {e}", file=sys.stderr)
return None
def _record_error(self, msg: str):
"""Record an error that occurred during evaluation."""
self.errors.append(msg)
print(f"ERROR: {msg}", file=sys.stderr)
import random
self.rng = random.Random(self.config.get('seed', 42))
@@ -241,27 +259,50 @@ class StreamInterpreter:
self.macros[name] = {'params': params, 'body': body}
elif cmd == 'effect':
# Handle (effect name :path "...") in included files - recursive
# Handle (effect name :path "...") or (effect name :name "...") in included files
i = 2
while i < len(form):
if isinstance(form[i], Keyword) and form[i].name == 'path':
if isinstance(form[i], Keyword):
kw = form[i].name
if kw == 'path':
path = str(form[i + 1]).strip('"')
# Resolve relative to the file being loaded
full = (effect_path.parent / path).resolve()
self._load_effect(full)
i += 2
elif kw == 'name':
fname = str(form[i + 1]).strip('"')
resolved = self._resolve_name(fname)
if resolved:
self._load_effect(resolved)
else:
raise RuntimeError(f"Could not resolve effect name '{fname}' - make sure it's uploaded and you're logged in")
i += 2
else:
i += 1
else:
i += 1
elif cmd == 'include':
# Handle (include :path "...") in included files - recursive
# Handle (include :path "...") or (include :name "...") in included files
i = 1
while i < len(form):
if isinstance(form[i], Keyword) and form[i].name == 'path':
if isinstance(form[i], Keyword):
kw = form[i].name
if kw == 'path':
path = str(form[i + 1]).strip('"')
full = (effect_path.parent / path).resolve()
self._load_effect(full)
i += 2
elif kw == 'name':
fname = str(form[i + 1]).strip('"')
resolved = self._resolve_name(fname)
if resolved:
self._load_effect(resolved)
else:
raise RuntimeError(f"Could not resolve include name '{fname}' - make sure it's uploaded and you're logged in")
i += 2
else:
i += 1
else:
i += 1
@@ -313,22 +354,49 @@ class StreamInterpreter:
name = form[1].name if isinstance(form[1], Symbol) else str(form[1])
i = 2
while i < len(form):
if isinstance(form[i], Keyword) and form[i].name == 'path':
if isinstance(form[i], Keyword):
kw = form[i].name
if kw == 'path':
path = str(form[i + 1]).strip('"')
full = (self.sexp_dir / path).resolve()
self._load_effect(full)
i += 2
elif kw == 'name':
# Resolve friendly name to path
fname = str(form[i + 1]).strip('"')
resolved = self._resolve_name(fname)
if resolved:
self._load_effect(resolved)
else:
raise RuntimeError(f"Could not resolve effect name '{fname}' - make sure it's uploaded and you're logged in")
i += 2
else:
i += 1
else:
i += 1
elif cmd == 'include':
i = 1
while i < len(form):
if isinstance(form[i], Keyword) and form[i].name == 'path':
if isinstance(form[i], Keyword):
kw = form[i].name
if kw == 'path':
path = str(form[i + 1]).strip('"')
full = (self.sexp_dir / path).resolve()
self._load_effect(full)
i += 2
elif kw == 'name':
# Resolve friendly name to path
fname = str(form[i + 1]).strip('"')
resolved = self._resolve_name(fname)
if resolved:
self._load_effect(resolved)
else:
raise RuntimeError(f"Could not resolve include name '{fname}' - make sure it's uploaded and you're logged in")
raise RuntimeError(f"Could not resolve include name '{fname}' - make sure it's uploaded and you're logged in")
i += 2
else:
i += 1
else:
i += 1
@@ -337,6 +405,12 @@ class StreamInterpreter:
# Skip if already set by config file
if self.audio_playback is None:
path = str(form[1]).strip('"')
# Try to resolve as friendly name first
resolved = self._resolve_name(path)
if resolved:
self.audio_playback = str(resolved)
else:
# Fall back to relative path
self.audio_playback = str((self.sexp_dir / path).resolve())
print(f"Audio playback: {self.audio_playback}", file=sys.stderr)
@@ -419,6 +493,10 @@ class StreamInterpreter:
if isinstance(expr, Keyword):
return expr.name
# Handle dicts from new parser - evaluate values
if isinstance(expr, dict):
return {k: self._eval(v, env) for k, v in expr.items()}
if not isinstance(expr, list) or not expr:
return expr
@@ -685,8 +763,8 @@ class StreamInterpreter:
return prim_func(*evaluated_args, **kwargs)
return prim_func(*evaluated_args)
except Exception as e:
print(f"Primitive {op} error: {e}", file=sys.stderr)
return None
self._record_error(f"Primitive {op} error: {e}")
raise RuntimeError(f"Primitive {op} failed: {e}")
# === Macros (function-like: args evaluated before binding) ===
@@ -720,8 +798,8 @@ class StreamInterpreter:
return prim_func(*evaluated_args, **kwargs)
return prim_func(*evaluated_args)
except Exception as e:
print(f"Primitive {op} error: {e}", file=sys.stderr)
return None
self._record_error(f"Primitive {op} error: {e}")
raise RuntimeError(f"Primitive {op} failed: {e}")
# Unknown - return as-is
return expr

View File

@@ -2,9 +2,12 @@
#
# Tasks:
# 1. run_stream - Execute a streaming S-expression recipe
# 2. upload_to_ipfs - Background IPFS upload for media files
from .streaming import run_stream
from .ipfs_upload import upload_to_ipfs
__all__ = [
"run_stream",
"upload_to_ipfs",
]

83
tasks/ipfs_upload.py Normal file
View File

@@ -0,0 +1,83 @@
"""
Background IPFS upload task.
Uploads files to IPFS in the background after initial local storage.
This allows fast uploads while still getting IPFS CIDs eventually.
"""
import logging
import os
import sys
from pathlib import Path
from typing import Optional
# Add parent directory to path for imports
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from celery_app import app
import ipfs_client
logger = logging.getLogger(__name__)
@app.task(bind=True, max_retries=3, default_retry_delay=60)
def upload_to_ipfs(self, local_cid: str, actor_id: str) -> Optional[str]:
"""
Upload a locally cached file to IPFS in the background.
Args:
local_cid: The local content hash of the file
actor_id: The user who uploaded the file
Returns:
IPFS CID if successful, None if failed
"""
from cache_manager import get_cache_manager
import asyncio
import database
logger.info(f"Background IPFS upload starting for {local_cid[:16]}...")
try:
cache_mgr = get_cache_manager()
# Get the file path from local cache
file_path = cache_mgr.get_by_cid(local_cid)
if not file_path or not file_path.exists():
logger.error(f"File not found for local CID {local_cid[:16]}...")
return None
# Upload to IPFS
logger.info(f"Uploading {file_path} to IPFS...")
ipfs_cid = ipfs_client.add_file(file_path)
if not ipfs_cid:
logger.error(f"IPFS upload failed for {local_cid[:16]}...")
raise self.retry(exc=Exception("IPFS upload failed"))
logger.info(f"IPFS upload successful: {local_cid[:16]}... -> {ipfs_cid[:16]}...")
# Update database with IPFS CID
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Initialize database pool if needed
loop.run_until_complete(database.init_pool())
# Update cache_items table
loop.run_until_complete(
database.update_cache_item_ipfs_cid(local_cid, ipfs_cid)
)
# Create index from IPFS CID to local cache
cache_mgr._set_content_index(ipfs_cid, local_cid)
logger.info(f"Database updated with IPFS CID for {local_cid[:16]}...")
finally:
loop.close()
return ipfs_cid
except Exception as e:
logger.error(f"Background IPFS upload error: {e}")
raise self.retry(exc=e)

View File

@@ -24,6 +24,11 @@ from cache_manager import get_cache_manager
logger = logging.getLogger(__name__)
# Module-level event loop for database operations
_resolve_loop = None
_db_initialized = False
def resolve_asset(ref: str, actor_id: Optional[str] = None) -> Optional[Path]:
"""
Resolve an asset reference (CID or friendly name) to a file path.
@@ -35,6 +40,7 @@ def resolve_asset(ref: str, actor_id: Optional[str] = None) -> Optional[Path]:
Returns:
Path to the asset file, or None if not found
"""
global _resolve_loop, _db_initialized
cache_mgr = get_cache_manager()
# Try as direct CID first
@@ -46,15 +52,22 @@ def resolve_asset(ref: str, actor_id: Optional[str] = None) -> Optional[Path]:
# Try as friendly name if actor_id provided
if actor_id:
import asyncio
import database
from database import resolve_friendly_name
try:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
cid = loop.run_until_complete(resolve_friendly_name(actor_id, ref))
finally:
loop.close()
# Reuse event loop for database operations
if _resolve_loop is None or _resolve_loop.is_closed():
_resolve_loop = asyncio.new_event_loop()
asyncio.set_event_loop(_resolve_loop)
_db_initialized = False
# Initialize database pool once per loop
if not _db_initialized:
_resolve_loop.run_until_complete(database.init_db())
_db_initialized = True
cid = _resolve_loop.run_until_complete(resolve_friendly_name(actor_id, ref))
if cid:
path = cache_mgr.get_by_cid(cid)
@@ -173,6 +186,7 @@ def create_cid_primitives(actor_id: Optional[str] = None):
@app.task(bind=True, name='tasks.run_stream')
def run_stream(
self,
run_id: str,
recipe_sexp: str,
output_name: str = "output.mp4",
duration: Optional[float] = None,
@@ -185,6 +199,7 @@ def run_stream(
Execute a streaming S-expression recipe.
Args:
run_id: The run ID for database tracking
recipe_sexp: The recipe S-expression content
output_name: Name for the output file
duration: Optional duration override (seconds)
@@ -197,7 +212,7 @@ def run_stream(
Dict with output_cid, output_path, and status
"""
task_id = self.request.id
logger.info(f"Starting stream task {task_id}")
logger.info(f"Starting stream task {task_id} for run {run_id}")
self.update_state(state='INITIALIZING', meta={'progress': 0})
@@ -237,8 +252,8 @@ def run_stream(
# Import the streaming interpreter
from streaming.stream_sexp_generic import StreamInterpreter
# Create interpreter
interp = StreamInterpreter(str(recipe_path))
# Create interpreter (pass actor_id for friendly name resolution)
interp = StreamInterpreter(str(recipe_path), actor_id=actor_id)
# Set primitive library directory explicitly
interp.primitive_lib_dir = sexp_effects_dir / "primitive_libs"
@@ -258,8 +273,17 @@ def run_stream(
logger.info(f"Rendering to {output_path}")
interp.run(duration=duration, output=str(output_path))
# Check for interpreter errors
if interp.errors:
error_msg = f"Rendering failed with {len(interp.errors)} errors: {interp.errors[0]}"
raise RuntimeError(error_msg)
self.update_state(state='CACHING', meta={'progress': 90})
# Validate output file (must be > 1KB to have actual frames)
if output_path.exists() and output_path.stat().st_size < 1024:
raise RuntimeError(f"Output file is too small ({output_path.stat().st_size} bytes) - rendering likely failed")
# Store output in cache
if output_path.exists():
cache_mgr = get_cache_manager()
@@ -271,16 +295,73 @@ def run_stream(
logger.info(f"Stream output cached: CID={cached_file.cid}, IPFS={ipfs_cid}")
# Save to database
import asyncio
import database
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Initialize database pool if needed
if database.pool is None:
loop.run_until_complete(database.init_db())
# Get recipe CID from pending_run
pending = loop.run_until_complete(database.get_pending_run(run_id))
recipe_cid = pending.get("recipe", "streaming") if pending else "streaming"
# Save to run_cache for completed runs
logger.info(f"Saving run {run_id} to run_cache with actor_id={actor_id}")
loop.run_until_complete(database.save_run_cache(
run_id=run_id,
output_cid=cached_file.cid,
recipe=recipe_cid,
inputs=[],
ipfs_cid=ipfs_cid,
actor_id=actor_id,
))
# Update pending run status
loop.run_until_complete(database.update_pending_run_status(
run_id=run_id,
status="completed",
))
logger.info(f"Saved run {run_id} to database with actor_id={actor_id}")
except Exception as db_err:
logger.warning(f"Failed to save run to database: {db_err}")
finally:
loop.close()
return {
"status": "completed",
"run_id": run_id,
"task_id": task_id,
"output_cid": cached_file.cid,
"ipfs_cid": ipfs_cid,
"output_path": str(cached_file.path),
}
else:
# Update pending run status to failed
import asyncio
import database
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
if database.pool is None:
loop.run_until_complete(database.init_db())
loop.run_until_complete(database.update_pending_run_status(
run_id=run_id,
status="failed",
error="Output file not created",
))
except Exception as db_err:
logger.warning(f"Failed to update run status: {db_err}")
finally:
loop.close()
return {
"status": "failed",
"run_id": run_id,
"task_id": task_id,
"error": "Output file not created",
}
@@ -290,8 +371,28 @@ def run_stream(
import traceback
traceback.print_exc()
# Update pending run status to failed
import asyncio
import database
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
if database.pool is None:
loop.run_until_complete(database.init_db())
loop.run_until_complete(database.update_pending_run_status(
run_id=run_id,
status="failed",
error=str(e),
))
except Exception as db_err:
logger.warning(f"Failed to update run status: {db_err}")
finally:
loop.close()
return {
"status": "failed",
"run_id": run_id,
"task_id": task_id,
"error": str(e),
}

View File

@@ -14,9 +14,9 @@
;; Usage:
;; (include :path "../templates/standard-effects.sexp")
(effect rotate :path "../sexp_effects/effects/rotate.sexp")
(effect zoom :path "../sexp_effects/effects/zoom.sexp")
(effect blend :path "../sexp_effects/effects/blend.sexp")
(effect ripple :path "../sexp_effects/effects/ripple.sexp")
(effect invert :path "../sexp_effects/effects/invert.sexp")
(effect hue_shift :path "../sexp_effects/effects/hue_shift.sexp")
(effect rotate :name "fx-rotate")
(effect zoom :name "fx-zoom")
(effect blend :name "fx-blend")
(effect ripple :name "fx-ripple")
(effect invert :name "fx-invert")
(effect hue_shift :name "fx-hue-shift")