Compare commits
159 Commits
427de25e13
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca4e86d07e | ||
|
|
589ea77b98 | ||
|
|
f5ef9cddd2 | ||
|
|
6e8e8f8de9 | ||
|
|
a29841f3c5 | ||
|
|
b322e003be | ||
|
|
146db1c60f | ||
|
|
fc9597456f | ||
|
|
dbc4ece2cc | ||
|
|
88ded8c927 | ||
|
|
48018d09b7 | ||
|
|
7411aa74c4 | ||
|
|
0534081e44 | ||
|
|
2f56ffc472 | ||
|
|
4647dd52c8 | ||
|
|
9a73dffaa6 | ||
|
|
5835344e30 | ||
|
|
3dda5f5f50 | ||
|
|
cdbc962b12 | ||
|
|
76e4a002a0 | ||
|
|
e2761798a8 | ||
|
|
81dc40534c | ||
|
|
43d73c7bf7 | ||
|
|
d900df5aa0 | ||
|
|
13415fb420 | ||
|
|
d6c575760b | ||
|
|
9a8a701492 | ||
|
|
b15e381f81 | ||
|
|
baf79f453f | ||
|
|
cd95e62899 | ||
|
|
514ee89cca | ||
|
|
0a6dc0099b | ||
|
|
180d6a874e | ||
|
|
949d716d9a | ||
|
|
d5f30035da | ||
|
|
4b0f1b0bcd | ||
|
|
9583ecb81a | ||
|
|
6ee8d72d24 | ||
|
|
ed617fcdd6 | ||
|
|
ef3638d3cf | ||
|
|
dd169635ca | ||
|
|
92f8e8a98c | ||
|
|
48eed99a28 | ||
|
|
9ce64ea797 | ||
|
|
70530e5c92 | ||
|
|
76bf19b8ab | ||
|
|
1bd171b892 | ||
|
|
e4349ba501 | ||
|
|
6e20d19a23 | ||
|
|
e64ca9fe3a | ||
|
|
def62de578 | ||
|
|
f858e25246 | ||
|
|
a162171025 | ||
|
|
234fbdbee2 | ||
|
|
1442216a15 | ||
|
|
b773689814 | ||
|
|
2d20a6f452 | ||
|
|
8b9309a90b | ||
|
|
3b964ba18d | ||
|
|
4d95ec5a32 | ||
|
|
ad1d7893f8 | ||
|
|
75f9d8fb11 | ||
|
|
b96e8ca4d2 | ||
|
|
8051ef9ba9 | ||
|
|
3adf927ca1 | ||
|
|
9bdad268a5 | ||
|
|
1cb9c3ac8a | ||
|
|
36c4afeb84 | ||
|
|
b6292268fa | ||
|
|
3a02fca7fd | ||
|
|
c4004b3f5d | ||
|
|
41adf058bd | ||
|
|
b7e3827fa2 | ||
|
|
771fb8cebc | ||
|
|
ef4bc24eda | ||
|
|
0bd8ee71c7 | ||
|
|
9151d2c2a8 | ||
|
|
ed5ef2bf39 | ||
|
|
bbcb79cc1e | ||
|
|
11bcafee55 | ||
|
|
b49d109a51 | ||
|
|
9096824444 | ||
|
|
fe6730ce72 | ||
|
|
6ea39d633b | ||
|
|
0847b733a9 | ||
|
|
7009840712 | ||
|
|
92eeb58c71 | ||
|
|
2c1728c6ce | ||
|
|
6e0ee65e40 | ||
|
|
3116a70c3e | ||
|
|
09d5359725 | ||
|
|
4930eb99ad | ||
|
|
86830019ad | ||
|
|
5bc655f8c8 | ||
|
|
8f4d6d12bc | ||
|
|
82599eff1e | ||
|
|
a57be27907 | ||
|
|
9302283e86 | ||
|
|
487acdd606 | ||
|
|
6b2991bf24 | ||
|
|
3ec045c533 | ||
|
|
3bff130e57 | ||
|
|
414cbddd66 | ||
|
|
89b2fd3d2e | ||
|
|
3d5a08a7dc | ||
|
|
d8360e5945 | ||
|
|
44066e9bdd | ||
|
|
b2be9ff976 | ||
|
|
32474380fa | ||
|
|
5bd055f031 | ||
|
|
cbdae70b66 | ||
|
|
2d520cf256 | ||
|
|
e6dd6e851c | ||
|
|
2081092ce8 | ||
|
|
d20eef76ad | ||
|
|
581da68b3b | ||
|
|
bb458aa924 | ||
|
|
270eeb3fcf | ||
|
|
f290c9f01e | ||
|
|
396bacc89b | ||
|
|
b599b59d44 | ||
|
|
cf83952a19 | ||
|
|
ca2d4a17a4 | ||
|
|
4f3eccd4d3 | ||
|
|
d685518c4c | ||
|
|
529c173722 | ||
|
|
d7d7cd28c2 | ||
|
|
c46fcd2308 | ||
|
|
2c3f943e5a | ||
|
|
7813eb081a | ||
|
|
0f4817e3a8 | ||
|
|
3ee4dc1efb | ||
|
|
c5d70f61c8 | ||
|
|
c3d6427883 | ||
|
|
ad15ef1ce7 | ||
|
|
164f1291ac | ||
|
|
59c72500ac | ||
|
|
84d465b264 | ||
|
|
f28edf7ab6 | ||
|
|
be4d0da84f | ||
|
|
bfe96a431c | ||
|
|
e1c0ebc0a2 | ||
|
|
6c4b850487 | ||
|
|
d08fbfc0bd | ||
|
|
bf188f4671 | ||
|
|
d603485d40 | ||
|
|
2c27eacb12 | ||
|
|
1ad8fe9890 | ||
|
|
3e3df6ff2a | ||
|
|
faf794ef35 | ||
|
|
a4bf0eae24 | ||
|
|
9a1ed2adea | ||
|
|
f67aacdceb | ||
|
|
da4e2e9d3d | ||
|
|
f7fa683bcf | ||
|
|
6c973203fc | ||
|
|
7e38b4a0c8 | ||
|
|
8bf6f87c2a | ||
|
|
abe89c9177 |
22
.dockerignore
Normal file
22
.dockerignore
Normal file
@@ -0,0 +1,22 @@
|
||||
# Don't copy local clones - Dockerfile will clone fresh
|
||||
artdag-effects/
|
||||
|
||||
# Python cache
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.egg-info/
|
||||
.pytest_cache/
|
||||
|
||||
# Virtual environments
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# Local env
|
||||
.env
|
||||
|
||||
# Git
|
||||
.git/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
10
.env.example
10
.env.example
@@ -1,5 +1,15 @@
|
||||
# L1 Server Configuration
|
||||
|
||||
# PostgreSQL password (REQUIRED - no default)
|
||||
POSTGRES_PASSWORD=changeme-generate-with-openssl-rand-hex-16
|
||||
|
||||
# Admin token for purge operations (REQUIRED - no default)
|
||||
# Generate with: openssl rand -hex 32
|
||||
ADMIN_TOKEN=changeme-generate-with-openssl-rand-hex-32
|
||||
|
||||
# L1 host IP/hostname for GPU worker cross-VPC access
|
||||
L1_HOST=your-l1-server-ip
|
||||
|
||||
# This L1 server's public URL (sent to L2 when publishing)
|
||||
L1_PUBLIC_URL=https://l1.artdag.rose-ash.com
|
||||
|
||||
|
||||
11
.env.gpu
Normal file
11
.env.gpu
Normal file
@@ -0,0 +1,11 @@
|
||||
# GPU worker env - connects to L1 host via public IP (cross-VPC)
|
||||
REDIS_URL=redis://138.68.142.139:16379/5
|
||||
DATABASE_URL=postgresql://artdag:f960bcc61d8b2155a1d57f7dd72c1c58@138.68.142.139:15432/artdag
|
||||
IPFS_API=/ip4/138.68.142.139/tcp/15001
|
||||
IPFS_GATEWAYS=https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link
|
||||
IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
CACHE_DIR=/data/cache
|
||||
C_FORCE_ROOT=true
|
||||
ARTDAG_CLUSTER_KEY=
|
||||
NVIDIA_VISIBLE_DEVICES=all
|
||||
STREAMING_GPU_PERSIST=0
|
||||
62
.gitea/workflows/ci.yml
Normal file
62
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,62 @@
|
||||
name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
REGISTRY: registry.rose-ash.com:5000
|
||||
IMAGE_CPU: celery-l1-server
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
apt-get update && apt-get install -y --no-install-recommends openssh-client
|
||||
|
||||
- name: Set up SSH
|
||||
env:
|
||||
SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
mkdir -p ~/.ssh
|
||||
echo "$SSH_KEY" > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true
|
||||
|
||||
- name: Pull latest code on server
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/celery
|
||||
git fetch origin main
|
||||
git reset --hard origin/main
|
||||
"
|
||||
|
||||
- name: Build and push image
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/celery
|
||||
docker build --build-arg CACHEBUST=\$(date +%s) -t ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:latest -t ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:${{ github.sha }} .
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:latest
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_CPU }}:${{ github.sha }}
|
||||
"
|
||||
|
||||
- name: Deploy stack
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/celery
|
||||
docker stack deploy -c docker-compose.yml celery
|
||||
echo 'Waiting for services to update...'
|
||||
sleep 10
|
||||
docker stack services celery
|
||||
"
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,3 +5,4 @@ __pycache__/
|
||||
.venv/
|
||||
venv/
|
||||
.env
|
||||
artdag-effects/
|
||||
|
||||
@@ -23,6 +23,7 @@ RUN ./build-client.sh
|
||||
RUN mkdir -p /data/cache
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV EFFECTS_PATH=/app/artdag-effects
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
|
||||
98
Dockerfile.gpu
Normal file
98
Dockerfile.gpu
Normal file
@@ -0,0 +1,98 @@
|
||||
# GPU-enabled worker image
|
||||
# Multi-stage build: use devel image for compiling, runtime for final image
|
||||
|
||||
# Stage 1: Build decord with CUDA
|
||||
FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.11 \
|
||||
python3.11-venv \
|
||||
python3.11-dev \
|
||||
python3-pip \
|
||||
git \
|
||||
cmake \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libavcodec-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libavdevice-dev \
|
||||
libavfilter-dev \
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
|
||||
&& ln -sf /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# Download Video Codec SDK headers for NVDEC/NVCUVID
|
||||
RUN git clone https://github.com/FFmpeg/nv-codec-headers.git /tmp/nv-codec-headers && \
|
||||
cd /tmp/nv-codec-headers && make install && rm -rf /tmp/nv-codec-headers
|
||||
|
||||
# Create stub for libnvcuvid (real library comes from driver at runtime)
|
||||
RUN echo 'void* __nvcuvid_stub__;' | gcc -shared -x c - -o /usr/local/cuda/lib64/libnvcuvid.so
|
||||
|
||||
# Build decord with CUDA support
|
||||
RUN git clone --recursive https://github.com/dmlc/decord /tmp/decord && \
|
||||
cd /tmp/decord && \
|
||||
mkdir build && cd build && \
|
||||
cmake .. -DUSE_CUDA=ON -DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_CUDA_ARCHITECTURES="70;75;80;86;89;90" && \
|
||||
make -j$(nproc) && \
|
||||
cd ../python && pip install --target=/decord-install .
|
||||
|
||||
# Stage 2: Runtime image
|
||||
FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu22.04
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Python 3.11 and system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.11 \
|
||||
python3.11-venv \
|
||||
python3-pip \
|
||||
git \
|
||||
ffmpeg \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
|
||||
&& ln -sf /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# Upgrade pip
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
|
||||
# Install CPU dependencies first
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Install GPU-specific dependencies (CuPy for CUDA 12.x)
|
||||
RUN pip install --no-cache-dir cupy-cuda12x
|
||||
|
||||
# Install PyNvVideoCodec for zero-copy GPU encoding
|
||||
RUN pip install --no-cache-dir PyNvVideoCodec
|
||||
|
||||
# Copy decord from builder stage
|
||||
COPY --from=builder /decord-install /usr/local/lib/python3.11/dist-packages/
|
||||
COPY --from=builder /tmp/decord/build/libdecord.so /usr/local/lib/
|
||||
RUN ldconfig
|
||||
|
||||
# Clone effects repo (before COPY so it gets cached)
|
||||
RUN git clone https://git.rose-ash.com/art-dag/effects.git /app/artdag-effects
|
||||
|
||||
# Copy application (this invalidates cache for any code change)
|
||||
COPY . .
|
||||
|
||||
# Create cache directory
|
||||
RUN mkdir -p /data/cache
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV EFFECTS_PATH=/app/artdag-effects
|
||||
ENV PYTHONPATH=/app
|
||||
# GPU persistence enabled - frames stay on GPU throughout pipeline
|
||||
ENV STREAMING_GPU_PERSIST=1
|
||||
# Preload libnvcuvid for decord NVDEC GPU decode
|
||||
ENV LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libnvcuvid.so
|
||||
# Use cluster's public IPFS gateway for HLS segment URLs
|
||||
ENV IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
|
||||
# Default command runs celery worker
|
||||
CMD ["celery", "-A", "celery_app", "worker", "--loglevel=info", "-E", "-Q", "gpu,celery"]
|
||||
@@ -60,7 +60,7 @@ The stack includes:
|
||||
| `HOST` | `0.0.0.0` | Server bind address |
|
||||
| `PORT` | `8000` | Server port |
|
||||
| `REDIS_URL` | `redis://localhost:6379/5` | Redis connection |
|
||||
| `DATABASE_URL` | `postgresql://artdag:artdag@localhost:5432/artdag` | PostgreSQL connection |
|
||||
| `DATABASE_URL` | **(required)** | PostgreSQL connection |
|
||||
| `CACHE_DIR` | `~/.artdag/cache` | Local cache directory |
|
||||
| `IPFS_API` | `/dns/localhost/tcp/5001` | IPFS API multiaddr |
|
||||
| `IPFS_GATEWAY_URL` | `https://ipfs.io/ipfs` | Public IPFS gateway |
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
L1 Server Configuration.
|
||||
|
||||
Environment-based configuration with sensible defaults.
|
||||
All config should go through this module - no direct os.environ calls elsewhere.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
@@ -31,9 +33,7 @@ class Settings:
|
||||
|
||||
# Database
|
||||
database_url: str = field(
|
||||
default_factory=lambda: os.environ.get(
|
||||
"DATABASE_URL", "postgresql://artdag:artdag@localhost:5432/artdag"
|
||||
)
|
||||
default_factory=lambda: os.environ.get("DATABASE_URL", "")
|
||||
)
|
||||
|
||||
# IPFS
|
||||
@@ -52,6 +52,16 @@ class Settings:
|
||||
default_factory=lambda: os.environ.get("L2_DOMAIN")
|
||||
)
|
||||
|
||||
# GPU/Streaming settings
|
||||
streaming_gpu_persist: bool = field(
|
||||
default_factory=lambda: os.environ.get("STREAMING_GPU_PERSIST", "0") == "1"
|
||||
)
|
||||
ipfs_gateways: str = field(
|
||||
default_factory=lambda: os.environ.get(
|
||||
"IPFS_GATEWAYS", "https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
|
||||
)
|
||||
)
|
||||
|
||||
# Derived paths
|
||||
@property
|
||||
def plan_cache_dir(self) -> Path:
|
||||
@@ -68,5 +78,26 @@ class Settings:
|
||||
self.analysis_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def log_config(self, logger=None) -> None:
|
||||
"""Log all configuration values for debugging."""
|
||||
output = logger.info if logger else lambda x: print(x, file=sys.stderr)
|
||||
output("=" * 60)
|
||||
output("CONFIGURATION")
|
||||
output("=" * 60)
|
||||
output(f" cache_dir: {self.cache_dir}")
|
||||
output(f" redis_url: {self.redis_url}")
|
||||
output(f" database_url: {self.database_url[:50]}...")
|
||||
output(f" ipfs_api: {self.ipfs_api}")
|
||||
output(f" ipfs_gateway_url: {self.ipfs_gateway_url}")
|
||||
output(f" ipfs_gateways: {self.ipfs_gateways[:50]}...")
|
||||
output(f" streaming_gpu_persist: {self.streaming_gpu_persist}")
|
||||
output(f" l2_server: {self.l2_server}")
|
||||
output("=" * 60)
|
||||
|
||||
|
||||
# Singleton settings instance
|
||||
settings = Settings()
|
||||
|
||||
# Log config on import if DEBUG or SHOW_CONFIG is set
|
||||
if os.environ.get("DEBUG") or os.environ.get("SHOW_CONFIG"):
|
||||
settings.log_config()
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
|
||||
from fastapi.responses import HTMLResponse, FileResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -208,13 +208,95 @@ async def import_from_ipfs(
|
||||
return {"cid": cid, "imported": True}
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_content(
|
||||
file: UploadFile = File(...),
|
||||
@router.post("/upload/chunk")
|
||||
async def upload_chunk(
|
||||
request: Request,
|
||||
chunk: UploadFile = File(...),
|
||||
upload_id: str = Form(...),
|
||||
chunk_index: int = Form(...),
|
||||
total_chunks: int = Form(...),
|
||||
filename: str = Form(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Upload content to cache and IPFS."""
|
||||
"""Upload a file chunk. Assembles file when all chunks received."""
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
# Create temp dir for this upload
|
||||
chunk_dir = Path(tempfile.gettempdir()) / "uploads" / upload_id
|
||||
chunk_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save this chunk
|
||||
chunk_path = chunk_dir / f"chunk_{chunk_index:05d}"
|
||||
chunk_data = await chunk.read()
|
||||
chunk_path.write_bytes(chunk_data)
|
||||
|
||||
# Check if all chunks received
|
||||
received = len(list(chunk_dir.glob("chunk_*")))
|
||||
|
||||
if received < total_chunks:
|
||||
return {"status": "partial", "received": received, "total": total_chunks}
|
||||
|
||||
# All chunks received - assemble file
|
||||
final_path = chunk_dir / filename
|
||||
with open(final_path, 'wb') as f:
|
||||
for i in range(total_chunks):
|
||||
cp = chunk_dir / f"chunk_{i:05d}"
|
||||
f.write(cp.read_bytes())
|
||||
cp.unlink() # Clean up chunk
|
||||
|
||||
# Read assembled file
|
||||
content = final_path.read_bytes()
|
||||
final_path.unlink()
|
||||
chunk_dir.rmdir()
|
||||
|
||||
# Now do the normal upload flow
|
||||
cid, ipfs_cid, error = await cache_service.upload_content(
|
||||
content=content,
|
||||
filename=filename,
|
||||
actor_id=ctx.actor_id,
|
||||
)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
# Assign friendly name
|
||||
final_cid = ipfs_cid or cid
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly_entry = await naming.assign_name(
|
||||
cid=final_cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name,
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "complete",
|
||||
"cid": final_cid,
|
||||
"friendly_name": friendly_entry["friendly_name"],
|
||||
"filename": filename,
|
||||
"size": len(content),
|
||||
"uploaded": True,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_content(
|
||||
file: UploadFile = File(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Upload content to cache and IPFS.
|
||||
|
||||
Args:
|
||||
file: The file to upload
|
||||
display_name: Optional custom name for the media (used as friendly name)
|
||||
"""
|
||||
content = await file.read()
|
||||
cid, ipfs_cid, error = await cache_service.upload_content(
|
||||
content=content,
|
||||
@@ -233,6 +315,7 @@ async def upload_content(
|
||||
cid=final_cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name, # Use custom name if provided
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
@@ -350,3 +433,83 @@ async def update_metadata_htmx(
|
||||
<div class="text-green-400 mb-4">Metadata saved!</div>
|
||||
<script>setTimeout(() => location.reload(), 1000);</script>
|
||||
''')
|
||||
|
||||
|
||||
# Friendly name editing
|
||||
@router.get("/{cid}/name-form", response_class=HTMLResponse)
|
||||
async def get_name_form(
|
||||
cid: str,
|
||||
request: Request,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get friendly name editing form (HTMX)."""
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
# Get current friendly name
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
entry = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
current_name = entry.get("base_name", "") if entry else ""
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<form hx-post="/cache/{cid}/name"
|
||||
hx-target="#friendly-name-section"
|
||||
hx-swap="innerHTML"
|
||||
class="space-y-3">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Friendly Name</label>
|
||||
<input type="text" name="display_name" value="{current_name}"
|
||||
placeholder="e.g., my-background-video"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A name to reference this media in recipes</p>
|
||||
</div>
|
||||
<div class="flex space-x-2">
|
||||
<button type="submit"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Save
|
||||
</button>
|
||||
<button type="button"
|
||||
onclick="location.reload()"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
''')
|
||||
|
||||
|
||||
@router.post("/{cid}/name", response_class=HTMLResponse)
|
||||
async def update_friendly_name(
|
||||
cid: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Update friendly name (HTMX form handler)."""
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
form_data = await request.form()
|
||||
display_name = form_data.get("display_name", "").strip()
|
||||
|
||||
if not display_name:
|
||||
return HTMLResponse('<div class="text-red-400">Name cannot be empty</div>')
|
||||
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
|
||||
try:
|
||||
entry = await naming.assign_name(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<div class="text-green-400 mb-2">Name updated!</div>
|
||||
<script>setTimeout(() => location.reload(), 1000);</script>
|
||||
''')
|
||||
except Exception as e:
|
||||
return HTMLResponse(f'<div class="text-red-400">Error: {e}</div>')
|
||||
|
||||
@@ -2,17 +2,17 @@
|
||||
Effects routes for L1 server.
|
||||
|
||||
Handles effect upload, listing, and metadata.
|
||||
Effects are stored in IPFS like all other content-addressed data.
|
||||
Effects are S-expression files stored in IPFS like all other content-addressed data.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
|
||||
from fastapi.responses import HTMLResponse, PlainTextResponse
|
||||
|
||||
from artdag_common import render
|
||||
@@ -40,12 +40,11 @@ def get_effects_dir() -> Path:
|
||||
|
||||
def parse_effect_metadata(source: str) -> dict:
|
||||
"""
|
||||
Parse effect metadata from source code.
|
||||
Parse effect metadata from S-expression source code.
|
||||
|
||||
Extracts PEP 723 dependencies and @-tag metadata from docstring.
|
||||
Extracts metadata from comment headers (;; @key value format)
|
||||
or from (defeffect name ...) form.
|
||||
"""
|
||||
import re
|
||||
|
||||
metadata = {
|
||||
"name": "",
|
||||
"version": "1.0.0",
|
||||
@@ -53,97 +52,54 @@ def parse_effect_metadata(source: str) -> dict:
|
||||
"temporal": False,
|
||||
"description": "",
|
||||
"params": [],
|
||||
"dependencies": [],
|
||||
"requires_python": ">=3.10",
|
||||
}
|
||||
|
||||
# Parse PEP 723 dependencies
|
||||
pep723_match = re.search(r"# /// script\n(.*?)# ///", source, re.DOTALL)
|
||||
if pep723_match:
|
||||
block = pep723_match.group(1)
|
||||
deps_match = re.search(r'# dependencies = \[(.*?)\]', block, re.DOTALL)
|
||||
if deps_match:
|
||||
metadata["dependencies"] = re.findall(r'"([^"]+)"', deps_match.group(1))
|
||||
python_match = re.search(r'# requires-python = "([^"]+)"', block)
|
||||
if python_match:
|
||||
metadata["requires_python"] = python_match.group(1)
|
||||
# Parse comment-based metadata (;; @key value)
|
||||
for line in source.split("\n"):
|
||||
stripped = line.strip()
|
||||
if not stripped.startswith(";"):
|
||||
# Stop parsing metadata at first non-comment line
|
||||
if stripped and not stripped.startswith("("):
|
||||
continue
|
||||
if stripped.startswith("("):
|
||||
break
|
||||
|
||||
# Parse docstring @-tags
|
||||
docstring_match = re.search(r'"""(.*?)"""', source, re.DOTALL)
|
||||
if not docstring_match:
|
||||
docstring_match = re.search(r"'''(.*?)'''", source, re.DOTALL)
|
||||
# Remove comment prefix
|
||||
comment = stripped.lstrip(";").strip()
|
||||
|
||||
if docstring_match:
|
||||
docstring = docstring_match.group(1)
|
||||
lines = docstring.split("\n")
|
||||
if comment.startswith("@effect "):
|
||||
metadata["name"] = comment[8:].strip()
|
||||
elif comment.startswith("@name "):
|
||||
metadata["name"] = comment[6:].strip()
|
||||
elif comment.startswith("@version "):
|
||||
metadata["version"] = comment[9:].strip()
|
||||
elif comment.startswith("@author "):
|
||||
metadata["author"] = comment[8:].strip()
|
||||
elif comment.startswith("@temporal"):
|
||||
val = comment[9:].strip().lower() if len(comment) > 9 else "true"
|
||||
metadata["temporal"] = val in ("true", "yes", "1", "")
|
||||
elif comment.startswith("@description "):
|
||||
metadata["description"] = comment[13:].strip()
|
||||
elif comment.startswith("@param "):
|
||||
# Format: @param name type [description]
|
||||
parts = comment[7:].split(None, 2)
|
||||
if len(parts) >= 2:
|
||||
param = {"name": parts[0], "type": parts[1]}
|
||||
if len(parts) > 2:
|
||||
param["description"] = parts[2]
|
||||
metadata["params"].append(param)
|
||||
|
||||
current_param = None
|
||||
desc_lines = []
|
||||
in_description = False
|
||||
# Also try to extract name from (defeffect "name" ...) or (effect "name" ...)
|
||||
if not metadata["name"]:
|
||||
name_match = re.search(r'\((defeffect|effect)\s+"([^"]+)"', source)
|
||||
if name_match:
|
||||
metadata["name"] = name_match.group(2)
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
|
||||
if stripped.startswith("@effect "):
|
||||
metadata["name"] = stripped[8:].strip()
|
||||
in_description = False
|
||||
|
||||
elif stripped.startswith("@version "):
|
||||
metadata["version"] = stripped[9:].strip()
|
||||
|
||||
elif stripped.startswith("@author "):
|
||||
metadata["author"] = stripped[8:].strip()
|
||||
|
||||
elif stripped.startswith("@temporal "):
|
||||
val = stripped[10:].strip().lower()
|
||||
metadata["temporal"] = val in ("true", "yes", "1")
|
||||
|
||||
elif stripped.startswith("@description"):
|
||||
in_description = True
|
||||
desc_lines = []
|
||||
|
||||
elif stripped.startswith("@param "):
|
||||
in_description = False
|
||||
if current_param:
|
||||
metadata["params"].append(current_param)
|
||||
parts = stripped[7:].split()
|
||||
if len(parts) >= 2:
|
||||
current_param = {
|
||||
"name": parts[0],
|
||||
"type": parts[1],
|
||||
"description": "",
|
||||
}
|
||||
else:
|
||||
current_param = None
|
||||
|
||||
elif stripped.startswith("@range ") and current_param:
|
||||
range_parts = stripped[7:].split()
|
||||
if len(range_parts) >= 2:
|
||||
try:
|
||||
current_param["range"] = [float(range_parts[0]), float(range_parts[1])]
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
elif stripped.startswith("@default ") and current_param:
|
||||
current_param["default"] = stripped[9:].strip()
|
||||
|
||||
elif stripped.startswith("@example"):
|
||||
in_description = False
|
||||
if current_param:
|
||||
metadata["params"].append(current_param)
|
||||
current_param = None
|
||||
|
||||
elif in_description and stripped:
|
||||
desc_lines.append(stripped)
|
||||
|
||||
elif current_param and stripped and not stripped.startswith("@"):
|
||||
current_param["description"] = stripped
|
||||
|
||||
if in_description:
|
||||
metadata["description"] = " ".join(desc_lines)
|
||||
|
||||
if current_param:
|
||||
metadata["params"].append(current_param)
|
||||
# Try to extract name from first (define ...) form
|
||||
if not metadata["name"]:
|
||||
define_match = re.search(r'\(define\s+(\w+)', source)
|
||||
if define_match:
|
||||
metadata["name"] = define_match.group(1)
|
||||
|
||||
return metadata
|
||||
|
||||
@@ -151,13 +107,18 @@ def parse_effect_metadata(source: str) -> dict:
|
||||
@router.post("/upload")
|
||||
async def upload_effect(
|
||||
file: UploadFile = File(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""
|
||||
Upload an effect to IPFS.
|
||||
Upload an S-expression effect to IPFS.
|
||||
|
||||
Parses PEP 723 metadata and @-tag docstring.
|
||||
Parses metadata from comment headers.
|
||||
Returns IPFS CID for use in recipes.
|
||||
|
||||
Args:
|
||||
file: The .sexp effect file
|
||||
display_name: Optional custom friendly name for the effect
|
||||
"""
|
||||
content = await file.read()
|
||||
|
||||
@@ -166,7 +127,7 @@ async def upload_effect(
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPException(400, "Effect must be valid UTF-8 text")
|
||||
|
||||
# Parse metadata
|
||||
# Parse metadata from sexp source
|
||||
try:
|
||||
meta = parse_effect_metadata(source)
|
||||
except Exception as e:
|
||||
@@ -185,7 +146,7 @@ async def upload_effect(
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
effect_dir.mkdir(parents=True, exist_ok=True)
|
||||
(effect_dir / "effect.py").write_text(source, encoding="utf-8")
|
||||
(effect_dir / "effect.sexp").write_text(source, encoding="utf-8")
|
||||
|
||||
# Store metadata (locally and in IPFS)
|
||||
full_meta = {
|
||||
@@ -200,14 +161,23 @@ async def upload_effect(
|
||||
# Also store metadata in IPFS for discoverability
|
||||
meta_cid = ipfs_client.add_json(full_meta)
|
||||
|
||||
# Assign friendly name
|
||||
# Track ownership in item_types
|
||||
import database
|
||||
await database.save_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="effect",
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
# Assign friendly name (use custom display_name if provided, else from metadata)
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly_entry = await naming.assign_name(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="effect",
|
||||
display_name=meta.get("name"),
|
||||
display_name=display_name or meta.get("name"),
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
@@ -221,7 +191,6 @@ async def upload_effect(
|
||||
"version": meta.get("version"),
|
||||
"temporal": meta.get("temporal", False),
|
||||
"params": meta.get("params", []),
|
||||
"dependencies": meta.get("dependencies", []),
|
||||
"uploaded": True,
|
||||
}
|
||||
|
||||
@@ -249,7 +218,7 @@ async def get_effect(
|
||||
# Cache locally
|
||||
effect_dir.mkdir(parents=True, exist_ok=True)
|
||||
source = source_bytes.decode("utf-8")
|
||||
(effect_dir / "effect.py").write_text(source)
|
||||
(effect_dir / "effect.sexp").write_text(source)
|
||||
|
||||
# Parse metadata from source
|
||||
parsed_meta = parse_effect_metadata(source)
|
||||
@@ -288,12 +257,16 @@ async def get_effect_source(
|
||||
):
|
||||
"""Get effect source code."""
|
||||
effects_dir = get_effects_dir()
|
||||
source_path = effects_dir / cid / "effect.py"
|
||||
source_path = effects_dir / cid / "effect.sexp"
|
||||
|
||||
# Try local cache first
|
||||
# Try local cache first (check both .sexp and legacy .py)
|
||||
if source_path.exists():
|
||||
return PlainTextResponse(source_path.read_text())
|
||||
|
||||
legacy_path = effects_dir / cid / "effect.py"
|
||||
if legacy_path.exists():
|
||||
return PlainTextResponse(legacy_path.read_text())
|
||||
|
||||
# Fetch from IPFS
|
||||
source_bytes = ipfs_client.get_bytes(cid)
|
||||
if not source_bytes:
|
||||
@@ -314,31 +287,33 @@ async def list_effects(
|
||||
limit: int = 20,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""List uploaded effects with pagination."""
|
||||
"""List user's effects with pagination."""
|
||||
import database
|
||||
effects_dir = get_effects_dir()
|
||||
effects = []
|
||||
|
||||
# Get user's effect CIDs from item_types
|
||||
user_items = await database.get_user_items(ctx.actor_id, item_type="effect", limit=1000)
|
||||
effect_cids = [item["cid"] for item in user_items]
|
||||
|
||||
# Get naming service for friendly name lookup
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
|
||||
if effects_dir.exists():
|
||||
for effect_dir in effects_dir.iterdir():
|
||||
if effect_dir.is_dir():
|
||||
metadata_path = effect_dir / "metadata.json"
|
||||
if metadata_path.exists():
|
||||
try:
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
# Add friendly name if available
|
||||
cid = meta.get("cid")
|
||||
if cid:
|
||||
friendly = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
if friendly:
|
||||
meta["friendly_name"] = friendly["friendly_name"]
|
||||
meta["base_name"] = friendly["base_name"]
|
||||
effects.append(meta)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
for cid in effect_cids:
|
||||
effect_dir = effects_dir / cid
|
||||
metadata_path = effect_dir / "metadata.json"
|
||||
if metadata_path.exists():
|
||||
try:
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
# Add friendly name if available
|
||||
friendly = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
if friendly:
|
||||
meta["friendly_name"] = friendly["friendly_name"]
|
||||
meta["base_name"] = friendly["base_name"]
|
||||
effects.append(meta)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# Sort by upload time (newest first)
|
||||
effects.sort(key=lambda e: e.get("uploaded_at", ""), reverse=True)
|
||||
@@ -412,25 +387,29 @@ async def delete_effect(
|
||||
cid: str,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Delete an effect from local cache (IPFS content is immutable)."""
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
"""Remove user's ownership link to an effect."""
|
||||
import database
|
||||
|
||||
if not effect_dir.exists():
|
||||
raise HTTPException(404, f"Effect {cid[:16]}... not found in local cache")
|
||||
# Remove user's ownership link from item_types
|
||||
await database.delete_item_type(cid, ctx.actor_id, "effect")
|
||||
|
||||
# Check ownership
|
||||
metadata_path = effect_dir / "metadata.json"
|
||||
if metadata_path.exists():
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
if meta.get("uploader") != ctx.actor_id:
|
||||
raise HTTPException(403, "Can only delete your own effects")
|
||||
# Remove friendly name
|
||||
await database.delete_friendly_name(ctx.actor_id, cid)
|
||||
|
||||
import shutil
|
||||
shutil.rmtree(effect_dir)
|
||||
# Check if anyone still owns this effect
|
||||
remaining_owners = await database.get_item_types(cid)
|
||||
|
||||
# Unpin from IPFS (content remains available if pinned elsewhere)
|
||||
ipfs_client.unpin(cid)
|
||||
# Only delete local files if no one owns it anymore
|
||||
if not remaining_owners:
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
if effect_dir.exists():
|
||||
import shutil
|
||||
shutil.rmtree(effect_dir)
|
||||
|
||||
logger.info(f"Deleted effect {cid[:16]}... by {ctx.actor_id}")
|
||||
return {"deleted": True, "note": "Unpinned from local IPFS; content may still exist on other nodes"}
|
||||
# Unpin from IPFS
|
||||
ipfs_client.unpin(cid)
|
||||
logger.info(f"Garbage collected effect {cid[:16]}... (no remaining owners)")
|
||||
|
||||
logger.info(f"Removed effect {cid[:16]}... ownership for {ctx.actor_id}")
|
||||
return {"deleted": True}
|
||||
|
||||
@@ -19,21 +19,23 @@ router = APIRouter()
|
||||
async def get_user_stats(actor_id: str) -> dict:
|
||||
"""Get stats for a user."""
|
||||
import database
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..services.run_service import RunService
|
||||
from ..dependencies import get_redis_client, get_cache_manager
|
||||
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
stats["media"] = await database.count_user_items(actor_id)
|
||||
# Count only actual media types (video, image, audio), not effects/recipes
|
||||
media_count = 0
|
||||
for media_type in ["video", "image", "audio", "unknown"]:
|
||||
media_count += await database.count_user_items(actor_id, item_type=media_type)
|
||||
stats["media"] = media_count
|
||||
except Exception:
|
||||
stats["media"] = 0
|
||||
|
||||
try:
|
||||
recipe_service = RecipeService(get_redis_client(), get_cache_manager())
|
||||
recipes = await recipe_service.list_recipes(actor_id)
|
||||
stats["recipes"] = len(recipes)
|
||||
# Count user's recipes from database (ownership-based)
|
||||
stats["recipes"] = await database.count_user_items(actor_id, item_type="recipe")
|
||||
except Exception:
|
||||
stats["recipes"] = 0
|
||||
|
||||
@@ -51,11 +53,8 @@ async def get_user_stats(actor_id: str) -> dict:
|
||||
stats["storage"] = 0
|
||||
|
||||
try:
|
||||
effects_dir = Path(get_cache_manager().cache_dir) / "_effects"
|
||||
if effects_dir.exists():
|
||||
stats["effects"] = len([d for d in effects_dir.iterdir() if d.is_dir()])
|
||||
else:
|
||||
stats["effects"] = 0
|
||||
# Count user's effects from database (ownership-based)
|
||||
stats["effects"] = await database.count_user_items(actor_id, item_type="effect")
|
||||
except Exception:
|
||||
stats["effects"] = 0
|
||||
|
||||
@@ -132,35 +131,56 @@ async def clear_user_data(request: Request):
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to list recipes: {e}")
|
||||
|
||||
# Delete all effects
|
||||
# Delete all effects (uses ownership model)
|
||||
cache_manager = get_cache_manager()
|
||||
try:
|
||||
cache_manager = get_cache_manager()
|
||||
effects_dir = Path(cache_manager.cache_dir) / "_effects"
|
||||
if effects_dir.exists():
|
||||
import shutil
|
||||
for effect_dir in effects_dir.iterdir():
|
||||
if effect_dir.is_dir():
|
||||
try:
|
||||
shutil.rmtree(effect_dir)
|
||||
deleted["effects"] += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Effect {effect_dir.name}: {e}")
|
||||
# Get user's effects from item_types
|
||||
effect_items = await database.get_user_items(actor_id, item_type="effect", limit=10000)
|
||||
for item in effect_items:
|
||||
cid = item.get("cid")
|
||||
if cid:
|
||||
try:
|
||||
# Remove ownership link
|
||||
await database.delete_item_type(cid, actor_id, "effect")
|
||||
await database.delete_friendly_name(actor_id, cid)
|
||||
|
||||
# Check if orphaned
|
||||
remaining = await database.get_item_types(cid)
|
||||
if not remaining:
|
||||
# Garbage collect
|
||||
effects_dir = Path(cache_manager.cache_dir) / "_effects" / cid
|
||||
if effects_dir.exists():
|
||||
import shutil
|
||||
shutil.rmtree(effects_dir)
|
||||
import ipfs_client
|
||||
ipfs_client.unpin(cid)
|
||||
deleted["effects"] += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Effect {cid[:16]}...: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to delete effects: {e}")
|
||||
|
||||
# Delete all media/cache items for user
|
||||
# Delete all media/cache items for user (uses ownership model)
|
||||
try:
|
||||
items = await database.get_user_items(actor_id, limit=10000)
|
||||
for item in items:
|
||||
try:
|
||||
from ..services.cache_service import CacheService
|
||||
cache_service = CacheService(database, cache_manager)
|
||||
|
||||
# Get user's media items (video, image, audio)
|
||||
for media_type in ["video", "image", "audio", "unknown"]:
|
||||
items = await database.get_user_items(actor_id, item_type=media_type, limit=10000)
|
||||
for item in items:
|
||||
cid = item.get("cid")
|
||||
if cid:
|
||||
await database.delete_cache_item(cid)
|
||||
deleted["media"] += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Media {item.get('cid', 'unknown')}: {e}")
|
||||
try:
|
||||
success, error = await cache_service.delete_content(cid, actor_id)
|
||||
if success:
|
||||
deleted["media"] += 1
|
||||
elif error:
|
||||
errors.append(f"Media {cid[:16]}...: {error}")
|
||||
except Exception as e:
|
||||
errors.append(f"Media {cid[:16]}...: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to list media: {e}")
|
||||
errors.append(f"Failed to delete media: {e}")
|
||||
|
||||
logger.info(f"Cleared data for {actor_id}: {deleted}")
|
||||
if errors:
|
||||
|
||||
@@ -569,6 +569,7 @@ async def run_recipe(
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
recipe_name=recipe.get("name"), # Store name for display
|
||||
recipe_sexp=recipe.get("sexp"), # S-expression for code-addressed execution
|
||||
)
|
||||
|
||||
if error:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ Cache Service - business logic for cache and media management.
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
@@ -11,6 +12,8 @@ from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from database import Database
|
||||
from cache_manager import L1CacheManager
|
||||
@@ -97,30 +100,52 @@ class CacheService:
|
||||
|
||||
async def get_cache_item(self, cid: str, actor_id: str = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached item with full metadata for display."""
|
||||
# Check if content exists
|
||||
if not self.cache.has_content(cid):
|
||||
return None
|
||||
|
||||
path = self.cache.get_by_cid(cid)
|
||||
if not path or not path.exists():
|
||||
return None
|
||||
|
||||
# Get metadata from database
|
||||
# Get metadata from database first
|
||||
meta = await self.db.load_item_metadata(cid, actor_id)
|
||||
cache_item = await self.db.get_cache_item(cid)
|
||||
|
||||
media_type = detect_media_type(path)
|
||||
mime_type = get_mime_type(path)
|
||||
size = path.stat().st_size
|
||||
# Check if content exists locally
|
||||
path = self.cache.get_by_cid(cid) if self.cache.has_content(cid) else None
|
||||
|
||||
if path and path.exists():
|
||||
# Local file exists - detect type from file
|
||||
media_type = detect_media_type(path)
|
||||
mime_type = get_mime_type(path)
|
||||
size = path.stat().st_size
|
||||
else:
|
||||
# File not local - check database for type info
|
||||
# Try to get type from item_types table
|
||||
media_type = "unknown"
|
||||
mime_type = "application/octet-stream"
|
||||
size = 0
|
||||
|
||||
if actor_id:
|
||||
try:
|
||||
item_types = await self.db.get_item_types(cid, actor_id)
|
||||
if item_types:
|
||||
media_type = item_types[0].get("type", "unknown")
|
||||
if media_type == "video":
|
||||
mime_type = "video/mp4"
|
||||
elif media_type == "image":
|
||||
mime_type = "image/png"
|
||||
elif media_type == "audio":
|
||||
mime_type = "audio/mpeg"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If no local path but we have IPFS CID, content is available remotely
|
||||
if not cache_item:
|
||||
return None
|
||||
|
||||
result = {
|
||||
"cid": cid,
|
||||
"path": str(path),
|
||||
"path": str(path) if path else None,
|
||||
"media_type": media_type,
|
||||
"mime_type": mime_type,
|
||||
"size": size,
|
||||
"ipfs_cid": cache_item.get("ipfs_cid") if cache_item else None,
|
||||
"meta": meta,
|
||||
"remote_only": path is None or not path.exists(),
|
||||
}
|
||||
|
||||
# Unpack meta fields to top level for template convenience
|
||||
@@ -416,32 +441,62 @@ class CacheService:
|
||||
return l2_result.get("ipfs_cid") or ipfs_cid, None
|
||||
|
||||
async def delete_content(self, cid: str, actor_id: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Delete content from cache. Returns (success, error)."""
|
||||
if not self.cache.has_content(cid):
|
||||
return False, "Content not found"
|
||||
"""
|
||||
Remove user's ownership link to cached content.
|
||||
|
||||
# Check if pinned
|
||||
This removes the item_types entry linking the user to the content.
|
||||
The cached file is only deleted if no other users own it.
|
||||
Returns (success, error).
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Check if pinned for this user
|
||||
meta = await self.db.load_item_metadata(cid, actor_id)
|
||||
if meta and meta.get("pinned"):
|
||||
pin_reason = meta.get("pin_reason", "unknown")
|
||||
return False, f"Cannot discard pinned item (reason: {pin_reason})"
|
||||
|
||||
# Check deletion rules via cache_manager
|
||||
can_delete, reason = self.cache.can_delete(cid)
|
||||
if not can_delete:
|
||||
return False, f"Cannot discard: {reason}"
|
||||
# Get the item type to delete the right ownership entry
|
||||
item_types = await self.db.get_item_types(cid, actor_id)
|
||||
if not item_types:
|
||||
return False, "You don't own this content"
|
||||
|
||||
# Delete via cache_manager
|
||||
success, msg = self.cache.delete_by_cid(cid)
|
||||
# Remove user's ownership links (all types for this user)
|
||||
for item in item_types:
|
||||
item_type = item.get("type", "media")
|
||||
await self.db.delete_item_type(cid, actor_id, item_type)
|
||||
|
||||
# Clean up legacy metadata files
|
||||
meta_path = self.cache_dir / f"{cid}.meta.json"
|
||||
if meta_path.exists():
|
||||
meta_path.unlink()
|
||||
mp4_path = self.cache_dir / f"{cid}.mp4"
|
||||
if mp4_path.exists():
|
||||
mp4_path.unlink()
|
||||
# Remove friendly name
|
||||
await self.db.delete_friendly_name(actor_id, cid)
|
||||
|
||||
# Check if anyone else still owns this content
|
||||
remaining_owners = await self.db.get_item_types(cid)
|
||||
|
||||
# Only delete the actual file if no one owns it anymore
|
||||
if not remaining_owners:
|
||||
# Check deletion rules via cache_manager
|
||||
can_delete, reason = self.cache.can_delete(cid)
|
||||
if can_delete:
|
||||
# Delete via cache_manager
|
||||
self.cache.delete_by_cid(cid)
|
||||
|
||||
# Clean up legacy metadata files
|
||||
meta_path = self.cache_dir / f"{cid}.meta.json"
|
||||
if meta_path.exists():
|
||||
meta_path.unlink()
|
||||
mp4_path = self.cache_dir / f"{cid}.mp4"
|
||||
if mp4_path.exists():
|
||||
mp4_path.unlink()
|
||||
|
||||
# Delete from database
|
||||
await self.db.delete_cache_item(cid)
|
||||
|
||||
logger.info(f"Garbage collected content {cid[:16]}... (no remaining owners)")
|
||||
else:
|
||||
logger.info(f"Content {cid[:16]}... orphaned but cannot delete: {reason}")
|
||||
|
||||
logger.info(f"Removed content {cid[:16]}... ownership for {actor_id}")
|
||||
return True, None
|
||||
|
||||
async def import_from_ipfs(self, ipfs_cid: str, actor_id: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
@@ -483,7 +538,11 @@ class CacheService:
|
||||
filename: str,
|
||||
actor_id: str,
|
||||
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
|
||||
"""Upload content to cache. Returns (cid, ipfs_cid, error)."""
|
||||
"""Upload content to cache. Returns (cid, ipfs_cid, error).
|
||||
|
||||
Files are stored locally first for fast response, then uploaded
|
||||
to IPFS in the background.
|
||||
"""
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
@@ -495,21 +554,25 @@ class CacheService:
|
||||
# Detect media type (video/image/audio) before moving file
|
||||
media_type = detect_media_type(tmp_path)
|
||||
|
||||
# Store in cache (also stores in IPFS)
|
||||
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True)
|
||||
cid = ipfs_cid or cached.cid # Prefer IPFS CID
|
||||
# Store locally AND upload to IPFS synchronously
|
||||
# This ensures the IPFS CID is available immediately for distributed access
|
||||
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True, skip_ipfs=False)
|
||||
cid = ipfs_cid or cached.cid # Prefer IPFS CID, fall back to local hash
|
||||
|
||||
# Save to database with media category type
|
||||
# Using media_type ("video", "image", "audio") not mime_type ("video/mp4")
|
||||
# so list_media filtering works correctly
|
||||
await self.db.create_cache_item(cid, ipfs_cid)
|
||||
await self.db.create_cache_item(cached.cid, ipfs_cid)
|
||||
await self.db.save_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=actor_id,
|
||||
item_type=media_type, # Store media category for filtering
|
||||
item_type=media_type,
|
||||
filename=filename
|
||||
)
|
||||
|
||||
if ipfs_cid:
|
||||
logger.info(f"Uploaded to IPFS: {ipfs_cid[:16]}...")
|
||||
else:
|
||||
logger.warning(f"IPFS upload failed, using local hash: {cid[:16]}...")
|
||||
|
||||
return cid, ipfs_cid, None
|
||||
except Exception as e:
|
||||
return None, None, f"Upload failed: {e}"
|
||||
|
||||
@@ -33,10 +33,15 @@ class RecipeService:
|
||||
async def get_recipe(self, recipe_id: str) -> Optional[Recipe]:
|
||||
"""Get a recipe by ID (content hash)."""
|
||||
import yaml
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Get from cache (content-addressed storage)
|
||||
logger.info(f"get_recipe: Looking up recipe_id={recipe_id[:16]}...")
|
||||
path = self.cache.get_by_cid(recipe_id)
|
||||
logger.info(f"get_recipe: cache.get_by_cid returned path={path}")
|
||||
if not path or not path.exists():
|
||||
logger.warning(f"get_recipe: Recipe {recipe_id[:16]}... not found in cache")
|
||||
return None
|
||||
|
||||
with open(path) as f:
|
||||
@@ -55,16 +60,40 @@ class RecipeService:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if is_sexp_format(content):
|
||||
# Parse S-expression
|
||||
try:
|
||||
compiled = compile_string(content)
|
||||
recipe_data = compiled.to_dict()
|
||||
recipe_data["sexp"] = content
|
||||
recipe_data["format"] = "sexp"
|
||||
logger.info(f"Parsed sexp recipe {recipe_id[:16]}..., keys: {list(recipe_data.keys())}")
|
||||
except (ParseError, CompileError) as e:
|
||||
logger.warning(f"Failed to parse sexp recipe {recipe_id[:16]}...: {e}")
|
||||
return {"error": str(e), "recipe_id": recipe_id}
|
||||
# Detect if this is a streaming recipe (starts with (stream ...))
|
||||
def is_streaming_recipe(text):
|
||||
for line in text.split('\n'):
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith(';'):
|
||||
continue
|
||||
return stripped.startswith('(stream')
|
||||
return False
|
||||
|
||||
if is_streaming_recipe(content):
|
||||
# Streaming recipes have different format - parse manually
|
||||
import re
|
||||
name_match = re.search(r'\(stream\s+"([^"]+)"', content)
|
||||
recipe_name = name_match.group(1) if name_match else "streaming"
|
||||
|
||||
recipe_data = {
|
||||
"name": recipe_name,
|
||||
"sexp": content,
|
||||
"format": "sexp",
|
||||
"type": "streaming",
|
||||
"dag": {"nodes": []}, # Streaming recipes don't have traditional DAG
|
||||
}
|
||||
logger.info(f"Parsed streaming recipe {recipe_id[:16]}..., name: {recipe_name}")
|
||||
else:
|
||||
# Parse traditional (recipe ...) S-expression
|
||||
try:
|
||||
compiled = compile_string(content)
|
||||
recipe_data = compiled.to_dict()
|
||||
recipe_data["sexp"] = content
|
||||
recipe_data["format"] = "sexp"
|
||||
logger.info(f"Parsed sexp recipe {recipe_id[:16]}..., keys: {list(recipe_data.keys())}")
|
||||
except (ParseError, CompileError) as e:
|
||||
logger.warning(f"Failed to parse sexp recipe {recipe_id[:16]}...: {e}")
|
||||
return {"error": str(e), "recipe_id": recipe_id}
|
||||
else:
|
||||
# Parse YAML
|
||||
try:
|
||||
@@ -160,14 +189,18 @@ class RecipeService:
|
||||
return None, f"Compile error: {e}"
|
||||
|
||||
# Write to temp file for caching
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".sexp", mode="w") as tmp:
|
||||
tmp.write(content)
|
||||
tmp_path = Path(tmp.name)
|
||||
|
||||
# Store in cache (content-addressed, auto-pins to IPFS)
|
||||
logger.info(f"upload_recipe: Storing recipe in cache from {tmp_path}")
|
||||
cached, ipfs_cid = self.cache.put(tmp_path, node_type="recipe", move=True)
|
||||
recipe_id = ipfs_cid or cached.cid # Prefer IPFS CID
|
||||
logger.info(f"upload_recipe: Stored recipe, cached.cid={cached.cid[:16]}..., ipfs_cid={ipfs_cid[:16] if ipfs_cid else None}, recipe_id={recipe_id[:16]}...")
|
||||
|
||||
# Track ownership in item_types and assign friendly name
|
||||
if uploader:
|
||||
|
||||
@@ -124,19 +124,40 @@ class RunService:
|
||||
# Check database for completed run
|
||||
cached = await self.db.get_run_cache(run_id)
|
||||
if cached:
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"recipe": cached.get("recipe"),
|
||||
"inputs": self._ensure_inputs_list(cached.get("inputs")),
|
||||
"output_cid": cached.get("output_cid"),
|
||||
"ipfs_cid": cached.get("ipfs_cid"),
|
||||
"provenance_cid": cached.get("provenance_cid"),
|
||||
"plan_cid": cached.get("plan_cid"),
|
||||
"actor_id": cached.get("actor_id"),
|
||||
"created_at": cached.get("created_at"),
|
||||
"completed_at": cached.get("created_at"),
|
||||
}
|
||||
output_cid = cached.get("output_cid")
|
||||
# Only return as completed if we have an output
|
||||
# (runs with no output should be re-executed)
|
||||
if output_cid:
|
||||
# Also fetch recipe content from pending_runs for streaming runs
|
||||
recipe_sexp = None
|
||||
recipe_name = None
|
||||
pending = await self.db.get_pending_run(run_id)
|
||||
if pending:
|
||||
recipe_sexp = pending.get("dag_json")
|
||||
|
||||
# Extract recipe name from streaming recipe content
|
||||
if recipe_sexp:
|
||||
import re
|
||||
name_match = re.search(r'\(stream\s+"([^"]+)"', recipe_sexp)
|
||||
if name_match:
|
||||
recipe_name = name_match.group(1)
|
||||
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"recipe": cached.get("recipe"),
|
||||
"recipe_name": recipe_name,
|
||||
"inputs": self._ensure_inputs_list(cached.get("inputs")),
|
||||
"output_cid": output_cid,
|
||||
"ipfs_cid": cached.get("ipfs_cid"),
|
||||
"ipfs_playlist_cid": cached.get("ipfs_playlist_cid") or (pending.get("ipfs_playlist_cid") if pending else None),
|
||||
"provenance_cid": cached.get("provenance_cid"),
|
||||
"plan_cid": cached.get("plan_cid"),
|
||||
"actor_id": cached.get("actor_id"),
|
||||
"created_at": cached.get("created_at"),
|
||||
"completed_at": cached.get("created_at"),
|
||||
"recipe_sexp": recipe_sexp,
|
||||
}
|
||||
|
||||
# Check database for pending run
|
||||
pending = await self.db.get_pending_run(run_id)
|
||||
@@ -154,6 +175,7 @@ class RunService:
|
||||
status_map = {
|
||||
"pending": "pending",
|
||||
"started": "running",
|
||||
"rendering": "running", # Custom status from streaming task
|
||||
"success": "completed",
|
||||
"failure": "failed",
|
||||
"retry": "running",
|
||||
@@ -171,15 +193,33 @@ class RunService:
|
||||
"output_name": pending.get("output_name"),
|
||||
"created_at": pending.get("created_at"),
|
||||
"error": pending.get("error"),
|
||||
"recipe_sexp": pending.get("dag_json"), # Recipe content for streaming runs
|
||||
# Checkpoint fields for resumable renders
|
||||
"checkpoint_frame": pending.get("checkpoint_frame"),
|
||||
"checkpoint_t": pending.get("checkpoint_t"),
|
||||
"total_frames": pending.get("total_frames"),
|
||||
"resumable": pending.get("resumable", True),
|
||||
# IPFS streaming info
|
||||
"ipfs_playlist_cid": pending.get("ipfs_playlist_cid"),
|
||||
"quality_playlists": pending.get("quality_playlists"),
|
||||
}
|
||||
|
||||
# If task completed, get result
|
||||
if result.ready():
|
||||
if result.successful():
|
||||
run_data["status"] = "completed"
|
||||
task_result = result.result
|
||||
if isinstance(task_result, dict):
|
||||
run_data["output_cid"] = task_result.get("output_cid")
|
||||
# Check task's own success flag and output_cid
|
||||
task_success = task_result.get("success", True)
|
||||
output_cid = task_result.get("output_cid")
|
||||
if task_success and output_cid:
|
||||
run_data["status"] = "completed"
|
||||
run_data["output_cid"] = output_cid
|
||||
else:
|
||||
run_data["status"] = "failed"
|
||||
run_data["error"] = task_result.get("error", "No output produced")
|
||||
else:
|
||||
run_data["status"] = "completed"
|
||||
else:
|
||||
run_data["status"] = "failed"
|
||||
run_data["error"] = str(result.result)
|
||||
@@ -196,6 +236,15 @@ class RunService:
|
||||
"actor_id": pending.get("actor_id"),
|
||||
"created_at": pending.get("created_at"),
|
||||
"error": pending.get("error"),
|
||||
"recipe_sexp": pending.get("dag_json"), # Recipe content for streaming runs
|
||||
# Checkpoint fields for resumable renders
|
||||
"checkpoint_frame": pending.get("checkpoint_frame"),
|
||||
"checkpoint_t": pending.get("checkpoint_t"),
|
||||
"total_frames": pending.get("total_frames"),
|
||||
"resumable": pending.get("resumable", True),
|
||||
# IPFS streaming info
|
||||
"ipfs_playlist_cid": pending.get("ipfs_playlist_cid"),
|
||||
"quality_playlists": pending.get("quality_playlists"),
|
||||
}
|
||||
|
||||
# Fallback: Check Redis for backwards compatibility
|
||||
@@ -241,6 +290,7 @@ class RunService:
|
||||
status_map = {
|
||||
"pending": "pending",
|
||||
"started": "running",
|
||||
"rendering": "running", # Custom status from streaming task
|
||||
"success": "completed",
|
||||
"failure": "failed",
|
||||
"retry": "running",
|
||||
@@ -263,10 +313,19 @@ class RunService:
|
||||
# If task completed, get result
|
||||
if result.ready():
|
||||
if result.successful():
|
||||
run_data["status"] = "completed"
|
||||
task_result = result.result
|
||||
if isinstance(task_result, dict):
|
||||
run_data["output_cid"] = task_result.get("output_cid")
|
||||
# Check task's own success flag and output_cid
|
||||
task_success = task_result.get("success", True)
|
||||
output_cid = task_result.get("output_cid")
|
||||
if task_success and output_cid:
|
||||
run_data["status"] = "completed"
|
||||
run_data["output_cid"] = output_cid
|
||||
else:
|
||||
run_data["status"] = "failed"
|
||||
run_data["error"] = task_result.get("error", "No output produced")
|
||||
else:
|
||||
run_data["status"] = "completed"
|
||||
else:
|
||||
run_data["status"] = "failed"
|
||||
run_data["error"] = str(result.result)
|
||||
@@ -291,9 +350,9 @@ class RunService:
|
||||
if any(r.get("run_id") == run_id for r in completed_runs):
|
||||
continue
|
||||
|
||||
# Get live status
|
||||
# Get live status - include pending, running, rendering, and failed runs
|
||||
run = await self.get_run(run_id)
|
||||
if run and run.get("status") in ("pending", "running"):
|
||||
if run and run.get("status") in ("pending", "running", "rendering", "failed"):
|
||||
pending.append(run)
|
||||
|
||||
# Combine and sort
|
||||
@@ -312,15 +371,19 @@ class RunService:
|
||||
actor_id: Optional[str] = None,
|
||||
l2_server: Optional[str] = None,
|
||||
recipe_name: Optional[str] = None,
|
||||
recipe_sexp: Optional[str] = None,
|
||||
) -> Tuple[Optional[RunResult], Optional[str]]:
|
||||
"""
|
||||
Create a new rendering run. Checks cache before executing.
|
||||
|
||||
If recipe_sexp is provided, uses the new S-expression execution path
|
||||
which generates code-addressed cache IDs before execution.
|
||||
|
||||
Returns (run_dict, error_message).
|
||||
"""
|
||||
import httpx
|
||||
try:
|
||||
from legacy_tasks import render_effect, execute_dag, build_effect_dag
|
||||
from legacy_tasks import render_effect, execute_dag, build_effect_dag, execute_recipe
|
||||
except ImportError as e:
|
||||
return None, f"Celery tasks not available: {e}"
|
||||
|
||||
@@ -401,7 +464,17 @@ class RunService:
|
||||
|
||||
# Not cached - submit to Celery
|
||||
try:
|
||||
if use_dag or recipe == "dag":
|
||||
# Prefer S-expression execution path (code-addressed cache IDs)
|
||||
if recipe_sexp:
|
||||
# Convert inputs to dict if needed
|
||||
if isinstance(inputs, dict):
|
||||
input_hashes = inputs
|
||||
else:
|
||||
# Legacy list format - use positional names
|
||||
input_hashes = {f"input_{i}": cid for i, cid in enumerate(input_list)}
|
||||
|
||||
task = execute_recipe.delay(recipe_sexp, input_hashes, run_id)
|
||||
elif use_dag or recipe == "dag":
|
||||
if dag_json:
|
||||
dag_data = dag_json
|
||||
else:
|
||||
@@ -467,10 +540,13 @@ class RunService:
|
||||
username: str,
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Discard (delete) a run record.
|
||||
Discard (delete) a run record and clean up outputs/intermediates.
|
||||
|
||||
Note: This removes the run record but not the output content.
|
||||
Outputs and intermediates are only deleted if not used by other runs.
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
run = await self.get_run(run_id)
|
||||
if not run:
|
||||
return False, f"Run {run_id} not found"
|
||||
@@ -480,6 +556,18 @@ class RunService:
|
||||
if run_owner and run_owner not in (username, actor_id):
|
||||
return False, "Access denied"
|
||||
|
||||
# Clean up activity outputs/intermediates (only if orphaned)
|
||||
# The activity_id is the same as run_id
|
||||
try:
|
||||
success, msg = self.cache.discard_activity_outputs_only(run_id)
|
||||
if success:
|
||||
logger.info(f"Cleaned up run {run_id}: {msg}")
|
||||
else:
|
||||
# Activity might not exist (old runs), that's OK
|
||||
logger.debug(f"No activity cleanup for {run_id}: {msg}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup activity for {run_id}: {e}")
|
||||
|
||||
# Remove task_id mapping from Redis
|
||||
self.redis.delete(f"{self.task_key_prefix}{run_id}")
|
||||
|
||||
@@ -487,8 +575,7 @@ class RunService:
|
||||
try:
|
||||
await self.db.delete_run_cache(run_id)
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(f"Failed to delete run_cache for {run_id}: {e}")
|
||||
logger.warning(f"Failed to delete run_cache for {run_id}: {e}")
|
||||
|
||||
# Remove pending run if exists
|
||||
try:
|
||||
@@ -498,43 +585,214 @@ class RunService:
|
||||
|
||||
return True, None
|
||||
|
||||
def _dag_to_steps(self, dag: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Convert DAG nodes dict format to steps list format.
|
||||
|
||||
DAG format: {"nodes": {"id": {...}}, "output_id": "..."}
|
||||
Steps format: {"steps": [{"id": "...", "type": "...", ...}], "output_id": "..."}
|
||||
"""
|
||||
if "steps" in dag:
|
||||
# Already in steps format
|
||||
return dag
|
||||
|
||||
if "nodes" not in dag:
|
||||
return dag
|
||||
|
||||
nodes = dag.get("nodes", {})
|
||||
steps = []
|
||||
|
||||
# Sort by topological order (sources first, then by input dependencies)
|
||||
def get_level(node_id: str, visited: set = None) -> int:
|
||||
if visited is None:
|
||||
visited = set()
|
||||
if node_id in visited:
|
||||
return 0
|
||||
visited.add(node_id)
|
||||
node = nodes.get(node_id, {})
|
||||
inputs = node.get("inputs", [])
|
||||
if not inputs:
|
||||
return 0
|
||||
return 1 + max(get_level(inp, visited) for inp in inputs)
|
||||
|
||||
sorted_ids = sorted(nodes.keys(), key=lambda nid: (get_level(nid), nid))
|
||||
|
||||
for node_id in sorted_ids:
|
||||
node = nodes[node_id]
|
||||
steps.append({
|
||||
"id": node_id,
|
||||
"step_id": node_id,
|
||||
"type": node.get("node_type", "EFFECT"),
|
||||
"config": node.get("config", {}),
|
||||
"inputs": node.get("inputs", []),
|
||||
"name": node.get("name"),
|
||||
"cache_id": node_id, # In code-addressed system, node_id IS the cache_id
|
||||
})
|
||||
|
||||
return {
|
||||
"steps": steps,
|
||||
"output_id": dag.get("output_id"),
|
||||
"metadata": dag.get("metadata", {}),
|
||||
"format": "json",
|
||||
}
|
||||
|
||||
def _sexp_to_steps(self, sexp_content: str) -> Dict[str, Any]:
|
||||
"""Convert S-expression plan to steps list format for UI.
|
||||
|
||||
Parses the S-expression plan format:
|
||||
(plan :id <id> :recipe <name> :recipe-hash <hash>
|
||||
(inputs (input_name hash) ...)
|
||||
(step step_id :cache-id <hash> :level <int> (node-type :key val ...))
|
||||
...
|
||||
:output <output_step_id>)
|
||||
|
||||
Returns steps list compatible with UI visualization.
|
||||
"""
|
||||
try:
|
||||
from artdag.sexp import parse, Symbol, Keyword
|
||||
except ImportError:
|
||||
return {"sexp": sexp_content, "steps": [], "format": "sexp"}
|
||||
|
||||
try:
|
||||
parsed = parse(sexp_content)
|
||||
except Exception:
|
||||
return {"sexp": sexp_content, "steps": [], "format": "sexp"}
|
||||
|
||||
if not isinstance(parsed, list) or not parsed:
|
||||
return {"sexp": sexp_content, "steps": [], "format": "sexp"}
|
||||
|
||||
steps = []
|
||||
output_step_id = None
|
||||
plan_id = None
|
||||
recipe_name = None
|
||||
|
||||
# Parse plan structure
|
||||
i = 0
|
||||
while i < len(parsed):
|
||||
item = parsed[i]
|
||||
|
||||
if isinstance(item, Keyword):
|
||||
key = item.name
|
||||
if i + 1 < len(parsed):
|
||||
value = parsed[i + 1]
|
||||
if key == "id":
|
||||
plan_id = value
|
||||
elif key == "recipe":
|
||||
recipe_name = value
|
||||
elif key == "output":
|
||||
output_step_id = value
|
||||
i += 2
|
||||
continue
|
||||
|
||||
if isinstance(item, list) and item:
|
||||
first = item[0]
|
||||
if isinstance(first, Symbol) and first.name == "step":
|
||||
# Parse step: (step step_id :cache-id <hash> :level <int> (node-expr))
|
||||
step_id = item[1] if len(item) > 1 else None
|
||||
cache_id = None
|
||||
level = 0
|
||||
node_type = "EFFECT"
|
||||
config = {}
|
||||
inputs = []
|
||||
|
||||
j = 2
|
||||
while j < len(item):
|
||||
part = item[j]
|
||||
if isinstance(part, Keyword):
|
||||
key = part.name
|
||||
if j + 1 < len(item):
|
||||
val = item[j + 1]
|
||||
if key == "cache-id":
|
||||
cache_id = val
|
||||
elif key == "level":
|
||||
level = val
|
||||
j += 2
|
||||
continue
|
||||
elif isinstance(part, list) and part:
|
||||
# Node expression: (node-type :key val ...)
|
||||
if isinstance(part[0], Symbol):
|
||||
node_type = part[0].name.upper()
|
||||
k = 1
|
||||
while k < len(part):
|
||||
if isinstance(part[k], Keyword):
|
||||
kname = part[k].name
|
||||
if k + 1 < len(part):
|
||||
kval = part[k + 1]
|
||||
if kname == "inputs":
|
||||
inputs = kval if isinstance(kval, list) else [kval]
|
||||
else:
|
||||
config[kname] = kval
|
||||
k += 2
|
||||
continue
|
||||
k += 1
|
||||
j += 1
|
||||
|
||||
steps.append({
|
||||
"id": step_id,
|
||||
"step_id": step_id,
|
||||
"type": node_type,
|
||||
"config": config,
|
||||
"inputs": inputs,
|
||||
"cache_id": cache_id or step_id,
|
||||
"level": level,
|
||||
})
|
||||
|
||||
i += 1
|
||||
|
||||
return {
|
||||
"sexp": sexp_content,
|
||||
"steps": steps,
|
||||
"output_id": output_step_id,
|
||||
"plan_id": plan_id,
|
||||
"recipe": recipe_name,
|
||||
"format": "sexp",
|
||||
}
|
||||
|
||||
async def get_run_plan(self, run_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get execution plan for a run.
|
||||
|
||||
Plans are just node outputs - cached by content hash like everything else.
|
||||
For streaming runs, returns the recipe content as the plan.
|
||||
"""
|
||||
# Get run to find plan_cache_id
|
||||
run = await self.get_run(run_id)
|
||||
if not run:
|
||||
return None
|
||||
|
||||
plan_cache_id = run.get("plan_cache_id")
|
||||
if plan_cache_id:
|
||||
# For streaming runs, return the recipe as the plan
|
||||
if run.get("recipe") == "streaming" and run.get("recipe_sexp"):
|
||||
return {
|
||||
"steps": [{"id": "stream", "type": "STREAM", "name": "Streaming Recipe"}],
|
||||
"sexp": run.get("recipe_sexp"),
|
||||
"format": "sexp",
|
||||
}
|
||||
|
||||
# Check plan_cid (stored in database) or plan_cache_id (legacy)
|
||||
plan_cid = run.get("plan_cid") or run.get("plan_cache_id")
|
||||
if plan_cid:
|
||||
# Get plan from cache by content hash
|
||||
plan_path = self.cache.get_by_cid(plan_cache_id)
|
||||
plan_path = self.cache.get_by_cid(plan_cid)
|
||||
if plan_path and plan_path.exists():
|
||||
with open(plan_path) as f:
|
||||
content = f.read()
|
||||
# Detect format
|
||||
if content.strip().startswith("("):
|
||||
return {"sexp": content, "format": "sexp"}
|
||||
# S-expression format - parse for UI
|
||||
return self._sexp_to_steps(content)
|
||||
else:
|
||||
plan = json.loads(content)
|
||||
plan["format"] = "json"
|
||||
return plan
|
||||
return self._dag_to_steps(plan)
|
||||
|
||||
# Fall back to legacy plans directory
|
||||
sexp_path = self.cache_dir / "plans" / f"{run_id}.sexp"
|
||||
if sexp_path.exists():
|
||||
with open(sexp_path) as f:
|
||||
return {"sexp": f.read(), "format": "sexp"}
|
||||
return self._sexp_to_steps(f.read())
|
||||
|
||||
json_path = self.cache_dir / "plans" / f"{run_id}.json"
|
||||
if json_path.exists():
|
||||
with open(json_path) as f:
|
||||
plan = json.load(f)
|
||||
plan["format"] = "json"
|
||||
return plan
|
||||
return self._dag_to_steps(plan)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
35
app/templates/cache/detail.html
vendored
35
app/templates/cache/detail.html
vendored
@@ -13,17 +13,32 @@
|
||||
<!-- Preview -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 overflow-hidden">
|
||||
{% if cache.mime_type and cache.mime_type.startswith('image/') %}
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<img src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" alt=""
|
||||
class="w-full max-h-96 object-contain bg-gray-900">
|
||||
{% else %}
|
||||
<img src="/cache/{{ cache.cid }}/raw" alt=""
|
||||
class="w-full max-h-96 object-contain bg-gray-900">
|
||||
{% endif %}
|
||||
|
||||
{% elif cache.mime_type and cache.mime_type.startswith('video/') %}
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<video src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls
|
||||
class="w-full max-h-96 bg-gray-900">
|
||||
</video>
|
||||
{% else %}
|
||||
<video src="/cache/{{ cache.cid }}/raw" controls
|
||||
class="w-full max-h-96 bg-gray-900">
|
||||
</video>
|
||||
{% endif %}
|
||||
|
||||
{% elif cache.mime_type and cache.mime_type.startswith('audio/') %}
|
||||
<div class="p-8 bg-gray-900">
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<audio src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls class="w-full"></audio>
|
||||
{% else %}
|
||||
<audio src="/cache/{{ cache.cid }}/raw" controls class="w-full"></audio>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% elif cache.mime_type == 'application/json' %}
|
||||
@@ -40,15 +55,23 @@
|
||||
</div>
|
||||
|
||||
<!-- Friendly Name -->
|
||||
{% if cache.friendly_name %}
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
|
||||
<div class="mb-2">
|
||||
<div id="friendly-name-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="text-gray-500 text-sm">Friendly Name</span>
|
||||
<p class="text-blue-400 font-medium text-lg mt-1">{{ cache.friendly_name }}</p>
|
||||
<button hx-get="/cache/{{ cache.cid }}/name-form"
|
||||
hx-target="#friendly-name-section"
|
||||
hx-swap="innerHTML"
|
||||
class="text-blue-400 hover:text-blue-300 text-sm">
|
||||
Edit
|
||||
</button>
|
||||
</div>
|
||||
<p class="text-gray-500 text-xs">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
|
||||
{% if cache.friendly_name %}
|
||||
<p class="text-blue-400 font-medium text-lg">{{ cache.friendly_name }}</p>
|
||||
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
|
||||
{% else %}
|
||||
<p class="text-gray-500 text-sm">No friendly name assigned. Click Edit to add one.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- User Metadata (editable) -->
|
||||
<div id="metadata-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
|
||||
|
||||
206
app/templates/cache/media_list.html
vendored
206
app/templates/cache/media_list.html
vendored
@@ -7,6 +7,10 @@
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-3xl font-bold">Media</h1>
|
||||
<div class="flex items-center space-x-4">
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload Media
|
||||
</button>
|
||||
<select id="type-filter" onchange="filterMedia()"
|
||||
class="bg-gray-800 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<option value="">All Types</option>
|
||||
@@ -17,6 +21,58 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Upload Modal -->
|
||||
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h2 class="text-xl font-semibold">Upload Media</h2>
|
||||
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="text-gray-400 hover:text-white">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Files</label>
|
||||
<input type="file" name="files" id="upload-file" required multiple
|
||||
accept="image/*,video/*,audio/*"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
|
||||
<p class="text-gray-500 text-xs mt-1">Select one or more files to upload</p>
|
||||
</div>
|
||||
|
||||
<div id="single-name-field">
|
||||
<label class="block text-gray-400 text-sm mb-1">Name (optional, for single file)</label>
|
||||
<input type="text" name="display_name" id="upload-name" placeholder="e.g., my-background-video"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A friendly name to reference this media in recipes</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-progress" class="hidden">
|
||||
<div class="bg-gray-700 rounded-full h-2">
|
||||
<div id="progress-bar" class="bg-blue-600 h-2 rounded-full transition-all" style="width: 0%"></div>
|
||||
</div>
|
||||
<p id="progress-text" class="text-gray-400 text-sm mt-1">Uploading...</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="hidden max-h-48 overflow-y-auto"></div>
|
||||
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" id="upload-btn"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if items %}
|
||||
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4" id="media-grid">
|
||||
{% for item in items %}
|
||||
@@ -115,5 +171,155 @@ function filterMedia() {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Show/hide name field based on file count
|
||||
document.getElementById('upload-file').addEventListener('change', function(e) {
|
||||
const nameField = document.getElementById('single-name-field');
|
||||
if (e.target.files.length > 1) {
|
||||
nameField.style.display = 'none';
|
||||
} else {
|
||||
nameField.style.display = 'block';
|
||||
}
|
||||
});
|
||||
|
||||
// Handle upload form
|
||||
document.getElementById('upload-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const form = e.target;
|
||||
const fileInput = document.getElementById('upload-file');
|
||||
const files = fileInput.files;
|
||||
const displayName = document.getElementById('upload-name').value;
|
||||
const progressDiv = document.getElementById('upload-progress');
|
||||
const progressBar = document.getElementById('progress-bar');
|
||||
const progressText = document.getElementById('progress-text');
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
const uploadBtn = document.getElementById('upload-btn');
|
||||
|
||||
// Show progress
|
||||
progressDiv.classList.remove('hidden');
|
||||
resultDiv.classList.add('hidden');
|
||||
uploadBtn.disabled = true;
|
||||
|
||||
const results = [];
|
||||
const errors = [];
|
||||
|
||||
const CHUNK_SIZE = 1024 * 1024; // 1MB chunks
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
|
||||
const uploadId = crypto.randomUUID();
|
||||
const useChunked = file.size > CHUNK_SIZE * 2; // Use chunked for files > 2MB
|
||||
|
||||
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name}`;
|
||||
|
||||
try {
|
||||
let data;
|
||||
|
||||
if (useChunked && totalChunks > 1) {
|
||||
// Chunked upload for large files
|
||||
for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
|
||||
const start = chunkIndex * CHUNK_SIZE;
|
||||
const end = Math.min(start + CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
const chunkForm = new FormData();
|
||||
chunkForm.append('chunk', chunk);
|
||||
chunkForm.append('upload_id', uploadId);
|
||||
chunkForm.append('chunk_index', chunkIndex);
|
||||
chunkForm.append('total_chunks', totalChunks);
|
||||
chunkForm.append('filename', file.name);
|
||||
if (files.length === 1 && displayName) {
|
||||
chunkForm.append('display_name', displayName);
|
||||
}
|
||||
|
||||
const chunkProgress = ((i + (chunkIndex + 1) / totalChunks) / files.length) * 100;
|
||||
progressBar.style.width = `${chunkProgress}%`;
|
||||
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name} (${chunkIndex + 1}/${totalChunks} chunks)`;
|
||||
|
||||
const response = await fetch('/media/upload/chunk', {
|
||||
method: 'POST',
|
||||
body: chunkForm,
|
||||
});
|
||||
|
||||
const contentType = response.headers.get('content-type') || '';
|
||||
if (!contentType.includes('application/json')) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(data.detail || 'Chunk upload failed');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Regular upload for small files
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (files.length === 1 && displayName) {
|
||||
formData.append('display_name', displayName);
|
||||
}
|
||||
|
||||
progressBar.style.width = `${((i + 0.5) / files.length) * 100}%`;
|
||||
|
||||
const response = await fetch('/media/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const contentType = response.headers.get('content-type') || '';
|
||||
if (!contentType.includes('application/json')) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(data.detail || 'Upload failed');
|
||||
}
|
||||
}
|
||||
|
||||
results.push({ filename: file.name, friendly_name: data.friendly_name, cid: data.cid });
|
||||
} catch (err) {
|
||||
errors.push({ filename: file.name, error: err.message });
|
||||
}
|
||||
|
||||
progressBar.style.width = `${((i + 1) / files.length) * 100}%`;
|
||||
}
|
||||
|
||||
progressText.textContent = 'Upload complete!';
|
||||
|
||||
// Show results
|
||||
let html = '';
|
||||
if (results.length > 0) {
|
||||
html += '<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300 mb-2">';
|
||||
html += `<p class="font-medium">${results.length} file(s) uploaded successfully!</p>`;
|
||||
for (const r of results) {
|
||||
html += `<p class="text-sm mt-1">${r.filename} → <span class="font-mono">${r.friendly_name}</span></p>`;
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
if (errors.length > 0) {
|
||||
html += '<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">';
|
||||
html += `<p class="font-medium">${errors.length} file(s) failed:</p>`;
|
||||
for (const e of errors) {
|
||||
html += `<p class="text-sm mt-1">${e.filename}: ${e.error}</p>`;
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
|
||||
resultDiv.innerHTML = html;
|
||||
resultDiv.classList.remove('hidden');
|
||||
|
||||
if (results.length > 0) {
|
||||
// Reload page after 2 seconds
|
||||
setTimeout(() => location.reload(), 2000);
|
||||
} else {
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
{{ super() }}
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css">
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/python.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/lisp.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/scheme.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
@@ -93,35 +94,23 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Dependencies -->
|
||||
{% if meta.dependencies %}
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2">
|
||||
<span class="text-gray-400 text-sm font-medium">Dependencies</span>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<div class="flex flex-wrap gap-2">
|
||||
{% for dep in meta.dependencies %}
|
||||
<span class="bg-gray-700 text-gray-300 px-3 py-1 rounded">{{ dep }}</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% if meta.requires_python %}
|
||||
<p class="text-gray-500 text-sm mt-3">Python {{ meta.requires_python }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Usage in Recipe -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2">
|
||||
<span class="text-gray-400 text-sm font-medium">Usage in Recipe</span>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect {{ meta.name or 'effect' }} :cid "{{ effect.cid }}")</code></pre>
|
||||
{% if effect.base_name %}
|
||||
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">({{ effect.base_name }} ...)</code></pre>
|
||||
<p class="text-gray-500 text-xs mt-2">
|
||||
Reference this effect in your recipe S-expression.
|
||||
Use the friendly name to reference this effect.
|
||||
</p>
|
||||
{% else %}
|
||||
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect :cid "{{ effect.cid }}")</code></pre>
|
||||
<p class="text-gray-500 text-xs mt-2">
|
||||
Reference this effect by CID in your recipe.
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -130,17 +119,17 @@
|
||||
<div class="lg:col-span-2">
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
|
||||
<span class="text-gray-400 text-sm font-medium">Source Code</span>
|
||||
<span class="text-gray-400 text-sm font-medium">Source Code (S-expression)</span>
|
||||
<div class="flex items-center space-x-2">
|
||||
<a href="/effects/{{ effect.cid }}/source"
|
||||
class="text-gray-400 hover:text-white text-sm"
|
||||
download="{{ meta.name or 'effect' }}.py">
|
||||
download="{{ meta.name or 'effect' }}.sexp">
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-python" id="source-code">Loading...</code></pre>
|
||||
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-lisp" id="source-code">Loading...</code></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -148,12 +137,18 @@
|
||||
|
||||
<!-- Actions -->
|
||||
<div class="flex items-center space-x-4 mt-8">
|
||||
{% if effect.cid.startswith('Qm') or effect.cid.startswith('bafy') %}
|
||||
<a href="https://ipfs.io/ipfs/{{ effect.cid }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% endif %}
|
||||
<button hx-post="/effects/{{ effect.cid }}/publish"
|
||||
hx-target="#share-result"
|
||||
hx-target="#action-result"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<span id="share-result"></span>
|
||||
<button onclick="deleteEffect('{{ effect.cid }}')"
|
||||
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
|
||||
Delete
|
||||
|
||||
@@ -6,15 +6,59 @@
|
||||
<div class="max-w-6xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-3xl font-bold">Effects</h1>
|
||||
<label class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium cursor-pointer">
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload Effect
|
||||
<input type="file" accept=".py" class="hidden" id="effect-upload" />
|
||||
</label>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Upload Modal -->
|
||||
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h2 class="text-xl font-semibold">Upload Effect</h2>
|
||||
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="text-gray-400 hover:text-white">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Effect File (.sexp)</label>
|
||||
<input type="file" name="file" id="upload-file" required
|
||||
accept=".sexp,.lisp"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Friendly Name (optional)</label>
|
||||
<input type="text" name="display_name" id="upload-name" placeholder="e.g., color-shift"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A name to reference this effect in recipes</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="hidden"></div>
|
||||
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" id="upload-btn"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p class="text-gray-400 mb-8">
|
||||
Effects are Python scripts that process video frames or whole videos.
|
||||
Each effect is stored in IPFS and can be referenced by CID in recipes.
|
||||
Effects are S-expression files that define video processing operations.
|
||||
Each effect is stored in IPFS and can be referenced by name in recipes.
|
||||
</p>
|
||||
|
||||
{% if effects %}
|
||||
@@ -49,17 +93,6 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if meta.dependencies %}
|
||||
<div class="mt-2 flex flex-wrap gap-1">
|
||||
{% for dep in meta.dependencies[:3] %}
|
||||
<span class="bg-gray-700 text-gray-300 px-2 py-0.5 rounded text-xs">{{ dep }}</span>
|
||||
{% endfor %}
|
||||
{% if meta.dependencies | length > 3 %}
|
||||
<span class="text-gray-500 text-xs">+{{ meta.dependencies | length - 3 }} more</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="mt-3 text-xs">
|
||||
{% if effect.friendly_name %}
|
||||
<span class="text-blue-400 font-medium">{{ effect.friendly_name }}</span>
|
||||
@@ -83,67 +116,85 @@
|
||||
|
||||
{% else %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
|
||||
<svg class="w-16 h-16 mx-auto mb-4 text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
|
||||
d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4"/>
|
||||
</svg>
|
||||
<p class="text-gray-500 mb-4">No effects uploaded yet.</p>
|
||||
<p class="text-gray-600 text-sm mb-6">
|
||||
Effects are Python files with @effect metadata in a docstring.
|
||||
Effects are S-expression files with metadata in comment headers.
|
||||
</p>
|
||||
<label class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium cursor-pointer inline-block">
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium">
|
||||
Upload Your First Effect
|
||||
<input type="file" accept=".py" class="hidden" id="effect-upload-empty" />
|
||||
</label>
|
||||
</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="fixed bottom-4 right-4 max-w-sm"></div>
|
||||
|
||||
<script>
|
||||
function handleEffectUpload(input) {
|
||||
const file = input.files[0];
|
||||
// Handle upload form
|
||||
document.getElementById('upload-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const form = e.target;
|
||||
const fileInput = document.getElementById('upload-file');
|
||||
const displayName = document.getElementById('upload-name').value;
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
const uploadBtn = document.getElementById('upload-btn');
|
||||
|
||||
const file = fileInput.files[0];
|
||||
if (!file) return;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (displayName) {
|
||||
formData.append('display_name', displayName);
|
||||
}
|
||||
|
||||
fetch('/effects/upload', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) throw new Error('Upload failed');
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
uploadBtn.disabled = true;
|
||||
uploadBtn.textContent = 'Uploading...';
|
||||
resultDiv.classList.add('hidden');
|
||||
|
||||
try {
|
||||
const response = await fetch('/effects/upload', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300">
|
||||
<p class="font-medium">Effect uploaded!</p>
|
||||
<p class="text-sm mt-1">${data.name} <span class="font-mono">${data.friendly_name}</span></p>
|
||||
</div>
|
||||
`;
|
||||
resultDiv.classList.remove('hidden');
|
||||
setTimeout(() => location.reload(), 1500);
|
||||
} else {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
|
||||
<p class="font-medium">Upload failed</p>
|
||||
<p class="text-sm mt-1">${data.detail || 'Unknown error'}</p>
|
||||
</div>
|
||||
`;
|
||||
resultDiv.classList.remove('hidden');
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
} catch (error) {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-green-900 border border-green-700 rounded-lg p-4">
|
||||
<p class="text-green-300 font-medium">Effect uploaded!</p>
|
||||
<p class="text-green-400 text-sm mt-1">${data.name} v${data.version}</p>
|
||||
<p class="text-gray-400 text-xs mt-2 font-mono">${data.cid}</p>
|
||||
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
|
||||
<p class="font-medium">Upload failed</p>
|
||||
<p class="text-sm mt-1">${error.message}</p>
|
||||
</div>
|
||||
`;
|
||||
setTimeout(() => {
|
||||
window.location.reload();
|
||||
}, 1500);
|
||||
})
|
||||
.catch(error => {
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-red-900 border border-red-700 rounded-lg p-4">
|
||||
<p class="text-red-300 font-medium">Upload failed</p>
|
||||
<p class="text-red-400 text-sm mt-1">${error.message}</p>
|
||||
</div>
|
||||
`;
|
||||
});
|
||||
|
||||
input.value = '';
|
||||
}
|
||||
|
||||
document.getElementById('effect-upload')?.addEventListener('change', function() {
|
||||
handleEffectUpload(this);
|
||||
});
|
||||
document.getElementById('effect-upload-empty')?.addEventListener('change', function() {
|
||||
handleEffectUpload(this);
|
||||
resultDiv.classList.remove('hidden');
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -50,6 +50,17 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if recipe.type == 'streaming' %}
|
||||
<!-- Streaming Recipe Info -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 p-4">
|
||||
<div class="flex items-center space-x-2 mb-2">
|
||||
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">Streaming Recipe</span>
|
||||
</div>
|
||||
<p class="text-gray-400 text-sm">
|
||||
This recipe uses frame-by-frame streaming rendering. The pipeline is defined as an S-expression that generates frames dynamically.
|
||||
</p>
|
||||
</div>
|
||||
{% else %}
|
||||
<!-- DAG Visualization -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6">
|
||||
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
|
||||
@@ -99,21 +110,125 @@
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- YAML Source -->
|
||||
<h2 class="text-lg font-semibold mb-4">Source</h2>
|
||||
<!-- Source Code -->
|
||||
<h2 class="text-lg font-semibold mb-4">Recipe (S-expression)</h2>
|
||||
<div class="bg-gray-900 rounded-lg p-4 border border-gray-700">
|
||||
<pre class="text-sm text-gray-300 overflow-x-auto whitespace-pre-wrap">{{ recipe.yaml }}</pre>
|
||||
{% if recipe.sexp %}
|
||||
<pre class="text-sm font-mono text-gray-300 overflow-x-auto whitespace-pre-wrap sexp-code">{{ recipe.sexp }}</pre>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No source available</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Single-pass S-expression syntax highlighter (avoids regex corruption)
|
||||
function highlightSexp(text) {
|
||||
const SPECIAL = new Set(['plan','recipe','def','->','stream','let','lambda','if','cond','define']);
|
||||
const PRIMS = new Set(['source','effect','sequence','segment','resize','transform','layer','blend','mux','analyze','fused-pipeline']);
|
||||
function esc(s) { return s.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); }
|
||||
function span(cls, s) { return '<span class="' + cls + '">' + esc(s) + '</span>'; }
|
||||
|
||||
let out = '', i = 0, len = text.length;
|
||||
while (i < len) {
|
||||
if (text[i] === ';' && i + 1 < len && text[i+1] === ';') {
|
||||
let end = text.indexOf('\n', i);
|
||||
if (end === -1) end = len;
|
||||
out += span('text-gray-500', text.slice(i, end));
|
||||
i = end;
|
||||
}
|
||||
else if (text[i] === '"') {
|
||||
let j = i + 1;
|
||||
while (j < len && text[j] !== '"') { if (text[j] === '\\') j++; j++; }
|
||||
if (j < len) j++;
|
||||
out += span('text-green-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else if (text[i] === ':' && i + 1 < len && /[a-zA-Z_-]/.test(text[i+1])) {
|
||||
let j = i + 1;
|
||||
while (j < len && /[a-zA-Z0-9_-]/.test(text[j])) j++;
|
||||
out += span('text-purple-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else if (text[i] === '(') {
|
||||
out += span('text-yellow-500', '(');
|
||||
i++;
|
||||
let ws = '';
|
||||
while (i < len && (text[i] === ' ' || text[i] === '\t')) { ws += text[i]; i++; }
|
||||
out += esc(ws);
|
||||
if (i < len && /[a-zA-Z_>-]/.test(text[i])) {
|
||||
let j = i;
|
||||
while (j < len && /[a-zA-Z0-9_>-]/.test(text[j])) j++;
|
||||
let word = text.slice(i, j);
|
||||
if (SPECIAL.has(word)) out += span('text-pink-400 font-semibold', word);
|
||||
else if (PRIMS.has(word)) out += span('text-blue-400', word);
|
||||
else out += esc(word);
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
else if (text[i] === ')') {
|
||||
out += span('text-yellow-500', ')');
|
||||
i++;
|
||||
}
|
||||
else if (/[0-9]/.test(text[i]) && (i === 0 || /[\s(]/.test(text[i-1]))) {
|
||||
let j = i;
|
||||
while (j < len && /[0-9.]/.test(text[j])) j++;
|
||||
out += span('text-orange-300', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else {
|
||||
let j = i;
|
||||
while (j < len && !'(;":)'.includes(text[j])) {
|
||||
if (text[j] === ':' && j + 1 < len && /[a-zA-Z_-]/.test(text[j+1])) break;
|
||||
if (/[0-9]/.test(text[j]) && (j === 0 || /[\s(]/.test(text[j-1]))) break;
|
||||
j++;
|
||||
}
|
||||
if (j === i) { out += esc(text[i]); i++; }
|
||||
else { out += esc(text.slice(i, j)); i = j; }
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
document.querySelectorAll('.sexp-code').forEach(el => {
|
||||
el.innerHTML = highlightSexp(el.textContent);
|
||||
});
|
||||
</script>
|
||||
|
||||
<!-- Actions -->
|
||||
<div class="flex items-center space-x-4 mt-8">
|
||||
<button hx-post="/runs/rerun/{{ recipe.recipe_id }}"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-green-600 hover:bg-green-700 px-4 py-2 rounded font-medium">
|
||||
Run Recipe
|
||||
</button>
|
||||
{% if recipe.ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ recipe.ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% elif recipe.recipe_id.startswith('Qm') or recipe.recipe_id.startswith('bafy') %}
|
||||
<a href="https://ipfs.io/ipfs/{{ recipe.recipe_id }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% endif %}
|
||||
<button hx-post="/recipes/{{ recipe.recipe_id }}/publish"
|
||||
hx-target="#share-result"
|
||||
hx-target="#action-result"
|
||||
class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<span id="share-result"></span>
|
||||
<button hx-delete="/recipes/{{ recipe.recipe_id }}/ui"
|
||||
hx-target="#action-result"
|
||||
hx-confirm="Delete this recipe? This cannot be undone."
|
||||
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
|
||||
Delete
|
||||
</button>
|
||||
<span id="action-result"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -64,14 +64,14 @@
|
||||
{# Arrow #}
|
||||
<span class="text-gray-600">-></span>
|
||||
|
||||
{# Output preview #}
|
||||
{# Output preview - prefer IPFS URLs when available #}
|
||||
{% if run.output_cid %}
|
||||
<div class="flex items-center space-x-1">
|
||||
<span class="text-xs text-gray-500 mr-1">Out:</span>
|
||||
{% if run.output_media_type and run.output_media_type.startswith('image/') %}
|
||||
<img src="/cache/{{ run.output_cid }}/raw" alt="" class="w-10 h-10 object-cover rounded">
|
||||
<img src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" alt="" class="w-10 h-10 object-cover rounded">
|
||||
{% elif run.output_media_type and run.output_media_type.startswith('video/') %}
|
||||
<video src="/cache/{{ run.output_cid }}/raw" class="w-10 h-10 object-cover rounded" muted></video>
|
||||
<video src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" class="w-10 h-10 object-cover rounded" muted></video>
|
||||
{% else %}
|
||||
<div class="w-10 h-10 bg-gray-700 rounded flex items-center justify-center text-gray-500 text-xs">?</div>
|
||||
{% endif %}
|
||||
|
||||
62
app/templates/runs/artifacts.html
Normal file
62
app/templates/runs/artifacts.html
Normal file
@@ -0,0 +1,62 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Run Artifacts{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mb-6">
|
||||
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
|
||||
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
|
||||
</svg>
|
||||
Back to Run
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<h1 class="text-2xl font-bold text-white mb-6">Run Artifacts</h1>
|
||||
|
||||
{% if artifacts %}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{% for artifact in artifacts %}
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-3">
|
||||
<span class="px-2 py-1 text-xs rounded
|
||||
{% if artifact.role == 'input' %}bg-blue-600
|
||||
{% elif artifact.role == 'output' %}bg-green-600
|
||||
{% else %}bg-purple-600{% endif %}">
|
||||
{{ artifact.role }}
|
||||
</span>
|
||||
<span class="text-sm text-gray-400">{{ artifact.step_name }}</span>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<p class="text-xs text-gray-500 mb-1">Content Hash</p>
|
||||
<p class="font-mono text-xs text-gray-300 truncate">{{ artifact.hash }}</p>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between text-sm">
|
||||
<span class="text-gray-400">
|
||||
{% if artifact.media_type == 'video' %}Video
|
||||
{% elif artifact.media_type == 'image' %}Image
|
||||
{% elif artifact.media_type == 'audio' %}Audio
|
||||
{% else %}File{% endif %}
|
||||
</span>
|
||||
<span class="text-gray-500">{{ (artifact.size_bytes / 1024)|round(1) }} KB</span>
|
||||
</div>
|
||||
|
||||
<div class="mt-3 flex gap-2">
|
||||
<a href="/cache/{{ artifact.hash }}" class="flex-1 px-3 py-1 bg-gray-700 hover:bg-gray-600 text-center text-sm rounded transition-colors">
|
||||
View
|
||||
</a>
|
||||
<a href="/cache/{{ artifact.hash }}/raw" class="flex-1 px-3 py-1 bg-blue-600 hover:bg-blue-700 text-center text-sm rounded transition-colors">
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="bg-gray-800 rounded-lg p-6 text-center">
|
||||
<p class="text-gray-400">No artifacts found for this run.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
@@ -7,10 +7,11 @@
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.23.0/cytoscape.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/hls.js@1.4.12/dist/hls.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% set status_colors = {'completed': 'green', 'running': 'blue', 'pending': 'yellow', 'failed': 'red'} %}
|
||||
{% set status_colors = {'completed': 'green', 'running': 'blue', 'pending': 'yellow', 'failed': 'red', 'paused': 'yellow'} %}
|
||||
{% set color = status_colors.get(run.status, 'gray') %}
|
||||
|
||||
<div class="max-w-6xl mx-auto">
|
||||
@@ -24,13 +25,65 @@
|
||||
{% if run.cached %}
|
||||
<span class="bg-purple-900 text-purple-300 px-3 py-1 rounded text-sm">Cached</span>
|
||||
{% endif %}
|
||||
{% if run.error %}
|
||||
<span class="text-red-400 text-sm ml-2">{{ run.error }}</span>
|
||||
{% endif %}
|
||||
{% if run.checkpoint_frame %}
|
||||
<span class="text-gray-400 text-sm ml-2">
|
||||
Checkpoint: {{ run.checkpoint_frame }}{% if run.total_frames %} / {{ run.total_frames }}{% endif %} frames
|
||||
</span>
|
||||
{% endif %}
|
||||
<div class="flex-grow"></div>
|
||||
|
||||
<!-- Pause button for running renders -->
|
||||
{% if run.status == 'running' %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/pause"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-yellow-600 hover:bg-yellow-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Pause
|
||||
</button>
|
||||
{% endif %}
|
||||
|
||||
<!-- Resume/Restart buttons for failed/paused renders -->
|
||||
{% if run.status in ['failed', 'paused'] %}
|
||||
{% if run.checkpoint_frame %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/resume"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-green-600 hover:bg-green-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Resume{% if run.total_frames %} ({{ ((run.checkpoint_frame / run.total_frames) * 100)|round|int }}%){% endif %}
|
||||
</button>
|
||||
{% endif %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/restart"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
hx-confirm="Discard progress and start over?"
|
||||
class="bg-yellow-600 hover:bg-yellow-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Restart
|
||||
</button>
|
||||
{% endif %}
|
||||
|
||||
{% if run.recipe %}
|
||||
<button hx-post="/runs/rerun/{{ run.recipe }}"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Run Again
|
||||
</button>
|
||||
{% endif %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/publish"
|
||||
hx-target="#share-result"
|
||||
hx-target="#action-result"
|
||||
class="bg-purple-600 hover:bg-purple-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<span id="share-result"></span>
|
||||
<button hx-delete="/runs/{{ run.run_id }}/ui"
|
||||
hx-target="#action-result"
|
||||
hx-confirm="Delete this run and all its artifacts? This cannot be undone."
|
||||
class="bg-red-600 hover:bg-red-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Delete
|
||||
</button>
|
||||
<span id="action-result"></span>
|
||||
</div>
|
||||
|
||||
<!-- Info Grid -->
|
||||
@@ -50,7 +103,11 @@
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Steps</div>
|
||||
<div class="text-white font-medium">
|
||||
{{ run.executed or 0 }} / {{ run.total_steps or (plan.steps|length if plan and plan.steps else '?') }}
|
||||
{% if run.recipe == 'streaming' %}
|
||||
{% if run.status == 'completed' %}1 / 1{% else %}0 / 1{% endif %}
|
||||
{% else %}
|
||||
{{ run.executed or 0 }} / {{ run.total_steps or (plan.steps|length if plan and plan.steps else '?') }}
|
||||
{% endif %}
|
||||
{% if run.cached_steps %}
|
||||
<span class="text-purple-400 text-sm">({{ run.cached_steps }} cached)</span>
|
||||
{% endif %}
|
||||
@@ -66,6 +123,309 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Unified HLS Player (shown during rendering, for paused/failed runs with checkpoint, OR for completed HLS streams) -->
|
||||
{% if run.status == 'rendering' or run.ipfs_playlist_cid or (run.status in ['paused', 'failed'] and run.checkpoint_frame) %}
|
||||
<div id="hls-player-container" class="mb-6 bg-gray-800 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h3 class="text-lg font-semibold flex items-center">
|
||||
{% if run.status == 'rendering' %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-red-500 rounded-full mr-2 animate-pulse"></span>
|
||||
<span id="player-title">Live Preview</span>
|
||||
{% elif run.status == 'paused' %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-yellow-500 rounded-full mr-2"></span>
|
||||
<span id="player-title">Partial Output (Paused)</span>
|
||||
{% elif run.status == 'failed' and run.checkpoint_frame %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-red-500 rounded-full mr-2"></span>
|
||||
<span id="player-title">Partial Output (Failed)</span>
|
||||
{% else %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-green-500 rounded-full mr-2 hidden"></span>
|
||||
<span id="player-title">Video</span>
|
||||
{% endif %}
|
||||
</h3>
|
||||
<div class="flex items-center space-x-4">
|
||||
<!-- Mode toggle -->
|
||||
<div class="flex items-center space-x-2 text-sm">
|
||||
<button id="mode-replay" onclick="setPlayerMode('replay')"
|
||||
class="px-2 py-1 rounded {% if run.status != 'rendering' %}bg-blue-600 text-white{% else %}bg-gray-700 text-gray-400 hover:bg-gray-600{% endif %}">
|
||||
From Start
|
||||
</button>
|
||||
<button id="mode-live" onclick="setPlayerMode('live')"
|
||||
class="px-2 py-1 rounded {% if run.status == 'rendering' %}bg-blue-600 text-white{% else %}bg-gray-700 text-gray-400 hover:bg-gray-600{% endif %}">
|
||||
Live Edge
|
||||
</button>
|
||||
</div>
|
||||
<div id="stream-status" class="text-sm text-gray-400">Connecting...</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="relative bg-black rounded-lg overflow-hidden" style="aspect-ratio: 16/9;">
|
||||
<video id="hls-video" class="w-full h-full" controls autoplay muted playsinline></video>
|
||||
<div id="stream-loading" class="absolute inset-0 flex items-center justify-center bg-gray-900/80">
|
||||
<div class="text-center">
|
||||
<div class="animate-spin w-8 h-8 border-2 border-blue-500 border-t-transparent rounded-full mx-auto mb-2"></div>
|
||||
<div class="text-gray-400">Waiting for stream...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-2 flex items-center justify-between text-xs text-gray-500">
|
||||
<span>Stream: <code class="bg-gray-900 px-1 rounded">/runs/{{ run.run_id }}/playlist.m3u8</code></span>
|
||||
<span id="stream-info"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
const video = document.getElementById('hls-video');
|
||||
const statusEl = document.getElementById('stream-status');
|
||||
const loadingEl = document.getElementById('stream-loading');
|
||||
const streamInfoEl = document.getElementById('stream-info');
|
||||
const liveIndicator = document.getElementById('live-indicator');
|
||||
const playerTitle = document.getElementById('player-title');
|
||||
const modeReplayBtn = document.getElementById('mode-replay');
|
||||
const modeLiveBtn = document.getElementById('mode-live');
|
||||
|
||||
const baseUrl = '/runs/{{ run.run_id }}/playlist.m3u8';
|
||||
const isRendering = {{ 'true' if run.status == 'rendering' else 'false' }};
|
||||
const isPausedOrFailed = {{ 'true' if run.status in ['paused', 'failed'] else 'false' }};
|
||||
|
||||
let hls = null;
|
||||
let retryCount = 0;
|
||||
const maxRetries = 120;
|
||||
let segmentsLoaded = 0;
|
||||
// Start in replay mode for paused/failed (shows partial output from start)
|
||||
// Start in live mode for rendering (follows the render progress)
|
||||
let currentMode = isRendering ? 'live' : 'replay';
|
||||
|
||||
function getHlsUrl() {
|
||||
return baseUrl + '?_t=' + Date.now();
|
||||
}
|
||||
|
||||
// Custom playlist loader that adds cache-busting to every request
|
||||
class CacheBustingPlaylistLoader extends Hls.DefaultConfig.loader {
|
||||
load(context, config, callbacks) {
|
||||
if (context.type === 'manifest' || context.type === 'level') {
|
||||
const url = new URL(context.url, window.location.origin);
|
||||
url.searchParams.set('_t', Date.now());
|
||||
context.url = url.toString();
|
||||
}
|
||||
super.load(context, config, callbacks);
|
||||
}
|
||||
}
|
||||
|
||||
function getHlsConfig(mode) {
|
||||
const baseConfig = {
|
||||
maxBufferLength: 120,
|
||||
maxMaxBufferLength: 180,
|
||||
maxBufferSize: 100 * 1024 * 1024,
|
||||
maxBufferHole: 0.5,
|
||||
backBufferLength: 60,
|
||||
manifestLoadingTimeOut: 10000,
|
||||
manifestLoadingMaxRetry: 4,
|
||||
levelLoadingTimeOut: 10000,
|
||||
levelLoadingMaxRetry: 4,
|
||||
fragLoadingTimeOut: 20000,
|
||||
fragLoadingMaxRetry: 6,
|
||||
startLevel: 0,
|
||||
abrEwmaDefaultEstimate: 500000,
|
||||
};
|
||||
|
||||
if (mode === 'live') {
|
||||
// Live mode: follow the edge, cache-bust playlists
|
||||
return {
|
||||
...baseConfig,
|
||||
pLoader: CacheBustingPlaylistLoader,
|
||||
liveSyncDurationCount: 10,
|
||||
liveMaxLatencyDurationCount: 20,
|
||||
liveDurationInfinity: true,
|
||||
};
|
||||
} else {
|
||||
// Replay mode: start from beginning, no live sync
|
||||
return {
|
||||
...baseConfig,
|
||||
pLoader: CacheBustingPlaylistLoader, // Still bust cache for fresh playlist
|
||||
startPosition: 0,
|
||||
liveDurationInfinity: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function updateModeUI(mode) {
|
||||
currentMode = mode;
|
||||
if (mode === 'live') {
|
||||
modeLiveBtn.classList.add('bg-blue-600', 'text-white');
|
||||
modeLiveBtn.classList.remove('bg-gray-700', 'text-gray-400');
|
||||
modeReplayBtn.classList.remove('bg-blue-600', 'text-white');
|
||||
modeReplayBtn.classList.add('bg-gray-700', 'text-gray-400');
|
||||
liveIndicator.classList.remove('hidden', 'bg-green-500');
|
||||
liveIndicator.classList.add('bg-red-500', 'animate-pulse');
|
||||
playerTitle.textContent = isRendering ? 'Live Preview' : 'Live Edge';
|
||||
} else {
|
||||
modeReplayBtn.classList.add('bg-blue-600', 'text-white');
|
||||
modeReplayBtn.classList.remove('bg-gray-700', 'text-gray-400');
|
||||
modeLiveBtn.classList.remove('bg-blue-600', 'text-white');
|
||||
modeLiveBtn.classList.add('bg-gray-700', 'text-gray-400');
|
||||
liveIndicator.classList.add('hidden');
|
||||
liveIndicator.classList.remove('animate-pulse');
|
||||
playerTitle.textContent = 'Replay';
|
||||
}
|
||||
}
|
||||
|
||||
window.setPlayerMode = function(mode) {
|
||||
if (mode === currentMode) return;
|
||||
|
||||
const currentTime = video.currentTime;
|
||||
const wasPlaying = !video.paused;
|
||||
|
||||
// Destroy current HLS instance
|
||||
if (hls) {
|
||||
hls.destroy();
|
||||
hls = null;
|
||||
}
|
||||
|
||||
updateModeUI(mode);
|
||||
segmentsLoaded = 0;
|
||||
retryCount = 0;
|
||||
|
||||
// Reinitialize with new config
|
||||
initHls(mode, mode === 'replay' ? 0 : null); // Start from 0 in replay, live edge in live
|
||||
};
|
||||
|
||||
function initHls(mode, startPosition) {
|
||||
mode = mode || currentMode;
|
||||
|
||||
if (Hls.isSupported()) {
|
||||
const config = getHlsConfig(mode);
|
||||
if (startPosition !== null && startPosition !== undefined) {
|
||||
config.startPosition = startPosition;
|
||||
}
|
||||
hls = new Hls(config);
|
||||
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function(event, data) {
|
||||
loadingEl.classList.add('hidden');
|
||||
statusEl.textContent = 'Buffering...';
|
||||
statusEl.classList.remove('text-gray-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
streamInfoEl.textContent = `${data.levels.length} quality level(s)`;
|
||||
video.play().catch(() => {});
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.FRAG_LOADED, function(event, data) {
|
||||
retryCount = 0;
|
||||
segmentsLoaded++;
|
||||
const modeLabel = currentMode === 'live' ? 'Live' : 'Replay';
|
||||
statusEl.textContent = `${modeLabel} (${segmentsLoaded} segments)`;
|
||||
statusEl.classList.remove('text-yellow-400', 'text-gray-400');
|
||||
statusEl.classList.add('text-green-400');
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.BUFFER_APPENDED, function() {
|
||||
loadingEl.classList.add('hidden');
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.ERROR, function(event, data) {
|
||||
console.log('HLS error:', data.type, data.details, data.fatal);
|
||||
|
||||
if (data.fatal) {
|
||||
switch (data.type) {
|
||||
case Hls.ErrorTypes.NETWORK_ERROR:
|
||||
if (retryCount < maxRetries) {
|
||||
retryCount++;
|
||||
statusEl.textContent = `Waiting for stream... (${retryCount})`;
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
const delay = Math.min(1000 * Math.pow(1.5, Math.min(retryCount, 6)), 10000);
|
||||
setTimeout(() => {
|
||||
hls.loadSource(getHlsUrl());
|
||||
}, delay + Math.random() * 1000);
|
||||
} else {
|
||||
statusEl.textContent = 'Stream unavailable';
|
||||
statusEl.classList.add('text-red-400');
|
||||
}
|
||||
break;
|
||||
case Hls.ErrorTypes.MEDIA_ERROR:
|
||||
console.log('Media error, attempting recovery');
|
||||
hls.recoverMediaError();
|
||||
break;
|
||||
default:
|
||||
statusEl.textContent = 'Stream error';
|
||||
statusEl.classList.add('text-red-400');
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
if (data.details === 'bufferStalledError') {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
video.addEventListener('waiting', function() {
|
||||
if (currentMode === 'live' && hls && hls.liveSyncPosition) {
|
||||
const liveEdge = hls.liveSyncPosition;
|
||||
const behindLive = liveEdge - video.currentTime;
|
||||
if (behindLive < 8) {
|
||||
statusEl.textContent = 'Waiting for rendering...';
|
||||
} else {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
}
|
||||
} else {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
}
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
});
|
||||
|
||||
video.addEventListener('playing', function() {
|
||||
const modeLabel = currentMode === 'live' ? 'Live' : 'Replay';
|
||||
statusEl.textContent = `${modeLabel} (${segmentsLoaded} segments)`;
|
||||
statusEl.classList.remove('text-yellow-400');
|
||||
statusEl.classList.add('text-green-400');
|
||||
});
|
||||
|
||||
// Live mode: periodic check for catching up to live edge
|
||||
if (currentMode === 'live') {
|
||||
setInterval(function() {
|
||||
if (hls && !video.paused && hls.levels && hls.levels.length > 0) {
|
||||
const buffered = video.buffered;
|
||||
if (buffered.length > 0) {
|
||||
const bufferEnd = buffered.end(buffered.length - 1);
|
||||
const bufferAhead = bufferEnd - video.currentTime;
|
||||
if (bufferAhead < 4) {
|
||||
statusEl.textContent = 'Waiting for rendering...';
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
}
|
||||
}
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
hls.loadSource(getHlsUrl());
|
||||
hls.attachMedia(video);
|
||||
} else if (video.canPlayType('application/vnd.apple.mpegurl')) {
|
||||
video.src = getHlsUrl();
|
||||
video.addEventListener('loadedmetadata', function() {
|
||||
loadingEl.classList.add('hidden');
|
||||
statusEl.textContent = 'Playing';
|
||||
video.play().catch(() => {});
|
||||
});
|
||||
} else {
|
||||
statusEl.textContent = 'HLS not supported';
|
||||
statusEl.classList.add('text-red-400');
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize with appropriate mode
|
||||
updateModeUI(currentMode);
|
||||
initHls(currentMode);
|
||||
|
||||
window.addEventListener('beforeunload', function() {
|
||||
if (hls) hls.destroy();
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
<!-- Tabs -->
|
||||
<div class="border-b border-gray-700 mb-6">
|
||||
<nav class="flex space-x-8">
|
||||
@@ -190,23 +550,85 @@
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
// Syntax highlight S-expressions
|
||||
// Single-pass S-expression syntax highlighter (avoids regex corruption)
|
||||
function highlightSexp(text) {
|
||||
const SPECIAL = new Set(['plan','recipe','def','->','stream','let','lambda','if','cond','define']);
|
||||
const PRIMS = new Set(['source','effect','sequence','segment','resize','transform','layer','blend','mux','analyze','fused-pipeline']);
|
||||
function esc(s) { return s.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); }
|
||||
function span(cls, s) { return '<span class="' + cls + '">' + esc(s) + '</span>'; }
|
||||
|
||||
let out = '', i = 0, len = text.length;
|
||||
while (i < len) {
|
||||
// Comments
|
||||
if (text[i] === ';' && i + 1 < len && text[i+1] === ';') {
|
||||
let end = text.indexOf('\n', i);
|
||||
if (end === -1) end = len;
|
||||
out += span('text-gray-500', text.slice(i, end));
|
||||
i = end;
|
||||
}
|
||||
// Strings
|
||||
else if (text[i] === '"') {
|
||||
let j = i + 1;
|
||||
while (j < len && text[j] !== '"') { if (text[j] === '\\') j++; j++; }
|
||||
if (j < len) j++; // closing quote
|
||||
out += span('text-green-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Keywords (:keyword)
|
||||
else if (text[i] === ':' && i + 1 < len && /[a-zA-Z_-]/.test(text[i+1])) {
|
||||
let j = i + 1;
|
||||
while (j < len && /[a-zA-Z0-9_-]/.test(text[j])) j++;
|
||||
out += span('text-purple-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Open paren - check for primitive/special after it
|
||||
else if (text[i] === '(') {
|
||||
out += span('text-yellow-500', '(');
|
||||
i++;
|
||||
// Skip whitespace after paren
|
||||
let ws = '';
|
||||
while (i < len && (text[i] === ' ' || text[i] === '\t')) { ws += text[i]; i++; }
|
||||
out += esc(ws);
|
||||
// Check if next word is a special form or primitive
|
||||
if (i < len && /[a-zA-Z_>-]/.test(text[i])) {
|
||||
let j = i;
|
||||
while (j < len && /[a-zA-Z0-9_>-]/.test(text[j])) j++;
|
||||
let word = text.slice(i, j);
|
||||
if (SPECIAL.has(word)) out += span('text-pink-400 font-semibold', word);
|
||||
else if (PRIMS.has(word)) out += span('text-blue-400', word);
|
||||
else out += esc(word);
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
// Close paren
|
||||
else if (text[i] === ')') {
|
||||
out += span('text-yellow-500', ')');
|
||||
i++;
|
||||
}
|
||||
// Numbers
|
||||
else if (/[0-9]/.test(text[i]) && (i === 0 || /[\s(]/.test(text[i-1]))) {
|
||||
let j = i;
|
||||
while (j < len && /[0-9.]/.test(text[j])) j++;
|
||||
out += span('text-orange-300', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Regular text
|
||||
else {
|
||||
let j = i;
|
||||
while (j < len && !'(;":)'.includes(text[j])) {
|
||||
if (text[j] === ':' && j + 1 < len && /[a-zA-Z_-]/.test(text[j+1])) break;
|
||||
if (/[0-9]/.test(text[j]) && (j === 0 || /[\s(]/.test(text[j-1]))) break;
|
||||
j++;
|
||||
}
|
||||
if (j === i) { out += esc(text[i]); i++; } // safety: advance at least 1 char
|
||||
else { out += esc(text.slice(i, j)); i = j; }
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
document.querySelectorAll('.sexp-code').forEach(el => {
|
||||
let html = el.textContent;
|
||||
// Comments
|
||||
html = html.replace(/(;;.*)/g, '<span class="text-gray-500">$1</span>');
|
||||
// Keywords (:keyword)
|
||||
html = html.replace(/(:[a-zA-Z_-]+)/g, '<span class="text-purple-400">$1</span>');
|
||||
// Strings
|
||||
html = html.replace(/("(?:[^"\\]|\\.)*")/g, '<span class="text-green-400">$1</span>');
|
||||
// Special forms
|
||||
html = html.replace(/\b(plan|recipe|def|->)\b/g, '<span class="text-pink-400 font-semibold">$1</span>');
|
||||
// Primitives
|
||||
html = html.replace(/\((source|effect|sequence|segment|resize|transform|layer|blend|mux|analyze)\b/g,
|
||||
'(<span class="text-blue-400">$1</span>');
|
||||
// Parentheses
|
||||
html = html.replace(/(\(|\))/g, '<span class="text-yellow-500">$1</span>');
|
||||
el.innerHTML = html;
|
||||
el.innerHTML = highlightSexp(el.textContent);
|
||||
});
|
||||
</script>
|
||||
{% else %}
|
||||
@@ -396,18 +818,26 @@
|
||||
<div class="mt-8 bg-gray-800 rounded-lg p-6">
|
||||
<h3 class="text-lg font-semibold mb-4">Output</h3>
|
||||
|
||||
{# Inline media preview #}
|
||||
{# Inline media preview - prefer IPFS URLs when available #}
|
||||
<div class="mb-4">
|
||||
{% if output_media_type and output_media_type.startswith('image/') %}
|
||||
<a href="/cache/{{ run.output_cid }}" class="block">
|
||||
<img src="/cache/{{ run.output_cid }}/raw" alt="Output"
|
||||
<a href="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}{% endif %}" class="block">
|
||||
<img src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" alt="Output"
|
||||
class="max-w-full max-h-96 rounded-lg mx-auto">
|
||||
</a>
|
||||
{% elif output_media_type and output_media_type.startswith('video/') %}
|
||||
<video src="/cache/{{ run.output_cid }}/raw" controls
|
||||
{# HLS streams use the unified player above; show direct video for non-HLS #}
|
||||
{% if run.ipfs_playlist_cid %}
|
||||
<div class="text-gray-400 text-sm py-4">
|
||||
HLS stream available in player above. Use "From Start" to watch from beginning or "Live Edge" to follow rendering progress.
|
||||
</div>
|
||||
{% else %}
|
||||
{# Direct video file #}
|
||||
<video src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" controls
|
||||
class="max-w-full max-h-96 rounded-lg mx-auto"></video>
|
||||
{% endif %}
|
||||
{% elif output_media_type and output_media_type.startswith('audio/') %}
|
||||
<audio src="/cache/{{ run.output_cid }}/raw" controls class="w-full"></audio>
|
||||
<audio src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" controls class="w-full"></audio>
|
||||
{% else %}
|
||||
<div class="bg-gray-900 rounded-lg p-8 text-center text-gray-500">
|
||||
<div class="text-4xl mb-2">?</div>
|
||||
@@ -417,16 +847,25 @@
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between">
|
||||
<a href="/cache/{{ run.output_cid }}" class="font-mono text-sm text-blue-400 hover:text-blue-300">
|
||||
{{ run.output_cid }}
|
||||
<a href="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}{% endif %}"
|
||||
class="font-mono text-sm text-blue-400 hover:text-blue-300">
|
||||
{% if run.ipfs_cid %}{{ run.ipfs_cid }}{% else %}{{ run.output_cid }}{% endif %}
|
||||
</a>
|
||||
{% if run.output_ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ run.output_ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
IPFS: {{ run.output_ipfs_cid[:16] }}...
|
||||
</a>
|
||||
{% endif %}
|
||||
<div class="flex items-center space-x-4">
|
||||
{% if run.ipfs_playlist_cid %}
|
||||
<a href="/ipfs/{{ run.ipfs_playlist_cid }}"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
HLS Playlist
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if run.ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ run.ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
View on IPFS Gateway
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
99
app/templates/runs/plan.html
Normal file
99
app/templates/runs/plan.html
Normal file
@@ -0,0 +1,99 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Run Plan - {{ run_id[:16] }}{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="https://unpkg.com/cytoscape@3.25.0/dist/cytoscape.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mb-6">
|
||||
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
|
||||
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
|
||||
</svg>
|
||||
Back to Run
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<h1 class="text-2xl font-bold text-white mb-6">Execution Plan</h1>
|
||||
|
||||
{% if plan %}
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<!-- DAG Visualization -->
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<h2 class="text-lg font-semibold text-white mb-4">DAG Visualization</h2>
|
||||
<div id="dag-container" class="w-full h-96 bg-gray-900 rounded"></div>
|
||||
</div>
|
||||
|
||||
<!-- Steps List -->
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<h2 class="text-lg font-semibold text-white mb-4">Steps ({{ plan.steps|length if plan.steps else 0 }})</h2>
|
||||
<div class="space-y-3 max-h-96 overflow-y-auto">
|
||||
{% for step in plan.get('steps', []) %}
|
||||
<div class="bg-gray-900 rounded-lg p-3">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-medium text-white">{{ step.name or step.id or 'Step ' ~ loop.index }}</span>
|
||||
<span class="px-2 py-0.5 text-xs rounded {% if step.status == 'completed' %}bg-green-600{% elif step.cached %}bg-blue-600{% else %}bg-gray-600{% endif %}">
|
||||
{{ step.status or ('cached' if step.cached else 'pending') }}
|
||||
</span>
|
||||
</div>
|
||||
{% if step.cache_id %}
|
||||
<div class="text-xs text-gray-400 font-mono truncate">
|
||||
{{ step.cache_id[:24] }}...
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No steps defined</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const elements = {{ dag_elements | tojson | safe }};
|
||||
|
||||
if (elements.length > 0) {
|
||||
cytoscape({
|
||||
container: document.getElementById('dag-container'),
|
||||
elements: elements,
|
||||
style: [
|
||||
{
|
||||
selector: 'node',
|
||||
style: {
|
||||
'background-color': 'data(color)',
|
||||
'label': 'data(label)',
|
||||
'color': '#fff',
|
||||
'text-valign': 'bottom',
|
||||
'text-margin-y': 5,
|
||||
'font-size': '10px'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'edge',
|
||||
style: {
|
||||
'width': 2,
|
||||
'line-color': '#6b7280',
|
||||
'target-arrow-color': '#6b7280',
|
||||
'target-arrow-shape': 'triangle',
|
||||
'curve-style': 'bezier'
|
||||
}
|
||||
}
|
||||
],
|
||||
layout: {
|
||||
name: 'breadthfirst',
|
||||
directed: true,
|
||||
padding: 20
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% else %}
|
||||
<div class="bg-gray-800 rounded-lg p-6 text-center">
|
||||
<p class="text-gray-400">No execution plan available for this run.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
398
cache_manager.py
398
cache_manager.py
@@ -162,153 +162,127 @@ class L1CacheManager:
|
||||
is_shared_fn=self._is_shared_by_node_id,
|
||||
)
|
||||
|
||||
# Content hash index: cid -> node_id
|
||||
# Uses Redis if available, falls back to in-memory dict
|
||||
self._content_index: Dict[str, str] = {}
|
||||
self._load_content_index()
|
||||
|
||||
# IPFS CID index: cid -> ipfs_cid
|
||||
self._ipfs_cids: Dict[str, str] = {}
|
||||
self._load_ipfs_index()
|
||||
|
||||
# Legacy files directory (for files uploaded directly by cid)
|
||||
self.legacy_dir = self.cache_dir / "legacy"
|
||||
self.legacy_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _index_path(self) -> Path:
|
||||
return self.cache_dir / "content_index.json"
|
||||
# ============ Redis Index (no JSON files) ============
|
||||
#
|
||||
# Content index maps: CID (content hash or IPFS CID) -> node_id (code hash)
|
||||
# IPFS index maps: node_id -> IPFS CID
|
||||
#
|
||||
# Database is the ONLY source of truth for cache_id -> ipfs_cid mapping.
|
||||
# No fallbacks - failures raise exceptions.
|
||||
|
||||
def _load_content_index(self):
|
||||
"""Load cid -> node_id index from Redis or JSON file."""
|
||||
# If Redis available and has data, use it
|
||||
if self._redis:
|
||||
def _run_async(self, coro):
|
||||
"""Run async coroutine from sync context.
|
||||
|
||||
Always creates a fresh event loop to avoid issues with Celery's
|
||||
prefork workers where loops may be closed by previous tasks.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
# Check if we're already in an async context
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
# We're in an async context - use a thread with its own loop
|
||||
import threading
|
||||
result = [None]
|
||||
error = [None]
|
||||
|
||||
def run_in_thread():
|
||||
try:
|
||||
new_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(new_loop)
|
||||
try:
|
||||
result[0] = new_loop.run_until_complete(coro)
|
||||
finally:
|
||||
new_loop.close()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
|
||||
thread = threading.Thread(target=run_in_thread)
|
||||
thread.start()
|
||||
thread.join(timeout=30)
|
||||
if error[0]:
|
||||
raise error[0]
|
||||
return result[0]
|
||||
except RuntimeError:
|
||||
# No running loop - create a fresh one (don't reuse potentially closed loops)
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
redis_data = self._redis.hgetall(self._redis_content_key)
|
||||
if redis_data:
|
||||
self._content_index = {
|
||||
k.decode() if isinstance(k, bytes) else k:
|
||||
v.decode() if isinstance(v, bytes) else v
|
||||
for k, v in redis_data.items()
|
||||
}
|
||||
logger.info(f"Loaded {len(self._content_index)} content index entries from Redis")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load content index from Redis: {e}")
|
||||
return loop.run_until_complete(coro)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
# Fall back to JSON file
|
||||
if self._index_path().exists():
|
||||
def _set_content_index(self, cache_id: str, ipfs_cid: str):
|
||||
"""Set content index entry in database (cache_id -> ipfs_cid)."""
|
||||
import database
|
||||
|
||||
async def save_to_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
with open(self._index_path()) as f:
|
||||
self._content_index = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load content index: {e}")
|
||||
self._content_index = {}
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO cache_items (cid, ipfs_cid)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (cid) DO UPDATE SET ipfs_cid = $2
|
||||
""",
|
||||
cache_id, ipfs_cid
|
||||
)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
# Also index from existing cache entries
|
||||
for entry in self.cache.list_entries():
|
||||
if entry.cid:
|
||||
self._content_index[entry.cid] = entry.node_id
|
||||
self._run_async(save_to_db())
|
||||
logger.info(f"Indexed in database: {cache_id[:16]}... -> {ipfs_cid}")
|
||||
|
||||
# Migrate to Redis if available
|
||||
if self._redis and self._content_index:
|
||||
def _get_content_index(self, cache_id: str) -> Optional[str]:
|
||||
"""Get content index entry (cache_id -> ipfs_cid) from database."""
|
||||
import database
|
||||
|
||||
async def get_from_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
self._redis.hset(self._redis_content_key, mapping=self._content_index)
|
||||
logger.info(f"Migrated {len(self._content_index)} content index entries to Redis")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate content index to Redis: {e}")
|
||||
row = await conn.fetchrow(
|
||||
"SELECT ipfs_cid FROM cache_items WHERE cid = $1",
|
||||
cache_id
|
||||
)
|
||||
return {"ipfs_cid": row["ipfs_cid"]} if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
def _save_content_index(self):
|
||||
"""Save cid -> node_id index to Redis and JSON file."""
|
||||
# Always save to JSON as backup
|
||||
with open(self._index_path(), "w") as f:
|
||||
json.dump(self._content_index, f, indent=2)
|
||||
result = self._run_async(get_from_db())
|
||||
if result and result.get("ipfs_cid"):
|
||||
return result["ipfs_cid"]
|
||||
return None
|
||||
|
||||
def _set_content_index(self, cid: str, node_id: str):
|
||||
"""Set a single content index entry (Redis + in-memory)."""
|
||||
self._content_index[cid] = node_id
|
||||
if self._redis:
|
||||
def _del_content_index(self, cache_id: str):
|
||||
"""Delete content index entry from database."""
|
||||
import database
|
||||
|
||||
async def delete_from_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
self._redis.hset(self._redis_content_key, cid, node_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to set content index in Redis: {e}")
|
||||
self._save_content_index()
|
||||
await conn.execute("DELETE FROM cache_items WHERE cid = $1", cache_id)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
def _get_content_index(self, cid: str) -> Optional[str]:
|
||||
"""Get a content index entry (Redis-first, then in-memory)."""
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_content_key, cid)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get content index from Redis: {e}")
|
||||
return self._content_index.get(cid)
|
||||
|
||||
def _del_content_index(self, cid: str):
|
||||
"""Delete a content index entry."""
|
||||
if cid in self._content_index:
|
||||
del self._content_index[cid]
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hdel(self._redis_content_key, cid)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete content index from Redis: {e}")
|
||||
self._save_content_index()
|
||||
|
||||
def _ipfs_index_path(self) -> Path:
|
||||
return self.cache_dir / "ipfs_index.json"
|
||||
|
||||
def _load_ipfs_index(self):
|
||||
"""Load cid -> ipfs_cid index from Redis or JSON file."""
|
||||
# If Redis available and has data, use it
|
||||
if self._redis:
|
||||
try:
|
||||
redis_data = self._redis.hgetall(self._redis_ipfs_key)
|
||||
if redis_data:
|
||||
self._ipfs_cids = {
|
||||
k.decode() if isinstance(k, bytes) else k:
|
||||
v.decode() if isinstance(v, bytes) else v
|
||||
for k, v in redis_data.items()
|
||||
}
|
||||
logger.info(f"Loaded {len(self._ipfs_cids)} IPFS index entries from Redis")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load IPFS index from Redis: {e}")
|
||||
|
||||
# Fall back to JSON file
|
||||
if self._ipfs_index_path().exists():
|
||||
try:
|
||||
with open(self._ipfs_index_path()) as f:
|
||||
self._ipfs_cids = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load IPFS index: {e}")
|
||||
self._ipfs_cids = {}
|
||||
|
||||
# Migrate to Redis if available
|
||||
if self._redis and self._ipfs_cids:
|
||||
try:
|
||||
self._redis.hset(self._redis_ipfs_key, mapping=self._ipfs_cids)
|
||||
logger.info(f"Migrated {len(self._ipfs_cids)} IPFS index entries to Redis")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate IPFS index to Redis: {e}")
|
||||
|
||||
def _save_ipfs_index(self):
|
||||
"""Save cid -> ipfs_cid index to JSON file (backup)."""
|
||||
with open(self._ipfs_index_path(), "w") as f:
|
||||
json.dump(self._ipfs_cids, f, indent=2)
|
||||
self._run_async(delete_from_db())
|
||||
|
||||
def _set_ipfs_index(self, cid: str, ipfs_cid: str):
|
||||
"""Set a single IPFS index entry (Redis + in-memory)."""
|
||||
self._ipfs_cids[cid] = ipfs_cid
|
||||
"""Set IPFS index entry in Redis."""
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hset(self._redis_ipfs_key, cid, ipfs_cid)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to set IPFS index in Redis: {e}")
|
||||
self._save_ipfs_index()
|
||||
|
||||
def _get_ipfs_cid_from_index(self, cid: str) -> Optional[str]:
|
||||
"""Get IPFS CID from index (Redis-first, then in-memory)."""
|
||||
"""Get IPFS CID from Redis."""
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_ipfs_key, cid)
|
||||
@@ -316,7 +290,7 @@ class L1CacheManager:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get IPFS CID from Redis: {e}")
|
||||
return self._ipfs_cids.get(cid)
|
||||
return None
|
||||
|
||||
def get_ipfs_cid(self, cid: str) -> Optional[str]:
|
||||
"""Get IPFS CID for a content hash."""
|
||||
@@ -366,44 +340,58 @@ class L1CacheManager:
|
||||
source_path: Path,
|
||||
node_type: str = "upload",
|
||||
node_id: str = None,
|
||||
cache_id: str = None,
|
||||
execution_time: float = 0.0,
|
||||
move: bool = False,
|
||||
skip_ipfs: bool = False,
|
||||
) -> tuple[CachedFile, Optional[str]]:
|
||||
"""
|
||||
Store a file in the cache and upload to IPFS.
|
||||
Store a file in the cache and optionally upload to IPFS.
|
||||
|
||||
Files are stored by IPFS CID when skip_ipfs=False (default), or by
|
||||
local content hash when skip_ipfs=True. The cache_id parameter creates
|
||||
an index from cache_id -> CID for code-addressed lookups.
|
||||
|
||||
Args:
|
||||
source_path: Path to file to cache
|
||||
node_type: Type of node (e.g., "upload", "source", "effect")
|
||||
node_id: Optional node_id; if not provided, uses CID
|
||||
node_id: DEPRECATED - ignored, always uses CID
|
||||
cache_id: Optional code-addressed cache ID to index
|
||||
execution_time: How long the operation took
|
||||
move: If True, move instead of copy
|
||||
skip_ipfs: If True, skip IPFS upload and use local hash (faster for large files)
|
||||
|
||||
Returns:
|
||||
Tuple of (CachedFile with both node_id and cid, CID)
|
||||
Tuple of (CachedFile with both node_id and cid, CID or None if skip_ipfs)
|
||||
"""
|
||||
# Upload to IPFS first to get the CID (primary identifier)
|
||||
cid = ipfs_client.add_file(source_path)
|
||||
if not cid:
|
||||
# Fallback to local hash if IPFS unavailable
|
||||
if skip_ipfs:
|
||||
# Use local content hash instead of IPFS CID (much faster)
|
||||
cid = file_hash(source_path)
|
||||
logger.warning(f"IPFS unavailable, using local hash: {cid[:16]}...")
|
||||
ipfs_cid = None
|
||||
logger.info(f"put: Using local hash (skip_ipfs=True): {cid[:16]}...")
|
||||
else:
|
||||
# Upload to IPFS first to get the CID (primary identifier)
|
||||
cid = ipfs_client.add_file(source_path)
|
||||
if not cid:
|
||||
raise RuntimeError(f"IPFS upload failed for {source_path}. IPFS is required.")
|
||||
ipfs_cid = cid
|
||||
|
||||
# Use CID as node_id if not provided
|
||||
if node_id is None:
|
||||
node_id = cid
|
||||
# Always store by IPFS CID (node_id parameter is deprecated)
|
||||
node_id = cid
|
||||
|
||||
# Check if already cached (by node_id)
|
||||
existing = self.cache.get_entry(node_id)
|
||||
if existing and existing.output_path.exists():
|
||||
return CachedFile.from_cache_entry(existing), cid
|
||||
return CachedFile.from_cache_entry(existing), ipfs_cid
|
||||
|
||||
# Compute local hash BEFORE moving the file (for dual-indexing)
|
||||
# Only needed if we uploaded to IPFS (to map local hash -> IPFS CID)
|
||||
local_hash = None
|
||||
if self._is_ipfs_cid(cid):
|
||||
if not skip_ipfs and self._is_ipfs_cid(cid):
|
||||
local_hash = file_hash(source_path)
|
||||
|
||||
# Store in local cache
|
||||
logger.info(f"put: Storing in cache with node_id={node_id[:16]}...")
|
||||
self.cache.put(
|
||||
node_id=node_id,
|
||||
source_path=source_path,
|
||||
@@ -413,19 +401,26 @@ class L1CacheManager:
|
||||
)
|
||||
|
||||
entry = self.cache.get_entry(node_id)
|
||||
logger.info(f"put: After cache.put, get_entry(node_id={node_id[:16]}...) returned entry={entry is not None}, path={entry.output_path if entry else None}")
|
||||
|
||||
# Update content index (CID -> node_id mapping)
|
||||
self._set_content_index(cid, node_id)
|
||||
# Verify we can retrieve it
|
||||
verify_path = self.cache.get(node_id)
|
||||
logger.info(f"put: Verify cache.get(node_id={node_id[:16]}...) = {verify_path}")
|
||||
|
||||
# Also index by local hash if cid is an IPFS CID
|
||||
# This ensures both IPFS CID and local hash can be used to find the file
|
||||
# Index by cache_id if provided (code-addressed cache lookup)
|
||||
# This allows get_by_cid(cache_id) to find files stored by IPFS CID
|
||||
if cache_id and cache_id != cid:
|
||||
self._set_content_index(cache_id, cid)
|
||||
logger.info(f"put: Indexed cache_id {cache_id[:16]}... -> IPFS {cid}")
|
||||
|
||||
# Also index by local hash for content-based lookup
|
||||
if local_hash and local_hash != cid:
|
||||
self._set_content_index(local_hash, node_id)
|
||||
logger.debug(f"Dual-indexed: {local_hash[:16]}... -> {node_id}")
|
||||
self._set_content_index(local_hash, cid)
|
||||
logger.debug(f"Indexed local hash {local_hash[:16]}... -> IPFS {cid}")
|
||||
|
||||
logger.info(f"Cached: {cid[:16]}...")
|
||||
logger.info(f"Cached: {cid[:16]}..." + (" (local only)" if skip_ipfs else " (IPFS)"))
|
||||
|
||||
return CachedFile.from_cache_entry(entry), cid
|
||||
return CachedFile.from_cache_entry(entry), ipfs_cid if not skip_ipfs else None
|
||||
|
||||
def get_by_node_id(self, node_id: str) -> Optional[Path]:
|
||||
"""Get cached file path by node_id."""
|
||||
@@ -438,42 +433,79 @@ class L1CacheManager:
|
||||
|
||||
def get_by_cid(self, cid: str) -> Optional[Path]:
|
||||
"""Get cached file path by cid or IPFS CID. Falls back to IPFS if not in local cache."""
|
||||
logger.info(f"get_by_cid: Looking for cid={cid[:16]}...")
|
||||
|
||||
# Check index first (Redis then local)
|
||||
node_id = self._get_content_index(cid)
|
||||
logger.info(f"get_by_cid: Index lookup returned node_id={node_id[:16] if node_id else None}...")
|
||||
if node_id:
|
||||
path = self.cache.get(node_id)
|
||||
logger.info(f"get_by_cid: cache.get(node_id={node_id[:16]}...) returned path={path}")
|
||||
if path and path.exists():
|
||||
logger.debug(f" Found via index: {path}")
|
||||
logger.info(f"get_by_cid: Found via index: {path}")
|
||||
return path
|
||||
|
||||
# artdag Cache doesn't know about entry - check filesystem directly
|
||||
# Files are stored at {cache_dir}/nodes/{node_id}/output.*
|
||||
nodes_dir = self.cache_dir / "nodes" / node_id
|
||||
if nodes_dir.exists():
|
||||
for f in nodes_dir.iterdir():
|
||||
if f.name.startswith("output."):
|
||||
logger.info(f"get_by_cid: Found on filesystem: {f}")
|
||||
return f
|
||||
|
||||
# For uploads, node_id == cid, so try direct lookup
|
||||
# This works even if cache index hasn't been reloaded
|
||||
path = self.cache.get(cid)
|
||||
logger.debug(f" cache.get({cid[:16]}...) returned: {path}")
|
||||
logger.info(f"get_by_cid: Direct cache.get({cid[:16]}...) returned: {path}")
|
||||
if path and path.exists():
|
||||
self._set_content_index(cid, cid)
|
||||
return path
|
||||
|
||||
# Check filesystem directly for cid as node_id
|
||||
nodes_dir = self.cache_dir / "nodes" / cid
|
||||
if nodes_dir.exists():
|
||||
for f in nodes_dir.iterdir():
|
||||
if f.name.startswith("output."):
|
||||
logger.info(f"get_by_cid: Found on filesystem (direct): {f}")
|
||||
self._set_content_index(cid, cid)
|
||||
return f
|
||||
|
||||
# Scan cache entries (fallback for new structure)
|
||||
entry = self.cache.find_by_cid(cid)
|
||||
logger.info(f"get_by_cid: find_by_cid({cid[:16]}...) returned entry={entry}")
|
||||
if entry and entry.output_path.exists():
|
||||
logger.debug(f" Found via scan: {entry.output_path}")
|
||||
logger.info(f"get_by_cid: Found via scan: {entry.output_path}")
|
||||
self._set_content_index(cid, entry.node_id)
|
||||
return entry.output_path
|
||||
|
||||
# Check legacy location (files stored directly as CACHE_DIR/{cid})
|
||||
legacy_path = self.cache_dir / cid
|
||||
logger.info(f"get_by_cid: Checking legacy path: {legacy_path} exists={legacy_path.exists()}")
|
||||
if legacy_path.exists() and legacy_path.is_file():
|
||||
logger.info(f"get_by_cid: Found at legacy path: {legacy_path}")
|
||||
return legacy_path
|
||||
|
||||
# Try to recover from IPFS if we have a CID
|
||||
ipfs_cid = self._get_ipfs_cid_from_index(cid)
|
||||
if ipfs_cid:
|
||||
logger.info(f"Recovering from IPFS: {cid[:16]}... ({ipfs_cid})")
|
||||
# Fetch from IPFS - this is the source of truth for all content
|
||||
if self._is_ipfs_cid(cid):
|
||||
logger.info(f"get_by_cid: Fetching from IPFS: {cid[:16]}...")
|
||||
recovery_path = self.legacy_dir / cid
|
||||
if ipfs_client.get_file(ipfs_cid, recovery_path):
|
||||
logger.info(f"Recovered from IPFS: {recovery_path}")
|
||||
recovery_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if ipfs_client.get_file(cid, str(recovery_path)):
|
||||
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
|
||||
self._set_content_index(cid, cid)
|
||||
return recovery_path
|
||||
else:
|
||||
logger.warning(f"get_by_cid: IPFS fetch failed for {cid[:16]}...")
|
||||
|
||||
# Also try with a mapped IPFS CID if different from cid
|
||||
ipfs_cid = self._get_ipfs_cid_from_index(cid)
|
||||
if ipfs_cid and ipfs_cid != cid:
|
||||
logger.info(f"get_by_cid: Fetching from IPFS via mapping: {ipfs_cid[:16]}...")
|
||||
recovery_path = self.legacy_dir / cid
|
||||
recovery_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if ipfs_client.get_file(ipfs_cid, str(recovery_path)):
|
||||
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
|
||||
return recovery_path
|
||||
|
||||
return None
|
||||
@@ -690,11 +722,26 @@ class L1CacheManager:
|
||||
return True, "Activity discarded"
|
||||
return False, "Failed to discard"
|
||||
|
||||
def _is_used_by_other_activities(self, node_id: str, exclude_activity_id: str) -> bool:
|
||||
"""Check if a node is used by any activity other than the excluded one."""
|
||||
for other_activity in self.activity_store.list():
|
||||
if other_activity.activity_id == exclude_activity_id:
|
||||
continue
|
||||
# Check if used as input, output, or intermediate
|
||||
if node_id in other_activity.input_ids:
|
||||
return True
|
||||
if node_id == other_activity.output_id:
|
||||
return True
|
||||
if node_id in other_activity.intermediate_ids:
|
||||
return True
|
||||
return False
|
||||
|
||||
def discard_activity_outputs_only(self, activity_id: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Discard an activity, deleting only outputs and intermediates.
|
||||
|
||||
Inputs (cache items, configs) are preserved.
|
||||
Outputs/intermediates used by other activities are preserved.
|
||||
|
||||
Returns:
|
||||
(success, message) tuple
|
||||
@@ -711,21 +758,31 @@ class L1CacheManager:
|
||||
if pinned:
|
||||
return False, f"Output is pinned ({reason})"
|
||||
|
||||
# Delete output
|
||||
if activity.output_id:
|
||||
entry = self.cache.get_entry(activity.output_id)
|
||||
if entry:
|
||||
# Remove from cache
|
||||
self.cache.remove(activity.output_id)
|
||||
# Remove from content index (Redis + local)
|
||||
self._del_content_index(entry.cid)
|
||||
# Delete from legacy dir if exists
|
||||
legacy_path = self.legacy_dir / entry.cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs = 0
|
||||
preserved_shared = 0
|
||||
|
||||
# Delete intermediates
|
||||
# Delete output (only if not used by other activities)
|
||||
if activity.output_id:
|
||||
if self._is_used_by_other_activities(activity.output_id, activity_id):
|
||||
preserved_shared += 1
|
||||
else:
|
||||
entry = self.cache.get_entry(activity.output_id)
|
||||
if entry:
|
||||
# Remove from cache
|
||||
self.cache.remove(activity.output_id)
|
||||
# Remove from content index (Redis + local)
|
||||
self._del_content_index(entry.cid)
|
||||
# Delete from legacy dir if exists
|
||||
legacy_path = self.legacy_dir / entry.cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs += 1
|
||||
|
||||
# Delete intermediates (only if not used by other activities)
|
||||
for node_id in activity.intermediate_ids:
|
||||
if self._is_used_by_other_activities(node_id, activity_id):
|
||||
preserved_shared += 1
|
||||
continue
|
||||
entry = self.cache.get_entry(node_id)
|
||||
if entry:
|
||||
self.cache.remove(node_id)
|
||||
@@ -733,11 +790,16 @@ class L1CacheManager:
|
||||
legacy_path = self.legacy_dir / entry.cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs += 1
|
||||
|
||||
# Remove activity record (inputs remain in cache)
|
||||
self.activity_store.remove(activity_id)
|
||||
|
||||
return True, "Activity discarded (outputs only)"
|
||||
msg = f"Activity discarded (deleted {deleted_outputs} outputs"
|
||||
if preserved_shared > 0:
|
||||
msg += f", preserved {preserved_shared} shared items"
|
||||
msg += ")"
|
||||
return True, msg
|
||||
|
||||
def cleanup_intermediates(self) -> int:
|
||||
"""Delete all intermediate cache entries (reconstructible)."""
|
||||
|
||||
@@ -1,22 +1,37 @@
|
||||
"""
|
||||
Art DAG Celery Application
|
||||
|
||||
Distributed rendering for the Art DAG system.
|
||||
Uses the foundational artdag language from GitHub.
|
||||
Streaming video rendering for the Art DAG system.
|
||||
Uses S-expression recipes with frame-by-frame processing.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from celery import Celery
|
||||
from celery.signals import worker_ready
|
||||
|
||||
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
|
||||
# Use central config
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
from app.config import settings
|
||||
|
||||
app = Celery(
|
||||
'art_celery',
|
||||
broker=REDIS_URL,
|
||||
backend=REDIS_URL,
|
||||
include=['legacy_tasks', 'tasks', 'tasks.analyze', 'tasks.execute', 'tasks.orchestrate', 'tasks.execute_sexp']
|
||||
broker=settings.redis_url,
|
||||
backend=settings.redis_url,
|
||||
include=['tasks', 'tasks.streaming', 'tasks.ipfs_upload']
|
||||
)
|
||||
|
||||
|
||||
@worker_ready.connect
|
||||
def log_config_on_startup(sender, **kwargs):
|
||||
"""Log configuration when worker starts."""
|
||||
print("=" * 60, file=sys.stderr)
|
||||
print("WORKER STARTED - CONFIGURATION", file=sys.stderr)
|
||||
print("=" * 60, file=sys.stderr)
|
||||
settings.log_config()
|
||||
print(f"Worker: {sender}", file=sys.stderr)
|
||||
print("=" * 60, file=sys.stderr)
|
||||
|
||||
app.conf.update(
|
||||
result_expires=86400 * 7, # 7 days - allow time for recovery after restarts
|
||||
task_serializer='json',
|
||||
|
||||
17
configs/audio-dizzy.sexp
Normal file
17
configs/audio-dizzy.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; Audio Configuration - dizzy.mp3
|
||||
;;
|
||||
;; Defines audio analyzer and playback for a recipe.
|
||||
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
|
||||
;;
|
||||
;; Provides:
|
||||
;; - music: audio analyzer for beat/energy detection
|
||||
;; - audio-playback: path for synchronized playback
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Audio analyzer (provides beat detection and energy levels)
|
||||
;; Paths relative to working directory (project root)
|
||||
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
|
||||
|
||||
;; Audio playback path (for sync with video output)
|
||||
(audio-playback "dizzy.mp3")
|
||||
17
configs/audio-halleluwah.sexp
Normal file
17
configs/audio-halleluwah.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; Audio Configuration - dizzy.mp3
|
||||
;;
|
||||
;; Defines audio analyzer and playback for a recipe.
|
||||
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
|
||||
;;
|
||||
;; Provides:
|
||||
;; - music: audio analyzer for beat/energy detection
|
||||
;; - audio-playback: path for synchronized playback
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Audio analyzer (provides beat detection and energy levels)
|
||||
;; Using friendly name for asset resolution
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
|
||||
;; Audio playback path (for sync with video output)
|
||||
(audio-playback "woods-audio")
|
||||
38
configs/sources-default.sexp
Normal file
38
configs/sources-default.sexp
Normal file
@@ -0,0 +1,38 @@
|
||||
;; Default Sources Configuration
|
||||
;;
|
||||
;; Defines video sources and per-pair effect configurations.
|
||||
;; Pass to recipe with: --sources configs/sources-default.sexp
|
||||
;;
|
||||
;; Required by recipes using process-pair macro:
|
||||
;; - sources: array of video sources
|
||||
;; - pair-configs: array of effect configurations per source
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Video sources array
|
||||
;; Paths relative to working directory (project root)
|
||||
(def sources [
|
||||
(streaming:make-video-source "monday.webm" 30)
|
||||
(streaming:make-video-source "escher.webm" 30)
|
||||
(streaming:make-video-source "2.webm" 30)
|
||||
(streaming:make-video-source "disruptors.webm" 30)
|
||||
(streaming:make-video-source "4.mp4" 30)
|
||||
(streaming:make-video-source "ecstacy.mp4" 30)
|
||||
(streaming:make-video-source "dopple.webm" 30)
|
||||
(streaming:make-video-source "5.mp4" 30)
|
||||
])
|
||||
|
||||
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
|
||||
;; :dir = rotation direction (1 or -1)
|
||||
;; :rot-a, :rot-b = max rotation angles for clip A and B
|
||||
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
|
||||
])
|
||||
19
configs/sources-woods-half.sexp
Normal file
19
configs/sources-woods-half.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Half-resolution Woods Sources (960x540)
|
||||
;;
|
||||
;; Pass to recipe with: --sources configs/sources-woods-half.sexp
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods_half/1.webm" 30)
|
||||
(streaming:make-video-source "woods_half/2.webm" 30)
|
||||
(streaming:make-video-source "woods_half/3.webm" 30)
|
||||
(streaming:make-video-source "woods_half/4.webm" 30)
|
||||
])
|
||||
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
])
|
||||
39
configs/sources-woods.sexp
Normal file
39
configs/sources-woods.sexp
Normal file
@@ -0,0 +1,39 @@
|
||||
;; Default Sources Configuration
|
||||
;;
|
||||
;; Defines video sources and per-pair effect configurations.
|
||||
;; Pass to recipe with: --sources configs/sources-default.sexp
|
||||
;;
|
||||
;; Required by recipes using process-pair macro:
|
||||
;; - sources: array of video sources
|
||||
;; - pair-configs: array of effect configurations per source
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Video sources array
|
||||
;; Using friendly names for asset resolution
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 10)
|
||||
(streaming:make-video-source "woods-2" 10)
|
||||
(streaming:make-video-source "woods-3" 10)
|
||||
(streaming:make-video-source "woods-4" 10)
|
||||
(streaming:make-video-source "woods-5" 10)
|
||||
(streaming:make-video-source "woods-6" 10)
|
||||
(streaming:make-video-source "woods-7" 10)
|
||||
(streaming:make-video-source "woods-8" 10)
|
||||
])
|
||||
|
||||
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
|
||||
;; :dir = rotation direction (1 or -1)
|
||||
;; :rot-a, :rot-b = max rotation angles for clip A and B
|
||||
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
|
||||
])
|
||||
364
database.py
364
database.py
@@ -11,7 +11,9 @@ from typing import List, Optional
|
||||
|
||||
import asyncpg
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://artdag:artdag@localhost:5432/artdag")
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
raise RuntimeError("DATABASE_URL environment variable is required")
|
||||
|
||||
pool: Optional[asyncpg.Pool] = None
|
||||
|
||||
@@ -90,13 +92,50 @@ CREATE TABLE IF NOT EXISTS pending_runs (
|
||||
recipe VARCHAR(255) NOT NULL,
|
||||
inputs JSONB NOT NULL,
|
||||
dag_json TEXT,
|
||||
plan_cid VARCHAR(128),
|
||||
output_name VARCHAR(255),
|
||||
actor_id VARCHAR(255),
|
||||
error TEXT,
|
||||
ipfs_playlist_cid VARCHAR(128), -- For streaming: IPFS CID of HLS playlist
|
||||
quality_playlists JSONB, -- For streaming: quality-level playlist CIDs {quality_name: {cid, width, height, bitrate}}
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Add ipfs_playlist_cid if table exists but column doesn't (migration)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'ipfs_playlist_cid') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN ipfs_playlist_cid VARCHAR(128);
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'quality_playlists') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN quality_playlists JSONB;
|
||||
END IF;
|
||||
-- Checkpoint columns for resumable renders
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'checkpoint_frame') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN checkpoint_frame INTEGER;
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'checkpoint_t') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN checkpoint_t FLOAT;
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'checkpoint_scans') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN checkpoint_scans JSONB;
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'total_frames') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN total_frames INTEGER;
|
||||
END IF;
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'pending_runs' AND column_name = 'resumable') THEN
|
||||
ALTER TABLE pending_runs ADD COLUMN resumable BOOLEAN DEFAULT TRUE;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_runs_status ON pending_runs(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_runs_actor ON pending_runs(actor_id);
|
||||
|
||||
@@ -165,11 +204,27 @@ CREATE INDEX IF NOT EXISTS idx_friendly_names_latest ON friendly_names(actor_id,
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database connection pool and create schema."""
|
||||
"""Initialize database connection pool and create schema.
|
||||
|
||||
Raises:
|
||||
asyncpg.PostgresError: If database connection fails
|
||||
RuntimeError: If pool creation fails
|
||||
"""
|
||||
global pool
|
||||
pool = await asyncpg.create_pool(DATABASE_URL)
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(SCHEMA_SQL)
|
||||
if pool is not None:
|
||||
return # Already initialized
|
||||
try:
|
||||
pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10)
|
||||
if pool is None:
|
||||
raise RuntimeError(f"Failed to create database pool for {DATABASE_URL}")
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(SCHEMA_SQL)
|
||||
except asyncpg.PostgresError as e:
|
||||
pool = None
|
||||
raise RuntimeError(f"Database connection failed: {e}") from e
|
||||
except Exception as e:
|
||||
pool = None
|
||||
raise RuntimeError(f"Database initialization failed: {e}") from e
|
||||
|
||||
|
||||
async def close_db():
|
||||
@@ -217,6 +272,15 @@ async def update_cache_item_ipfs_cid(cid: str, ipfs_cid: str) -> bool:
|
||||
return result == "UPDATE 1"
|
||||
|
||||
|
||||
async def get_ipfs_cid(cid: str) -> Optional[str]:
|
||||
"""Get the IPFS CID for a cache item by its internal CID."""
|
||||
async with pool.acquire() as conn:
|
||||
return await conn.fetchval(
|
||||
"SELECT ipfs_cid FROM cache_items WHERE cid = $1",
|
||||
cid
|
||||
)
|
||||
|
||||
|
||||
async def delete_cache_item(cid: str) -> bool:
|
||||
"""Delete a cache item and all associated data (cascades)."""
|
||||
async with pool.acquire() as conn:
|
||||
@@ -641,6 +705,9 @@ async def save_item_metadata(
|
||||
|
||||
Returns a dict with the item metadata (compatible with old JSON format).
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"save_item_metadata: cid={cid[:16] if cid else None}..., actor_id={actor_id}, item_type={item_type}")
|
||||
# Build metadata JSONB for extra fields
|
||||
metadata = {}
|
||||
if tags:
|
||||
@@ -677,6 +744,7 @@ async def save_item_metadata(
|
||||
)
|
||||
|
||||
item_type_id = row["id"]
|
||||
logger.info(f"save_item_metadata: Created/updated item_type id={item_type_id} for cid={cid[:16]}...")
|
||||
|
||||
# Handle pinning
|
||||
if pinned and pin_reason:
|
||||
@@ -1081,6 +1149,8 @@ async def count_user_items(actor_id: str, item_type: Optional[str] = None) -> in
|
||||
|
||||
async def get_run_cache(run_id: str) -> Optional[dict]:
|
||||
"""Get cached run result by content-addressable run_id."""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
@@ -1124,7 +1194,10 @@ async def save_run_cache(
|
||||
output_cid = EXCLUDED.output_cid,
|
||||
ipfs_cid = COALESCE(EXCLUDED.ipfs_cid, run_cache.ipfs_cid),
|
||||
provenance_cid = COALESCE(EXCLUDED.provenance_cid, run_cache.provenance_cid),
|
||||
plan_cid = COALESCE(EXCLUDED.plan_cid, run_cache.plan_cid)
|
||||
plan_cid = COALESCE(EXCLUDED.plan_cid, run_cache.plan_cid),
|
||||
actor_id = COALESCE(EXCLUDED.actor_id, run_cache.actor_id),
|
||||
recipe = COALESCE(EXCLUDED.recipe, run_cache.recipe),
|
||||
inputs = COALESCE(EXCLUDED.inputs, run_cache.inputs)
|
||||
RETURNING run_id, output_cid, ipfs_cid, provenance_cid, plan_cid, recipe, inputs, actor_id, created_at
|
||||
""",
|
||||
run_id, output_cid, ipfs_cid, provenance_cid, plan_cid, recipe, _json.dumps(inputs), actor_id
|
||||
@@ -1444,6 +1517,8 @@ async def create_pending_run(
|
||||
output_name: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""Create a pending run record for durability."""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
@@ -1473,10 +1548,14 @@ async def create_pending_run(
|
||||
|
||||
async def get_pending_run(run_id: str) -> Optional[dict]:
|
||||
"""Get a pending run by ID."""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
SELECT run_id, celery_task_id, status, recipe, inputs, dag_json, output_name, actor_id, error, created_at, updated_at
|
||||
SELECT run_id, celery_task_id, status, recipe, inputs, dag_json, plan_cid, output_name, actor_id, error,
|
||||
ipfs_playlist_cid, quality_playlists, checkpoint_frame, checkpoint_t, checkpoint_scans,
|
||||
total_frames, resumable, created_at, updated_at
|
||||
FROM pending_runs WHERE run_id = $1
|
||||
""",
|
||||
run_id
|
||||
@@ -1486,6 +1565,14 @@ async def get_pending_run(run_id: str) -> Optional[dict]:
|
||||
inputs = row["inputs"]
|
||||
if isinstance(inputs, str):
|
||||
inputs = _json.loads(inputs)
|
||||
# Parse quality_playlists if it's a string
|
||||
quality_playlists = row.get("quality_playlists")
|
||||
if isinstance(quality_playlists, str):
|
||||
quality_playlists = _json.loads(quality_playlists)
|
||||
# Parse checkpoint_scans if it's a string
|
||||
checkpoint_scans = row.get("checkpoint_scans")
|
||||
if isinstance(checkpoint_scans, str):
|
||||
checkpoint_scans = _json.loads(checkpoint_scans)
|
||||
return {
|
||||
"run_id": row["run_id"],
|
||||
"celery_task_id": row["celery_task_id"],
|
||||
@@ -1493,9 +1580,17 @@ async def get_pending_run(run_id: str) -> Optional[dict]:
|
||||
"recipe": row["recipe"],
|
||||
"inputs": inputs,
|
||||
"dag_json": row["dag_json"],
|
||||
"plan_cid": row["plan_cid"],
|
||||
"output_name": row["output_name"],
|
||||
"actor_id": row["actor_id"],
|
||||
"error": row["error"],
|
||||
"ipfs_playlist_cid": row["ipfs_playlist_cid"],
|
||||
"quality_playlists": quality_playlists,
|
||||
"checkpoint_frame": row.get("checkpoint_frame"),
|
||||
"checkpoint_t": row.get("checkpoint_t"),
|
||||
"checkpoint_scans": checkpoint_scans,
|
||||
"total_frames": row.get("total_frames"),
|
||||
"resumable": row.get("resumable", True),
|
||||
"created_at": row["created_at"].isoformat() if row["created_at"] else None,
|
||||
"updated_at": row["updated_at"].isoformat() if row["updated_at"] else None,
|
||||
}
|
||||
@@ -1553,6 +1648,8 @@ async def list_pending_runs(actor_id: Optional[str] = None, status: Optional[str
|
||||
|
||||
async def update_pending_run_status(run_id: str, status: str, error: Optional[str] = None) -> bool:
|
||||
"""Update the status of a pending run."""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
if error:
|
||||
result = await conn.execute(
|
||||
@@ -1567,6 +1664,145 @@ async def update_pending_run_status(run_id: str, status: str, error: Optional[st
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
async def update_pending_run_plan(run_id: str, plan_cid: str) -> bool:
|
||||
"""Update the plan_cid of a pending run (called when plan is generated)."""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
result = await conn.execute(
|
||||
"UPDATE pending_runs SET plan_cid = $2, updated_at = NOW() WHERE run_id = $1",
|
||||
run_id, plan_cid
|
||||
)
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
async def update_pending_run_playlist(run_id: str, ipfs_playlist_cid: str, quality_playlists: Optional[dict] = None) -> bool:
|
||||
"""Update the IPFS playlist CID of a streaming run.
|
||||
|
||||
Args:
|
||||
run_id: The run ID
|
||||
ipfs_playlist_cid: Master playlist CID
|
||||
quality_playlists: Dict of quality name -> {cid, width, height, bitrate}
|
||||
"""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
if quality_playlists:
|
||||
result = await conn.execute(
|
||||
"UPDATE pending_runs SET ipfs_playlist_cid = $2, quality_playlists = $3, updated_at = NOW() WHERE run_id = $1",
|
||||
run_id, ipfs_playlist_cid, _json.dumps(quality_playlists)
|
||||
)
|
||||
else:
|
||||
result = await conn.execute(
|
||||
"UPDATE pending_runs SET ipfs_playlist_cid = $2, updated_at = NOW() WHERE run_id = $1",
|
||||
run_id, ipfs_playlist_cid
|
||||
)
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
async def update_pending_run_checkpoint(
|
||||
run_id: str,
|
||||
checkpoint_frame: int,
|
||||
checkpoint_t: float,
|
||||
checkpoint_scans: Optional[dict] = None,
|
||||
total_frames: Optional[int] = None,
|
||||
) -> bool:
|
||||
"""Update checkpoint state for a streaming run.
|
||||
|
||||
Called at segment boundaries to enable resume after failures.
|
||||
|
||||
Args:
|
||||
run_id: The run ID
|
||||
checkpoint_frame: Last completed frame at segment boundary
|
||||
checkpoint_t: Time value for checkpoint frame
|
||||
checkpoint_scans: Accumulated scan state {scan_name: state_dict}
|
||||
total_frames: Total expected frames (for progress %)
|
||||
"""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
result = await conn.execute(
|
||||
"""
|
||||
UPDATE pending_runs SET
|
||||
checkpoint_frame = $2,
|
||||
checkpoint_t = $3,
|
||||
checkpoint_scans = $4,
|
||||
total_frames = COALESCE($5, total_frames),
|
||||
updated_at = NOW()
|
||||
WHERE run_id = $1
|
||||
""",
|
||||
run_id,
|
||||
checkpoint_frame,
|
||||
checkpoint_t,
|
||||
_json.dumps(checkpoint_scans) if checkpoint_scans else None,
|
||||
total_frames,
|
||||
)
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
async def get_run_checkpoint(run_id: str) -> Optional[dict]:
|
||||
"""Get checkpoint data for resuming a run.
|
||||
|
||||
Returns:
|
||||
Dict with checkpoint_frame, checkpoint_t, checkpoint_scans, quality_playlists, etc.
|
||||
or None if no checkpoint exists
|
||||
"""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
SELECT checkpoint_frame, checkpoint_t, checkpoint_scans, total_frames,
|
||||
quality_playlists, ipfs_playlist_cid, resumable
|
||||
FROM pending_runs WHERE run_id = $1
|
||||
""",
|
||||
run_id
|
||||
)
|
||||
if row and row.get("checkpoint_frame") is not None:
|
||||
# Parse JSONB fields
|
||||
checkpoint_scans = row.get("checkpoint_scans")
|
||||
if isinstance(checkpoint_scans, str):
|
||||
checkpoint_scans = _json.loads(checkpoint_scans)
|
||||
quality_playlists = row.get("quality_playlists")
|
||||
if isinstance(quality_playlists, str):
|
||||
quality_playlists = _json.loads(quality_playlists)
|
||||
return {
|
||||
"frame_num": row["checkpoint_frame"],
|
||||
"t": row["checkpoint_t"],
|
||||
"scans": checkpoint_scans or {},
|
||||
"total_frames": row.get("total_frames"),
|
||||
"quality_playlists": quality_playlists,
|
||||
"ipfs_playlist_cid": row.get("ipfs_playlist_cid"),
|
||||
"resumable": row.get("resumable", True),
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
async def clear_run_checkpoint(run_id: str) -> bool:
|
||||
"""Clear checkpoint data for a run (used on restart).
|
||||
|
||||
Args:
|
||||
run_id: The run ID
|
||||
"""
|
||||
if pool is None:
|
||||
raise RuntimeError("Database pool not initialized - call init_db() first")
|
||||
async with pool.acquire() as conn:
|
||||
result = await conn.execute(
|
||||
"""
|
||||
UPDATE pending_runs SET
|
||||
checkpoint_frame = NULL,
|
||||
checkpoint_t = NULL,
|
||||
checkpoint_scans = NULL,
|
||||
quality_playlists = NULL,
|
||||
ipfs_playlist_cid = NULL,
|
||||
updated_at = NOW()
|
||||
WHERE run_id = $1
|
||||
""",
|
||||
run_id,
|
||||
)
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
async def complete_pending_run(run_id: str) -> bool:
|
||||
"""Remove a pending run after it completes (moves to run_cache)."""
|
||||
async with pool.acquire() as conn:
|
||||
@@ -1792,3 +2028,117 @@ async def delete_friendly_name(actor_id: str, cid: str) -> bool:
|
||||
actor_id, cid
|
||||
)
|
||||
return "DELETE 1" in result
|
||||
|
||||
|
||||
async def update_friendly_name_cid(actor_id: str, old_cid: str, new_cid: str) -> bool:
|
||||
"""
|
||||
Update a friendly name's CID (used when IPFS upload completes).
|
||||
|
||||
This updates the CID from a local SHA256 hash to an IPFS CID,
|
||||
ensuring assets can be fetched by remote workers via IPFS.
|
||||
"""
|
||||
async with pool.acquire() as conn:
|
||||
result = await conn.execute(
|
||||
"UPDATE friendly_names SET cid = $3 WHERE actor_id = $1 AND cid = $2",
|
||||
actor_id, old_cid, new_cid
|
||||
)
|
||||
return "UPDATE 1" in result
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SYNCHRONOUS DATABASE FUNCTIONS (for use from non-async contexts like video streaming)
|
||||
# =============================================================================
|
||||
|
||||
def resolve_friendly_name_sync(
|
||||
actor_id: str,
|
||||
name: str,
|
||||
item_type: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Synchronous version of resolve_friendly_name using psycopg2.
|
||||
|
||||
Useful when calling from synchronous code (e.g., video streaming callbacks)
|
||||
where async/await is not possible.
|
||||
|
||||
Returns CID or None if not found.
|
||||
"""
|
||||
import psycopg2
|
||||
|
||||
parts = name.strip().split(' ')
|
||||
base_name = parts[0]
|
||||
version_id = parts[1] if len(parts) > 1 else None
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(DATABASE_URL)
|
||||
cursor = conn.cursor()
|
||||
|
||||
if version_id:
|
||||
# Exact version lookup
|
||||
if item_type:
|
||||
query = """
|
||||
SELECT cid FROM friendly_names
|
||||
WHERE actor_id = %s AND base_name = %s AND version_id = %s AND item_type = %s
|
||||
"""
|
||||
cursor.execute(query, (actor_id, base_name, version_id, item_type))
|
||||
else:
|
||||
query = """
|
||||
SELECT cid FROM friendly_names
|
||||
WHERE actor_id = %s AND base_name = %s AND version_id = %s
|
||||
"""
|
||||
cursor.execute(query, (actor_id, base_name, version_id))
|
||||
else:
|
||||
# Latest version lookup
|
||||
if item_type:
|
||||
query = """
|
||||
SELECT cid FROM friendly_names
|
||||
WHERE actor_id = %s AND base_name = %s AND item_type = %s
|
||||
ORDER BY created_at DESC LIMIT 1
|
||||
"""
|
||||
cursor.execute(query, (actor_id, base_name, item_type))
|
||||
else:
|
||||
query = """
|
||||
SELECT cid FROM friendly_names
|
||||
WHERE actor_id = %s AND base_name = %s
|
||||
ORDER BY created_at DESC LIMIT 1
|
||||
"""
|
||||
cursor.execute(query, (actor_id, base_name))
|
||||
|
||||
result = cursor.fetchone()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
return result[0] if result else None
|
||||
|
||||
except Exception as e:
|
||||
import sys
|
||||
print(f"resolve_friendly_name_sync ERROR: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def get_ipfs_cid_sync(cid: str) -> Optional[str]:
|
||||
"""
|
||||
Synchronous version of get_ipfs_cid using psycopg2.
|
||||
|
||||
Returns the IPFS CID for a given internal CID, or None if not found.
|
||||
"""
|
||||
import psycopg2
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(DATABASE_URL)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"SELECT ipfs_cid FROM cache_items WHERE cid = %s",
|
||||
(cid,)
|
||||
)
|
||||
|
||||
result = cursor.fetchone()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
return result[0] if result else None
|
||||
|
||||
except Exception as e:
|
||||
import sys
|
||||
print(f"get_ipfs_cid_sync ERROR: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
@@ -7,13 +7,13 @@ echo "=== Pulling latest code ==="
|
||||
git pull
|
||||
|
||||
echo "=== Building Docker image ==="
|
||||
docker build --build-arg CACHEBUST=$(date +%s) -t git.rose-ash.com/art-dag/l1-server:latest .
|
||||
docker build --build-arg CACHEBUST=$(date +%s) -t registry.rose-ash.com:5000/celery-l1-server:latest .
|
||||
|
||||
echo "=== Pushing to registry ==="
|
||||
docker push registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
|
||||
echo "=== Redeploying celery stack ==="
|
||||
docker stack deploy -c docker-compose.yml celery
|
||||
|
||||
echo "=== Restarting proxy nginx ==="
|
||||
docker service update --force proxy_nginx
|
||||
|
||||
echo "=== Done ==="
|
||||
docker stack services celery
|
||||
|
||||
249
diagnose_gpu.py
Executable file
249
diagnose_gpu.py
Executable file
@@ -0,0 +1,249 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
GPU Rendering Diagnostic Script
|
||||
|
||||
Checks for common issues that cause GPU rendering slowdowns in art-dag.
|
||||
Run this script to identify potential performance bottlenecks.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
def print_section(title):
|
||||
print(f"\n{'='*60}")
|
||||
print(f" {title}")
|
||||
print(f"{'='*60}")
|
||||
|
||||
def check_pass(msg):
|
||||
print(f" [PASS] {msg}")
|
||||
|
||||
def check_fail(msg):
|
||||
print(f" [FAIL] {msg}")
|
||||
|
||||
def check_warn(msg):
|
||||
print(f" [WARN] {msg}")
|
||||
|
||||
def check_info(msg):
|
||||
print(f" [INFO] {msg}")
|
||||
|
||||
# ============================================================
|
||||
# 1. Check GPU Availability
|
||||
# ============================================================
|
||||
print_section("1. GPU AVAILABILITY")
|
||||
|
||||
# Check nvidia-smi
|
||||
try:
|
||||
result = subprocess.run(["nvidia-smi", "--query-gpu=name,memory.total,memory.free,utilization.gpu",
|
||||
"--format=csv,noheader"], capture_output=True, text=True, timeout=5)
|
||||
if result.returncode == 0:
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
check_pass(f"GPU found: {line}")
|
||||
else:
|
||||
check_fail("nvidia-smi failed - no GPU detected")
|
||||
except FileNotFoundError:
|
||||
check_fail("nvidia-smi not found - NVIDIA drivers not installed")
|
||||
except Exception as e:
|
||||
check_fail(f"nvidia-smi error: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 2. Check CuPy
|
||||
# ============================================================
|
||||
print_section("2. CUPY (GPU ARRAY LIBRARY)")
|
||||
|
||||
try:
|
||||
import cupy as cp
|
||||
check_pass(f"CuPy available, version {cp.__version__}")
|
||||
|
||||
# Test basic GPU operation
|
||||
try:
|
||||
a = cp.zeros((100, 100), dtype=cp.uint8)
|
||||
cp.cuda.Stream.null.synchronize()
|
||||
check_pass("CuPy GPU operations working")
|
||||
|
||||
# Check memory
|
||||
mempool = cp.get_default_memory_pool()
|
||||
check_info(f"GPU memory pool: {mempool.used_bytes() / 1024**2:.1f} MB used, "
|
||||
f"{mempool.total_bytes() / 1024**2:.1f} MB total")
|
||||
except Exception as e:
|
||||
check_fail(f"CuPy GPU test failed: {e}")
|
||||
except ImportError:
|
||||
check_fail("CuPy not installed - GPU rendering disabled")
|
||||
|
||||
# ============================================================
|
||||
# 3. Check PyNvVideoCodec (GPU Encoding)
|
||||
# ============================================================
|
||||
print_section("3. PYNVVIDEOCODEC (GPU ENCODING)")
|
||||
|
||||
try:
|
||||
import PyNvVideoCodec as nvc
|
||||
check_pass("PyNvVideoCodec available - zero-copy GPU encoding enabled")
|
||||
except ImportError:
|
||||
check_warn("PyNvVideoCodec not available - using FFmpeg NVENC (slower)")
|
||||
|
||||
# ============================================================
|
||||
# 4. Check Decord GPU (Hardware Decode)
|
||||
# ============================================================
|
||||
print_section("4. DECORD GPU (HARDWARE DECODE)")
|
||||
|
||||
try:
|
||||
import decord
|
||||
from decord import gpu
|
||||
ctx = gpu(0)
|
||||
check_pass(f"Decord GPU (NVDEC) available - hardware video decode enabled")
|
||||
except ImportError:
|
||||
check_warn("Decord not installed - using FFmpeg decode")
|
||||
except Exception as e:
|
||||
check_warn(f"Decord GPU not available ({e}) - using FFmpeg decode")
|
||||
|
||||
# ============================================================
|
||||
# 5. Check DLPack Support
|
||||
# ============================================================
|
||||
print_section("5. DLPACK (ZERO-COPY TRANSFER)")
|
||||
|
||||
try:
|
||||
import decord
|
||||
from decord import VideoReader, gpu
|
||||
import cupy as cp
|
||||
|
||||
# Need a test video file
|
||||
test_video = None
|
||||
for path in ["/data/cache", "/tmp"]:
|
||||
if os.path.exists(path):
|
||||
for f in os.listdir(path):
|
||||
if f.endswith(('.mp4', '.webm', '.mkv')):
|
||||
test_video = os.path.join(path, f)
|
||||
break
|
||||
if test_video:
|
||||
break
|
||||
|
||||
if test_video:
|
||||
try:
|
||||
vr = VideoReader(test_video, ctx=gpu(0))
|
||||
frame = vr[0]
|
||||
dlpack = frame.to_dlpack()
|
||||
gpu_frame = cp.from_dlpack(dlpack)
|
||||
check_pass(f"DLPack zero-copy working (tested with {os.path.basename(test_video)})")
|
||||
except Exception as e:
|
||||
check_fail(f"DLPack FAILED: {e}")
|
||||
check_info("This means every frame does GPU->CPU->GPU copy (SLOW)")
|
||||
else:
|
||||
check_warn("No test video found - cannot verify DLPack")
|
||||
except ImportError:
|
||||
check_warn("Cannot test DLPack - decord or cupy not available")
|
||||
|
||||
# ============================================================
|
||||
# 6. Check Fast CUDA Kernels
|
||||
# ============================================================
|
||||
print_section("6. FAST CUDA KERNELS (JIT COMPILED)")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from streaming.jit_compiler import (
|
||||
fast_rotate, fast_zoom, fast_blend, fast_hue_shift,
|
||||
fast_invert, fast_ripple, get_fast_ops
|
||||
)
|
||||
check_pass("Fast CUDA kernels loaded successfully")
|
||||
|
||||
# Test one kernel
|
||||
try:
|
||||
import cupy as cp
|
||||
test_img = cp.zeros((720, 1280, 3), dtype=cp.uint8)
|
||||
result = fast_rotate(test_img, 45.0)
|
||||
cp.cuda.Stream.null.synchronize()
|
||||
check_pass("Fast rotate kernel working")
|
||||
except Exception as e:
|
||||
check_fail(f"Fast kernel execution failed: {e}")
|
||||
except ImportError as e:
|
||||
check_warn(f"Fast CUDA kernels not available: {e}")
|
||||
check_info("Fallback to slower CuPy operations")
|
||||
|
||||
# ============================================================
|
||||
# 7. Check Fused Pipeline Compiler
|
||||
# ============================================================
|
||||
print_section("7. FUSED PIPELINE COMPILER")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from streaming.sexp_to_cuda import compile_frame_pipeline, compile_autonomous_pipeline
|
||||
check_pass("Fused CUDA pipeline compiler available")
|
||||
except ImportError as e:
|
||||
check_warn(f"Fused pipeline compiler not available: {e}")
|
||||
check_info("Using per-operation fallback (slower for multi-effect pipelines)")
|
||||
|
||||
# ============================================================
|
||||
# 8. Check FFmpeg NVENC
|
||||
# ============================================================
|
||||
print_section("8. FFMPEG NVENC (HARDWARE ENCODE)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(["ffmpeg", "-encoders"], capture_output=True, text=True, timeout=5)
|
||||
if "h264_nvenc" in result.stdout:
|
||||
check_pass("FFmpeg h264_nvenc encoder available")
|
||||
else:
|
||||
check_warn("FFmpeg h264_nvenc not available - using libx264 (CPU)")
|
||||
|
||||
if "hevc_nvenc" in result.stdout:
|
||||
check_pass("FFmpeg hevc_nvenc encoder available")
|
||||
except Exception as e:
|
||||
check_fail(f"FFmpeg check failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 9. Check FFmpeg NVDEC
|
||||
# ============================================================
|
||||
print_section("9. FFMPEG NVDEC (HARDWARE DECODE)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(["ffmpeg", "-hwaccels"], capture_output=True, text=True, timeout=5)
|
||||
if "cuda" in result.stdout:
|
||||
check_pass("FFmpeg CUDA hwaccel available")
|
||||
else:
|
||||
check_warn("FFmpeg CUDA hwaccel not available - using CPU decode")
|
||||
except Exception as e:
|
||||
check_fail(f"FFmpeg hwaccel check failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 10. Check Pipeline Cache Status
|
||||
# ============================================================
|
||||
print_section("10. PIPELINE CACHE STATUS")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from sexp_effects.primitive_libs.streaming_gpu import (
|
||||
_FUSED_PIPELINE_CACHE, _AUTONOMOUS_PIPELINE_CACHE
|
||||
)
|
||||
fused_count = len(_FUSED_PIPELINE_CACHE)
|
||||
auto_count = len(_AUTONOMOUS_PIPELINE_CACHE)
|
||||
|
||||
if fused_count > 0 or auto_count > 0:
|
||||
check_info(f"Fused pipeline cache: {fused_count} entries")
|
||||
check_info(f"Autonomous pipeline cache: {auto_count} entries")
|
||||
if fused_count > 100 or auto_count > 100:
|
||||
check_warn("Large pipeline cache - may cause memory pressure")
|
||||
else:
|
||||
check_info("Pipeline caches empty (no rendering done yet)")
|
||||
except Exception as e:
|
||||
check_info(f"Could not check pipeline cache: {e}")
|
||||
|
||||
# ============================================================
|
||||
# Summary
|
||||
# ============================================================
|
||||
print_section("SUMMARY")
|
||||
print("""
|
||||
Optimal GPU rendering requires:
|
||||
1. [CRITICAL] CuPy with working GPU operations
|
||||
2. [CRITICAL] DLPack zero-copy transfer (decord -> CuPy)
|
||||
3. [HIGH] Fast CUDA kernels from jit_compiler
|
||||
4. [MEDIUM] Fused pipeline compiler for multi-effect recipes
|
||||
5. [MEDIUM] PyNvVideoCodec for zero-copy encoding
|
||||
6. [LOW] FFmpeg NVENC/NVDEC as fallback
|
||||
|
||||
If DLPack is failing, check:
|
||||
- decord version (needs 0.6.0+ with DLPack support)
|
||||
- CuPy version compatibility
|
||||
- CUDA toolkit version match
|
||||
|
||||
If fast kernels are not loading:
|
||||
- Check if streaming/jit_compiler.py exists
|
||||
- Verify CUDA compiler (nvcc) is available
|
||||
""")
|
||||
36
docker-compose.gpu-dev.yml
Normal file
36
docker-compose.gpu-dev.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
# GPU Worker Development Override
|
||||
#
|
||||
# Usage: docker stack deploy -c docker-compose.yml -c docker-compose.gpu-dev.yml celery
|
||||
# Or for quick testing: docker-compose -f docker-compose.yml -f docker-compose.gpu-dev.yml up l1-gpu-worker
|
||||
#
|
||||
# Features:
|
||||
# - Mounts source code for instant changes (no rebuild needed)
|
||||
# - Uses watchmedo for auto-reload on file changes
|
||||
# - Shows config on startup
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
l1-gpu-worker:
|
||||
# Override command to use watchmedo for auto-reload
|
||||
command: >
|
||||
sh -c "
|
||||
pip install -q watchdog[watchmedo] 2>/dev/null || true;
|
||||
echo '=== GPU WORKER DEV MODE ===';
|
||||
echo 'Source mounted - changes take effect on restart';
|
||||
echo 'Auto-reload enabled via watchmedo';
|
||||
env | grep -E 'STREAMING_GPU|IPFS_GATEWAY|REDIS|DATABASE' | sort;
|
||||
echo '===========================';
|
||||
watchmedo auto-restart --directory=/app --pattern='*.py' --recursive -- \
|
||||
celery -A celery_app worker --loglevel=info -E -Q gpu,celery
|
||||
"
|
||||
environment:
|
||||
# Development defaults (can override with .env)
|
||||
- STREAMING_GPU_PERSIST=0
|
||||
- IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
- SHOW_CONFIG=1
|
||||
volumes:
|
||||
# Mount source code for hot reload
|
||||
- ./:/app:ro
|
||||
# Keep cache local
|
||||
- gpu_cache:/data/cache
|
||||
@@ -3,6 +3,10 @@ version: "3.8"
|
||||
services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 16379
|
||||
mode: host # Bypass swarm routing mesh
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
@@ -11,13 +15,21 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- POSTGRES_USER=artdag
|
||||
- POSTGRES_PASSWORD=artdag
|
||||
- POSTGRES_DB=artdag
|
||||
ports:
|
||||
- target: 5432
|
||||
published: 15432
|
||||
mode: host # Expose for GPU worker on different VPC
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
@@ -26,12 +38,18 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
ipfs:
|
||||
image: ipfs/kubo:latest
|
||||
ports:
|
||||
- "4001:4001" # Swarm TCP
|
||||
- "4001:4001/udp" # Swarm UDP
|
||||
- target: 5001
|
||||
published: 15001
|
||||
mode: host # API port for GPU worker on different VPC
|
||||
volumes:
|
||||
- ipfs_data:/data/ipfs
|
||||
- l1_cache:/data/cache:ro # Read-only access to cache for adding files
|
||||
@@ -42,23 +60,27 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
l1-server:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
image: registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- DATABASE_URL=postgresql://artdag:artdag@postgres:5432/artdag
|
||||
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
|
||||
- IPFS_API=/dns/ipfs/tcp/5001
|
||||
- CACHE_DIR=/data/cache
|
||||
# Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS, no local cache)
|
||||
# - IPFS_PRIMARY=true
|
||||
# Cluster key for trust domains - systems with same key can share work via IPFS
|
||||
# Generate with: openssl rand -hex 32
|
||||
- ARTDAG_CLUSTER_KEY=${ARTDAG_CLUSTER_KEY:-}
|
||||
# DATABASE_URL, ADMIN_TOKEN, ARTDAG_CLUSTER_KEY,
|
||||
# L2_SERVER, L2_DOMAIN, IPFS_GATEWAY_URL from .env file
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8100/')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 15s
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
@@ -70,21 +92,26 @@ services:
|
||||
- externalnet
|
||||
deploy:
|
||||
replicas: 1
|
||||
update_config:
|
||||
order: start-first
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
l1-worker:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
command: celery -A celery_app worker --loglevel=info -E
|
||||
image: registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
command: sh -c "find /app -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null; celery -A celery_app worker --loglevel=info -E"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- DATABASE_URL=postgresql://artdag:artdag@postgres:5432/artdag
|
||||
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
|
||||
- IPFS_API=/dns/ipfs/tcp/5001
|
||||
- CACHE_DIR=/data/cache
|
||||
- C_FORCE_ROOT=true
|
||||
# Must match l1-server for consistent cache_ids
|
||||
- ARTDAG_CLUSTER_KEY=${ARTDAG_CLUSTER_KEY:-}
|
||||
# DATABASE_URL, ARTDAG_CLUSTER_KEY from .env file
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
@@ -97,6 +124,9 @@ services:
|
||||
replicas: 2
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
flower:
|
||||
image: mher/flower:2.0
|
||||
@@ -113,12 +143,42 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
# GPU worker for streaming/rendering tasks
|
||||
# Build: docker build -f Dockerfile.gpu -t registry.rose-ash.com:5000/celery-l1-gpu-server:latest .
|
||||
# Requires: docker node update --label-add gpu=true <gpu-node-name>
|
||||
l1-gpu-worker:
|
||||
image: registry.rose-ash.com:5000/celery-l1-gpu-server:latest
|
||||
command: sh -c "cd /app && celery -A celery_app worker --loglevel=info -E -Q gpu,celery"
|
||||
env_file:
|
||||
- .env.gpu
|
||||
volumes:
|
||||
# Local cache - ephemeral, just for working files
|
||||
- gpu_cache:/data/cache
|
||||
# Note: No source mount - GPU worker uses code from image
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
- ipfs
|
||||
networks:
|
||||
- celery
|
||||
deploy:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu == true
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
postgres_data:
|
||||
ipfs_data:
|
||||
l1_cache:
|
||||
gpu_cache: # Ephemeral cache for GPU workers
|
||||
|
||||
networks:
|
||||
celery:
|
||||
|
||||
150
effects/quick_test_explicit.sexp
Normal file
150
effects/quick_test_explicit.sexp
Normal file
@@ -0,0 +1,150 @@
|
||||
;; Quick Test - Fully Explicit Streaming Version
|
||||
;;
|
||||
;; The interpreter is completely generic - knows nothing about video/audio.
|
||||
;; All domain logic is explicit via primitives.
|
||||
;;
|
||||
;; Run with built-in sources/audio:
|
||||
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp --fps 30
|
||||
;;
|
||||
;; Run with external config files:
|
||||
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp \
|
||||
;; --sources configs/sources-default.sexp \
|
||||
;; --audio configs/audio-dizzy.sexp \
|
||||
;; --fps 30
|
||||
|
||||
(stream "quick_test_explicit"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives and effects
|
||||
(include :path "../templates/standard-primitives.sexp")
|
||||
(include :path "../templates/standard-effects.sexp")
|
||||
|
||||
;; Load reusable templates
|
||||
(include :path "../templates/stream-process-pair.sexp")
|
||||
(include :path "../templates/crossfade-zoom.sexp")
|
||||
|
||||
;; === SOURCES AS ARRAY ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "monday.webm" 30)
|
||||
(streaming:make-video-source "escher.webm" 30)
|
||||
(streaming:make-video-source "2.webm" 30)
|
||||
(streaming:make-video-source "disruptors.webm" 30)
|
||||
(streaming:make-video-source "4.mp4" 30)
|
||||
(streaming:make-video-source "ecstacy.mp4" 30)
|
||||
(streaming:make-video-source "dopple.webm" 30)
|
||||
(streaming:make-video-source "5.mp4" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
|
||||
;; Pairs 3,6: reversed (negative rot-a, positive rot-b, shrink zoom-a, grow zoom-b)
|
||||
;; Pair 5: smaller ranges
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
|
||||
])
|
||||
|
||||
;; Audio analyzer
|
||||
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
|
||||
|
||||
;; Audio playback
|
||||
(audio-playback "../dizzy.mp3")
|
||||
|
||||
;; === GLOBAL SCANS ===
|
||||
|
||||
;; Cycle state: which source is active (recipe-specific)
|
||||
;; clen = beats per source (8-24 beats = ~4-12 seconds)
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Reusable scans from templates (require 'music' to be defined)
|
||||
(include :path "../templates/scan-oscillating-spin.sexp")
|
||||
(include :path "../templates/scan-ripple-drops.sexp")
|
||||
|
||||
;; === PER-PAIR STATE (dynamically sized based on sources) ===
|
||||
;; Each pair has: inv-a, inv-b, hue-a, hue-b, mix, rot-angle
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [;; Invert toggles (10% chance, lasts 1-4 beats)
|
||||
new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
;; Hue shifts (10% chance, lasts 1-4 beats) - use countdown like invert
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
;; Pick random hue value when triggering (stored separately)
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
;; Mix (holds for 1-10 beats, then picks 0, 0.5, or 1)
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
;; Rotation (accumulates, reverses direction when cycle completes)
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
;; Note: dir comes from pair-configs, but we store rotation state here
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic: last third of cycle crossfades to next
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Get pair states array (required by process-pair macro)
|
||||
pair-states (bind pairs :states)
|
||||
|
||||
;; Process active pair using macro from template
|
||||
active-frame (process-pair active)
|
||||
|
||||
;; Crossfade with zoom during transition (using macro)
|
||||
result (if fading
|
||||
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
|
||||
active-frame)
|
||||
|
||||
;; Final: global spin + ripple
|
||||
spun (rotate result :angle (bind spin :angle))
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
|
||||
|
||||
(ripple spun
|
||||
:amplitude rip-amp
|
||||
:center_x (bind ripple-state :cx)
|
||||
:center_y (bind ripple-state :cy)
|
||||
:frequency 8
|
||||
:decay 2
|
||||
:speed 5))))
|
||||
294
hybrid_state.py
294
hybrid_state.py
@@ -1,294 +0,0 @@
|
||||
"""
|
||||
Hybrid State Manager: Local Redis + IPNS Sync.
|
||||
|
||||
Provides fast local operations with eventual consistency across L1 nodes.
|
||||
|
||||
- Local Redis: Fast reads/writes (microseconds)
|
||||
- IPNS Sync: Background sync with other nodes (every N seconds)
|
||||
- Duplicate work: Accepted, idempotent (same inputs → same CID)
|
||||
|
||||
Usage:
|
||||
from hybrid_state import get_state_manager
|
||||
|
||||
state = get_state_manager()
|
||||
|
||||
# Fast local lookup
|
||||
cid = state.get_cached_cid(cache_id)
|
||||
|
||||
# Fast local write (synced in background)
|
||||
state.set_cached_cid(cache_id, output_cid)
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, Optional
|
||||
|
||||
import redis
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configuration
|
||||
REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/5")
|
||||
CLUSTER_KEY = os.environ.get("ARTDAG_CLUSTER_KEY", "default")
|
||||
IPNS_SYNC_INTERVAL = int(os.environ.get("ARTDAG_IPNS_SYNC_INTERVAL", "30"))
|
||||
IPNS_ENABLED = os.environ.get("ARTDAG_IPNS_SYNC", "").lower() in ("true", "1", "yes")
|
||||
|
||||
# Redis keys
|
||||
CACHE_KEY = "artdag:cid_cache" # hash: cache_id → output CID
|
||||
ANALYSIS_KEY = "artdag:analysis_cache" # hash: input_hash:features → analysis CID
|
||||
PLAN_KEY = "artdag:plan_cache" # hash: plan_id → plan CID
|
||||
RUN_KEY = "artdag:run_cache" # hash: run_id → output CID
|
||||
CLAIM_KEY_PREFIX = "artdag:claim:" # string: cache_id → worker (with TTL)
|
||||
|
||||
# IPNS names (relative to cluster key)
|
||||
IPNS_CACHE_NAME = "cache"
|
||||
IPNS_ANALYSIS_NAME = "analysis"
|
||||
IPNS_PLAN_NAME = "plans"
|
||||
|
||||
|
||||
class HybridStateManager:
|
||||
"""
|
||||
Local Redis + async IPNS sync for distributed L1 coordination.
|
||||
|
||||
Fast path (local Redis):
|
||||
- get_cached_cid / set_cached_cid
|
||||
- try_claim / release_claim
|
||||
|
||||
Slow path (background IPNS sync):
|
||||
- Periodically syncs local state with global IPNS state
|
||||
- Merges remote state into local (pulls new entries)
|
||||
- Publishes local state to IPNS (pushes updates)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_url: str = REDIS_URL,
|
||||
cluster_key: str = CLUSTER_KEY,
|
||||
sync_interval: int = IPNS_SYNC_INTERVAL,
|
||||
ipns_enabled: bool = IPNS_ENABLED,
|
||||
):
|
||||
self.cluster_key = cluster_key
|
||||
self.sync_interval = sync_interval
|
||||
self.ipns_enabled = ipns_enabled
|
||||
|
||||
# Connect to Redis
|
||||
self._redis = redis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
# IPNS client (lazy import)
|
||||
self._ipfs = None
|
||||
|
||||
# Sync thread
|
||||
self._sync_thread = None
|
||||
self._stop_sync = threading.Event()
|
||||
|
||||
# Start background sync if enabled
|
||||
if self.ipns_enabled:
|
||||
self._start_background_sync()
|
||||
|
||||
@property
|
||||
def ipfs(self):
|
||||
"""Lazy import of IPFS client."""
|
||||
if self._ipfs is None:
|
||||
try:
|
||||
import ipfs_client
|
||||
self._ipfs = ipfs_client
|
||||
except ImportError:
|
||||
logger.warning("ipfs_client not available, IPNS sync disabled")
|
||||
self._ipfs = False
|
||||
return self._ipfs if self._ipfs else None
|
||||
|
||||
# ========== CID Cache ==========
|
||||
|
||||
def get_cached_cid(self, cache_id: str) -> Optional[str]:
|
||||
"""Get output CID for a cache_id. Fast local lookup."""
|
||||
return self._redis.hget(CACHE_KEY, cache_id)
|
||||
|
||||
def set_cached_cid(self, cache_id: str, cid: str) -> None:
|
||||
"""Set output CID for a cache_id. Fast local write."""
|
||||
self._redis.hset(CACHE_KEY, cache_id, cid)
|
||||
|
||||
def get_all_cached_cids(self) -> Dict[str, str]:
|
||||
"""Get all cached CIDs."""
|
||||
return self._redis.hgetall(CACHE_KEY)
|
||||
|
||||
# ========== Analysis Cache ==========
|
||||
|
||||
def get_analysis_cid(self, input_hash: str, features: list) -> Optional[str]:
|
||||
"""Get analysis CID for input + features."""
|
||||
key = f"{input_hash}:{','.join(sorted(features))}"
|
||||
return self._redis.hget(ANALYSIS_KEY, key)
|
||||
|
||||
def set_analysis_cid(self, input_hash: str, features: list, cid: str) -> None:
|
||||
"""Set analysis CID for input + features."""
|
||||
key = f"{input_hash}:{','.join(sorted(features))}"
|
||||
self._redis.hset(ANALYSIS_KEY, key, cid)
|
||||
|
||||
def get_all_analysis_cids(self) -> Dict[str, str]:
|
||||
"""Get all analysis CIDs."""
|
||||
return self._redis.hgetall(ANALYSIS_KEY)
|
||||
|
||||
# ========== Plan Cache ==========
|
||||
|
||||
def get_plan_cid(self, plan_id: str) -> Optional[str]:
|
||||
"""Get plan CID for a plan_id."""
|
||||
return self._redis.hget(PLAN_KEY, plan_id)
|
||||
|
||||
def set_plan_cid(self, plan_id: str, cid: str) -> None:
|
||||
"""Set plan CID for a plan_id."""
|
||||
self._redis.hset(PLAN_KEY, plan_id, cid)
|
||||
|
||||
def get_all_plan_cids(self) -> Dict[str, str]:
|
||||
"""Get all plan CIDs."""
|
||||
return self._redis.hgetall(PLAN_KEY)
|
||||
|
||||
# ========== Run Cache ==========
|
||||
|
||||
def get_run_cid(self, run_id: str) -> Optional[str]:
|
||||
"""Get output CID for a run_id."""
|
||||
return self._redis.hget(RUN_KEY, run_id)
|
||||
|
||||
def set_run_cid(self, run_id: str, cid: str) -> None:
|
||||
"""Set output CID for a run_id."""
|
||||
self._redis.hset(RUN_KEY, run_id, cid)
|
||||
|
||||
# ========== Claiming ==========
|
||||
|
||||
def try_claim(self, cache_id: str, worker_id: str, ttl: int = 300) -> bool:
|
||||
"""
|
||||
Try to claim a cache_id for execution.
|
||||
|
||||
Returns True if claimed, False if already claimed by another worker.
|
||||
Uses Redis SETNX for atomic claim.
|
||||
"""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
return self._redis.set(key, worker_id, nx=True, ex=ttl)
|
||||
|
||||
def release_claim(self, cache_id: str) -> None:
|
||||
"""Release a claim."""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
self._redis.delete(key)
|
||||
|
||||
def get_claim(self, cache_id: str) -> Optional[str]:
|
||||
"""Get current claim holder for a cache_id."""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
return self._redis.get(key)
|
||||
|
||||
# ========== IPNS Sync ==========
|
||||
|
||||
def _start_background_sync(self):
|
||||
"""Start background IPNS sync thread."""
|
||||
if self._sync_thread is not None:
|
||||
return
|
||||
|
||||
def sync_loop():
|
||||
logger.info(f"IPNS sync started (interval={self.sync_interval}s)")
|
||||
while not self._stop_sync.wait(timeout=self.sync_interval):
|
||||
try:
|
||||
self._sync_with_ipns()
|
||||
except Exception as e:
|
||||
logger.warning(f"IPNS sync failed: {e}")
|
||||
|
||||
self._sync_thread = threading.Thread(target=sync_loop, daemon=True)
|
||||
self._sync_thread.start()
|
||||
|
||||
def stop_sync(self):
|
||||
"""Stop background sync thread."""
|
||||
self._stop_sync.set()
|
||||
if self._sync_thread:
|
||||
self._sync_thread.join(timeout=5)
|
||||
|
||||
def _sync_with_ipns(self):
|
||||
"""Sync local state with IPNS global state."""
|
||||
if not self.ipfs:
|
||||
return
|
||||
|
||||
logger.debug("Starting IPNS sync...")
|
||||
|
||||
# Sync each cache type
|
||||
self._sync_hash(CACHE_KEY, IPNS_CACHE_NAME)
|
||||
self._sync_hash(ANALYSIS_KEY, IPNS_ANALYSIS_NAME)
|
||||
self._sync_hash(PLAN_KEY, IPNS_PLAN_NAME)
|
||||
|
||||
logger.debug("IPNS sync complete")
|
||||
|
||||
def _sync_hash(self, redis_key: str, ipns_name: str):
|
||||
"""Sync a Redis hash with IPNS."""
|
||||
ipns_full_name = f"{self.cluster_key}/{ipns_name}"
|
||||
|
||||
# Pull: resolve IPNS → get global state
|
||||
global_state = {}
|
||||
try:
|
||||
global_cid = self.ipfs.name_resolve(ipns_full_name)
|
||||
if global_cid:
|
||||
global_bytes = self.ipfs.get_bytes(global_cid)
|
||||
if global_bytes:
|
||||
global_state = json.loads(global_bytes.decode('utf-8'))
|
||||
logger.debug(f"Pulled {len(global_state)} entries from {ipns_name}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not resolve {ipns_full_name}: {e}")
|
||||
|
||||
# Merge global into local (add entries we don't have)
|
||||
if global_state:
|
||||
pipe = self._redis.pipeline()
|
||||
for key, value in global_state.items():
|
||||
pipe.hsetnx(redis_key, key, value)
|
||||
results = pipe.execute()
|
||||
added = sum(1 for r in results if r)
|
||||
if added:
|
||||
logger.info(f"Merged {added} new entries from IPNS/{ipns_name}")
|
||||
|
||||
# Push: get local state, merge with global, publish
|
||||
local_state = self._redis.hgetall(redis_key)
|
||||
if local_state:
|
||||
merged = {**global_state, **local_state}
|
||||
|
||||
# Only publish if we have new entries
|
||||
if len(merged) > len(global_state):
|
||||
try:
|
||||
new_cid = self.ipfs.add_json(merged)
|
||||
if new_cid:
|
||||
# Note: name_publish can be slow
|
||||
self.ipfs.name_publish(ipns_full_name, new_cid)
|
||||
logger.info(f"Published {len(merged)} entries to IPNS/{ipns_name}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish to {ipns_full_name}: {e}")
|
||||
|
||||
def force_sync(self):
|
||||
"""Force an immediate IPNS sync (blocking)."""
|
||||
self._sync_with_ipns()
|
||||
|
||||
# ========== Stats ==========
|
||||
|
||||
def get_stats(self) -> Dict:
|
||||
"""Get cache statistics."""
|
||||
return {
|
||||
"cached_cids": self._redis.hlen(CACHE_KEY),
|
||||
"analysis_cids": self._redis.hlen(ANALYSIS_KEY),
|
||||
"plan_cids": self._redis.hlen(PLAN_KEY),
|
||||
"run_cids": self._redis.hlen(RUN_KEY),
|
||||
"ipns_enabled": self.ipns_enabled,
|
||||
"cluster_key": self.cluster_key[:16] + "..." if len(self.cluster_key) > 16 else self.cluster_key,
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_state_manager: Optional[HybridStateManager] = None
|
||||
|
||||
|
||||
def get_state_manager() -> HybridStateManager:
|
||||
"""Get the singleton state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
_state_manager = HybridStateManager()
|
||||
return _state_manager
|
||||
|
||||
|
||||
def reset_state_manager():
|
||||
"""Reset the singleton (for testing)."""
|
||||
global _state_manager
|
||||
if _state_manager:
|
||||
_state_manager.stop_sync()
|
||||
_state_manager = None
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
@@ -19,8 +19,18 @@ logger = logging.getLogger(__name__)
|
||||
# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
|
||||
# Connection timeout in seconds
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "30"))
|
||||
# Connection timeout in seconds (increased for large files)
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "120"))
|
||||
|
||||
# IPFS gateway URLs for fallback when local node doesn't have content
|
||||
# Comma-separated list of gateway URLs (without /ipfs/ suffix)
|
||||
IPFS_GATEWAYS = [g.strip() for g in os.getenv(
|
||||
"IPFS_GATEWAYS",
|
||||
"https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
|
||||
).split(",") if g.strip()]
|
||||
|
||||
# Gateway timeout (shorter than API timeout for faster fallback)
|
||||
GATEWAY_TIMEOUT = int(os.getenv("GATEWAY_TIMEOUT", "30"))
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
@@ -45,18 +55,22 @@ def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
def add_file(file_path: Path, pin: bool = True) -> Optional[str]:
|
||||
def add_file(file_path: Union[Path, str], pin: bool = True) -> Optional[str]:
|
||||
"""
|
||||
Add a file to IPFS and optionally pin it.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file to add
|
||||
file_path: Path to the file to add (Path object or string)
|
||||
pin: Whether to pin the file (default: True)
|
||||
|
||||
Returns:
|
||||
IPFS CID (content identifier) or None on failure
|
||||
"""
|
||||
try:
|
||||
# Ensure file_path is a Path object
|
||||
if isinstance(file_path, str):
|
||||
file_path = Path(file_path)
|
||||
|
||||
url = f"{IPFS_BASE_URL}/api/v0/add"
|
||||
params = {"pin": str(pin).lower()}
|
||||
|
||||
@@ -118,13 +132,27 @@ def add_json(data: dict, pin: bool = True) -> Optional[str]:
|
||||
return add_bytes(json_bytes, pin=pin)
|
||||
|
||||
|
||||
def get_file(cid: str, dest_path: Path) -> bool:
|
||||
def add_string(content: str, pin: bool = True) -> Optional[str]:
|
||||
"""
|
||||
Add a string to IPFS and optionally pin it.
|
||||
|
||||
Args:
|
||||
content: String content to add (e.g., S-expression)
|
||||
pin: Whether to pin the data (default: True)
|
||||
|
||||
Returns:
|
||||
IPFS CID or None on failure
|
||||
"""
|
||||
return add_bytes(content.encode('utf-8'), pin=pin)
|
||||
|
||||
|
||||
def get_file(cid: str, dest_path: Union[Path, str]) -> bool:
|
||||
"""
|
||||
Retrieve a file from IPFS and save to destination.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
dest_path: Path to save the file
|
||||
dest_path: Path to save the file (Path object or string)
|
||||
|
||||
Returns:
|
||||
True on success, False on failure
|
||||
@@ -134,6 +162,10 @@ def get_file(cid: str, dest_path: Path) -> bool:
|
||||
if data is None:
|
||||
return False
|
||||
|
||||
# Ensure dest_path is a Path object
|
||||
if isinstance(dest_path, str):
|
||||
dest_path = Path(dest_path)
|
||||
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest_path.write_bytes(data)
|
||||
logger.info(f"Retrieved from IPFS: {cid} -> {dest_path}")
|
||||
@@ -143,16 +175,50 @@ def get_file(cid: str, dest_path: Path) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def get_bytes(cid: str) -> Optional[bytes]:
|
||||
def get_bytes_from_gateway(cid: str) -> Optional[bytes]:
|
||||
"""
|
||||
Retrieve bytes data from IPFS.
|
||||
Retrieve bytes from IPFS via public gateways (fallback).
|
||||
|
||||
Tries each configured gateway in order until one succeeds.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
|
||||
Returns:
|
||||
File content as bytes or None if all gateways fail
|
||||
"""
|
||||
for gateway in IPFS_GATEWAYS:
|
||||
try:
|
||||
url = f"{gateway}/ipfs/{cid}"
|
||||
logger.info(f"Trying gateway: {url}")
|
||||
response = requests.get(url, timeout=GATEWAY_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
data = response.content
|
||||
logger.info(f"Retrieved from gateway {gateway}: {cid} ({len(data)} bytes)")
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"Gateway {gateway} failed for {cid}: {e}")
|
||||
continue
|
||||
|
||||
logger.error(f"All gateways failed for {cid}")
|
||||
return None
|
||||
|
||||
|
||||
def get_bytes(cid: str, use_gateway_fallback: bool = True) -> Optional[bytes]:
|
||||
"""
|
||||
Retrieve bytes data from IPFS.
|
||||
|
||||
Tries local IPFS node first, then falls back to public gateways
|
||||
if configured and use_gateway_fallback is True.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
use_gateway_fallback: If True, try public gateways on local failure
|
||||
|
||||
Returns:
|
||||
File content as bytes or None on failure
|
||||
"""
|
||||
# Try local IPFS node first
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/cat"
|
||||
params = {"arg": cid}
|
||||
@@ -164,6 +230,13 @@ def get_bytes(cid: str) -> Optional[bytes]:
|
||||
logger.info(f"Retrieved from IPFS: {cid} ({len(data)} bytes)")
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"Local IPFS failed for {cid}: {e}")
|
||||
|
||||
# Try gateway fallback
|
||||
if use_gateway_fallback and IPFS_GATEWAYS:
|
||||
logger.info(f"Trying gateway fallback for {cid}")
|
||||
return get_bytes_from_gateway(cid)
|
||||
|
||||
logger.error(f"Failed to get bytes from IPFS: {e}")
|
||||
return None
|
||||
|
||||
|
||||
569
legacy_tasks.py
569
legacy_tasks.py
@@ -1,569 +0,0 @@
|
||||
"""
|
||||
Art DAG Celery Tasks
|
||||
|
||||
Distributed rendering tasks for the Art DAG system.
|
||||
Supports both single-effect runs and multi-step DAG execution.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from celery import Task
|
||||
from celery_app import app
|
||||
|
||||
# Import artdag components
|
||||
from artdag import DAG, Node, NodeType
|
||||
from artdag.engine import Engine
|
||||
from artdag.executor import register_executor, Executor, get_executor
|
||||
from artdag.nodes.effect import register_effect
|
||||
import artdag.nodes # Register all built-in executors (SOURCE, EFFECT, etc.)
|
||||
|
||||
# Add effects to path (use env var in Docker, fallback to home dir locally)
|
||||
EFFECTS_PATH = Path(os.environ.get("EFFECTS_PATH", str(Path.home() / "artdag-effects")))
|
||||
ARTDAG_PATH = Path(os.environ.get("ARTDAG_PATH", str(Path.home() / "art" / "artdag")))
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_effects_commit() -> str:
|
||||
"""Get current git commit hash of effects repo."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD"],
|
||||
cwd=EFFECTS_PATH,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
def get_artdag_commit() -> str:
|
||||
"""Get current git commit hash of artdag repo."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD"],
|
||||
cwd=ARTDAG_PATH,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
sys.path.insert(0, str(EFFECTS_PATH / "dog"))
|
||||
|
||||
# Register the dog effect with the EFFECT executor
|
||||
# New format uses process() instead of effect_dog()
|
||||
from effect import process as dog_process
|
||||
|
||||
@register_effect("dog")
|
||||
def _dog_effect(input_path: Path, output_path: Path, config: dict) -> Path:
|
||||
"""Dog effect wrapper - registered for DAG EFFECT nodes."""
|
||||
# Wrap for new whole-video API
|
||||
return dog_process([input_path], output_path, config, None)
|
||||
|
||||
|
||||
def file_hash(path: Path) -> str:
|
||||
"""Compute SHA3-256 hash of a file."""
|
||||
hasher = hashlib.sha3_256()
|
||||
actual_path = path.resolve() if path.is_symlink() else path
|
||||
with open(actual_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(65536), b""):
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
# Cache directory (shared between server and worker)
|
||||
CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
|
||||
|
||||
|
||||
# ============ Executors for Effects ============
|
||||
|
||||
@register_executor("effect:dog")
|
||||
class DogExecutor(Executor):
|
||||
"""Executor for the dog effect."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
from effect import effect_dog
|
||||
if len(inputs) != 1:
|
||||
raise ValueError(f"Dog effect expects 1 input, got {len(inputs)}")
|
||||
return effect_dog(inputs[0], output_path, config)
|
||||
|
||||
|
||||
@register_executor("effect:identity")
|
||||
class IdentityExecutor(Executor):
|
||||
"""Executor for the identity effect (passthrough)."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
from artdag.nodes.effect import effect_identity
|
||||
if len(inputs) != 1:
|
||||
raise ValueError(f"Identity effect expects 1 input, got {len(inputs)}")
|
||||
return effect_identity(inputs[0], output_path, config)
|
||||
|
||||
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class SourceExecutor(Executor):
|
||||
"""Executor for SOURCE nodes - loads content from cache by hash."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
# Source nodes load from cache by cid
|
||||
cid = config.get("cid")
|
||||
if not cid:
|
||||
raise ValueError("SOURCE node requires cid in config")
|
||||
|
||||
# Look up in cache
|
||||
source_path = CACHE_DIR / cid
|
||||
if not source_path.exists():
|
||||
# Try nodes directory
|
||||
from cache_manager import get_cache_manager
|
||||
cache_manager = get_cache_manager()
|
||||
source_path = cache_manager.get_by_cid(cid)
|
||||
|
||||
if not source_path or not source_path.exists():
|
||||
raise ValueError(f"Source content not in cache: {cid}")
|
||||
|
||||
# For source nodes, we just return the path (no transformation)
|
||||
# The engine will use this as input to subsequent nodes
|
||||
return source_path
|
||||
|
||||
|
||||
class RenderTask(Task):
|
||||
"""Base task with provenance tracking."""
|
||||
|
||||
def on_success(self, retval, task_id, args, kwargs):
|
||||
"""Record successful render."""
|
||||
print(f"Task {task_id} completed: {retval}")
|
||||
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
"""Record failed render."""
|
||||
print(f"Task {task_id} failed: {exc}")
|
||||
|
||||
|
||||
@app.task(base=RenderTask, bind=True)
|
||||
def render_effect(self, input_hash: str, effect_name: str, output_name: str) -> dict:
|
||||
"""
|
||||
Render an effect on an input asset.
|
||||
|
||||
Args:
|
||||
input_hash: SHA3-256 hash of input asset
|
||||
effect_name: Name of effect (e.g., "dog", "identity")
|
||||
output_name: Name for output asset
|
||||
|
||||
Returns:
|
||||
Provenance record with output hash
|
||||
"""
|
||||
from cache_manager import get_cache_manager
|
||||
|
||||
# Registry hashes (for effects/infra metadata only)
|
||||
REGISTRY = {
|
||||
"effect:dog": {
|
||||
"hash": "d048fe313433eb4e38f0e24194ffae91b896ca3e6eed3e50b2cc37b7be495555"
|
||||
},
|
||||
"effect:identity": {
|
||||
"hash": "640ea11ee881ebf4101af0a955439105ab11e763682b209e88ea08fc66e1cc03"
|
||||
},
|
||||
"infra:artdag": {
|
||||
"hash": "96a5972de216aee12ec794dcad5f9360da2e676171eabf24a46dfe1ee5fee4b0"
|
||||
},
|
||||
"infra:giles-hp": {
|
||||
"hash": "964bf6e69dc4e2493f42375013caffe26404ec3cf8eb5d9bc170cd42a361523b"
|
||||
}
|
||||
}
|
||||
|
||||
# Input comes from cache by hash (supports both legacy and new cache locations)
|
||||
cache_manager = get_cache_manager()
|
||||
input_path = cache_manager.get_by_cid(input_hash)
|
||||
if not input_path or not input_path.exists():
|
||||
raise ValueError(f"Input not in cache: {input_hash}")
|
||||
|
||||
output_dir = CACHE_DIR
|
||||
|
||||
# Verify input
|
||||
actual_hash = file_hash(input_path)
|
||||
if actual_hash != input_hash:
|
||||
raise ValueError(f"Input hash mismatch: expected {input_hash}, got {actual_hash}")
|
||||
|
||||
self.update_state(state='RENDERING', meta={'effect': effect_name, 'input': input_hash[:16]})
|
||||
|
||||
# Load and apply effect
|
||||
if effect_name == "dog":
|
||||
from effect import effect_dog, DOG_HASH
|
||||
output_path = output_dir / f"{output_name}.mkv"
|
||||
result = effect_dog(input_path, output_path, {})
|
||||
expected_hash = DOG_HASH
|
||||
elif effect_name == "identity":
|
||||
from artdag.nodes.effect import effect_identity
|
||||
output_path = output_dir / f"{output_name}{input_path.suffix}"
|
||||
result = effect_identity(input_path, output_path, {})
|
||||
expected_hash = input_hash
|
||||
else:
|
||||
raise ValueError(f"Unknown effect: {effect_name}")
|
||||
|
||||
# Verify output
|
||||
output_cid = file_hash(result)
|
||||
if output_cid != expected_hash:
|
||||
raise ValueError(f"Output hash mismatch: expected {expected_hash}, got {output_cid}")
|
||||
|
||||
# Build effect info based on source
|
||||
if effect_name == "identity":
|
||||
# Identity is from artdag package on GitHub
|
||||
artdag_commit = get_artdag_commit()
|
||||
effect_info = {
|
||||
"name": f"effect:{effect_name}",
|
||||
"cid": REGISTRY[f"effect:{effect_name}"]["hash"],
|
||||
"repo": "github",
|
||||
"repo_commit": artdag_commit,
|
||||
"repo_url": f"https://github.com/gilesbradshaw/art-dag/blob/{artdag_commit}/artdag/nodes/effect.py"
|
||||
}
|
||||
else:
|
||||
# Other effects from rose-ash effects repo
|
||||
effects_commit = get_effects_commit()
|
||||
effect_info = {
|
||||
"name": f"effect:{effect_name}",
|
||||
"cid": REGISTRY[f"effect:{effect_name}"]["hash"],
|
||||
"repo": "rose-ash",
|
||||
"repo_commit": effects_commit,
|
||||
"repo_url": f"https://git.rose-ash.com/art-dag/effects/src/commit/{effects_commit}/{effect_name}"
|
||||
}
|
||||
|
||||
# Build provenance
|
||||
provenance = {
|
||||
"task_id": self.request.id,
|
||||
"rendered_at": datetime.now(timezone.utc).isoformat(),
|
||||
"rendered_by": "@giles@artdag.rose-ash.com",
|
||||
"output": {
|
||||
"name": output_name,
|
||||
"cid": output_cid,
|
||||
},
|
||||
"inputs": [
|
||||
{"cid": input_hash}
|
||||
],
|
||||
"effects": [effect_info],
|
||||
"infrastructure": {
|
||||
"software": {"name": "infra:artdag", "cid": REGISTRY["infra:artdag"]["hash"]},
|
||||
"hardware": {"name": "infra:giles-hp", "cid": REGISTRY["infra:giles-hp"]["hash"]}
|
||||
}
|
||||
}
|
||||
|
||||
# Store provenance on IPFS
|
||||
import ipfs_client
|
||||
provenance_cid = ipfs_client.add_json(provenance)
|
||||
if provenance_cid:
|
||||
provenance["provenance_cid"] = provenance_cid
|
||||
logger.info(f"Stored provenance on IPFS: {provenance_cid}")
|
||||
else:
|
||||
logger.warning("Failed to store provenance on IPFS")
|
||||
|
||||
return provenance
|
||||
|
||||
|
||||
@app.task
|
||||
def render_dog_from_cat() -> dict:
|
||||
"""Convenience task: render cat through dog effect."""
|
||||
CAT_HASH = "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b"
|
||||
return render_effect.delay(CAT_HASH, "dog", "dog-from-cat-celery").get()
|
||||
|
||||
|
||||
@app.task(base=RenderTask, bind=True)
|
||||
def execute_dag(self, dag_json: str, run_id: str = None) -> dict:
|
||||
"""
|
||||
Execute a multi-step DAG.
|
||||
|
||||
Args:
|
||||
dag_json: Serialized DAG as JSON string
|
||||
run_id: Optional run ID for tracking
|
||||
|
||||
Returns:
|
||||
Execution result with output hash and node results
|
||||
"""
|
||||
from cache_manager import get_cache_manager
|
||||
|
||||
# Parse DAG
|
||||
try:
|
||||
dag = DAG.from_json(dag_json)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid DAG JSON: {e}")
|
||||
|
||||
# Validate DAG
|
||||
errors = dag.validate()
|
||||
if errors:
|
||||
raise ValueError(f"Invalid DAG: {errors}")
|
||||
|
||||
# Create engine with cache directory
|
||||
engine = Engine(CACHE_DIR / "nodes")
|
||||
|
||||
# Set up progress callback
|
||||
def progress_callback(progress):
|
||||
self.update_state(
|
||||
state='EXECUTING',
|
||||
meta={
|
||||
'node_id': progress.node_id,
|
||||
'node_type': progress.node_type,
|
||||
'status': progress.status,
|
||||
'progress': progress.progress,
|
||||
'message': progress.message,
|
||||
}
|
||||
)
|
||||
logger.info(f"DAG progress: {progress.node_id} - {progress.status} - {progress.message}")
|
||||
|
||||
engine.set_progress_callback(progress_callback)
|
||||
|
||||
# Execute DAG
|
||||
self.update_state(state='EXECUTING', meta={'status': 'starting', 'nodes': len(dag.nodes)})
|
||||
result = engine.execute(dag)
|
||||
|
||||
if not result.success:
|
||||
raise RuntimeError(f"DAG execution failed: {result.error}")
|
||||
|
||||
# Index all node outputs by cid and upload to IPFS
|
||||
cache_manager = get_cache_manager()
|
||||
output_cid = None
|
||||
node_hashes = {} # node_id -> cid mapping
|
||||
node_ipfs_cids = {} # node_id -> ipfs_cid mapping
|
||||
|
||||
# Process all node results (intermediates + output)
|
||||
for node_id, node_path in result.node_results.items():
|
||||
if node_path and Path(node_path).exists():
|
||||
node = dag.nodes.get(node_id)
|
||||
# Skip SOURCE nodes - they're already in cache
|
||||
if node and (node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE"):
|
||||
cid = node.config.get("cid")
|
||||
if cid:
|
||||
node_hashes[node_id] = cid
|
||||
continue
|
||||
|
||||
# Determine node type for cache metadata
|
||||
node_type_str = str(node.node_type) if node else "intermediate"
|
||||
if "effect" in node_type_str.lower():
|
||||
cache_node_type = "effect_output"
|
||||
else:
|
||||
cache_node_type = "dag_intermediate"
|
||||
|
||||
# Store in cache_manager (indexes by cid, uploads to IPFS)
|
||||
# put() returns (CachedFile, cid) where cid is IPFS CID if available, else local hash
|
||||
cached, content_cid = cache_manager.put(
|
||||
Path(node_path),
|
||||
node_type=cache_node_type,
|
||||
node_id=node_id,
|
||||
)
|
||||
# content_cid is the primary identifier (IPFS CID or local hash)
|
||||
node_hashes[node_id] = content_cid
|
||||
# Track IPFS CIDs separately (they start with Qm or bafy)
|
||||
if content_cid and (content_cid.startswith("Qm") or content_cid.startswith("bafy")):
|
||||
node_ipfs_cids[node_id] = content_cid
|
||||
logger.info(f"Cached node {node_id}: IPFS CID {content_cid}")
|
||||
else:
|
||||
logger.info(f"Cached node {node_id}: local hash {content_cid[:16] if content_cid else 'none'}...")
|
||||
|
||||
# Get output hash from the output node
|
||||
# Use the same identifier that's in the cache index (IPFS CID if available)
|
||||
if result.output_path and result.output_path.exists():
|
||||
local_hash = file_hash(result.output_path)
|
||||
output_ipfs_cid = node_ipfs_cids.get(dag.output_id)
|
||||
# Use IPFS CID as primary identifier if available, otherwise local hash
|
||||
# This must match what's in the content_index from cache_manager.put()
|
||||
output_cid = node_hashes.get(dag.output_id, local_hash)
|
||||
|
||||
# Store output in database (for L2 to query IPFS CID)
|
||||
import asyncio
|
||||
import database
|
||||
|
||||
# Store plan (DAG) to IPFS
|
||||
plan_cid = None
|
||||
try:
|
||||
import ipfs_client
|
||||
dag_dict = json.loads(dag_json)
|
||||
plan_cid = ipfs_client.add_json(dag_dict)
|
||||
if plan_cid:
|
||||
logger.info(f"Stored plan to IPFS: {plan_cid}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to store plan to IPFS: {e}")
|
||||
|
||||
async def save_to_db():
|
||||
if database.pool is None:
|
||||
await database.init_db()
|
||||
await database.create_cache_item(output_cid, output_ipfs_cid)
|
||||
# Also save the run result
|
||||
if run_id:
|
||||
input_hashes_for_db = [
|
||||
node.config.get("cid")
|
||||
for node in dag.nodes.values()
|
||||
if (node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE")
|
||||
and node.config.get("cid")
|
||||
]
|
||||
# Get actor_id and recipe from pending_runs (saved when run started)
|
||||
actor_id = None
|
||||
recipe_name = "dag"
|
||||
pending = await database.get_pending_run(run_id)
|
||||
if pending:
|
||||
actor_id = pending.get("actor_id")
|
||||
recipe_name = pending.get("recipe") or "dag"
|
||||
|
||||
await database.save_run_cache(
|
||||
run_id=run_id,
|
||||
output_cid=output_cid,
|
||||
recipe=recipe_name,
|
||||
inputs=input_hashes_for_db,
|
||||
ipfs_cid=output_ipfs_cid,
|
||||
actor_id=actor_id,
|
||||
plan_cid=plan_cid,
|
||||
)
|
||||
|
||||
# Save output as media for the user
|
||||
if actor_id:
|
||||
await database.save_item_metadata(
|
||||
cid=output_cid,
|
||||
actor_id=actor_id,
|
||||
item_type="media",
|
||||
description=f"Output from recipe: {recipe_name}",
|
||||
source_type="recipe",
|
||||
source_note=f"run_id: {run_id}",
|
||||
)
|
||||
|
||||
# Clean up pending run
|
||||
if pending:
|
||||
await database.complete_pending_run(run_id)
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_running():
|
||||
asyncio.ensure_future(save_to_db())
|
||||
else:
|
||||
loop.run_until_complete(save_to_db())
|
||||
except RuntimeError:
|
||||
asyncio.run(save_to_db())
|
||||
|
||||
# Record activity for deletion tracking
|
||||
input_hashes = []
|
||||
intermediate_hashes = []
|
||||
for node_id, node in dag.nodes.items():
|
||||
if node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE":
|
||||
cid = node.config.get("cid")
|
||||
if cid:
|
||||
input_hashes.append(cid)
|
||||
elif node_id != dag.output_id and node_id in node_hashes:
|
||||
intermediate_hashes.append(node_hashes[node_id])
|
||||
|
||||
if input_hashes:
|
||||
from artdag.activities import Activity
|
||||
from datetime import datetime, timezone
|
||||
activity = Activity(
|
||||
activity_id=run_id or f"dag-{output_cid[:16]}",
|
||||
input_ids=sorted(input_hashes),
|
||||
output_id=output_cid,
|
||||
intermediate_ids=intermediate_hashes,
|
||||
created_at=datetime.now(timezone.utc).timestamp(),
|
||||
status="completed",
|
||||
)
|
||||
cache_manager.activity_store.add(activity)
|
||||
|
||||
# Build provenance
|
||||
input_hashes_for_provenance = []
|
||||
for node_id, node in dag.nodes.items():
|
||||
if node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE":
|
||||
cid = node.config.get("cid")
|
||||
if cid:
|
||||
input_hashes_for_provenance.append({"cid": cid})
|
||||
|
||||
provenance = {
|
||||
"task_id": self.request.id,
|
||||
"run_id": run_id,
|
||||
"rendered_at": datetime.now(timezone.utc).isoformat(),
|
||||
"output": {
|
||||
"cid": output_cid,
|
||||
"ipfs_cid": node_ipfs_cids.get(dag.output_id) if dag.output_id else None,
|
||||
},
|
||||
"inputs": input_hashes_for_provenance,
|
||||
"dag": dag_json, # Full DAG definition
|
||||
"nodes": {
|
||||
node_id: {
|
||||
"cid": node_hashes.get(node_id),
|
||||
"ipfs_cid": node_ipfs_cids.get(node_id),
|
||||
}
|
||||
for node_id in dag.nodes.keys()
|
||||
if node_id in node_hashes
|
||||
},
|
||||
"execution": {
|
||||
"execution_time": result.execution_time,
|
||||
"nodes_executed": result.nodes_executed,
|
||||
"nodes_cached": result.nodes_cached,
|
||||
}
|
||||
}
|
||||
|
||||
# Store provenance on IPFS
|
||||
import ipfs_client
|
||||
provenance_cid = ipfs_client.add_json(provenance)
|
||||
if provenance_cid:
|
||||
provenance["provenance_cid"] = provenance_cid
|
||||
logger.info(f"Stored DAG provenance on IPFS: {provenance_cid}")
|
||||
else:
|
||||
logger.warning("Failed to store DAG provenance on IPFS")
|
||||
|
||||
# Build result
|
||||
return {
|
||||
"success": True,
|
||||
"run_id": run_id,
|
||||
"output_cid": output_cid,
|
||||
"output_ipfs_cid": node_ipfs_cids.get(dag.output_id) if dag.output_id else None,
|
||||
"output_path": str(result.output_path) if result.output_path else None,
|
||||
"execution_time": result.execution_time,
|
||||
"nodes_executed": result.nodes_executed,
|
||||
"nodes_cached": result.nodes_cached,
|
||||
"node_results": {
|
||||
node_id: str(path) for node_id, path in result.node_results.items()
|
||||
},
|
||||
"node_hashes": node_hashes, # node_id -> cid
|
||||
"node_ipfs_cids": node_ipfs_cids, # node_id -> ipfs_cid
|
||||
"provenance_cid": provenance_cid,
|
||||
}
|
||||
|
||||
|
||||
def build_effect_dag(input_hashes: List[str], effect_name: str) -> DAG:
|
||||
"""
|
||||
Build a simple DAG for applying an effect to inputs.
|
||||
|
||||
Args:
|
||||
input_hashes: List of input content hashes
|
||||
effect_name: Name of effect to apply (e.g., "dog", "identity")
|
||||
|
||||
Returns:
|
||||
DAG ready for execution
|
||||
"""
|
||||
dag = DAG()
|
||||
|
||||
# Add source nodes for each input
|
||||
source_ids = []
|
||||
for i, cid in enumerate(input_hashes):
|
||||
source_node = Node(
|
||||
node_type=NodeType.SOURCE,
|
||||
config={"cid": cid},
|
||||
name=f"source_{i}",
|
||||
)
|
||||
dag.add_node(source_node)
|
||||
source_ids.append(source_node.node_id)
|
||||
|
||||
# Add effect node
|
||||
effect_node = Node(
|
||||
node_type=f"effect:{effect_name}",
|
||||
config={},
|
||||
inputs=source_ids,
|
||||
name=f"effect_{effect_name}",
|
||||
)
|
||||
dag.add_node(effect_node)
|
||||
dag.set_output(effect_node.node_id)
|
||||
|
||||
return dag
|
||||
477
path_registry.py
Normal file
477
path_registry.py
Normal file
@@ -0,0 +1,477 @@
|
||||
"""
|
||||
Path Registry - Maps human-friendly paths to content-addressed IDs.
|
||||
|
||||
This module provides a bidirectional mapping between:
|
||||
- Human-friendly paths (e.g., "effects/ascii_fx_zone.sexp")
|
||||
- Content-addressed IDs (IPFS CIDs or SHA3-256 hashes)
|
||||
|
||||
The registry is useful for:
|
||||
- Looking up effects by their friendly path name
|
||||
- Resolving cids back to the original path for debugging
|
||||
- Maintaining a stable naming scheme across cache updates
|
||||
|
||||
Storage:
|
||||
- Uses the existing item_types table in the database (path column)
|
||||
- Caches in Redis for fast lookups across distributed workers
|
||||
|
||||
The registry uses a system actor (@system@local) for global path mappings,
|
||||
allowing effects to be resolved by path without requiring user context.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# System actor for global path mappings (effects, recipes, analyzers)
|
||||
SYSTEM_ACTOR = "@system@local"
|
||||
|
||||
|
||||
@dataclass
|
||||
class PathEntry:
|
||||
"""A registered path with its content-addressed ID."""
|
||||
path: str # Human-friendly path (relative or normalized)
|
||||
cid: str # Content-addressed ID (IPFS CID or hash)
|
||||
content_type: str # Type: "effect", "recipe", "analyzer", etc.
|
||||
actor_id: str = SYSTEM_ACTOR # Owner (system for global)
|
||||
description: Optional[str] = None
|
||||
created_at: float = 0.0
|
||||
|
||||
|
||||
class PathRegistry:
|
||||
"""
|
||||
Registry for mapping paths to content-addressed IDs.
|
||||
|
||||
Uses the existing item_types table for persistence and Redis
|
||||
for fast lookups in distributed Celery workers.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_client=None):
|
||||
self._redis = redis_client
|
||||
self._redis_path_to_cid_key = "artdag:path_to_cid"
|
||||
self._redis_cid_to_path_key = "artdag:cid_to_path"
|
||||
|
||||
def _run_async(self, coro):
|
||||
"""Run async coroutine from sync context."""
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
import threading
|
||||
result = [None]
|
||||
error = [None]
|
||||
|
||||
def run_in_thread():
|
||||
try:
|
||||
new_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(new_loop)
|
||||
try:
|
||||
result[0] = new_loop.run_until_complete(coro)
|
||||
finally:
|
||||
new_loop.close()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
|
||||
thread = threading.Thread(target=run_in_thread)
|
||||
thread.start()
|
||||
thread.join(timeout=30)
|
||||
if error[0]:
|
||||
raise error[0]
|
||||
return result[0]
|
||||
except RuntimeError:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop.run_until_complete(coro)
|
||||
|
||||
def _normalize_path(self, path: str) -> str:
|
||||
"""Normalize a path for consistent storage."""
|
||||
# Remove leading ./ or /
|
||||
path = path.lstrip('./')
|
||||
# Normalize separators
|
||||
path = path.replace('\\', '/')
|
||||
# Remove duplicate slashes
|
||||
while '//' in path:
|
||||
path = path.replace('//', '/')
|
||||
return path
|
||||
|
||||
def register(
|
||||
self,
|
||||
path: str,
|
||||
cid: str,
|
||||
content_type: str = "effect",
|
||||
actor_id: str = SYSTEM_ACTOR,
|
||||
description: Optional[str] = None,
|
||||
) -> PathEntry:
|
||||
"""
|
||||
Register a path -> cid mapping.
|
||||
|
||||
Args:
|
||||
path: Human-friendly path (e.g., "effects/ascii_fx_zone.sexp")
|
||||
cid: Content-addressed ID (IPFS CID or hash)
|
||||
content_type: Type of content ("effect", "recipe", "analyzer")
|
||||
actor_id: Owner (default: system for global mappings)
|
||||
description: Optional description
|
||||
|
||||
Returns:
|
||||
The created PathEntry
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
now = datetime.now(timezone.utc).timestamp()
|
||||
|
||||
entry = PathEntry(
|
||||
path=norm_path,
|
||||
cid=cid,
|
||||
content_type=content_type,
|
||||
actor_id=actor_id,
|
||||
description=description,
|
||||
created_at=now,
|
||||
)
|
||||
|
||||
# Store in database (item_types table)
|
||||
self._save_to_db(entry)
|
||||
|
||||
# Update Redis cache
|
||||
self._update_redis_cache(norm_path, cid)
|
||||
|
||||
logger.info(f"Registered path '{norm_path}' -> {cid[:16]}...")
|
||||
return entry
|
||||
|
||||
def _save_to_db(self, entry: PathEntry):
|
||||
"""Save entry to database using item_types table."""
|
||||
import database
|
||||
|
||||
async def save():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
# Ensure cache_item exists
|
||||
await conn.execute(
|
||||
"INSERT INTO cache_items (cid) VALUES ($1) ON CONFLICT DO NOTHING",
|
||||
entry.cid
|
||||
)
|
||||
# Insert or update item_type with path
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO item_types (cid, actor_id, type, path, description)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (cid, actor_id, type, path) DO UPDATE SET
|
||||
description = COALESCE(EXCLUDED.description, item_types.description)
|
||||
""",
|
||||
entry.cid, entry.actor_id, entry.content_type, entry.path, entry.description
|
||||
)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
self._run_async(save())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to save path registry to DB: {e}")
|
||||
|
||||
def _update_redis_cache(self, path: str, cid: str):
|
||||
"""Update Redis cache with mapping."""
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hset(self._redis_path_to_cid_key, path, cid)
|
||||
self._redis.hset(self._redis_cid_to_path_key, cid, path)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update Redis cache: {e}")
|
||||
|
||||
def get_cid(self, path: str, content_type: str = None) -> Optional[str]:
|
||||
"""
|
||||
Get the cid for a path.
|
||||
|
||||
Args:
|
||||
path: Human-friendly path
|
||||
content_type: Optional type filter
|
||||
|
||||
Returns:
|
||||
The cid, or None if not found
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
|
||||
# Try Redis first (fast path)
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_path_to_cid_key, norm_path)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis lookup failed: {e}")
|
||||
|
||||
# Fall back to database
|
||||
return self._get_cid_from_db(norm_path, content_type)
|
||||
|
||||
def _get_cid_from_db(self, path: str, content_type: str = None) -> Optional[str]:
|
||||
"""Get cid from database using item_types table."""
|
||||
import database
|
||||
|
||||
async def get():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if content_type:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT cid FROM item_types WHERE path = $1 AND type = $2",
|
||||
path, content_type
|
||||
)
|
||||
else:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT cid FROM item_types WHERE path = $1",
|
||||
path
|
||||
)
|
||||
return row["cid"] if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
result = self._run_async(get())
|
||||
# Update Redis cache if found
|
||||
if result and self._redis:
|
||||
self._update_redis_cache(path, result)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get from DB: {e}")
|
||||
return None
|
||||
|
||||
def get_path(self, cid: str) -> Optional[str]:
|
||||
"""
|
||||
Get the path for a cid.
|
||||
|
||||
Args:
|
||||
cid: Content-addressed ID
|
||||
|
||||
Returns:
|
||||
The path, or None if not found
|
||||
"""
|
||||
# Try Redis first
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_cid_to_path_key, cid)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis lookup failed: {e}")
|
||||
|
||||
# Fall back to database
|
||||
return self._get_path_from_db(cid)
|
||||
|
||||
def _get_path_from_db(self, cid: str) -> Optional[str]:
|
||||
"""Get path from database using item_types table."""
|
||||
import database
|
||||
|
||||
async def get():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT path FROM item_types WHERE cid = $1 AND path IS NOT NULL ORDER BY created_at LIMIT 1",
|
||||
cid
|
||||
)
|
||||
return row["path"] if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
result = self._run_async(get())
|
||||
# Update Redis cache if found
|
||||
if result and self._redis:
|
||||
self._update_redis_cache(result, cid)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get from DB: {e}")
|
||||
return None
|
||||
|
||||
def list_by_type(self, content_type: str, actor_id: str = None) -> List[PathEntry]:
|
||||
"""
|
||||
List all entries of a given type.
|
||||
|
||||
Args:
|
||||
content_type: Type to filter by ("effect", "recipe", etc.)
|
||||
actor_id: Optional actor filter (None = all, SYSTEM_ACTOR = global)
|
||||
|
||||
Returns:
|
||||
List of PathEntry objects
|
||||
"""
|
||||
import database
|
||||
|
||||
async def list_entries():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if actor_id:
|
||||
rows = await conn.fetch(
|
||||
"""
|
||||
SELECT cid, path, type, actor_id, description,
|
||||
EXTRACT(EPOCH FROM created_at) as created_at
|
||||
FROM item_types
|
||||
WHERE type = $1 AND actor_id = $2 AND path IS NOT NULL
|
||||
ORDER BY path
|
||||
""",
|
||||
content_type, actor_id
|
||||
)
|
||||
else:
|
||||
rows = await conn.fetch(
|
||||
"""
|
||||
SELECT cid, path, type, actor_id, description,
|
||||
EXTRACT(EPOCH FROM created_at) as created_at
|
||||
FROM item_types
|
||||
WHERE type = $1 AND path IS NOT NULL
|
||||
ORDER BY path
|
||||
""",
|
||||
content_type
|
||||
)
|
||||
return [
|
||||
PathEntry(
|
||||
path=row["path"],
|
||||
cid=row["cid"],
|
||||
content_type=row["type"],
|
||||
actor_id=row["actor_id"],
|
||||
description=row["description"],
|
||||
created_at=row["created_at"] or 0,
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
return self._run_async(list_entries())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to list from DB: {e}")
|
||||
return []
|
||||
|
||||
def delete(self, path: str, content_type: str = None) -> bool:
|
||||
"""
|
||||
Delete a path registration.
|
||||
|
||||
Args:
|
||||
path: The path to delete
|
||||
content_type: Optional type filter
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
|
||||
# Get cid for Redis cleanup
|
||||
cid = self.get_cid(norm_path, content_type)
|
||||
|
||||
# Delete from database
|
||||
deleted = self._delete_from_db(norm_path, content_type)
|
||||
|
||||
# Clean up Redis
|
||||
if deleted and cid and self._redis:
|
||||
try:
|
||||
self._redis.hdel(self._redis_path_to_cid_key, norm_path)
|
||||
self._redis.hdel(self._redis_cid_to_path_key, cid)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to clean up Redis: {e}")
|
||||
|
||||
return deleted
|
||||
|
||||
def _delete_from_db(self, path: str, content_type: str = None) -> bool:
|
||||
"""Delete from database."""
|
||||
import database
|
||||
|
||||
async def delete():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if content_type:
|
||||
result = await conn.execute(
|
||||
"DELETE FROM item_types WHERE path = $1 AND type = $2",
|
||||
path, content_type
|
||||
)
|
||||
else:
|
||||
result = await conn.execute(
|
||||
"DELETE FROM item_types WHERE path = $1",
|
||||
path
|
||||
)
|
||||
return "DELETE" in result
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
return self._run_async(delete())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete from DB: {e}")
|
||||
return False
|
||||
|
||||
def register_effect(
|
||||
self,
|
||||
path: str,
|
||||
cid: str,
|
||||
description: Optional[str] = None,
|
||||
) -> PathEntry:
|
||||
"""
|
||||
Convenience method to register an effect.
|
||||
|
||||
Args:
|
||||
path: Effect path (e.g., "effects/ascii_fx_zone.sexp")
|
||||
cid: IPFS CID of the effect file
|
||||
description: Optional description
|
||||
|
||||
Returns:
|
||||
The created PathEntry
|
||||
"""
|
||||
return self.register(
|
||||
path=path,
|
||||
cid=cid,
|
||||
content_type="effect",
|
||||
actor_id=SYSTEM_ACTOR,
|
||||
description=description,
|
||||
)
|
||||
|
||||
def get_effect_cid(self, path: str) -> Optional[str]:
|
||||
"""
|
||||
Get CID for an effect by path.
|
||||
|
||||
Args:
|
||||
path: Effect path
|
||||
|
||||
Returns:
|
||||
IPFS CID or None
|
||||
"""
|
||||
return self.get_cid(path, content_type="effect")
|
||||
|
||||
def list_effects(self) -> List[PathEntry]:
|
||||
"""List all registered effects."""
|
||||
return self.list_by_type("effect", actor_id=SYSTEM_ACTOR)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_registry: Optional[PathRegistry] = None
|
||||
|
||||
|
||||
def get_path_registry() -> PathRegistry:
|
||||
"""Get the singleton path registry instance."""
|
||||
global _registry
|
||||
if _registry is None:
|
||||
import redis
|
||||
from urllib.parse import urlparse
|
||||
|
||||
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
|
||||
parsed = urlparse(redis_url)
|
||||
redis_client = redis.Redis(
|
||||
host=parsed.hostname or 'localhost',
|
||||
port=parsed.port or 6379,
|
||||
db=int(parsed.path.lstrip('/') or 0),
|
||||
socket_timeout=5,
|
||||
socket_connect_timeout=5
|
||||
)
|
||||
|
||||
_registry = PathRegistry(redis_client=redis_client)
|
||||
return _registry
|
||||
|
||||
|
||||
def reset_path_registry():
|
||||
"""Reset the singleton (for testing)."""
|
||||
global _registry
|
||||
_registry = None
|
||||
223
recipes/woods-lowres.sexp
Normal file
223
recipes/woods-lowres.sexp
Normal file
@@ -0,0 +1,223 @@
|
||||
;; Woods Recipe - OPTIMIZED VERSION
|
||||
;;
|
||||
;; Uses fused-pipeline for GPU acceleration when available,
|
||||
;; falls back to individual primitives on CPU.
|
||||
;;
|
||||
;; Key optimizations:
|
||||
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
|
||||
;; 2. Uses fused-pipeline to batch effects into single kernel passes
|
||||
;; 3. GPU persistence - frames stay on GPU throughout pipeline
|
||||
|
||||
(stream "woods-lowres"
|
||||
:fps 30
|
||||
:width 640
|
||||
:height 360
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives (includes proper asset resolution)
|
||||
;; Auto-selects GPU versions when available, falls back to CPU
|
||||
(include :name "tpl-standard-primitives")
|
||||
|
||||
;; === SOURCES (using streaming: which has proper asset resolution) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === SCANS ===
|
||||
|
||||
;; Cycle state
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Spin scan
|
||||
(scan spin (streaming:audio-beat music t)
|
||||
:init {:angle 0 :dir 1 :speed 2}
|
||||
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
|
||||
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
|
||||
(dict :angle (+ angle (* new-dir new-speed))
|
||||
:dir new-dir
|
||||
:speed new-speed)))
|
||||
|
||||
;; Ripple scan - raindrop style, all params randomized
|
||||
;; Higher freq = bigger gaps between waves (formula is dist/freq)
|
||||
(scan ripple-state (streaming:audio-beat music t)
|
||||
:init {:gate 0 :cx 320 :cy 180 :freq 20 :decay 6 :amp-mult 1.0}
|
||||
:step (let [new-gate (if (< (core:rand) 0.2) (+ 2 (core:rand-int 0 4)) (core:max 0 (- gate 1)))
|
||||
triggered (> new-gate gate)
|
||||
new-cx (if triggered (core:rand-int 50 590) cx)
|
||||
new-cy (if triggered (core:rand-int 50 310) cy)
|
||||
new-freq (if triggered (+ 15 (core:rand-int 0 20)) freq)
|
||||
new-decay (if triggered (+ 5 (core:rand-int 0 4)) decay)
|
||||
new-amp-mult (if triggered (+ 0.8 (* (core:rand) 1.2)) amp-mult)]
|
||||
(dict :gate new-gate :cx new-cx :cy new-cy :freq new-freq :decay new-decay :amp-mult new-amp-mult)))
|
||||
|
||||
;; Pair states
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === OPTIMIZED PROCESS-PAIR MACRO ===
|
||||
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
|
||||
(defmacro process-pair-fast (idx)
|
||||
(let [;; Get sources for this pair (with safe modulo indexing)
|
||||
num-sources (len sources)
|
||||
src-a (nth sources (mod (* idx 2) num-sources))
|
||||
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
|
||||
cfg (nth pair-configs idx)
|
||||
pstate (nth (bind pairs :states) idx)
|
||||
|
||||
;; Read frames (GPU decode, stays on GPU)
|
||||
frame-a (streaming:source-read src-a t)
|
||||
frame-b (streaming:source-read src-b t)
|
||||
|
||||
;; Get state values
|
||||
dir (get cfg :dir)
|
||||
rot-max-a (get cfg :rot-a)
|
||||
rot-max-b (get cfg :rot-b)
|
||||
zoom-max-a (get cfg :zoom-a)
|
||||
zoom-max-b (get cfg :zoom-b)
|
||||
pair-angle (get pstate :angle)
|
||||
inv-a-on (> (get pstate :inv-a) 0)
|
||||
inv-b-on (> (get pstate :inv-b) 0)
|
||||
hue-a-on (> (get pstate :hue-a) 0)
|
||||
hue-b-on (> (get pstate :hue-b) 0)
|
||||
hue-a-val (get pstate :hue-a-val)
|
||||
hue-b-val (get pstate :hue-b-val)
|
||||
mix-ratio (get pstate :mix)
|
||||
|
||||
;; Calculate rotation angles
|
||||
angle-a (* dir pair-angle rot-max-a 0.01)
|
||||
angle-b (* dir pair-angle rot-max-b 0.01)
|
||||
|
||||
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
|
||||
zoom-a (core:map-range e 0 1 1 zoom-max-a)
|
||||
zoom-b (core:map-range e 0 1 1 zoom-max-b)
|
||||
|
||||
;; Define effect pipelines for each source
|
||||
;; These get compiled to single CUDA kernels!
|
||||
;; First resize to target resolution, then apply effects
|
||||
effects-a [{:op "resize" :width 640 :height 360}
|
||||
{:op "zoom" :amount zoom-a}
|
||||
{:op "rotate" :angle angle-a}
|
||||
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
|
||||
{:op "invert" :amount (if inv-a-on 1 0)}]
|
||||
effects-b [{:op "resize" :width 640 :height 360}
|
||||
{:op "zoom" :amount zoom-b}
|
||||
{:op "rotate" :angle angle-b}
|
||||
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
|
||||
{:op "invert" :amount (if inv-b-on 1 0)}]
|
||||
|
||||
;; Apply fused pipelines (single kernel per source!)
|
||||
processed-a (streaming:fused-pipeline frame-a effects-a)
|
||||
processed-b (streaming:fused-pipeline frame-b effects-b)]
|
||||
|
||||
;; Blend the two processed frames
|
||||
(blending:blend-images processed-a processed-b mix-ratio)))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Process active pair with fused pipeline
|
||||
active-frame (process-pair-fast active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
;; Old pair: zooms out (1.0 -> 2.0) and fades out
|
||||
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
|
||||
result (if fading
|
||||
(let [next-frame (process-pair-fast next-idx)
|
||||
;; Active zooms out as it fades
|
||||
active-zoom (+ 1.0 fade-amt)
|
||||
active-zoomed (streaming:fused-pipeline active-frame
|
||||
[{:op "zoom" :amount active-zoom}])
|
||||
;; Next starts small and zooms in
|
||||
next-zoom (+ 0.1 (* fade-amt 0.9))
|
||||
next-zoomed (streaming:fused-pipeline next-frame
|
||||
[{:op "zoom" :amount next-zoom}])]
|
||||
(blending:blend-images active-zoomed next-zoomed fade-amt))
|
||||
active-frame)
|
||||
|
||||
;; Final effects pipeline (fused!)
|
||||
spin-angle (bind spin :angle)
|
||||
;; Ripple params - all randomized per ripple trigger
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp-mult (bind ripple-state :amp-mult)
|
||||
rip-amp (* rip-gate rip-amp-mult (core:map-range e 0 1 50 200))
|
||||
rip-cx (bind ripple-state :cx)
|
||||
rip-cy (bind ripple-state :cy)
|
||||
rip-freq (bind ripple-state :freq)
|
||||
rip-decay (bind ripple-state :decay)
|
||||
|
||||
;; Fused final effects
|
||||
final-effects [{:op "rotate" :angle spin-angle}
|
||||
{:op "ripple" :amplitude rip-amp :frequency rip-freq :decay rip-decay
|
||||
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
|
||||
|
||||
;; Apply final fused pipeline
|
||||
(streaming:fused-pipeline result final-effects
|
||||
:rotate_angle spin-angle
|
||||
:ripple_phase (* now 5)
|
||||
:ripple_amplitude rip-amp))))
|
||||
211
recipes/woods-recipe-optimized.sexp
Normal file
211
recipes/woods-recipe-optimized.sexp
Normal file
@@ -0,0 +1,211 @@
|
||||
;; Woods Recipe - OPTIMIZED VERSION
|
||||
;;
|
||||
;; Uses fused-pipeline for GPU acceleration when available,
|
||||
;; falls back to individual primitives on CPU.
|
||||
;;
|
||||
;; Key optimizations:
|
||||
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
|
||||
;; 2. Uses fused-pipeline to batch effects into single kernel passes
|
||||
;; 3. GPU persistence - frames stay on GPU throughout pipeline
|
||||
|
||||
(stream "woods-recipe-optimized"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives (includes proper asset resolution)
|
||||
;; Auto-selects GPU versions when available, falls back to CPU
|
||||
(include :name "tpl-standard-primitives")
|
||||
|
||||
;; === SOURCES (using streaming: which has proper asset resolution) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === SCANS ===
|
||||
|
||||
;; Cycle state
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Spin scan
|
||||
(scan spin (streaming:audio-beat music t)
|
||||
:init {:angle 0 :dir 1 :speed 2}
|
||||
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
|
||||
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
|
||||
(dict :angle (+ angle (* new-dir new-speed))
|
||||
:dir new-dir
|
||||
:speed new-speed)))
|
||||
|
||||
;; Ripple scan
|
||||
(scan ripple-state (streaming:audio-beat music t)
|
||||
:init {:gate 0 :cx 960 :cy 540}
|
||||
:step (let [new-gate (if (< (core:rand) 0.15) (+ 3 (core:rand-int 0 5)) (core:max 0 (- gate 1)))
|
||||
new-cx (if (> new-gate gate) (+ 200 (core:rand-int 0 1520)) cx)
|
||||
new-cy (if (> new-gate gate) (+ 200 (core:rand-int 0 680)) cy)]
|
||||
(dict :gate new-gate :cx new-cx :cy new-cy)))
|
||||
|
||||
;; Pair states
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === OPTIMIZED PROCESS-PAIR MACRO ===
|
||||
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
|
||||
(defmacro process-pair-fast (idx)
|
||||
(let [;; Get sources for this pair (with safe modulo indexing)
|
||||
num-sources (len sources)
|
||||
src-a (nth sources (mod (* idx 2) num-sources))
|
||||
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
|
||||
cfg (nth pair-configs idx)
|
||||
pstate (nth (bind pairs :states) idx)
|
||||
|
||||
;; Read frames (GPU decode, stays on GPU)
|
||||
frame-a (streaming:source-read src-a t)
|
||||
frame-b (streaming:source-read src-b t)
|
||||
|
||||
;; Get state values
|
||||
dir (get cfg :dir)
|
||||
rot-max-a (get cfg :rot-a)
|
||||
rot-max-b (get cfg :rot-b)
|
||||
zoom-max-a (get cfg :zoom-a)
|
||||
zoom-max-b (get cfg :zoom-b)
|
||||
pair-angle (get pstate :angle)
|
||||
inv-a-on (> (get pstate :inv-a) 0)
|
||||
inv-b-on (> (get pstate :inv-b) 0)
|
||||
hue-a-on (> (get pstate :hue-a) 0)
|
||||
hue-b-on (> (get pstate :hue-b) 0)
|
||||
hue-a-val (get pstate :hue-a-val)
|
||||
hue-b-val (get pstate :hue-b-val)
|
||||
mix-ratio (get pstate :mix)
|
||||
|
||||
;; Calculate rotation angles
|
||||
angle-a (* dir pair-angle rot-max-a 0.01)
|
||||
angle-b (* dir pair-angle rot-max-b 0.01)
|
||||
|
||||
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
|
||||
zoom-a (core:map-range e 0 1 1 zoom-max-a)
|
||||
zoom-b (core:map-range e 0 1 1 zoom-max-b)
|
||||
|
||||
;; Define effect pipelines for each source
|
||||
;; These get compiled to single CUDA kernels!
|
||||
effects-a [{:op "zoom" :amount zoom-a}
|
||||
{:op "rotate" :angle angle-a}
|
||||
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
|
||||
{:op "invert" :amount (if inv-a-on 1 0)}]
|
||||
effects-b [{:op "zoom" :amount zoom-b}
|
||||
{:op "rotate" :angle angle-b}
|
||||
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
|
||||
{:op "invert" :amount (if inv-b-on 1 0)}]
|
||||
|
||||
;; Apply fused pipelines (single kernel per source!)
|
||||
processed-a (streaming:fused-pipeline frame-a effects-a)
|
||||
processed-b (streaming:fused-pipeline frame-b effects-b)]
|
||||
|
||||
;; Blend the two processed frames
|
||||
(blending:blend-images processed-a processed-b mix-ratio)))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Process active pair with fused pipeline
|
||||
active-frame (process-pair-fast active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
;; Old pair: zooms out (1.0 -> 2.0) and fades out
|
||||
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
|
||||
result (if fading
|
||||
(let [next-frame (process-pair-fast next-idx)
|
||||
;; Active zooms out as it fades
|
||||
active-zoom (+ 1.0 fade-amt)
|
||||
active-zoomed (streaming:fused-pipeline active-frame
|
||||
[{:op "zoom" :amount active-zoom}])
|
||||
;; Next starts small and zooms in
|
||||
next-zoom (+ 0.1 (* fade-amt 0.9))
|
||||
next-zoomed (streaming:fused-pipeline next-frame
|
||||
[{:op "zoom" :amount next-zoom}])]
|
||||
(blending:blend-images active-zoomed next-zoomed fade-amt))
|
||||
active-frame)
|
||||
|
||||
;; Final effects pipeline (fused!)
|
||||
spin-angle (bind spin :angle)
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))
|
||||
rip-cx (bind ripple-state :cx)
|
||||
rip-cy (bind ripple-state :cy)
|
||||
|
||||
;; Fused final effects
|
||||
final-effects [{:op "rotate" :angle spin-angle}
|
||||
{:op "ripple" :amplitude rip-amp :frequency 8 :decay 2
|
||||
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
|
||||
|
||||
;; Apply final fused pipeline
|
||||
(streaming:fused-pipeline result final-effects
|
||||
:rotate_angle spin-angle
|
||||
:ripple_phase (* now 5)
|
||||
:ripple_amplitude rip-amp))))
|
||||
134
recipes/woods-recipe.sexp
Normal file
134
recipes/woods-recipe.sexp
Normal file
@@ -0,0 +1,134 @@
|
||||
;; Woods Recipe - Using friendly names for all assets
|
||||
;;
|
||||
;; Requires uploaded:
|
||||
;; - Media: woods-1 through woods-8 (videos), woods-audio (audio)
|
||||
;; - Effects: fx-rotate, fx-zoom, fx-blend, fx-ripple, fx-invert, fx-hue-shift
|
||||
;; - Templates: tpl-standard-primitives, tpl-standard-effects, tpl-process-pair,
|
||||
;; tpl-crossfade-zoom, tpl-scan-spin, tpl-scan-ripple
|
||||
|
||||
(stream "woods-recipe"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives and effects via friendly names
|
||||
(include :name "tpl-standard-primitives")
|
||||
(include :name "tpl-standard-effects")
|
||||
|
||||
;; Load reusable templates
|
||||
(include :name "tpl-process-pair")
|
||||
(include :name "tpl-crossfade-zoom")
|
||||
|
||||
;; === SOURCES AS ARRAY (using friendly names) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio analyzer (using friendly name)
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
|
||||
;; Audio playback (friendly name resolved by streaming primitives)
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === GLOBAL SCANS ===
|
||||
|
||||
;; Cycle state: which source is active
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Reusable scans from templates
|
||||
(include :name "tpl-scan-spin")
|
||||
(include :name "tpl-scan-ripple")
|
||||
|
||||
;; === PER-PAIR STATE ===
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Get pair states array
|
||||
pair-states (bind pairs :states)
|
||||
|
||||
;; Process active pair using macro from template
|
||||
active-frame (process-pair active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
result (if fading
|
||||
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
|
||||
active-frame)
|
||||
|
||||
;; Final: global spin + ripple
|
||||
spun (rotate result :angle (bind spin :angle))
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
|
||||
|
||||
(ripple spun
|
||||
:amplitude rip-amp
|
||||
:center_x (bind ripple-state :cx)
|
||||
:center_y (bind ripple-state :cy)
|
||||
:frequency 8
|
||||
:decay 2
|
||||
:speed 5))))
|
||||
65
render.py
65
render.py
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
CLI to submit render tasks to Art DAG Celery.
|
||||
|
||||
Usage:
|
||||
python render.py dog cat # Render cat through dog effect
|
||||
python render.py identity cat # Render cat through identity effect
|
||||
python render.py <effect> <input> # General form
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
from legacy_tasks import render_effect
|
||||
|
||||
# Known asset hashes
|
||||
ASSETS = {
|
||||
"cat": "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b",
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Submit render task to Art DAG Celery")
|
||||
parser.add_argument("effect", help="Effect to apply (e.g., dog, identity)")
|
||||
parser.add_argument("input", help="Input asset name or hash")
|
||||
parser.add_argument("--output", "-o", help="Output name (default: <effect>-from-<input>)")
|
||||
parser.add_argument("--sync", "-s", action="store_true", help="Wait for result")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Resolve input to hash
|
||||
input_hash = ASSETS.get(args.input, args.input)
|
||||
if len(input_hash) != 64:
|
||||
print(f"Error: Unknown asset '{args.input}' and not a valid hash")
|
||||
sys.exit(1)
|
||||
|
||||
# Generate output name
|
||||
output_name = args.output or f"{args.effect}-from-{args.input}-celery"
|
||||
|
||||
print(f"Submitting render task:")
|
||||
print(f" Effect: {args.effect}")
|
||||
print(f" Input: {args.input} ({input_hash[:16]}...)")
|
||||
print(f" Output: {output_name}")
|
||||
|
||||
# Submit task
|
||||
task = render_effect.delay(input_hash, args.effect, output_name)
|
||||
print(f" Task ID: {task.id}")
|
||||
|
||||
if args.sync:
|
||||
print("\nWaiting for result...")
|
||||
try:
|
||||
result = task.get(timeout=300)
|
||||
print("\nRender complete!")
|
||||
print(json.dumps(result, indent=2))
|
||||
except Exception as e:
|
||||
print(f"\nRender failed: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\nTask submitted. Check status with:")
|
||||
print(f" celery -A celery_app inspect query_task {task.id}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -15,3 +15,5 @@ opencv-python-headless>=4.8.0
|
||||
git+https://github.com/gilesbradshaw/art-dag.git@main
|
||||
# Shared components (tracks master branch)
|
||||
git+https://git.rose-ash.com/art-dag/common.git@master
|
||||
psycopg2-binary
|
||||
nest_asyncio
|
||||
|
||||
77
scripts/cloud-init-gpu.sh
Normal file
77
scripts/cloud-init-gpu.sh
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/bin/bash
|
||||
# Cloud-init startup script for GPU droplet (RTX 6000 Ada, etc.)
|
||||
# Paste this into DigitalOcean "User data" field when creating droplet
|
||||
|
||||
set -e
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
exec > /var/log/artdag-setup.log 2>&1
|
||||
|
||||
echo "=== ArtDAG GPU Setup Started $(date) ==="
|
||||
|
||||
# Update system (non-interactive, keep existing configs)
|
||||
apt-get update
|
||||
apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" upgrade
|
||||
|
||||
# Install essentials
|
||||
apt-get install -y \
|
||||
python3 python3-venv python3-pip \
|
||||
git curl wget \
|
||||
ffmpeg \
|
||||
vulkan-tools \
|
||||
build-essential
|
||||
|
||||
# Create venv
|
||||
VENV_DIR="/opt/artdag-gpu"
|
||||
python3 -m venv "$VENV_DIR"
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# Install Python packages
|
||||
pip install --upgrade pip
|
||||
pip install \
|
||||
numpy \
|
||||
opencv-python-headless \
|
||||
wgpu \
|
||||
httpx \
|
||||
pyyaml \
|
||||
celery[redis] \
|
||||
fastapi \
|
||||
uvicorn \
|
||||
asyncpg
|
||||
|
||||
# Create code directory
|
||||
mkdir -p "$VENV_DIR/celery/sexp_effects/effects"
|
||||
mkdir -p "$VENV_DIR/celery/sexp_effects/primitive_libs"
|
||||
mkdir -p "$VENV_DIR/celery/streaming"
|
||||
|
||||
# Add SSH key for easier access (optional - add your key here)
|
||||
# echo "ssh-ed25519 AAAA... your-key" >> /root/.ssh/authorized_keys
|
||||
|
||||
# Test GPU
|
||||
echo "=== GPU Info ==="
|
||||
nvidia-smi || echo "nvidia-smi not available yet"
|
||||
|
||||
echo "=== NVENC Check ==="
|
||||
ffmpeg -encoders 2>/dev/null | grep -E "nvenc|cuda" || echo "NVENC not detected"
|
||||
|
||||
echo "=== wgpu Check ==="
|
||||
"$VENV_DIR/bin/python3" -c "
|
||||
import wgpu
|
||||
try:
|
||||
adapter = wgpu.gpu.request_adapter_sync(power_preference='high-performance')
|
||||
print(f'GPU: {adapter.info}')
|
||||
except Exception as e:
|
||||
print(f'wgpu error: {e}')
|
||||
" || echo "wgpu test failed"
|
||||
|
||||
# Add environment setup
|
||||
cat >> /etc/profile.d/artdag-gpu.sh << 'ENVEOF'
|
||||
export WGPU_BACKEND_TYPE=Vulkan
|
||||
export PATH="/opt/artdag-gpu/bin:$PATH"
|
||||
ENVEOF
|
||||
|
||||
# Mark setup complete
|
||||
touch /opt/artdag-gpu/.setup-complete
|
||||
echo "=== Setup Complete $(date) ==="
|
||||
echo "Venv: /opt/artdag-gpu"
|
||||
echo "Activate: source /opt/artdag-gpu/bin/activate"
|
||||
echo "Vulkan: export WGPU_BACKEND_TYPE=Vulkan"
|
||||
51
scripts/deploy-to-gpu.sh
Executable file
51
scripts/deploy-to-gpu.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
# Deploy art-dag GPU code to a remote droplet
|
||||
# Usage: ./deploy-to-gpu.sh <droplet-ip>
|
||||
|
||||
set -e
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 <droplet-ip>"
|
||||
echo "Example: $0 159.223.7.100"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DROPLET_IP="$1"
|
||||
REMOTE_DIR="/opt/artdag-gpu/celery"
|
||||
LOCAL_DIR="$(dirname "$0")/.."
|
||||
|
||||
echo "=== Deploying to $DROPLET_IP ==="
|
||||
|
||||
# Create remote directory
|
||||
echo "[1/4] Creating remote directory..."
|
||||
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects $REMOTE_DIR/streaming $REMOTE_DIR/scripts"
|
||||
|
||||
# Copy core files
|
||||
echo "[2/4] Copying core files..."
|
||||
scp "$LOCAL_DIR/sexp_effects/wgsl_compiler.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/parser.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/interpreter.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/__init__.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/streaming/backends.py" "root@$DROPLET_IP:$REMOTE_DIR/streaming/"
|
||||
|
||||
# Copy effects
|
||||
echo "[3/4] Copying effects..."
|
||||
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects/effects $REMOTE_DIR/sexp_effects/primitive_libs"
|
||||
scp -r "$LOCAL_DIR/sexp_effects/effects/"*.sexp "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/effects/" 2>/dev/null || true
|
||||
scp -r "$LOCAL_DIR/sexp_effects/primitive_libs/"*.py "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/primitive_libs/" 2>/dev/null || true
|
||||
|
||||
# Test
|
||||
echo "[4/4] Testing deployment..."
|
||||
ssh "root@$DROPLET_IP" "cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c '
|
||||
import sys
|
||||
sys.path.insert(0, \".\")
|
||||
from sexp_effects.wgsl_compiler import compile_effect_file
|
||||
result = compile_effect_file(\"sexp_effects/effects/invert.sexp\")
|
||||
print(f\"Compiled effect: {result.name}\")
|
||||
print(\"Deployment OK\")
|
||||
'" || echo "Test failed - may need to run setup script first"
|
||||
|
||||
echo ""
|
||||
echo "=== Deployment complete ==="
|
||||
echo "SSH: ssh root@$DROPLET_IP"
|
||||
echo "Test: ssh root@$DROPLET_IP 'cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c \"from streaming.backends import get_backend; b=get_backend(\\\"wgpu\\\"); print(b)\"'"
|
||||
34
scripts/gpu-dev-deploy.sh
Executable file
34
scripts/gpu-dev-deploy.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# Quick deploy to GPU node with hot reload
|
||||
# Usage: ./scripts/gpu-dev-deploy.sh
|
||||
|
||||
set -e
|
||||
|
||||
GPU_HOST="${GPU_HOST:-root@138.197.163.123}"
|
||||
REMOTE_DIR="/root/art-dag/celery"
|
||||
|
||||
echo "=== GPU Dev Deploy ==="
|
||||
echo "Syncing code to $GPU_HOST..."
|
||||
|
||||
# Sync code (excluding cache, git, __pycache__)
|
||||
rsync -avz --delete \
|
||||
--exclude '.git' \
|
||||
--exclude '__pycache__' \
|
||||
--exclude '*.pyc' \
|
||||
--exclude '.pytest_cache' \
|
||||
--exclude 'node_modules' \
|
||||
--exclude '.env' \
|
||||
./ "$GPU_HOST:$REMOTE_DIR/"
|
||||
|
||||
echo "Restarting GPU worker..."
|
||||
ssh "$GPU_HOST" "docker kill \$(docker ps -q -f name=l1-gpu-worker) 2>/dev/null || true"
|
||||
|
||||
echo "Waiting for new container..."
|
||||
sleep 10
|
||||
|
||||
# Show new container logs
|
||||
ssh "$GPU_HOST" "docker logs --tail 30 \$(docker ps -q -f name=l1-gpu-worker)"
|
||||
|
||||
echo ""
|
||||
echo "=== Deploy Complete ==="
|
||||
echo "Use 'ssh $GPU_HOST docker logs -f \$(docker ps -q -f name=l1-gpu-worker)' to follow logs"
|
||||
108
scripts/setup-gpu-droplet.sh
Executable file
108
scripts/setup-gpu-droplet.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/bin/bash
|
||||
# Setup script for GPU droplet with NVENC support
|
||||
# Run as root on a fresh Ubuntu droplet with NVIDIA GPU
|
||||
|
||||
set -e
|
||||
|
||||
echo "=== ArtDAG GPU Droplet Setup ==="
|
||||
|
||||
# 1. System updates
|
||||
echo "[1/7] Updating system..."
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
|
||||
# 2. Install NVIDIA drivers (if not already installed)
|
||||
echo "[2/7] Checking NVIDIA drivers..."
|
||||
if ! command -v nvidia-smi &> /dev/null; then
|
||||
echo "Installing NVIDIA drivers..."
|
||||
apt-get install -y nvidia-driver-535 nvidia-utils-535
|
||||
echo "NVIDIA drivers installed. Reboot required."
|
||||
echo "After reboot, run this script again."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
nvidia-smi
|
||||
echo "NVIDIA drivers OK"
|
||||
|
||||
# 3. Install FFmpeg with NVENC support
|
||||
echo "[3/7] Installing FFmpeg with NVENC..."
|
||||
apt-get install -y ffmpeg
|
||||
|
||||
# Verify NVENC
|
||||
if ffmpeg -encoders 2>/dev/null | grep -q nvenc; then
|
||||
echo "NVENC available:"
|
||||
ffmpeg -encoders 2>/dev/null | grep nvenc
|
||||
else
|
||||
echo "WARNING: NVENC not available. GPU may not support hardware encoding."
|
||||
fi
|
||||
|
||||
# 4. Install Python and create venv
|
||||
echo "[4/7] Setting up Python environment..."
|
||||
apt-get install -y python3 python3-venv python3-pip git
|
||||
|
||||
VENV_DIR="/opt/artdag-gpu"
|
||||
python3 -m venv "$VENV_DIR"
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# 5. Install Python dependencies
|
||||
echo "[5/7] Installing Python packages..."
|
||||
pip install --upgrade pip
|
||||
pip install \
|
||||
numpy \
|
||||
opencv-python-headless \
|
||||
wgpu \
|
||||
httpx \
|
||||
pyyaml \
|
||||
celery[redis] \
|
||||
fastapi \
|
||||
uvicorn
|
||||
|
||||
# 6. Clone/update art-dag code
|
||||
echo "[6/7] Setting up art-dag code..."
|
||||
ARTDAG_DIR="$VENV_DIR/celery"
|
||||
if [ -d "$ARTDAG_DIR" ]; then
|
||||
echo "Updating existing code..."
|
||||
cd "$ARTDAG_DIR"
|
||||
git pull || true
|
||||
else
|
||||
echo "Cloning art-dag..."
|
||||
git clone https://git.rose-ash.com/art-dag/celery.git "$ARTDAG_DIR" || {
|
||||
echo "Git clone failed. You may need to copy code manually."
|
||||
}
|
||||
fi
|
||||
|
||||
# 7. Test GPU compute
|
||||
echo "[7/7] Testing GPU compute..."
|
||||
"$VENV_DIR/bin/python3" << 'PYTEST'
|
||||
import sys
|
||||
try:
|
||||
import wgpu
|
||||
adapter = wgpu.gpu.request_adapter_sync(power_preference="high-performance")
|
||||
print(f"GPU Adapter: {adapter.info.get('device', 'unknown')}")
|
||||
device = adapter.request_device_sync()
|
||||
print("wgpu device created successfully")
|
||||
|
||||
# Check for NVENC via FFmpeg
|
||||
import subprocess
|
||||
result = subprocess.run(['ffmpeg', '-encoders'], capture_output=True, text=True)
|
||||
if 'h264_nvenc' in result.stdout:
|
||||
print("NVENC H.264 encoder: AVAILABLE")
|
||||
else:
|
||||
print("NVENC H.264 encoder: NOT AVAILABLE")
|
||||
if 'hevc_nvenc' in result.stdout:
|
||||
print("NVENC HEVC encoder: AVAILABLE")
|
||||
else:
|
||||
print("NVENC HEVC encoder: NOT AVAILABLE")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
sys.exit(1)
|
||||
PYTEST
|
||||
|
||||
echo ""
|
||||
echo "=== Setup Complete ==="
|
||||
echo "Venv: $VENV_DIR"
|
||||
echo "Code: $ARTDAG_DIR"
|
||||
echo ""
|
||||
echo "To activate: source $VENV_DIR/bin/activate"
|
||||
echo "To test: cd $ARTDAG_DIR && python -c 'from streaming.backends import get_backend; print(get_backend(\"wgpu\"))'"
|
||||
32
sexp_effects/__init__.py
Normal file
32
sexp_effects/__init__.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
S-Expression Effects System
|
||||
|
||||
Safe, shareable effects defined in S-expressions.
|
||||
"""
|
||||
|
||||
from .parser import parse, parse_file, Symbol, Keyword
|
||||
from .interpreter import (
|
||||
Interpreter,
|
||||
get_interpreter,
|
||||
load_effect,
|
||||
load_effects_dir,
|
||||
run_effect,
|
||||
list_effects,
|
||||
make_process_frame,
|
||||
)
|
||||
from .primitives import PRIMITIVES
|
||||
|
||||
__all__ = [
|
||||
'parse',
|
||||
'parse_file',
|
||||
'Symbol',
|
||||
'Keyword',
|
||||
'Interpreter',
|
||||
'get_interpreter',
|
||||
'load_effect',
|
||||
'load_effects_dir',
|
||||
'run_effect',
|
||||
'list_effects',
|
||||
'make_process_frame',
|
||||
'PRIMITIVES',
|
||||
]
|
||||
206
sexp_effects/derived.sexp
Normal file
206
sexp_effects/derived.sexp
Normal file
@@ -0,0 +1,206 @@
|
||||
;; Derived Operations
|
||||
;;
|
||||
;; These are built from true primitives using S-expressions.
|
||||
;; Load with: (require "derived")
|
||||
|
||||
;; =============================================================================
|
||||
;; Math Helpers (derivable from where + basic ops)
|
||||
;; =============================================================================
|
||||
|
||||
;; Absolute value
|
||||
(define (abs x) (where (< x 0) (- x) x))
|
||||
|
||||
;; Minimum of two values
|
||||
(define (min2 a b) (where (< a b) a b))
|
||||
|
||||
;; Maximum of two values
|
||||
(define (max2 a b) (where (> a b) a b))
|
||||
|
||||
;; Clamp x to range [lo, hi]
|
||||
(define (clamp x lo hi) (max2 lo (min2 hi x)))
|
||||
|
||||
;; Square of x
|
||||
(define (sq x) (* x x))
|
||||
|
||||
;; Linear interpolation: a*(1-t) + b*t
|
||||
(define (lerp a b t) (+ (* a (- 1 t)) (* b t)))
|
||||
|
||||
;; Smooth interpolation between edges
|
||||
(define (smoothstep edge0 edge1 x)
|
||||
(let ((t (clamp (/ (- x edge0) (- edge1 edge0)) 0 1)))
|
||||
(* t (* t (- 3 (* 2 t))))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Channel Shortcuts (derivable from channel primitive)
|
||||
;; =============================================================================
|
||||
|
||||
;; Extract red channel as xector
|
||||
(define (red frame) (channel frame 0))
|
||||
|
||||
;; Extract green channel as xector
|
||||
(define (green frame) (channel frame 1))
|
||||
|
||||
;; Extract blue channel as xector
|
||||
(define (blue frame) (channel frame 2))
|
||||
|
||||
;; Convert to grayscale xector (ITU-R BT.601)
|
||||
(define (gray frame)
|
||||
(+ (* (red frame) 0.299)
|
||||
(* (green frame) 0.587)
|
||||
(* (blue frame) 0.114)))
|
||||
|
||||
;; Alias for gray
|
||||
(define (luminance frame) (gray frame))
|
||||
|
||||
;; =============================================================================
|
||||
;; Coordinate Generators (derivable from iota + repeat/tile)
|
||||
;; =============================================================================
|
||||
|
||||
;; X coordinate for each pixel [0, width)
|
||||
(define (x-coords frame) (tile (iota (width frame)) (height frame)))
|
||||
|
||||
;; Y coordinate for each pixel [0, height)
|
||||
(define (y-coords frame) (repeat (iota (height frame)) (width frame)))
|
||||
|
||||
;; Normalized X coordinate [0, 1]
|
||||
(define (x-norm frame) (/ (x-coords frame) (max2 1 (- (width frame) 1))))
|
||||
|
||||
;; Normalized Y coordinate [0, 1]
|
||||
(define (y-norm frame) (/ (y-coords frame) (max2 1 (- (height frame) 1))))
|
||||
|
||||
;; Distance from frame center for each pixel
|
||||
(define (dist-from-center frame)
|
||||
(let* ((cx (/ (width frame) 2))
|
||||
(cy (/ (height frame) 2))
|
||||
(dx (- (x-coords frame) cx))
|
||||
(dy (- (y-coords frame) cy)))
|
||||
(sqrt (+ (sq dx) (sq dy)))))
|
||||
|
||||
;; Normalized distance from center [0, ~1]
|
||||
(define (dist-norm frame)
|
||||
(let ((d (dist-from-center frame)))
|
||||
(/ d (max2 1 (βmax d)))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Cell/Grid Operations (derivable from floor + basic math)
|
||||
;; =============================================================================
|
||||
|
||||
;; Cell row index for each pixel
|
||||
(define (cell-row frame cell-size) (floor (/ (y-coords frame) cell-size)))
|
||||
|
||||
;; Cell column index for each pixel
|
||||
(define (cell-col frame cell-size) (floor (/ (x-coords frame) cell-size)))
|
||||
|
||||
;; Number of cell rows
|
||||
(define (num-rows frame cell-size) (floor (/ (height frame) cell-size)))
|
||||
|
||||
;; Number of cell columns
|
||||
(define (num-cols frame cell-size) (floor (/ (width frame) cell-size)))
|
||||
|
||||
;; Flat cell index for each pixel
|
||||
(define (cell-indices frame cell-size)
|
||||
(+ (* (cell-row frame cell-size) (num-cols frame cell-size))
|
||||
(cell-col frame cell-size)))
|
||||
|
||||
;; Total number of cells
|
||||
(define (num-cells frame cell-size)
|
||||
(* (num-rows frame cell-size) (num-cols frame cell-size)))
|
||||
|
||||
;; X position within cell [0, cell-size)
|
||||
(define (local-x frame cell-size) (mod (x-coords frame) cell-size))
|
||||
|
||||
;; Y position within cell [0, cell-size)
|
||||
(define (local-y frame cell-size) (mod (y-coords frame) cell-size))
|
||||
|
||||
;; Normalized X within cell [0, 1]
|
||||
(define (local-x-norm frame cell-size)
|
||||
(/ (local-x frame cell-size) (max2 1 (- cell-size 1))))
|
||||
|
||||
;; Normalized Y within cell [0, 1]
|
||||
(define (local-y-norm frame cell-size)
|
||||
(/ (local-y frame cell-size) (max2 1 (- cell-size 1))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Fill Operations (derivable from iota)
|
||||
;; =============================================================================
|
||||
|
||||
;; Xector of n zeros
|
||||
(define (zeros n) (* (iota n) 0))
|
||||
|
||||
;; Xector of n ones
|
||||
(define (ones n) (+ (zeros n) 1))
|
||||
|
||||
;; Xector of n copies of val
|
||||
(define (fill val n) (+ (zeros n) val))
|
||||
|
||||
;; Xector of zeros matching x's length
|
||||
(define (zeros-like x) (* x 0))
|
||||
|
||||
;; Xector of ones matching x's length
|
||||
(define (ones-like x) (+ (zeros-like x) 1))
|
||||
|
||||
;; =============================================================================
|
||||
;; Pooling (derivable from group-reduce)
|
||||
;; =============================================================================
|
||||
|
||||
;; Pool a channel by cell index
|
||||
(define (pool-channel chan cell-idx num-cells)
|
||||
(group-reduce chan cell-idx num-cells "mean"))
|
||||
|
||||
;; Pool red channel to cells
|
||||
(define (pool-red frame cell-size)
|
||||
(pool-channel (red frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool green channel to cells
|
||||
(define (pool-green frame cell-size)
|
||||
(pool-channel (green frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool blue channel to cells
|
||||
(define (pool-blue frame cell-size)
|
||||
(pool-channel (blue frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool grayscale to cells
|
||||
(define (pool-gray frame cell-size)
|
||||
(pool-channel (gray frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; =============================================================================
|
||||
;; Blending (derivable from math)
|
||||
;; =============================================================================
|
||||
|
||||
;; Additive blend
|
||||
(define (blend-add a b) (clamp (+ a b) 0 255))
|
||||
|
||||
;; Multiply blend (normalized)
|
||||
(define (blend-multiply a b) (* (/ a 255) b))
|
||||
|
||||
;; Screen blend
|
||||
(define (blend-screen a b) (- 255 (* (/ (- 255 a) 255) (- 255 b))))
|
||||
|
||||
;; Overlay blend
|
||||
(define (blend-overlay a b)
|
||||
(where (< a 128)
|
||||
(* 2 (/ (* a b) 255))
|
||||
(- 255 (* 2 (/ (* (- 255 a) (- 255 b)) 255)))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Simple Effects (derivable from primitives)
|
||||
;; =============================================================================
|
||||
|
||||
;; Invert a channel (255 - c)
|
||||
(define (invert-channel c) (- 255 c))
|
||||
|
||||
;; Binary threshold
|
||||
(define (threshold-channel c thresh) (where (> c thresh) 255 0))
|
||||
|
||||
;; Reduce to n levels
|
||||
(define (posterize-channel c levels)
|
||||
(let ((step (/ 255 (- levels 1))))
|
||||
(* (round (/ c step)) step)))
|
||||
17
sexp_effects/effects/ascii_art.sexp
Normal file
17
sexp_effects/effects/ascii_art.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; ASCII Art effect - converts image to ASCII characters
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_art
|
||||
:params (
|
||||
(char_size :type int :default 8 :range [4 32])
|
||||
(alphabet :type string :default "standard")
|
||||
(color_mode :type string :default "color" :desc "color, mono, invert, or any color name/hex")
|
||||
(background_color :type string :default "black" :desc "background color name/hex")
|
||||
(invert_colors :type int :default 0 :desc "swap foreground and background colors")
|
||||
(contrast :type float :default 1.5 :range [1 3])
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
(chars (luminance-to-chars luminances alphabet contrast)))
|
||||
(render-char-grid frame chars colors char_size color_mode background_color invert_colors)))
|
||||
52
sexp_effects/effects/ascii_art_fx.sexp
Normal file
52
sexp_effects/effects/ascii_art_fx.sexp
Normal file
@@ -0,0 +1,52 @@
|
||||
;; ASCII Art FX - converts image to ASCII characters with per-character effects
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_art_fx
|
||||
:params (
|
||||
;; Basic parameters
|
||||
(char_size :type int :default 8 :range [4 32]
|
||||
:desc "Size of each character cell in pixels")
|
||||
(alphabet :type string :default "standard"
|
||||
:desc "Character set to use")
|
||||
(color_mode :type string :default "color"
|
||||
:choices [color mono invert]
|
||||
:desc "Color mode: color, mono, invert, or any color name/hex")
|
||||
(background_color :type string :default "black"
|
||||
:desc "Background color name or hex value")
|
||||
(invert_colors :type int :default 0 :range [0 1]
|
||||
:desc "Swap foreground and background colors (0/1)")
|
||||
(contrast :type float :default 1.5 :range [1 3]
|
||||
:desc "Character selection contrast")
|
||||
|
||||
;; Per-character effects
|
||||
(char_jitter :type float :default 0 :range [0 20]
|
||||
:desc "Position jitter amount in pixels")
|
||||
(char_scale :type float :default 1.0 :range [0.5 2.0]
|
||||
:desc "Character scale factor")
|
||||
(char_rotation :type float :default 0 :range [0 180]
|
||||
:desc "Rotation amount in degrees")
|
||||
(char_hue_shift :type float :default 0 :range [0 360]
|
||||
:desc "Hue shift in degrees")
|
||||
|
||||
;; Modulation sources
|
||||
(jitter_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives jitter modulation")
|
||||
(scale_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives scale modulation")
|
||||
(rotation_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives rotation modulation")
|
||||
(hue_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives hue shift modulation")
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
(chars (luminance-to-chars luminances alphabet contrast)))
|
||||
(render-char-grid-fx frame chars colors luminances char_size
|
||||
color_mode background_color invert_colors
|
||||
char_jitter char_scale char_rotation char_hue_shift
|
||||
jitter_source scale_source rotation_source hue_source)))
|
||||
102
sexp_effects/effects/ascii_fx_zone.sexp
Normal file
102
sexp_effects/effects/ascii_fx_zone.sexp
Normal file
@@ -0,0 +1,102 @@
|
||||
;; Composable ASCII Art with Per-Zone Expression-Driven Effects
|
||||
;; Requires ascii primitive library for the ascii-fx-zone primitive
|
||||
|
||||
(require-primitives "ascii")
|
||||
|
||||
;; Two modes of operation:
|
||||
;;
|
||||
;; 1. EXPRESSION MODE: Use zone-* variables in expression parameters
|
||||
;; Zone variables available:
|
||||
;; zone-row, zone-col: Grid position (integers)
|
||||
;; zone-row-norm, zone-col-norm: Normalized position (0-1)
|
||||
;; zone-lum: Cell luminance (0-1)
|
||||
;; zone-sat: Cell saturation (0-1)
|
||||
;; zone-hue: Cell hue (0-360)
|
||||
;; zone-r, zone-g, zone-b: RGB components (0-1)
|
||||
;;
|
||||
;; Example:
|
||||
;; (ascii-fx-zone frame
|
||||
;; :cols 80
|
||||
;; :char_hue (* zone-lum 180)
|
||||
;; :char_rotation (* zone-col-norm 30))
|
||||
;;
|
||||
;; 2. CELL EFFECT MODE: Pass a lambda to apply arbitrary effects per-cell
|
||||
;; The lambda receives (cell-image zone-dict) and returns modified cell.
|
||||
;; Zone dict contains: row, col, row-norm, col-norm, lum, sat, hue, r, g, b,
|
||||
;; char, color, cell_size, plus any bound analysis values.
|
||||
;;
|
||||
;; Any loaded sexp effect can be called on cells - each cell is just a small frame:
|
||||
;; (blur cell radius) - Gaussian blur
|
||||
;; (rotate cell angle) - Rotate by angle degrees
|
||||
;; (brightness cell factor) - Adjust brightness
|
||||
;; (contrast cell factor) - Adjust contrast
|
||||
;; (saturation cell factor) - Adjust saturation
|
||||
;; (hue_shift cell degrees) - Shift hue
|
||||
;; (rgb_split cell offset_x offset_y) - RGB channel split
|
||||
;; (invert cell) - Invert colors
|
||||
;; (pixelate cell block_size) - Pixelate
|
||||
;; (wave cell amplitude freq) - Wave distortion
|
||||
;; ... and any other loaded effect
|
||||
;;
|
||||
;; Example:
|
||||
;; (ascii-fx-zone frame
|
||||
;; :cols 60
|
||||
;; :cell_effect (lambda [cell zone]
|
||||
;; (blur (rotate cell (* (get zone "energy") 45))
|
||||
;; (if (> (get zone "lum") 0.5) 3 0))))
|
||||
|
||||
(define-effect ascii_fx_zone
|
||||
:params (
|
||||
(cols :type int :default 80 :range [20 200]
|
||||
:desc "Number of character columns")
|
||||
(char_size :type int :default nil :range [4 32]
|
||||
:desc "Character cell size in pixels (overrides cols if set)")
|
||||
(alphabet :type string :default "standard"
|
||||
:desc "Character set: standard, blocks, simple, digits, or custom string")
|
||||
(color_mode :type string :default "color"
|
||||
:desc "Color mode: color, mono, invert, or any color name/hex")
|
||||
(background :type string :default "black"
|
||||
:desc "Background color name or hex value")
|
||||
(contrast :type float :default 1.5 :range [0.5 3.0]
|
||||
:desc "Contrast for character selection")
|
||||
(char_hue :type any :default nil
|
||||
:desc "Hue shift expression (evaluated per-zone with zone-* vars)")
|
||||
(char_saturation :type any :default nil
|
||||
:desc "Saturation multiplier expression (1.0 = unchanged)")
|
||||
(char_brightness :type any :default nil
|
||||
:desc "Brightness multiplier expression (1.0 = unchanged)")
|
||||
(char_scale :type any :default nil
|
||||
:desc "Character scale expression (1.0 = normal size)")
|
||||
(char_rotation :type any :default nil
|
||||
:desc "Character rotation expression (degrees)")
|
||||
(char_jitter :type any :default nil
|
||||
:desc "Position jitter expression (pixels)")
|
||||
(cell_effect :type any :default nil
|
||||
:desc "Lambda (cell zone) -> cell for arbitrary per-cell effects")
|
||||
;; Convenience params for staged recipes (avoids compile-time expression issues)
|
||||
(energy :type float :default nil
|
||||
:desc "Energy multiplier (0-1) from audio analysis bind")
|
||||
(rotation_scale :type float :default 0
|
||||
:desc "Max rotation at top-right when energy=1 (degrees)")
|
||||
)
|
||||
;; The ascii-fx-zone special form handles expression params
|
||||
;; If energy + rotation_scale provided, it builds: energy * scale * position_factor
|
||||
;; where position_factor = 0 at bottom-left, 3 at top-right
|
||||
;; If cell_effect provided, each character is rendered to a cell image,
|
||||
;; passed to the lambda, and the result composited back
|
||||
(ascii-fx-zone frame
|
||||
:cols cols
|
||||
:char_size char_size
|
||||
:alphabet alphabet
|
||||
:color_mode color_mode
|
||||
:background background
|
||||
:contrast contrast
|
||||
:char_hue char_hue
|
||||
:char_saturation char_saturation
|
||||
:char_brightness char_brightness
|
||||
:char_scale char_scale
|
||||
:char_rotation char_rotation
|
||||
:char_jitter char_jitter
|
||||
:cell_effect cell_effect
|
||||
:energy energy
|
||||
:rotation_scale rotation_scale))
|
||||
30
sexp_effects/effects/ascii_zones.sexp
Normal file
30
sexp_effects/effects/ascii_zones.sexp
Normal file
@@ -0,0 +1,30 @@
|
||||
;; ASCII Zones effect - different character sets for different brightness zones
|
||||
;; Dark areas use simple chars, mid uses standard, bright uses blocks
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_zones
|
||||
:params (
|
||||
(char_size :type int :default 8 :range [4 32])
|
||||
(dark_threshold :type int :default 80 :range [0 128])
|
||||
(bright_threshold :type int :default 180 :range [128 255])
|
||||
(color_mode :type string :default "color")
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
;; Start with simple chars as base
|
||||
(base-chars (luminance-to-chars luminances "simple" 1.2))
|
||||
;; Map each cell to appropriate alphabet based on brightness zone
|
||||
(zoned-chars (map-char-grid base-chars luminances
|
||||
(lambda (r c ch lum)
|
||||
(cond
|
||||
;; Bright zones: use block characters
|
||||
((> lum bright_threshold)
|
||||
(alphabet-char "blocks" (floor (/ (- lum bright_threshold) 15))))
|
||||
;; Dark zones: use simple sparse chars
|
||||
((< lum dark_threshold)
|
||||
(alphabet-char " .-" (floor (/ lum 30))))
|
||||
;; Mid zones: use standard ASCII
|
||||
(else
|
||||
(alphabet-char "standard" (floor (/ lum 4)))))))))
|
||||
(render-char-grid frame zoned-chars colors char_size color_mode (list 0 0 0))))
|
||||
31
sexp_effects/effects/blend.sexp
Normal file
31
sexp_effects/effects/blend.sexp
Normal file
@@ -0,0 +1,31 @@
|
||||
;; Blend effect - combines two video frames
|
||||
;; Streaming-compatible: frame is background, overlay is second frame
|
||||
;; Usage: (blend background overlay :opacity 0.5 :mode "alpha")
|
||||
;;
|
||||
;; Params:
|
||||
;; mode - blend mode (add, multiply, screen, overlay, difference, lighten, darken, alpha)
|
||||
;; opacity - blend amount (0-1)
|
||||
|
||||
(require-primitives "image" "blending" "core")
|
||||
|
||||
(define-effect blend
|
||||
:params (
|
||||
(overlay :type frame :default nil)
|
||||
(mode :type string :default "alpha")
|
||||
(opacity :type float :default 0.5)
|
||||
)
|
||||
(if (core:is-nil overlay)
|
||||
frame
|
||||
(let [a frame
|
||||
b overlay
|
||||
a-h (image:height a)
|
||||
a-w (image:width a)
|
||||
b-h (image:height b)
|
||||
b-w (image:width b)
|
||||
;; Resize b to match a if needed
|
||||
b-sized (if (and (= a-w b-w) (= a-h b-h))
|
||||
b
|
||||
(image:resize b a-w a-h "linear"))]
|
||||
(if (= mode "alpha")
|
||||
(blending:blend-images a b-sized opacity)
|
||||
(blending:blend-images a (blending:blend-mode a b-sized mode) opacity)))))
|
||||
58
sexp_effects/effects/blend_multi.sexp
Normal file
58
sexp_effects/effects/blend_multi.sexp
Normal file
@@ -0,0 +1,58 @@
|
||||
;; N-way weighted blend effect
|
||||
;; Streaming-compatible: pass inputs as a list of frames
|
||||
;; Usage: (blend_multi :inputs [(read a) (read b) (read c)] :weights [0.3 0.4 0.3])
|
||||
;;
|
||||
;; Parameters:
|
||||
;; inputs - list of N frames to blend
|
||||
;; weights - list of N floats, one per input (resolved per-frame)
|
||||
;; mode - blend mode applied when folding each frame in:
|
||||
;; "alpha" — pure weighted average (default)
|
||||
;; "multiply" — darken by multiplication
|
||||
;; "screen" — lighten (inverse multiply)
|
||||
;; "overlay" — contrast-boosting midtone blend
|
||||
;; "soft-light" — gentle dodge/burn
|
||||
;; "hard-light" — strong dodge/burn
|
||||
;; "color-dodge" — brightens towards white
|
||||
;; "color-burn" — darkens towards black
|
||||
;; "difference" — absolute pixel difference
|
||||
;; "exclusion" — softer difference
|
||||
;; "add" — additive (clamped)
|
||||
;; "subtract" — subtractive (clamped)
|
||||
;; "darken" — per-pixel minimum
|
||||
;; "lighten" — per-pixel maximum
|
||||
;; resize_mode - how to match frame dimensions (fit, crop, stretch)
|
||||
;;
|
||||
;; Uses a left-fold over inputs[1..N-1]. At each step the running
|
||||
;; opacity is: w[i] / (w[0] + w[1] + ... + w[i])
|
||||
;; which produces the correct normalised weighted result.
|
||||
|
||||
(require-primitives "image" "blending")
|
||||
|
||||
(define-effect blend_multi
|
||||
:params (
|
||||
(inputs :type list :default [])
|
||||
(weights :type list :default [])
|
||||
(mode :type string :default "alpha")
|
||||
(resize_mode :type string :default "fit")
|
||||
)
|
||||
(let [n (len inputs)
|
||||
;; Target dimensions from first frame
|
||||
target-w (image:width (nth inputs 0))
|
||||
target-h (image:height (nth inputs 0))
|
||||
;; Fold over indices 1..n-1
|
||||
;; Accumulator is (list blended-frame running-weight-sum)
|
||||
seed (list (nth inputs 0) (nth weights 0))
|
||||
result (reduce (range 1 n) seed
|
||||
(lambda (pair i)
|
||||
(let [acc (nth pair 0)
|
||||
running (nth pair 1)
|
||||
w (nth weights i)
|
||||
new-running (+ running w)
|
||||
opacity (/ w (max new-running 0.001))
|
||||
f (image:resize (nth inputs i) target-w target-h "linear")
|
||||
;; Apply blend mode then mix with opacity
|
||||
blended (if (= mode "alpha")
|
||||
(blending:blend-images acc f opacity)
|
||||
(blending:blend-images acc (blending:blend-mode acc f mode) opacity))]
|
||||
(list blended new-running))))]
|
||||
(nth result 0)))
|
||||
16
sexp_effects/effects/bloom.sexp
Normal file
16
sexp_effects/effects/bloom.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Bloom effect - glow on bright areas
|
||||
(require-primitives "image" "blending")
|
||||
|
||||
(define-effect bloom
|
||||
:params (
|
||||
(intensity :type float :default 0.5 :range [0 2])
|
||||
(threshold :type int :default 200 :range [0 255])
|
||||
(radius :type int :default 15 :range [1 50])
|
||||
)
|
||||
(let* ((bright (map-pixels frame
|
||||
(lambda (x y c)
|
||||
(if (> (luminance c) threshold)
|
||||
c
|
||||
(rgb 0 0 0)))))
|
||||
(blurred (image:blur bright radius)))
|
||||
(blending:blend-mode frame blurred "add")))
|
||||
8
sexp_effects/effects/blur.sexp
Normal file
8
sexp_effects/effects/blur.sexp
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Blur effect - gaussian blur
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect blur
|
||||
:params (
|
||||
(radius :type int :default 5 :range [1 50])
|
||||
)
|
||||
(image:blur frame (max 1 radius)))
|
||||
9
sexp_effects/effects/brightness.sexp
Normal file
9
sexp_effects/effects/brightness.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Brightness effect - adjusts overall brightness
|
||||
;; Uses vectorized adjust primitive for fast processing
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect brightness
|
||||
:params (
|
||||
(amount :type int :default 0 :range [-255 255])
|
||||
)
|
||||
(color_ops:adjust-brightness frame amount))
|
||||
65
sexp_effects/effects/cell_pattern.sexp
Normal file
65
sexp_effects/effects/cell_pattern.sexp
Normal file
@@ -0,0 +1,65 @@
|
||||
;; Cell Pattern effect - custom patterns within cells
|
||||
;;
|
||||
;; Demonstrates building arbitrary per-cell visuals from primitives.
|
||||
;; Uses local coordinates within cells to draw patterns scaled by luminance.
|
||||
|
||||
(require-primitives "xector")
|
||||
|
||||
(define-effect cell_pattern
|
||||
:params (
|
||||
(cell-size :type int :default 16 :range [8 48] :desc "Cell size")
|
||||
(pattern :type string :default "diagonal" :desc "Pattern: diagonal, cross, ring")
|
||||
)
|
||||
(let* (
|
||||
;; Pool to get cell colors
|
||||
(pooled (pool-frame frame cell-size))
|
||||
(cell-r (nth pooled 0))
|
||||
(cell-g (nth pooled 1))
|
||||
(cell-b (nth pooled 2))
|
||||
(cell-lum (α/ (nth pooled 3) 255))
|
||||
|
||||
;; Cell indices for each pixel
|
||||
(cell-idx (cell-indices frame cell-size))
|
||||
|
||||
;; Look up cell values for each pixel
|
||||
(pix-r (gather cell-r cell-idx))
|
||||
(pix-g (gather cell-g cell-idx))
|
||||
(pix-b (gather cell-b cell-idx))
|
||||
(pix-lum (gather cell-lum cell-idx))
|
||||
|
||||
;; Local position within cell [0, 1]
|
||||
(lx (local-x-norm frame cell-size))
|
||||
(ly (local-y-norm frame cell-size))
|
||||
|
||||
;; Pattern mask based on pattern type
|
||||
(mask
|
||||
(cond
|
||||
;; Diagonal lines - thickness based on luminance
|
||||
((= pattern "diagonal")
|
||||
(let* ((diag (αmod (α+ lx ly) 0.25))
|
||||
(thickness (α* pix-lum 0.125)))
|
||||
(α< diag thickness)))
|
||||
|
||||
;; Cross pattern
|
||||
((= pattern "cross")
|
||||
(let* ((cx (αabs (α- lx 0.5)))
|
||||
(cy (αabs (α- ly 0.5)))
|
||||
(thickness (α* pix-lum 0.25)))
|
||||
(αor (α< cx thickness) (α< cy thickness))))
|
||||
|
||||
;; Ring pattern
|
||||
((= pattern "ring")
|
||||
(let* ((dx (α- lx 0.5))
|
||||
(dy (α- ly 0.5))
|
||||
(dist (αsqrt (α+ (α² dx) (α² dy))))
|
||||
(target (α* pix-lum 0.4))
|
||||
(thickness 0.05))
|
||||
(α< (αabs (α- dist target)) thickness)))
|
||||
|
||||
;; Default: solid
|
||||
(else (α> pix-lum 0)))))
|
||||
|
||||
;; Apply mask: show cell color where mask is true, black elsewhere
|
||||
(rgb (where mask pix-r 0)
|
||||
(where mask pix-g 0)
|
||||
(where mask pix-b 0))))
|
||||
13
sexp_effects/effects/color-adjust.sexp
Normal file
13
sexp_effects/effects/color-adjust.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Color adjustment effect - replaces TRANSFORM node
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect color-adjust
|
||||
:params (
|
||||
(brightness :type int :default 0 :range [-255 255] :desc "Brightness adjustment")
|
||||
(contrast :type float :default 1 :range [0 3] :desc "Contrast multiplier")
|
||||
(saturation :type float :default 1 :range [0 2] :desc "Saturation multiplier")
|
||||
)
|
||||
(-> frame
|
||||
(color_ops:adjust-brightness brightness)
|
||||
(color_ops:adjust-contrast contrast)
|
||||
(color_ops:adjust-saturation saturation)))
|
||||
13
sexp_effects/effects/color_cycle.sexp
Normal file
13
sexp_effects/effects/color_cycle.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Color Cycle effect - animated hue rotation
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect color_cycle
|
||||
:params (
|
||||
(speed :type int :default 1 :range [0 10])
|
||||
)
|
||||
(let ((shift (* t speed 360)))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(let* ((hsv (rgb->hsv c))
|
||||
(new-h (mod (+ (first hsv) shift) 360)))
|
||||
(hsv->rgb (list new-h (nth hsv 1) (nth hsv 2))))))))
|
||||
9
sexp_effects/effects/contrast.sexp
Normal file
9
sexp_effects/effects/contrast.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Contrast effect - adjusts image contrast
|
||||
;; Uses vectorized adjust primitive for fast processing
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect contrast
|
||||
:params (
|
||||
(amount :type int :default 1 :range [0.5 3])
|
||||
)
|
||||
(color_ops:adjust-contrast frame amount))
|
||||
30
sexp_effects/effects/crt.sexp
Normal file
30
sexp_effects/effects/crt.sexp
Normal file
@@ -0,0 +1,30 @@
|
||||
;; CRT effect - old monitor simulation
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect crt
|
||||
:params (
|
||||
(line_spacing :type int :default 2 :range [1 10])
|
||||
(line_opacity :type float :default 0.3 :range [0 1])
|
||||
(vignette_amount :type float :default 0.2)
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (/ w 2))
|
||||
(cy (/ h 2))
|
||||
(max-dist (sqrt (+ (* cx cx) (* cy cy)))))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(let* (;; Scanline darkening
|
||||
(scanline-factor (if (= 0 (mod y line_spacing))
|
||||
(- 1 line_opacity)
|
||||
1))
|
||||
;; Vignette
|
||||
(dx (- x cx))
|
||||
(dy (- y cy))
|
||||
(dist (sqrt (+ (* dx dx) (* dy dy))))
|
||||
(vignette-factor (- 1 (* (/ dist max-dist) vignette_amount)))
|
||||
;; Combined
|
||||
(factor (* scanline-factor vignette-factor)))
|
||||
(rgb (* (red c) factor)
|
||||
(* (green c) factor)
|
||||
(* (blue c) factor)))))))
|
||||
14
sexp_effects/effects/datamosh.sexp
Normal file
14
sexp_effects/effects/datamosh.sexp
Normal file
@@ -0,0 +1,14 @@
|
||||
;; Datamosh effect - glitch block corruption
|
||||
|
||||
(define-effect datamosh
|
||||
:params (
|
||||
(block_size :type int :default 32 :range [8 128])
|
||||
(corruption :type float :default 0.3 :range [0 1])
|
||||
(max_offset :type int :default 50 :range [0 200])
|
||||
(color_corrupt :type bool :default true)
|
||||
)
|
||||
;; Get previous frame from state, or use current frame if none
|
||||
(let ((prev (state-get "prev_frame" frame)))
|
||||
(begin
|
||||
(state-set "prev_frame" (copy frame))
|
||||
(datamosh frame prev block_size corruption max_offset color_corrupt))))
|
||||
19
sexp_effects/effects/echo.sexp
Normal file
19
sexp_effects/effects/echo.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Echo effect - motion trails using frame buffer
|
||||
(require-primitives "blending")
|
||||
|
||||
(define-effect echo
|
||||
:params (
|
||||
(num_echoes :type int :default 4 :range [1 20])
|
||||
(decay :type float :default 0.5 :range [0 1])
|
||||
)
|
||||
(let* ((buffer (state-get "buffer" (list)))
|
||||
(new-buffer (take (cons frame buffer) (+ num_echoes 1))))
|
||||
(begin
|
||||
(state-set "buffer" new-buffer)
|
||||
;; Blend frames with decay
|
||||
(if (< (length new-buffer) 2)
|
||||
frame
|
||||
(let ((result (copy frame)))
|
||||
;; Simple blend of first two frames for now
|
||||
;; Full version would fold over all frames
|
||||
(blending:blend-images frame (nth new-buffer 1) (* decay 0.5)))))))
|
||||
9
sexp_effects/effects/edge_detect.sexp
Normal file
9
sexp_effects/effects/edge_detect.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Edge detection effect - highlights edges
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect edge_detect
|
||||
:params (
|
||||
(low :type int :default 50 :range [10 100])
|
||||
(high :type int :default 150 :range [50 300])
|
||||
)
|
||||
(image:edge-detect frame low high))
|
||||
13
sexp_effects/effects/emboss.sexp
Normal file
13
sexp_effects/effects/emboss.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Emboss effect - creates raised/3D appearance
|
||||
(require-primitives "blending")
|
||||
|
||||
(define-effect emboss
|
||||
:params (
|
||||
(strength :type int :default 1 :range [0.5 3])
|
||||
(blend :type float :default 0.3 :range [0 1])
|
||||
)
|
||||
(let* ((kernel (list (list (- strength) (- strength) 0)
|
||||
(list (- strength) 1 strength)
|
||||
(list 0 strength strength)))
|
||||
(embossed (convolve frame kernel)))
|
||||
(blending:blend-images embossed frame blend)))
|
||||
19
sexp_effects/effects/film_grain.sexp
Normal file
19
sexp_effects/effects/film_grain.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Film Grain effect - adds film grain texture
|
||||
(require-primitives "core")
|
||||
|
||||
(define-effect film_grain
|
||||
:params (
|
||||
(intensity :type float :default 0.2 :range [0 1])
|
||||
(colored :type bool :default false)
|
||||
)
|
||||
(let ((grain-amount (* intensity 50)))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(if colored
|
||||
(rgb (clamp (+ (red c) (gaussian 0 grain-amount)) 0 255)
|
||||
(clamp (+ (green c) (gaussian 0 grain-amount)) 0 255)
|
||||
(clamp (+ (blue c) (gaussian 0 grain-amount)) 0 255))
|
||||
(let ((n (gaussian 0 grain-amount)))
|
||||
(rgb (clamp (+ (red c) n) 0 255)
|
||||
(clamp (+ (green c) n) 0 255)
|
||||
(clamp (+ (blue c) n) 0 255))))))))
|
||||
16
sexp_effects/effects/fisheye.sexp
Normal file
16
sexp_effects/effects/fisheye.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Fisheye effect - barrel/pincushion lens distortion
|
||||
(require-primitives "geometry" "image")
|
||||
|
||||
(define-effect fisheye
|
||||
:params (
|
||||
(strength :type float :default 0.3 :range [-1 1])
|
||||
(center_x :type float :default 0.5 :range [0 1])
|
||||
(center_y :type float :default 0.5 :range [0 1])
|
||||
(zoom_correct :type bool :default true)
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (* w center_x))
|
||||
(cy (* h center_y))
|
||||
(coords (geometry:fisheye-coords w h strength cx cy zoom_correct)))
|
||||
(geometry:remap frame (geometry:coords-x coords) (geometry:coords-y coords))))
|
||||
16
sexp_effects/effects/flip.sexp
Normal file
16
sexp_effects/effects/flip.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Flip effect - flips image horizontally or vertically
|
||||
(require-primitives "geometry")
|
||||
|
||||
(define-effect flip
|
||||
:params (
|
||||
(horizontal :type bool :default true)
|
||||
(vertical :type bool :default false)
|
||||
)
|
||||
(let ((result frame))
|
||||
(if horizontal
|
||||
(set! result (geometry:flip-img result "horizontal"))
|
||||
nil)
|
||||
(if vertical
|
||||
(set! result (geometry:flip-img result "vertical"))
|
||||
nil)
|
||||
result))
|
||||
7
sexp_effects/effects/grayscale.sexp
Normal file
7
sexp_effects/effects/grayscale.sexp
Normal file
@@ -0,0 +1,7 @@
|
||||
;; Grayscale effect - converts to grayscale
|
||||
;; Uses vectorized mix-gray primitive for fast processing
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect grayscale
|
||||
:params ()
|
||||
(image:grayscale frame))
|
||||
49
sexp_effects/effects/halftone.sexp
Normal file
49
sexp_effects/effects/halftone.sexp
Normal file
@@ -0,0 +1,49 @@
|
||||
;; Halftone/dot effect - built from primitive xector operations
|
||||
;;
|
||||
;; Uses:
|
||||
;; pool-frame - downsample to cell luminances
|
||||
;; cell-indices - which cell each pixel belongs to
|
||||
;; gather - look up cell value for each pixel
|
||||
;; local-x/y-norm - position within cell [0,1]
|
||||
;; where - conditional per-pixel
|
||||
|
||||
(require-primitives "xector")
|
||||
|
||||
(define-effect halftone
|
||||
:params (
|
||||
(cell-size :type int :default 12 :range [4 32] :desc "Size of halftone cells")
|
||||
(dot-scale :type float :default 0.9 :range [0.1 1.0] :desc "Max dot radius")
|
||||
(invert :type bool :default false :desc "Invert (white dots on black)")
|
||||
)
|
||||
(let* (
|
||||
;; Pool frame to get luminance per cell
|
||||
(pooled (pool-frame frame cell-size))
|
||||
(cell-lum (nth pooled 3)) ; luminance is 4th element
|
||||
|
||||
;; For each output pixel, get its cell index
|
||||
(cell-idx (cell-indices frame cell-size))
|
||||
|
||||
;; Get cell luminance for each pixel
|
||||
(pixel-lum (α/ (gather cell-lum cell-idx) 255))
|
||||
|
||||
;; Position within cell, normalized to [-0.5, 0.5]
|
||||
(lx (α- (local-x-norm frame cell-size) 0.5))
|
||||
(ly (α- (local-y-norm frame cell-size) 0.5))
|
||||
|
||||
;; Distance from cell center (0 at center, ~0.7 at corners)
|
||||
(dist (αsqrt (α+ (α² lx) (α² ly))))
|
||||
|
||||
;; Radius based on luminance (brighter = bigger dot)
|
||||
(radius (α* (if invert (α- 1 pixel-lum) pixel-lum)
|
||||
(α* dot-scale 0.5)))
|
||||
|
||||
;; Is this pixel inside the dot?
|
||||
(inside (α< dist radius))
|
||||
|
||||
;; Output color
|
||||
(fg (if invert 255 0))
|
||||
(bg (if invert 0 255))
|
||||
(out (where inside fg bg)))
|
||||
|
||||
;; Grayscale output
|
||||
(rgb out out out)))
|
||||
12
sexp_effects/effects/hue_shift.sexp
Normal file
12
sexp_effects/effects/hue_shift.sexp
Normal file
@@ -0,0 +1,12 @@
|
||||
;; Hue shift effect - rotates hue values
|
||||
;; Uses vectorized shift-hsv primitive for fast processing
|
||||
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect hue_shift
|
||||
:params (
|
||||
(degrees :type int :default 0 :range [0 360])
|
||||
(speed :type int :default 0 :desc "rotation per second")
|
||||
)
|
||||
(let ((shift (+ degrees (* speed t))))
|
||||
(color_ops:shift-hsv frame shift 1 1)))
|
||||
9
sexp_effects/effects/invert.sexp
Normal file
9
sexp_effects/effects/invert.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Invert effect - inverts all colors
|
||||
;; Uses vectorized invert-img primitive for fast processing
|
||||
;; amount param: 0 = no invert, 1 = full invert (threshold at 0.5)
|
||||
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect invert
|
||||
:params ((amount :type float :default 1 :range [0 1]))
|
||||
(if (> amount 0.5) (color_ops:invert-img frame) frame))
|
||||
20
sexp_effects/effects/kaleidoscope.sexp
Normal file
20
sexp_effects/effects/kaleidoscope.sexp
Normal file
@@ -0,0 +1,20 @@
|
||||
;; Kaleidoscope effect - mandala-like symmetry patterns
|
||||
(require-primitives "geometry" "image")
|
||||
|
||||
(define-effect kaleidoscope
|
||||
:params (
|
||||
(segments :type int :default 6 :range [3 16])
|
||||
(rotation :type int :default 0 :range [0 360])
|
||||
(rotation_speed :type int :default 0 :range [-180 180])
|
||||
(center_x :type float :default 0.5 :range [0 1])
|
||||
(center_y :type float :default 0.5 :range [0 1])
|
||||
(zoom :type int :default 1 :range [0.5 3])
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (* w center_x))
|
||||
(cy (* h center_y))
|
||||
;; Total rotation including time-based animation
|
||||
(total_rot (+ rotation (* rotation_speed (or _time 0))))
|
||||
(coords (geometry:kaleidoscope-coords w h segments total_rot cx cy zoom)))
|
||||
(geometry:remap frame (geometry:coords-x coords) (geometry:coords-y coords))))
|
||||
36
sexp_effects/effects/layer.sexp
Normal file
36
sexp_effects/effects/layer.sexp
Normal file
@@ -0,0 +1,36 @@
|
||||
;; Layer effect - composite overlay over background at position
|
||||
;; Streaming-compatible: frame is background, overlay is foreground
|
||||
;; Usage: (layer background overlay :x 10 :y 20 :opacity 0.8)
|
||||
;;
|
||||
;; Params:
|
||||
;; overlay - frame to composite on top
|
||||
;; x, y - position to place overlay
|
||||
;; opacity - blend amount (0-1)
|
||||
;; mode - blend mode (alpha, multiply, screen, etc.)
|
||||
|
||||
(require-primitives "image" "blending" "core")
|
||||
|
||||
(define-effect layer
|
||||
:params (
|
||||
(overlay :type frame :default nil)
|
||||
(x :type int :default 0)
|
||||
(y :type int :default 0)
|
||||
(opacity :type float :default 1.0)
|
||||
(mode :type string :default "alpha")
|
||||
)
|
||||
(if (core:is-nil overlay)
|
||||
frame
|
||||
(let [bg (copy frame)
|
||||
fg overlay
|
||||
fg-w (image:width fg)
|
||||
fg-h (image:height fg)]
|
||||
(if (= opacity 1.0)
|
||||
;; Simple paste
|
||||
(paste bg fg x y)
|
||||
;; Blend with opacity
|
||||
(let [blended (if (= mode "alpha")
|
||||
(blending:blend-images (image:crop bg x y fg-w fg-h) fg opacity)
|
||||
(blending:blend-images (image:crop bg x y fg-w fg-h)
|
||||
(blending:blend-mode (image:crop bg x y fg-w fg-h) fg mode)
|
||||
opacity))]
|
||||
(paste bg blended x y))))))
|
||||
33
sexp_effects/effects/mirror.sexp
Normal file
33
sexp_effects/effects/mirror.sexp
Normal file
@@ -0,0 +1,33 @@
|
||||
;; Mirror effect - mirrors half of image
|
||||
(require-primitives "geometry" "image")
|
||||
|
||||
(define-effect mirror
|
||||
:params (
|
||||
(mode :type string :default "left_right")
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(hw (floor (/ w 2)))
|
||||
(hh (floor (/ h 2))))
|
||||
(cond
|
||||
((= mode "left_right")
|
||||
(let ((left (image:crop frame 0 0 hw h))
|
||||
(result (copy frame)))
|
||||
(paste result (geometry:flip-img left "horizontal") hw 0)))
|
||||
|
||||
((= mode "right_left")
|
||||
(let ((right (image:crop frame hw 0 hw h))
|
||||
(result (copy frame)))
|
||||
(paste result (geometry:flip-img right "horizontal") 0 0)))
|
||||
|
||||
((= mode "top_bottom")
|
||||
(let ((top (image:crop frame 0 0 w hh))
|
||||
(result (copy frame)))
|
||||
(paste result (geometry:flip-img top "vertical") 0 hh)))
|
||||
|
||||
((= mode "bottom_top")
|
||||
(let ((bottom (image:crop frame 0 hh w hh))
|
||||
(result (copy frame)))
|
||||
(paste result (geometry:flip-img bottom "vertical") 0 0)))
|
||||
|
||||
(else frame))))
|
||||
30
sexp_effects/effects/mosaic.sexp
Normal file
30
sexp_effects/effects/mosaic.sexp
Normal file
@@ -0,0 +1,30 @@
|
||||
;; Mosaic effect - built from primitive xector operations
|
||||
;;
|
||||
;; Uses:
|
||||
;; pool-frame - downsample to cell averages
|
||||
;; cell-indices - which cell each pixel belongs to
|
||||
;; gather - look up cell value for each pixel
|
||||
|
||||
(require-primitives "xector")
|
||||
|
||||
(define-effect mosaic
|
||||
:params (
|
||||
(cell-size :type int :default 16 :range [4 64] :desc "Size of mosaic cells")
|
||||
)
|
||||
(let* (
|
||||
;; Pool frame to get average color per cell (returns r,g,b,lum xectors)
|
||||
(pooled (pool-frame frame cell-size))
|
||||
(cell-r (nth pooled 0))
|
||||
(cell-g (nth pooled 1))
|
||||
(cell-b (nth pooled 2))
|
||||
|
||||
;; For each output pixel, get its cell index
|
||||
(cell-idx (cell-indices frame cell-size))
|
||||
|
||||
;; Gather: look up cell color for each pixel
|
||||
(out-r (gather cell-r cell-idx))
|
||||
(out-g (gather cell-g cell-idx))
|
||||
(out-b (gather cell-b cell-idx)))
|
||||
|
||||
;; Reconstruct frame
|
||||
(rgb out-r out-g out-b)))
|
||||
23
sexp_effects/effects/neon_glow.sexp
Normal file
23
sexp_effects/effects/neon_glow.sexp
Normal file
@@ -0,0 +1,23 @@
|
||||
;; Neon Glow effect - glowing edge effect
|
||||
(require-primitives "image" "blending")
|
||||
|
||||
(define-effect neon_glow
|
||||
:params (
|
||||
(edge_low :type int :default 50 :range [10 200])
|
||||
(edge_high :type int :default 150 :range [50 300])
|
||||
(glow_radius :type int :default 15 :range [1 50])
|
||||
(glow_intensity :type int :default 2 :range [0.5 5])
|
||||
(background :type float :default 0.3 :range [0 1])
|
||||
)
|
||||
(let* ((edge-img (image:edge-detect frame edge_low edge_high))
|
||||
(glow (image:blur edge-img glow_radius))
|
||||
;; Intensify the glow
|
||||
(bright-glow (map-pixels glow
|
||||
(lambda (x y c)
|
||||
(rgb (clamp (* (red c) glow_intensity) 0 255)
|
||||
(clamp (* (green c) glow_intensity) 0 255)
|
||||
(clamp (* (blue c) glow_intensity) 0 255))))))
|
||||
(blending:blend-mode (blending:blend-images frame (make-image (image:width frame) (image:height frame) (list 0 0 0))
|
||||
(- 1 background))
|
||||
bright-glow
|
||||
"screen")))
|
||||
8
sexp_effects/effects/noise.sexp
Normal file
8
sexp_effects/effects/noise.sexp
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Noise effect - adds random noise
|
||||
;; Uses vectorized add-noise primitive for fast processing
|
||||
|
||||
(define-effect noise
|
||||
:params (
|
||||
(amount :type int :default 20 :range [0 100])
|
||||
)
|
||||
(add-noise frame amount))
|
||||
24
sexp_effects/effects/outline.sexp
Normal file
24
sexp_effects/effects/outline.sexp
Normal file
@@ -0,0 +1,24 @@
|
||||
;; Outline effect - shows only edges
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect outline
|
||||
:params (
|
||||
(thickness :type int :default 2 :range [1 10])
|
||||
(threshold :type int :default 100 :range [20 300])
|
||||
(color :type list :default (list 0 0 0))
|
||||
(fill_mode :type string :default "original")
|
||||
)
|
||||
(let* ((edge-img (image:edge-detect frame (/ threshold 2) threshold))
|
||||
(dilated (if (> thickness 1)
|
||||
(dilate edge-img thickness)
|
||||
edge-img))
|
||||
(base (cond
|
||||
((= fill_mode "original") (copy frame))
|
||||
((= fill_mode "white") (make-image (image:width frame) (image:height frame) (list 255 255 255)))
|
||||
(else (make-image (image:width frame) (image:height frame) (list 0 0 0))))))
|
||||
(map-pixels base
|
||||
(lambda (x y c)
|
||||
(let ((edge-val (luminance (pixel dilated x y))))
|
||||
(if (> edge-val 128)
|
||||
color
|
||||
c))))))
|
||||
13
sexp_effects/effects/pixelate.sexp
Normal file
13
sexp_effects/effects/pixelate.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Pixelate effect - creates blocky pixels
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect pixelate
|
||||
:params (
|
||||
(block_size :type int :default 8 :range [2 64])
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(small-w (max 1 (floor (/ w block_size))))
|
||||
(small-h (max 1 (floor (/ h block_size))))
|
||||
(small (image:resize frame small-w small-h "area")))
|
||||
(image:resize small w h "nearest")))
|
||||
11
sexp_effects/effects/pixelsort.sexp
Normal file
11
sexp_effects/effects/pixelsort.sexp
Normal file
@@ -0,0 +1,11 @@
|
||||
;; Pixelsort effect - glitch art pixel sorting
|
||||
|
||||
(define-effect pixelsort
|
||||
:params (
|
||||
(sort_by :type string :default "lightness")
|
||||
(threshold_low :type int :default 50 :range [0 255])
|
||||
(threshold_high :type int :default 200 :range [0 255])
|
||||
(angle :type int :default 0 :range [0 180])
|
||||
(reverse :type bool :default false)
|
||||
)
|
||||
(pixelsort frame sort_by threshold_low threshold_high angle reverse))
|
||||
8
sexp_effects/effects/posterize.sexp
Normal file
8
sexp_effects/effects/posterize.sexp
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Posterize effect - reduces color levels
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect posterize
|
||||
:params (
|
||||
(levels :type int :default 8 :range [2 32])
|
||||
)
|
||||
(color_ops:posterize frame levels))
|
||||
11
sexp_effects/effects/resize-frame.sexp
Normal file
11
sexp_effects/effects/resize-frame.sexp
Normal file
@@ -0,0 +1,11 @@
|
||||
;; Resize effect - replaces RESIZE node
|
||||
;; Note: uses target-w/target-h to avoid conflict with width/height primitives
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect resize-frame
|
||||
:params (
|
||||
(target-w :type int :default 640 :desc "Target width in pixels")
|
||||
(target-h :type int :default 480 :desc "Target height in pixels")
|
||||
(mode :type string :default "linear" :choices [linear nearest area] :desc "Interpolation mode")
|
||||
)
|
||||
(image:resize frame target-w target-h mode))
|
||||
13
sexp_effects/effects/rgb_split.sexp
Normal file
13
sexp_effects/effects/rgb_split.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; RGB Split effect - chromatic aberration
|
||||
|
||||
(define-effect rgb_split
|
||||
:params (
|
||||
(offset_x :type int :default 10 :range [-50 50])
|
||||
(offset_y :type int :default 0 :range [-50 50])
|
||||
)
|
||||
(let* ((r (channel frame 0))
|
||||
(g (channel frame 1))
|
||||
(b (channel frame 2))
|
||||
(r-shifted (translate (merge-channels r r r) offset_x offset_y))
|
||||
(b-shifted (translate (merge-channels b b b) (- offset_x) (- offset_y))))
|
||||
(merge-channels (channel r-shifted 0) g (channel b-shifted 0))))
|
||||
19
sexp_effects/effects/ripple.sexp
Normal file
19
sexp_effects/effects/ripple.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Ripple effect - radial wave distortion from center
|
||||
(require-primitives "geometry" "image" "math")
|
||||
|
||||
(define-effect ripple
|
||||
:params (
|
||||
(frequency :type int :default 5 :range [1 20])
|
||||
(amplitude :type int :default 10 :range [0 50])
|
||||
(center_x :type float :default 0.5 :range [0 1])
|
||||
(center_y :type float :default 0.5 :range [0 1])
|
||||
(decay :type int :default 1 :range [0 5])
|
||||
(speed :type int :default 1 :range [0 10])
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (* w center_x))
|
||||
(cy (* h center_y))
|
||||
(phase (* (or t 0) speed 2 pi))
|
||||
(coords (geometry:ripple-displace w h frequency amplitude cx cy decay phase)))
|
||||
(geometry:remap frame (geometry:coords-x coords) (geometry:coords-y coords))))
|
||||
11
sexp_effects/effects/rotate.sexp
Normal file
11
sexp_effects/effects/rotate.sexp
Normal file
@@ -0,0 +1,11 @@
|
||||
;; Rotate effect - rotates image
|
||||
|
||||
(require-primitives "geometry")
|
||||
|
||||
(define-effect rotate
|
||||
:params (
|
||||
(angle :type int :default 0 :range [-360 360])
|
||||
(speed :type int :default 0 :desc "rotation per second")
|
||||
)
|
||||
(let ((total-angle (+ angle (* speed t))))
|
||||
(geometry:rotate-img frame total-angle)))
|
||||
9
sexp_effects/effects/saturation.sexp
Normal file
9
sexp_effects/effects/saturation.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Saturation effect - adjusts color saturation
|
||||
;; Uses vectorized shift-hsv primitive for fast processing
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect saturation
|
||||
:params (
|
||||
(amount :type int :default 1 :range [0 3])
|
||||
)
|
||||
(color_ops:adjust-saturation frame amount))
|
||||
15
sexp_effects/effects/scanlines.sexp
Normal file
15
sexp_effects/effects/scanlines.sexp
Normal file
@@ -0,0 +1,15 @@
|
||||
;; Scanlines effect - VHS-style horizontal line shifting
|
||||
(require-primitives "core")
|
||||
|
||||
(define-effect scanlines
|
||||
:params (
|
||||
(amplitude :type int :default 10 :range [0 100])
|
||||
(frequency :type int :default 10 :range [1 100])
|
||||
(randomness :type float :default 0.5 :range [0 1])
|
||||
)
|
||||
(map-rows frame
|
||||
(lambda (y row)
|
||||
(let* ((sine-shift (* amplitude (sin (/ (* y 6.28) (max 1 frequency)))))
|
||||
(rand-shift (core:rand-range (- amplitude) amplitude))
|
||||
(shift (floor (lerp sine-shift rand-shift randomness))))
|
||||
(roll row shift 0)))))
|
||||
7
sexp_effects/effects/sepia.sexp
Normal file
7
sexp_effects/effects/sepia.sexp
Normal file
@@ -0,0 +1,7 @@
|
||||
;; Sepia effect - applies sepia tone
|
||||
;; Classic warm vintage look
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect sepia
|
||||
:params ()
|
||||
(color_ops:sepia frame))
|
||||
8
sexp_effects/effects/sharpen.sexp
Normal file
8
sexp_effects/effects/sharpen.sexp
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Sharpen effect - sharpens edges
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect sharpen
|
||||
:params (
|
||||
(amount :type int :default 1 :range [0 5])
|
||||
)
|
||||
(image:sharpen frame amount))
|
||||
16
sexp_effects/effects/strobe.sexp
Normal file
16
sexp_effects/effects/strobe.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Strobe effect - holds frames for choppy look
|
||||
(require-primitives "core")
|
||||
|
||||
(define-effect strobe
|
||||
:params (
|
||||
(frame_rate :type int :default 12 :range [1 60])
|
||||
)
|
||||
(let* ((held (state-get "held" nil))
|
||||
(held-until (state-get "held-until" 0))
|
||||
(frame-duration (/ 1 frame_rate)))
|
||||
(if (or (core:is-nil held) (>= t held-until))
|
||||
(begin
|
||||
(state-set "held" (copy frame))
|
||||
(state-set "held-until" (+ t frame-duration))
|
||||
frame)
|
||||
held)))
|
||||
17
sexp_effects/effects/swirl.sexp
Normal file
17
sexp_effects/effects/swirl.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; Swirl effect - spiral vortex distortion
|
||||
(require-primitives "geometry" "image")
|
||||
|
||||
(define-effect swirl
|
||||
:params (
|
||||
(strength :type int :default 1 :range [-10 10])
|
||||
(radius :type float :default 0.5 :range [0.1 2])
|
||||
(center_x :type float :default 0.5 :range [0 1])
|
||||
(center_y :type float :default 0.5 :range [0 1])
|
||||
(falloff :type string :default "quadratic")
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (* w center_x))
|
||||
(cy (* h center_y))
|
||||
(coords (geometry:swirl-coords w h strength radius cx cy falloff)))
|
||||
(geometry:remap frame (geometry:coords-x coords) (geometry:coords-y coords))))
|
||||
9
sexp_effects/effects/threshold.sexp
Normal file
9
sexp_effects/effects/threshold.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Threshold effect - converts to black and white
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect threshold
|
||||
:params (
|
||||
(level :type int :default 128 :range [0 255])
|
||||
(invert :type bool :default false)
|
||||
)
|
||||
(color_ops:threshold frame level invert))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user