Compare commits
300 Commits
17b92c77ef
...
oauth-sso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
476e9fa5b0 | ||
|
|
97309d3aad | ||
|
|
670aa582df | ||
|
|
be9fa8e920 | ||
|
|
82823e393a | ||
|
|
28a5cc37d0 | ||
|
|
956da6df2e | ||
|
|
a3437f0069 | ||
|
|
fc93e27b30 | ||
|
|
7ec5609aac | ||
|
|
80b423034d | ||
|
|
eaefdd326b | ||
|
|
e1f13abc7f | ||
|
|
b294fd0695 | ||
|
|
ab3b6b672d | ||
|
|
c7466a2fe8 | ||
|
|
be263b1398 | ||
|
|
49097eef53 | ||
|
|
ca4e86d07e | ||
|
|
589ea77b98 | ||
|
|
f5ef9cddd2 | ||
|
|
6e8e8f8de9 | ||
|
|
a29841f3c5 | ||
|
|
b322e003be | ||
|
|
146db1c60f | ||
|
|
fc9597456f | ||
|
|
dbc4ece2cc | ||
|
|
88ded8c927 | ||
|
|
48018d09b7 | ||
|
|
7411aa74c4 | ||
|
|
0534081e44 | ||
|
|
2f56ffc472 | ||
|
|
4647dd52c8 | ||
|
|
9a73dffaa6 | ||
|
|
5835344e30 | ||
|
|
3dda5f5f50 | ||
|
|
cdbc962b12 | ||
|
|
76e4a002a0 | ||
|
|
e2761798a8 | ||
|
|
81dc40534c | ||
|
|
43d73c7bf7 | ||
|
|
d900df5aa0 | ||
|
|
13415fb420 | ||
|
|
d6c575760b | ||
|
|
9a8a701492 | ||
|
|
b15e381f81 | ||
|
|
baf79f453f | ||
|
|
cd95e62899 | ||
|
|
514ee89cca | ||
|
|
0a6dc0099b | ||
|
|
180d6a874e | ||
|
|
949d716d9a | ||
|
|
d5f30035da | ||
|
|
4b0f1b0bcd | ||
|
|
9583ecb81a | ||
|
|
6ee8d72d24 | ||
|
|
ed617fcdd6 | ||
|
|
ef3638d3cf | ||
|
|
dd169635ca | ||
|
|
92f8e8a98c | ||
|
|
48eed99a28 | ||
|
|
9ce64ea797 | ||
|
|
70530e5c92 | ||
|
|
76bf19b8ab | ||
|
|
1bd171b892 | ||
|
|
e4349ba501 | ||
|
|
6e20d19a23 | ||
|
|
e64ca9fe3a | ||
|
|
def62de578 | ||
|
|
f858e25246 | ||
|
|
a162171025 | ||
|
|
234fbdbee2 | ||
|
|
1442216a15 | ||
|
|
b773689814 | ||
|
|
2d20a6f452 | ||
|
|
8b9309a90b | ||
|
|
3b964ba18d | ||
|
|
4d95ec5a32 | ||
|
|
ad1d7893f8 | ||
|
|
75f9d8fb11 | ||
|
|
b96e8ca4d2 | ||
|
|
8051ef9ba9 | ||
|
|
3adf927ca1 | ||
|
|
9bdad268a5 | ||
|
|
1cb9c3ac8a | ||
|
|
36c4afeb84 | ||
|
|
b6292268fa | ||
|
|
3a02fca7fd | ||
|
|
c4004b3f5d | ||
|
|
41adf058bd | ||
|
|
b7e3827fa2 | ||
|
|
771fb8cebc | ||
|
|
ef4bc24eda | ||
|
|
0bd8ee71c7 | ||
|
|
9151d2c2a8 | ||
|
|
ed5ef2bf39 | ||
|
|
bbcb79cc1e | ||
|
|
11bcafee55 | ||
|
|
b49d109a51 | ||
|
|
9096824444 | ||
|
|
fe6730ce72 | ||
|
|
6ea39d633b | ||
|
|
0847b733a9 | ||
|
|
7009840712 | ||
|
|
92eeb58c71 | ||
|
|
2c1728c6ce | ||
|
|
6e0ee65e40 | ||
|
|
3116a70c3e | ||
|
|
09d5359725 | ||
|
|
4930eb99ad | ||
|
|
86830019ad | ||
|
|
5bc655f8c8 | ||
|
|
8f4d6d12bc | ||
|
|
82599eff1e | ||
|
|
a57be27907 | ||
|
|
9302283e86 | ||
|
|
487acdd606 | ||
|
|
6b2991bf24 | ||
|
|
3ec045c533 | ||
|
|
3bff130e57 | ||
|
|
414cbddd66 | ||
|
|
89b2fd3d2e | ||
|
|
3d5a08a7dc | ||
|
|
d8360e5945 | ||
|
|
44066e9bdd | ||
|
|
b2be9ff976 | ||
|
|
32474380fa | ||
|
|
5bd055f031 | ||
|
|
cbdae70b66 | ||
|
|
2d520cf256 | ||
|
|
e6dd6e851c | ||
|
|
2081092ce8 | ||
|
|
d20eef76ad | ||
|
|
581da68b3b | ||
|
|
bb458aa924 | ||
|
|
270eeb3fcf | ||
|
|
f290c9f01e | ||
|
|
396bacc89b | ||
|
|
b599b59d44 | ||
|
|
cf83952a19 | ||
|
|
ca2d4a17a4 | ||
|
|
4f3eccd4d3 | ||
|
|
d685518c4c | ||
|
|
529c173722 | ||
|
|
d7d7cd28c2 | ||
|
|
c46fcd2308 | ||
|
|
2c3f943e5a | ||
|
|
7813eb081a | ||
|
|
0f4817e3a8 | ||
|
|
3ee4dc1efb | ||
|
|
c5d70f61c8 | ||
|
|
c3d6427883 | ||
|
|
ad15ef1ce7 | ||
|
|
164f1291ac | ||
|
|
59c72500ac | ||
|
|
84d465b264 | ||
|
|
f28edf7ab6 | ||
|
|
be4d0da84f | ||
|
|
bfe96a431c | ||
|
|
e1c0ebc0a2 | ||
|
|
6c4b850487 | ||
|
|
d08fbfc0bd | ||
|
|
bf188f4671 | ||
|
|
d603485d40 | ||
|
|
2c27eacb12 | ||
|
|
1ad8fe9890 | ||
|
|
3e3df6ff2a | ||
|
|
faf794ef35 | ||
|
|
a4bf0eae24 | ||
|
|
9a1ed2adea | ||
|
|
f67aacdceb | ||
|
|
da4e2e9d3d | ||
|
|
f7fa683bcf | ||
|
|
6c973203fc | ||
|
|
7e38b4a0c8 | ||
|
|
8bf6f87c2a | ||
|
|
abe89c9177 | ||
|
|
427de25e13 | ||
|
|
a5a718e387 | ||
|
|
c5c7e5e162 | ||
|
|
b36aab33bb | ||
|
|
2e3d3a5c6d | ||
|
|
5b05dbd31e | ||
|
|
a0b113923e | ||
|
|
cc29311d1c | ||
|
|
87ff3d5d14 | ||
|
|
6adef63fad | ||
|
|
f1b90fe65d | ||
|
|
4b5066c525 | ||
|
|
280dddebd0 | ||
|
|
79a74df2bb | ||
|
|
7d24ba4dd7 | ||
|
|
ee8719ac0b | ||
|
|
19634a4ac5 | ||
|
|
98ca2a6c81 | ||
|
|
9bb1c4278e | ||
|
|
73cc3e7c2f | ||
|
|
991db29e27 | ||
|
|
2cc7d88d2e | ||
|
|
9f8aa54e2b | ||
|
|
977d9a9258 | ||
|
|
585c75e846 | ||
|
|
19e2277155 | ||
|
|
1e542a1e0b | ||
|
|
1e928ae771 | ||
|
|
4ba981ee8d | ||
|
|
e465496313 | ||
|
|
56009c391d | ||
|
|
0ba1d6e82d | ||
|
|
19e74a097d | ||
|
|
60344b34f4 | ||
|
|
8e7228fc38 | ||
|
|
f333eeb1e6 | ||
|
|
b35fe3d7a7 | ||
|
|
124cf65138 | ||
|
|
00b4934bf6 | ||
|
|
faa54b2e85 | ||
|
|
92d26b2b72 | ||
|
|
494a2a8650 | ||
|
|
c7c7c90909 | ||
|
|
db3faa6a2c | ||
|
|
8e1c08abdc | ||
|
|
8e0b473925 | ||
|
|
3599f3779b | ||
|
|
b686ce75f8 | ||
|
|
3dbbb52d23 | ||
|
|
10fe05a049 | ||
|
|
65a8170192 | ||
|
|
f554122b07 | ||
|
|
82d94f6e0e | ||
|
|
5c3558e1ba | ||
|
|
7a4cd3d413 | ||
|
|
e91803f71d | ||
|
|
5e6ed38ca4 | ||
|
|
48faf7ee43 | ||
|
|
0a82158622 | ||
|
|
fe8e65881d | ||
|
|
8e70a9b9f2 | ||
|
|
d73592fbe2 | ||
|
|
cdd29e07f5 | ||
|
|
24b6b4af28 | ||
|
|
0ead728fde | ||
|
|
f7dbb952ab | ||
|
|
e59a50c000 | ||
|
|
9df78f771d | ||
|
|
9a8e26e79c | ||
|
|
145c69f21b | ||
|
|
ccf28bd351 | ||
|
|
50ea0f1491 | ||
|
|
c6fadea090 | ||
|
|
95ffe9fa69 | ||
|
|
aacf1ceb77 | ||
|
|
eb1de433b1 | ||
|
|
59de1cf6b5 | ||
|
|
cca8f74d3c | ||
|
|
d92e493689 | ||
|
|
8ab0f05a7d | ||
|
|
a6dd470623 | ||
|
|
209d416442 | ||
|
|
0f18e5de8d | ||
|
|
ab2f65e14d | ||
|
|
0ddeb5ba94 | ||
|
|
156014a1f3 | ||
|
|
088af1611b | ||
|
|
3acaacce1f | ||
|
|
e806337503 | ||
|
|
6c73a06539 | ||
|
|
970faa3fa0 | ||
|
|
b57745098e | ||
|
|
2e9ba46f19 | ||
|
|
9e3c4c9d78 | ||
|
|
3373be285e | ||
|
|
a6fe88c277 | ||
|
|
697fa7c64d | ||
|
|
f6a7e0852c | ||
|
|
921d81421a | ||
|
|
27cbb0a85c | ||
|
|
9c148f535d | ||
|
|
c0fe22313f | ||
|
|
e8501de466 | ||
|
|
4b22fb6588 | ||
|
|
854396680f | ||
|
|
8591faf0fc | ||
|
|
4f011a66ff | ||
|
|
b47417704e | ||
|
|
3f77c24699 | ||
|
|
ada51c0880 | ||
|
|
e7e95b7857 | ||
|
|
828a958a7b | ||
|
|
7c6b8d7170 | ||
|
|
f0db4f4ea6 | ||
|
|
a34fff3aaa | ||
|
|
b372d02df2 | ||
|
|
d19d6d6e42 | ||
|
|
eed4596af8 | ||
|
|
83cce09b1a | ||
|
|
0ec1de3cb7 | ||
|
|
cc6be54a92 | ||
|
|
29da91e01a | ||
|
|
73b7f173c5 |
22
.dockerignore
Normal file
22
.dockerignore
Normal file
@@ -0,0 +1,22 @@
|
||||
# Don't copy local clones - Dockerfile will clone fresh
|
||||
artdag-effects/
|
||||
|
||||
# Python cache
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.egg-info/
|
||||
.pytest_cache/
|
||||
|
||||
# Virtual environments
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# Local env
|
||||
.env
|
||||
|
||||
# Git
|
||||
.git/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
10
.env.example
10
.env.example
@@ -1,5 +1,15 @@
|
||||
# L1 Server Configuration
|
||||
|
||||
# PostgreSQL password (REQUIRED - no default)
|
||||
POSTGRES_PASSWORD=changeme-generate-with-openssl-rand-hex-16
|
||||
|
||||
# Admin token for purge operations (REQUIRED - no default)
|
||||
# Generate with: openssl rand -hex 32
|
||||
ADMIN_TOKEN=changeme-generate-with-openssl-rand-hex-32
|
||||
|
||||
# L1 host IP/hostname for GPU worker cross-VPC access
|
||||
L1_HOST=your-l1-server-ip
|
||||
|
||||
# This L1 server's public URL (sent to L2 when publishing)
|
||||
L1_PUBLIC_URL=https://l1.artdag.rose-ash.com
|
||||
|
||||
|
||||
11
.env.gpu
Normal file
11
.env.gpu
Normal file
@@ -0,0 +1,11 @@
|
||||
# GPU worker env - connects to L1 host via public IP (cross-VPC)
|
||||
REDIS_URL=redis://138.68.142.139:16379/5
|
||||
DATABASE_URL=postgresql://artdag:f960bcc61d8b2155a1d57f7dd72c1c58@138.68.142.139:15432/artdag
|
||||
IPFS_API=/ip4/138.68.142.139/tcp/15001
|
||||
IPFS_GATEWAYS=https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link
|
||||
IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
CACHE_DIR=/data/cache
|
||||
C_FORCE_ROOT=true
|
||||
ARTDAG_CLUSTER_KEY=
|
||||
NVIDIA_VISIBLE_DEVICES=all
|
||||
STREAMING_GPU_PERSIST=0
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,3 +5,4 @@ __pycache__/
|
||||
.venv/
|
||||
venv/
|
||||
.env
|
||||
artdag-effects/
|
||||
|
||||
@@ -16,10 +16,14 @@ COPY . .
|
||||
# Clone effects repo
|
||||
RUN git clone https://git.rose-ash.com/art-dag/effects.git /app/artdag-effects
|
||||
|
||||
# Build client tarball for download
|
||||
RUN ./build-client.sh
|
||||
|
||||
# Create cache directory
|
||||
RUN mkdir -p /data/cache
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV EFFECTS_PATH=/app/artdag-effects
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
|
||||
98
Dockerfile.gpu
Normal file
98
Dockerfile.gpu
Normal file
@@ -0,0 +1,98 @@
|
||||
# GPU-enabled worker image
|
||||
# Multi-stage build: use devel image for compiling, runtime for final image
|
||||
|
||||
# Stage 1: Build decord with CUDA
|
||||
FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.11 \
|
||||
python3.11-venv \
|
||||
python3.11-dev \
|
||||
python3-pip \
|
||||
git \
|
||||
cmake \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libavcodec-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libavdevice-dev \
|
||||
libavfilter-dev \
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
|
||||
&& ln -sf /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# Download Video Codec SDK headers for NVDEC/NVCUVID
|
||||
RUN git clone https://github.com/FFmpeg/nv-codec-headers.git /tmp/nv-codec-headers && \
|
||||
cd /tmp/nv-codec-headers && make install && rm -rf /tmp/nv-codec-headers
|
||||
|
||||
# Create stub for libnvcuvid (real library comes from driver at runtime)
|
||||
RUN echo 'void* __nvcuvid_stub__;' | gcc -shared -x c - -o /usr/local/cuda/lib64/libnvcuvid.so
|
||||
|
||||
# Build decord with CUDA support
|
||||
RUN git clone --recursive https://github.com/dmlc/decord /tmp/decord && \
|
||||
cd /tmp/decord && \
|
||||
mkdir build && cd build && \
|
||||
cmake .. -DUSE_CUDA=ON -DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_CUDA_ARCHITECTURES="70;75;80;86;89;90" && \
|
||||
make -j$(nproc) && \
|
||||
cd ../python && pip install --target=/decord-install .
|
||||
|
||||
# Stage 2: Runtime image
|
||||
FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu22.04
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Python 3.11 and system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.11 \
|
||||
python3.11-venv \
|
||||
python3-pip \
|
||||
git \
|
||||
ffmpeg \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& ln -sf /usr/bin/python3.11 /usr/bin/python3 \
|
||||
&& ln -sf /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# Upgrade pip
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
|
||||
# Install CPU dependencies first
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Install GPU-specific dependencies (CuPy for CUDA 12.x)
|
||||
RUN pip install --no-cache-dir cupy-cuda12x
|
||||
|
||||
# Install PyNvVideoCodec for zero-copy GPU encoding
|
||||
RUN pip install --no-cache-dir PyNvVideoCodec
|
||||
|
||||
# Copy decord from builder stage
|
||||
COPY --from=builder /decord-install /usr/local/lib/python3.11/dist-packages/
|
||||
COPY --from=builder /tmp/decord/build/libdecord.so /usr/local/lib/
|
||||
RUN ldconfig
|
||||
|
||||
# Clone effects repo (before COPY so it gets cached)
|
||||
RUN git clone https://git.rose-ash.com/art-dag/effects.git /app/artdag-effects
|
||||
|
||||
# Copy application (this invalidates cache for any code change)
|
||||
COPY . .
|
||||
|
||||
# Create cache directory
|
||||
RUN mkdir -p /data/cache
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV EFFECTS_PATH=/app/artdag-effects
|
||||
ENV PYTHONPATH=/app
|
||||
# GPU persistence enabled - frames stay on GPU throughout pipeline
|
||||
ENV STREAMING_GPU_PERSIST=1
|
||||
# Preload libnvcuvid for decord NVDEC GPU decode
|
||||
ENV LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libnvcuvid.so
|
||||
# Use cluster's public IPFS gateway for HLS segment URLs
|
||||
ENV IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
|
||||
# Default command runs celery worker
|
||||
CMD ["celery", "-A", "celery_app", "worker", "--loglevel=info", "-E", "-Q", "gpu,celery"]
|
||||
28
README.md
28
README.md
@@ -5,7 +5,7 @@ L1 rendering server for the Art DAG system. Manages distributed rendering jobs v
|
||||
## Features
|
||||
|
||||
- **3-Phase Execution**: Analyze → Plan → Execute pipeline for recipe-based rendering
|
||||
- **Content-Addressable Caching**: SHA3-256 hashed content with deduplication
|
||||
- **Content-Addressable Caching**: IPFS CIDs with deduplication
|
||||
- **IPFS Integration**: Optional IPFS-primary mode for distributed storage
|
||||
- **Storage Providers**: S3, IPFS, and local storage backends
|
||||
- **DAG Visualization**: Interactive graph visualization of execution plans
|
||||
@@ -60,7 +60,7 @@ The stack includes:
|
||||
| `HOST` | `0.0.0.0` | Server bind address |
|
||||
| `PORT` | `8000` | Server port |
|
||||
| `REDIS_URL` | `redis://localhost:6379/5` | Redis connection |
|
||||
| `DATABASE_URL` | `postgresql://artdag:artdag@localhost:5432/artdag` | PostgreSQL connection |
|
||||
| `DATABASE_URL` | **(required)** | PostgreSQL connection |
|
||||
| `CACHE_DIR` | `~/.artdag/cache` | Local cache directory |
|
||||
| `IPFS_API` | `/dns/localhost/tcp/5001` | IPFS API multiaddr |
|
||||
| `IPFS_GATEWAY_URL` | `https://ipfs.io/ipfs` | Public IPFS gateway |
|
||||
@@ -130,13 +130,13 @@ Interactive docs: http://localhost:8100/docs
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| GET | `/cache/{hash}` | Get cached content (with preview) |
|
||||
| GET | `/cache/{hash}/raw` | Download raw content |
|
||||
| GET | `/cache/{hash}/mp4` | Get MP4 video |
|
||||
| GET | `/cache/{hash}/meta` | Get content metadata |
|
||||
| PATCH | `/cache/{hash}/meta` | Update metadata |
|
||||
| POST | `/cache/{hash}/publish` | Publish to L2 |
|
||||
| DELETE | `/cache/{hash}` | Delete from cache |
|
||||
| GET | `/cache/{cid}` | Get cached content (with preview) |
|
||||
| GET | `/cache/{cid}/raw` | Download raw content |
|
||||
| GET | `/cache/{cid}/mp4` | Get MP4 video |
|
||||
| GET | `/cache/{cid}/meta` | Get content metadata |
|
||||
| PATCH | `/cache/{cid}/meta` | Update metadata |
|
||||
| POST | `/cache/{cid}/publish` | Publish to L2 |
|
||||
| DELETE | `/cache/{cid}` | Delete from cache |
|
||||
| POST | `/cache/import?path=` | Import local file |
|
||||
| POST | `/cache/upload` | Upload file |
|
||||
| GET | `/media` | Browse media gallery |
|
||||
@@ -185,7 +185,7 @@ Recipes are executed in three phases:
|
||||
### Phase 1: Analyze
|
||||
Extract features from input files:
|
||||
- **Audio/Video**: Tempo, beat times, energy levels
|
||||
- Results cached by content hash
|
||||
- Results cached by CID
|
||||
|
||||
### Phase 2: Plan
|
||||
Generate an execution plan:
|
||||
@@ -237,7 +237,7 @@ output: sync_video
|
||||
|
||||
### Local Cache
|
||||
- Location: `~/.artdag/cache/` (or `CACHE_DIR`)
|
||||
- Content-addressed by SHA3-256 hash
|
||||
- Content-addressed by IPFS CID
|
||||
- Subdirectories: `plans/`, `analysis/`
|
||||
|
||||
### Redis
|
||||
@@ -318,12 +318,12 @@ Every render produces a provenance record:
|
||||
"task_id": "celery-task-uuid",
|
||||
"rendered_at": "2026-01-07T...",
|
||||
"rendered_by": "@giles@artdag.rose-ash.com",
|
||||
"output": {"name": "...", "content_hash": "..."},
|
||||
"output": {"name": "...", "cid": "Qm..."},
|
||||
"inputs": [...],
|
||||
"effects": [...],
|
||||
"infrastructure": {
|
||||
"software": {"name": "infra:artdag", "content_hash": "..."},
|
||||
"hardware": {"name": "infra:giles-hp", "content_hash": "..."}
|
||||
"software": {"name": "infra:artdag", "cid": "Qm..."},
|
||||
"hardware": {"name": "infra:giles-hp", "cid": "Qm..."}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
212
app/__init__.py
212
app/__init__.py
@@ -4,14 +4,37 @@ Art-DAG L1 Server Application Factory.
|
||||
Creates and configures the FastAPI application with all routers and middleware.
|
||||
"""
|
||||
|
||||
import secrets
|
||||
import time
|
||||
from pathlib import Path
|
||||
from fastapi import FastAPI
|
||||
from urllib.parse import quote
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from artdag_common import create_jinja_env
|
||||
from artdag_common.middleware.auth import get_user_from_cookie
|
||||
|
||||
from .config import settings
|
||||
|
||||
# Paths that should never trigger a silent auth check
|
||||
_SKIP_PREFIXES = ("/auth/", "/static/", "/api/", "/ipfs/", "/download/", "/inbox", "/health", "/internal/", "/oembed")
|
||||
_SILENT_CHECK_COOLDOWN = 300 # 5 minutes
|
||||
_DEVICE_COOKIE = "artdag_did"
|
||||
_DEVICE_COOKIE_MAX_AGE = 30 * 24 * 3600 # 30 days
|
||||
|
||||
# Derive external base URL from oauth_redirect_uri (e.g. https://celery-artdag.rose-ash.com)
|
||||
_EXTERNAL_BASE = settings.oauth_redirect_uri.rsplit("/auth/callback", 1)[0]
|
||||
|
||||
|
||||
def _external_url(request: Request) -> str:
|
||||
"""Build external URL from request path + query, using configured base domain."""
|
||||
url = f"{_EXTERNAL_BASE}{request.url.path}"
|
||||
if request.url.query:
|
||||
url += f"?{request.url.query}"
|
||||
return url
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""
|
||||
@@ -26,51 +49,186 @@ def create_app() -> FastAPI:
|
||||
version="1.0.0",
|
||||
)
|
||||
|
||||
# Database lifecycle events
|
||||
from database import init_db, close_db
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
await init_db()
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
await close_db()
|
||||
|
||||
# Silent auth check — auto-login via prompt=none OAuth
|
||||
# NOTE: registered BEFORE device_id so device_id is outermost (runs first)
|
||||
@app.middleware("http")
|
||||
async def silent_auth_check(request: Request, call_next):
|
||||
path = request.url.path
|
||||
if (
|
||||
request.method != "GET"
|
||||
or any(path.startswith(p) for p in _SKIP_PREFIXES)
|
||||
or request.headers.get("hx-request") # skip HTMX
|
||||
):
|
||||
return await call_next(request)
|
||||
|
||||
# Already logged in — but verify account hasn't logged out
|
||||
if get_user_from_cookie(request):
|
||||
device_id = getattr(request.state, "device_id", None)
|
||||
if device_id:
|
||||
try:
|
||||
from .dependencies import get_redis_client
|
||||
r = get_redis_client()
|
||||
if not r.get(f"did_auth:{device_id}"):
|
||||
# Account logged out — clear our cookie
|
||||
response = await call_next(request)
|
||||
response.delete_cookie("artdag_session")
|
||||
response.delete_cookie("pnone_at")
|
||||
return response
|
||||
except Exception:
|
||||
pass
|
||||
return await call_next(request)
|
||||
|
||||
# Check cooldown — don't re-check within 5 minutes
|
||||
pnone_at = request.cookies.get("pnone_at")
|
||||
if pnone_at:
|
||||
try:
|
||||
pnone_ts = float(pnone_at)
|
||||
if (time.time() - pnone_ts) < _SILENT_CHECK_COOLDOWN:
|
||||
# But first check if account signalled a login via inbox delivery
|
||||
device_id = getattr(request.state, "device_id", None)
|
||||
if device_id:
|
||||
try:
|
||||
from .dependencies import get_redis_client
|
||||
r = get_redis_client()
|
||||
auth_ts = r.get(f"did_auth:{device_id}")
|
||||
if auth_ts and float(auth_ts) > pnone_ts:
|
||||
# Login happened since our last check — retry
|
||||
current_url = _external_url(request)
|
||||
return RedirectResponse(
|
||||
url=f"/auth/login?prompt=none&next={quote(current_url, safe='')}",
|
||||
status_code=302,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return await call_next(request)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Redirect to silent OAuth check
|
||||
current_url = _external_url(request)
|
||||
return RedirectResponse(
|
||||
url=f"/auth/login?prompt=none&next={quote(current_url, safe='')}",
|
||||
status_code=302,
|
||||
)
|
||||
|
||||
# Device ID middleware — track browser identity across domains
|
||||
# Registered AFTER silent_auth_check so it's outermost (always runs)
|
||||
@app.middleware("http")
|
||||
async def device_id_middleware(request: Request, call_next):
|
||||
did = request.cookies.get(_DEVICE_COOKIE)
|
||||
if did:
|
||||
request.state.device_id = did
|
||||
request.state._new_device_id = False
|
||||
else:
|
||||
request.state.device_id = secrets.token_urlsafe(32)
|
||||
request.state._new_device_id = True
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
if getattr(request.state, "_new_device_id", False):
|
||||
response.set_cookie(
|
||||
key=_DEVICE_COOKIE,
|
||||
value=request.state.device_id,
|
||||
max_age=_DEVICE_COOKIE_MAX_AGE,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
return response
|
||||
|
||||
# Coop fragment pre-fetch — inject nav-tree, auth-menu, cart-mini into
|
||||
# request.state for full-page HTML renders. Skips HTMX, API, and
|
||||
# internal paths. Failures are silent (fragments default to "").
|
||||
_FRAG_SKIP = ("/auth/", "/api/", "/internal/", "/health", "/oembed",
|
||||
"/ipfs/", "/download/", "/inbox", "/static/")
|
||||
|
||||
@app.middleware("http")
|
||||
async def coop_fragments_middleware(request: Request, call_next):
|
||||
path = request.url.path
|
||||
if (
|
||||
request.method != "GET"
|
||||
or any(path.startswith(p) for p in _FRAG_SKIP)
|
||||
or request.headers.get("hx-request")
|
||||
or request.headers.get(fragments.FRAGMENT_HEADER)
|
||||
):
|
||||
request.state.nav_tree_html = ""
|
||||
request.state.auth_menu_html = ""
|
||||
request.state.cart_mini_html = ""
|
||||
return await call_next(request)
|
||||
|
||||
from artdag_common.fragments import fetch_fragments as _fetch_frags
|
||||
|
||||
user = get_user_from_cookie(request)
|
||||
auth_params = {"email": user.email} if user and user.email else {}
|
||||
nav_params = {"app_name": "artdag", "path": path}
|
||||
|
||||
try:
|
||||
nav_tree_html, auth_menu_html, cart_mini_html = await _fetch_frags([
|
||||
("blog", "nav-tree", nav_params),
|
||||
("account", "auth-menu", auth_params or None),
|
||||
("cart", "cart-mini", None),
|
||||
])
|
||||
except Exception:
|
||||
nav_tree_html = auth_menu_html = cart_mini_html = ""
|
||||
|
||||
request.state.nav_tree_html = nav_tree_html
|
||||
request.state.auth_menu_html = auth_menu_html
|
||||
request.state.cart_mini_html = cart_mini_html
|
||||
|
||||
return await call_next(request)
|
||||
|
||||
# Initialize Jinja2 templates
|
||||
template_dir = Path(__file__).parent / "templates"
|
||||
app.state.templates = create_jinja_env(template_dir)
|
||||
|
||||
# Custom 404 handler
|
||||
@app.exception_handler(404)
|
||||
async def not_found_handler(request: Request, exc):
|
||||
from artdag_common.middleware import wants_html
|
||||
if wants_html(request):
|
||||
from artdag_common import render
|
||||
return render(app.state.templates, "404.html", request,
|
||||
user=None,
|
||||
status_code=404,
|
||||
)
|
||||
return JSONResponse({"detail": "Not found"}, status_code=404)
|
||||
|
||||
# Include routers
|
||||
from .routers import auth, storage, api, recipes, cache, runs, home
|
||||
from .routers import auth, storage, api, recipes, cache, runs, home, effects, inbox, fragments, oembed
|
||||
|
||||
# Home and auth routers (root level)
|
||||
app.include_router(home.router, tags=["home"])
|
||||
app.include_router(auth.router, prefix="/auth", tags=["auth"])
|
||||
app.include_router(inbox.router, tags=["inbox"])
|
||||
app.include_router(fragments.router, tags=["fragments"])
|
||||
app.include_router(oembed.router, tags=["oembed"])
|
||||
|
||||
# Feature routers
|
||||
app.include_router(storage.router, prefix="/storage", tags=["storage"])
|
||||
app.include_router(api.router, prefix="/api", tags=["api"])
|
||||
|
||||
# Runs router - handles both /runs and /run/{id} patterns
|
||||
# Runs and recipes routers
|
||||
app.include_router(runs.router, prefix="/runs", tags=["runs"])
|
||||
# Also mount at /run for single-run detail URLs
|
||||
from fastapi import APIRouter
|
||||
run_detail_router = APIRouter()
|
||||
@run_detail_router.get("/{run_id}")
|
||||
async def run_detail_redirect(run_id: str, request):
|
||||
from .routers.runs import run_detail
|
||||
return await run_detail(run_id, request)
|
||||
app.include_router(run_detail_router, prefix="/run", tags=["runs"])
|
||||
|
||||
# Recipes router - handles both /recipes and /recipe/{id} patterns
|
||||
app.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
|
||||
recipe_detail_router = APIRouter()
|
||||
@recipe_detail_router.get("/{recipe_id}")
|
||||
async def recipe_detail_redirect(recipe_id: str, request):
|
||||
from .routers.recipes import get_recipe
|
||||
return await get_recipe(recipe_id, request)
|
||||
app.include_router(recipe_detail_router, prefix="/recipe", tags=["recipes"])
|
||||
|
||||
# Cache router - handles /cache and /media
|
||||
app.include_router(cache.router, prefix="/cache", tags=["cache"])
|
||||
# Also mount media list at /media for convenience
|
||||
from fastapi import APIRouter as MediaRouter
|
||||
media_router = MediaRouter()
|
||||
@media_router.get("")
|
||||
async def media_list_redirect(request, offset: int = 0, limit: int = 24):
|
||||
from .routers.cache import list_media
|
||||
return await list_media(request, offset, limit)
|
||||
app.include_router(media_router, prefix="/media", tags=["media"])
|
||||
# Also mount cache router at /media for convenience
|
||||
app.include_router(cache.router, prefix="/media", tags=["media"])
|
||||
|
||||
# Effects router
|
||||
app.include_router(effects.router, prefix="/effects", tags=["effects"])
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
L1 Server Configuration.
|
||||
|
||||
Environment-based configuration with sensible defaults.
|
||||
All config should go through this module - no direct os.environ calls elsewhere.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
@@ -31,9 +33,7 @@ class Settings:
|
||||
|
||||
# Database
|
||||
database_url: str = field(
|
||||
default_factory=lambda: os.environ.get(
|
||||
"DATABASE_URL", "postgresql://artdag:artdag@localhost:5432/artdag"
|
||||
)
|
||||
default_factory=lambda: os.environ.get("DATABASE_URL", "")
|
||||
)
|
||||
|
||||
# IPFS
|
||||
@@ -44,12 +44,34 @@ class Settings:
|
||||
default_factory=lambda: os.environ.get("IPFS_GATEWAY_URL", "https://ipfs.io/ipfs")
|
||||
)
|
||||
|
||||
# L2 Server
|
||||
l2_server: Optional[str] = field(
|
||||
default_factory=lambda: os.environ.get("L2_SERVER")
|
||||
# OAuth SSO (replaces L2 auth)
|
||||
oauth_authorize_url: str = field(
|
||||
default_factory=lambda: os.environ.get("OAUTH_AUTHORIZE_URL", "https://account.rose-ash.com/auth/oauth/authorize")
|
||||
)
|
||||
l2_domain: Optional[str] = field(
|
||||
default_factory=lambda: os.environ.get("L2_DOMAIN")
|
||||
oauth_token_url: str = field(
|
||||
default_factory=lambda: os.environ.get("OAUTH_TOKEN_URL", "https://account.rose-ash.com/auth/oauth/token")
|
||||
)
|
||||
oauth_client_id: str = field(
|
||||
default_factory=lambda: os.environ.get("OAUTH_CLIENT_ID", "artdag")
|
||||
)
|
||||
oauth_redirect_uri: str = field(
|
||||
default_factory=lambda: os.environ.get("OAUTH_REDIRECT_URI", "https://celery-artdag.rose-ash.com/auth/callback")
|
||||
)
|
||||
oauth_logout_url: str = field(
|
||||
default_factory=lambda: os.environ.get("OAUTH_LOGOUT_URL", "https://account.rose-ash.com/auth/sso-logout/")
|
||||
)
|
||||
secret_key: str = field(
|
||||
default_factory=lambda: os.environ.get("SECRET_KEY", "change-me-in-production")
|
||||
)
|
||||
|
||||
# GPU/Streaming settings
|
||||
streaming_gpu_persist: bool = field(
|
||||
default_factory=lambda: os.environ.get("STREAMING_GPU_PERSIST", "0") == "1"
|
||||
)
|
||||
ipfs_gateways: str = field(
|
||||
default_factory=lambda: os.environ.get(
|
||||
"IPFS_GATEWAYS", "https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
|
||||
)
|
||||
)
|
||||
|
||||
# Derived paths
|
||||
@@ -68,5 +90,27 @@ class Settings:
|
||||
self.analysis_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def log_config(self, logger=None) -> None:
|
||||
"""Log all configuration values for debugging."""
|
||||
output = logger.info if logger else lambda x: print(x, file=sys.stderr)
|
||||
output("=" * 60)
|
||||
output("CONFIGURATION")
|
||||
output("=" * 60)
|
||||
output(f" cache_dir: {self.cache_dir}")
|
||||
output(f" redis_url: {self.redis_url}")
|
||||
output(f" database_url: {self.database_url[:50]}...")
|
||||
output(f" ipfs_api: {self.ipfs_api}")
|
||||
output(f" ipfs_gateway_url: {self.ipfs_gateway_url}")
|
||||
output(f" ipfs_gateways: {self.ipfs_gateways[:50]}...")
|
||||
output(f" streaming_gpu_persist: {self.streaming_gpu_persist}")
|
||||
output(f" oauth_client_id: {self.oauth_client_id}")
|
||||
output(f" oauth_authorize_url: {self.oauth_authorize_url}")
|
||||
output("=" * 60)
|
||||
|
||||
|
||||
# Singleton settings instance
|
||||
settings = Settings()
|
||||
|
||||
# Log config on import if DEBUG or SHOW_CONFIG is set
|
||||
if os.environ.get("DEBUG") or os.environ.get("SHOW_CONFIG"):
|
||||
settings.log_config()
|
||||
|
||||
@@ -85,7 +85,7 @@ async def require_auth(request: Request) -> UserContext:
|
||||
if "text/html" in accept:
|
||||
raise HTTPException(
|
||||
status_code=302,
|
||||
headers={"Location": "/login"}
|
||||
headers={"Location": "/auth/login"}
|
||||
)
|
||||
raise HTTPException(status_code=401, detail="Authentication required")
|
||||
return ctx
|
||||
@@ -112,6 +112,7 @@ def get_run_service():
|
||||
"""Get the run service."""
|
||||
from .services.run_service import RunService
|
||||
return RunService(
|
||||
database=get_database(),
|
||||
redis=get_redis_client(),
|
||||
cache=get_cache_manager(),
|
||||
)
|
||||
@@ -121,7 +122,7 @@ def get_recipe_service():
|
||||
"""Get the recipe service."""
|
||||
from .services.recipe_service import RecipeService
|
||||
return RecipeService(
|
||||
redis=get_redis_client(),
|
||||
redis=get_redis_client(), # Kept for API compatibility, not used
|
||||
cache=get_cache_manager(),
|
||||
)
|
||||
|
||||
@@ -133,3 +134,53 @@ def get_cache_service():
|
||||
cache_manager=get_cache_manager(),
|
||||
database=get_database(),
|
||||
)
|
||||
|
||||
|
||||
async def get_nav_counts(actor_id: Optional[str] = None) -> dict:
|
||||
"""
|
||||
Get counts for navigation bar display.
|
||||
|
||||
Returns dict with: runs, recipes, effects, media, storage
|
||||
"""
|
||||
counts = {}
|
||||
|
||||
try:
|
||||
import database
|
||||
counts["media"] = await database.count_user_items(actor_id) if actor_id else 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_service = get_recipe_service()
|
||||
recipes = await recipe_service.list_recipes(actor_id)
|
||||
counts["recipes"] = len(recipes)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
run_service = get_run_service()
|
||||
runs = await run_service.list_runs(actor_id)
|
||||
counts["runs"] = len(runs)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Effects are stored in _effects/ directory, not in cache
|
||||
from pathlib import Path
|
||||
cache_mgr = get_cache_manager()
|
||||
effects_dir = Path(cache_mgr.cache_dir) / "_effects"
|
||||
if effects_dir.exists():
|
||||
counts["effects"] = len([d for d in effects_dir.iterdir() if d.is_dir()])
|
||||
else:
|
||||
counts["effects"] = 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
import database
|
||||
storage_providers = await database.get_user_storage_providers(actor_id) if actor_id else []
|
||||
counts["storage"] = len(storage_providers) if storage_providers else 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return counts
|
||||
|
||||
@@ -11,12 +11,11 @@ import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import yaml
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
from ..dependencies import require_auth, get_redis_client, get_cache_manager
|
||||
from ..services.auth_service import UserContext
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -26,9 +25,8 @@ RUNS_KEY_PREFIX = "artdag:run:"
|
||||
|
||||
|
||||
class PlanRequest(BaseModel):
|
||||
recipe_yaml: str
|
||||
recipe_sexp: str
|
||||
input_hashes: Dict[str, str]
|
||||
features: List[str] = ["beats", "energy"]
|
||||
|
||||
|
||||
class ExecutePlanRequest(BaseModel):
|
||||
@@ -37,9 +35,8 @@ class ExecutePlanRequest(BaseModel):
|
||||
|
||||
|
||||
class RecipeRunRequest(BaseModel):
|
||||
recipe_yaml: str
|
||||
recipe_sexp: str
|
||||
input_hashes: Dict[str, str]
|
||||
features: List[str] = ["beats", "energy"]
|
||||
|
||||
|
||||
def compute_run_id(input_hashes: List[str], recipe: str, recipe_hash: str = None) -> str:
|
||||
@@ -68,9 +65,8 @@ async def generate_plan_endpoint(
|
||||
|
||||
try:
|
||||
task = generate_plan.delay(
|
||||
recipe_yaml=request.recipe_yaml,
|
||||
recipe_sexp=request.recipe_sexp,
|
||||
input_hashes=request.input_hashes,
|
||||
features=request.features,
|
||||
)
|
||||
|
||||
# Wait for result (plan generation is usually fast)
|
||||
@@ -136,15 +132,16 @@ async def run_recipe_endpoint(
|
||||
Returns immediately with run_id. Poll /api/run/{run_id} for status.
|
||||
"""
|
||||
from tasks.orchestrate import run_recipe
|
||||
from artdag.sexp import compile_string
|
||||
import database
|
||||
|
||||
redis = get_redis_client()
|
||||
cache = get_cache_manager()
|
||||
|
||||
# Parse recipe name
|
||||
# Parse recipe name from S-expression
|
||||
try:
|
||||
recipe_data = yaml.safe_load(request.recipe_yaml)
|
||||
recipe_name = recipe_data.get("name", "unknown")
|
||||
compiled = compile_string(request.recipe_sexp)
|
||||
recipe_name = compiled.name or "unknown"
|
||||
except Exception:
|
||||
recipe_name = "unknown"
|
||||
|
||||
@@ -152,28 +149,27 @@ async def run_recipe_endpoint(
|
||||
run_id = compute_run_id(
|
||||
list(request.input_hashes.values()),
|
||||
recipe_name,
|
||||
hashlib.sha3_256(request.recipe_yaml.encode()).hexdigest()
|
||||
hashlib.sha3_256(request.recipe_sexp.encode()).hexdigest()
|
||||
)
|
||||
|
||||
# Check if already completed
|
||||
cached = await database.get_run_cache(run_id)
|
||||
if cached:
|
||||
output_hash = cached.get("output_hash")
|
||||
if cache.has_content(output_hash):
|
||||
output_cid = cached.get("output_cid")
|
||||
if cache.has_content(output_cid):
|
||||
return {
|
||||
"status": "completed",
|
||||
"run_id": run_id,
|
||||
"output_hash": output_hash,
|
||||
"output_ipfs_cid": cache.get_ipfs_cid(output_hash),
|
||||
"output_cid": output_cid,
|
||||
"output_ipfs_cid": cache.get_ipfs_cid(output_cid),
|
||||
"cached": True,
|
||||
}
|
||||
|
||||
# Submit to Celery
|
||||
try:
|
||||
task = run_recipe.delay(
|
||||
recipe_yaml=request.recipe_yaml,
|
||||
recipe_sexp=request.recipe_sexp,
|
||||
input_hashes=request.input_hashes,
|
||||
features=request.features,
|
||||
run_id=run_id,
|
||||
)
|
||||
|
||||
@@ -228,7 +224,7 @@ async def get_run_status(
|
||||
if result.successful():
|
||||
task_result = result.get()
|
||||
data["status"] = task_result.get("status", "completed")
|
||||
data["output_hash"] = task_result.get("output_cache_id")
|
||||
data["output_cid"] = task_result.get("output_cache_id")
|
||||
data["output_ipfs_cid"] = task_result.get("output_ipfs_cid")
|
||||
data["total_steps"] = task_result.get("total_steps")
|
||||
data["cached"] = task_result.get("cached")
|
||||
@@ -254,7 +250,7 @@ async def get_run_status(
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"output_hash": cached.get("output_hash"),
|
||||
"output_cid": cached.get("output_cid"),
|
||||
"cached": True,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,122 +1,165 @@
|
||||
"""
|
||||
Authentication routes for L1 server.
|
||||
Authentication routes — OAuth2 authorization code flow via account.rose-ash.com.
|
||||
|
||||
L1 doesn't handle login directly - users log in at their L2 server.
|
||||
Token is passed via URL from L2 redirect, then L1 sets its own cookie.
|
||||
GET /auth/login — redirect to account OAuth authorize
|
||||
GET /auth/callback — exchange code for user info, set session cookie
|
||||
GET /auth/logout — clear cookie, redirect through account SSO logout
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
import secrets
|
||||
import time
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from pydantic import BaseModel
|
||||
from itsdangerous import URLSafeSerializer
|
||||
|
||||
from ..dependencies import get_redis_client
|
||||
from ..services.auth_service import AuthService
|
||||
from artdag_common.middleware.auth import UserContext, set_auth_cookie, clear_auth_cookie
|
||||
|
||||
from ..config import settings
|
||||
|
||||
router = APIRouter()
|
||||
security = HTTPBearer(auto_error=False)
|
||||
|
||||
_signer = None
|
||||
|
||||
|
||||
def get_auth_service():
|
||||
"""Get auth service instance."""
|
||||
return AuthService(get_redis_client())
|
||||
def _get_signer() -> URLSafeSerializer:
|
||||
global _signer
|
||||
if _signer is None:
|
||||
_signer = URLSafeSerializer(settings.secret_key, salt="oauth-state")
|
||||
return _signer
|
||||
|
||||
|
||||
class RevokeUserRequest(BaseModel):
|
||||
"""Request to revoke all tokens for a user."""
|
||||
username: str
|
||||
l2_server: str
|
||||
@router.get("/login")
|
||||
async def login(request: Request):
|
||||
"""Store state + next in signed cookie, redirect to account OAuth authorize."""
|
||||
next_url = request.query_params.get("next", "/")
|
||||
prompt = request.query_params.get("prompt", "")
|
||||
state = secrets.token_urlsafe(32)
|
||||
|
||||
signer = _get_signer()
|
||||
state_payload = signer.dumps({"state": state, "next": next_url, "prompt": prompt})
|
||||
|
||||
@router.get("")
|
||||
async def auth_callback(
|
||||
request: Request,
|
||||
auth_token: str = None,
|
||||
auth_service: AuthService = Depends(get_auth_service),
|
||||
):
|
||||
"""
|
||||
Receive auth token from L2 redirect and set local cookie.
|
||||
|
||||
This enables cross-subdomain auth on iOS Safari which blocks shared cookies.
|
||||
L2 redirects here with ?auth_token=... after user logs in.
|
||||
"""
|
||||
if not auth_token:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
# Verify the token is valid
|
||||
ctx = await auth_service.verify_token_with_l2(auth_token)
|
||||
if not ctx:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
# Register token for this user (for revocation by username later)
|
||||
auth_service.register_user_token(ctx.username, auth_token)
|
||||
|
||||
# Set local first-party cookie and redirect to runs
|
||||
response = RedirectResponse(url="/runs", status_code=302)
|
||||
response.set_cookie(
|
||||
key="auth_token",
|
||||
value=auth_token,
|
||||
httponly=True,
|
||||
max_age=60 * 60 * 24 * 30, # 30 days
|
||||
samesite="lax",
|
||||
secure=True
|
||||
device_id = getattr(request.state, "device_id", "")
|
||||
authorize_url = (
|
||||
f"{settings.oauth_authorize_url}"
|
||||
f"?client_id={settings.oauth_client_id}"
|
||||
f"&redirect_uri={settings.oauth_redirect_uri}"
|
||||
f"&device_id={device_id}"
|
||||
f"&state={state}"
|
||||
)
|
||||
if prompt:
|
||||
authorize_url += f"&prompt={prompt}"
|
||||
|
||||
response = RedirectResponse(url=authorize_url, status_code=302)
|
||||
response.set_cookie(
|
||||
key="oauth_state",
|
||||
value=state_payload,
|
||||
max_age=600, # 10 minutes
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/callback")
|
||||
async def callback(request: Request):
|
||||
"""Validate state, exchange code via token endpoint, set session cookie."""
|
||||
code = request.query_params.get("code", "")
|
||||
state = request.query_params.get("state", "")
|
||||
error = request.query_params.get("error", "")
|
||||
account_did = request.query_params.get("account_did", "")
|
||||
|
||||
# Adopt account's device ID as our own (one identity across all apps)
|
||||
if account_did:
|
||||
request.state.device_id = account_did
|
||||
request.state._new_device_id = True # device_id middleware will set cookie
|
||||
|
||||
# Recover state from signed cookie
|
||||
state_cookie = request.cookies.get("oauth_state", "")
|
||||
signer = _get_signer()
|
||||
try:
|
||||
payload = signer.loads(state_cookie) if state_cookie else {}
|
||||
except Exception:
|
||||
payload = {}
|
||||
|
||||
next_url = payload.get("next", "/")
|
||||
|
||||
# Handle prompt=none rejection (user not logged in on account)
|
||||
if error == "login_required":
|
||||
response = RedirectResponse(url=next_url, status_code=302)
|
||||
response.delete_cookie("oauth_state")
|
||||
# Set cooldown cookie — don't re-check for 5 minutes
|
||||
response.set_cookie(
|
||||
key="pnone_at",
|
||||
value=str(time.time()),
|
||||
max_age=300,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
# Set device cookie if adopted
|
||||
if account_did:
|
||||
response.set_cookie(
|
||||
key="artdag_did",
|
||||
value=account_did,
|
||||
max_age=30 * 24 * 3600,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
return response
|
||||
|
||||
# Normal callback — validate state + code
|
||||
if not state_cookie or not code or not state:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
if payload.get("state") != state:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
# Exchange code for user info via account's token endpoint
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
try:
|
||||
resp = await client.post(
|
||||
settings.oauth_token_url,
|
||||
json={
|
||||
"code": code,
|
||||
"client_id": settings.oauth_client_id,
|
||||
"redirect_uri": settings.oauth_redirect_uri,
|
||||
},
|
||||
)
|
||||
except httpx.HTTPError:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
if resp.status_code != 200:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
data = resp.json()
|
||||
if "error" in data:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
# Map OAuth response to artdag UserContext
|
||||
# Note: account token endpoint returns user.email as "username"
|
||||
display_name = data.get("display_name", "")
|
||||
username = data.get("username", "")
|
||||
email = username # OAuth response "username" is the user's email
|
||||
actor_id = f"@{username}"
|
||||
|
||||
user = UserContext(username=username, actor_id=actor_id, email=email)
|
||||
|
||||
response = RedirectResponse(url=next_url, status_code=302)
|
||||
set_auth_cookie(response, user)
|
||||
response.delete_cookie("oauth_state")
|
||||
response.delete_cookie("pnone_at")
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/logout")
|
||||
async def logout():
|
||||
"""
|
||||
Logout - clear local cookie and redirect to home.
|
||||
|
||||
Note: This only logs out of L1. User should also logout from L2.
|
||||
"""
|
||||
response = RedirectResponse(url="/", status_code=302)
|
||||
response.delete_cookie("auth_token")
|
||||
"""Clear session cookie, redirect through account SSO logout."""
|
||||
response = RedirectResponse(url=settings.oauth_logout_url, status_code=302)
|
||||
clear_auth_cookie(response)
|
||||
response.delete_cookie("oauth_state")
|
||||
response.delete_cookie("pnone_at")
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/revoke")
|
||||
async def revoke_token(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||
auth_service: AuthService = Depends(get_auth_service),
|
||||
):
|
||||
"""
|
||||
Revoke a token. Called by L2 when user logs out.
|
||||
|
||||
The token to revoke is passed in the Authorization header.
|
||||
"""
|
||||
if not credentials:
|
||||
raise HTTPException(401, "No token provided")
|
||||
|
||||
token = credentials.credentials
|
||||
|
||||
# Verify token is valid before revoking (ensures caller has the token)
|
||||
ctx = auth_service.get_user_context_from_token(token)
|
||||
if not ctx:
|
||||
raise HTTPException(401, "Invalid token")
|
||||
|
||||
# Revoke the token
|
||||
newly_revoked = auth_service.revoke_token(token)
|
||||
|
||||
return {"revoked": True, "newly_revoked": newly_revoked}
|
||||
|
||||
|
||||
@router.post("/revoke-user")
|
||||
async def revoke_user_tokens(
|
||||
request: RevokeUserRequest,
|
||||
auth_service: AuthService = Depends(get_auth_service),
|
||||
):
|
||||
"""
|
||||
Revoke all tokens for a user. Called by L2 when user logs out.
|
||||
|
||||
This handles the case where L2 issued scoped tokens that differ from L2's own token.
|
||||
"""
|
||||
# Revoke all tokens registered for this user
|
||||
count = auth_service.revoke_all_user_tokens(request.username)
|
||||
|
||||
return {
|
||||
"revoked": True,
|
||||
"tokens_revoked": count,
|
||||
"username": request.username
|
||||
}
|
||||
|
||||
@@ -8,18 +8,19 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
|
||||
from fastapi.responses import HTMLResponse, FileResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
|
||||
from ..dependencies import (
|
||||
require_auth, get_templates, get_redis_client,
|
||||
get_cache_manager, get_current_user
|
||||
)
|
||||
from ..services.auth_service import UserContext, AuthService
|
||||
from ..services.auth_service import AuthService
|
||||
from ..services.cache_service import CacheService
|
||||
|
||||
router = APIRouter()
|
||||
@@ -39,26 +40,27 @@ def get_cache_service():
|
||||
return CacheService(database, get_cache_manager())
|
||||
|
||||
|
||||
@router.get("/{content_hash}")
|
||||
@router.get("/{cid}")
|
||||
async def get_cached(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
request: Request,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get cached content by hash. Content negotiation: HTML for browsers, JSON for APIs."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
ctx = await get_current_user(request)
|
||||
|
||||
cache_item = await cache_service.get_cache_item(content_hash)
|
||||
# Pass actor_id to get friendly name and user-specific metadata
|
||||
actor_id = ctx.actor_id if ctx else None
|
||||
cache_item = await cache_service.get_cache_item(cid, actor_id=actor_id)
|
||||
if not cache_item:
|
||||
if wants_html(request):
|
||||
templates = get_templates(request)
|
||||
return render(templates, "cache/not_found.html", request,
|
||||
content_hash=content_hash,
|
||||
cid=cid,
|
||||
user=ctx,
|
||||
active_tab="media",
|
||||
)
|
||||
raise HTTPException(404, f"Content {content_hash} not in cache")
|
||||
raise HTTPException(404, f"Content {cid} not in cache")
|
||||
|
||||
# JSON response
|
||||
if wants_json(request):
|
||||
@@ -70,39 +72,43 @@ async def get_cached(
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
# Check access
|
||||
has_access = await cache_service.check_access(content_hash, ctx.actor_id, ctx.username)
|
||||
has_access = await cache_service.check_access(cid, ctx.actor_id, ctx.username)
|
||||
if not has_access:
|
||||
raise HTTPException(403, "Access denied")
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "cache/detail.html", request,
|
||||
cache=cache_item,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="media",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{content_hash}/raw")
|
||||
@router.get("/{cid}/raw")
|
||||
async def get_cached_raw(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get raw cached content (file download)."""
|
||||
file_path, media_type, filename = await cache_service.get_raw_file(content_hash)
|
||||
file_path, media_type, filename = await cache_service.get_raw_file(cid)
|
||||
|
||||
if not file_path:
|
||||
raise HTTPException(404, f"Content {content_hash} not in cache")
|
||||
raise HTTPException(404, f"Content {cid} not in cache")
|
||||
|
||||
return FileResponse(file_path, media_type=media_type, filename=filename)
|
||||
|
||||
|
||||
@router.get("/{content_hash}/mp4")
|
||||
@router.get("/{cid}/mp4")
|
||||
async def get_cached_mp4(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get cached content as MP4 (transcodes MKV on first request)."""
|
||||
mp4_path, error = await cache_service.get_as_mp4(content_hash)
|
||||
mp4_path, error = await cache_service.get_as_mp4(cid)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400 if "not a video" in error else 404, error)
|
||||
@@ -110,29 +116,29 @@ async def get_cached_mp4(
|
||||
return FileResponse(mp4_path, media_type="video/mp4")
|
||||
|
||||
|
||||
@router.get("/{content_hash}/meta")
|
||||
@router.get("/{cid}/meta")
|
||||
async def get_metadata(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get content metadata."""
|
||||
meta = await cache_service.get_metadata(content_hash, ctx.actor_id)
|
||||
meta = await cache_service.get_metadata(cid, ctx.actor_id)
|
||||
if meta is None:
|
||||
raise HTTPException(404, "Content not found")
|
||||
return meta
|
||||
|
||||
|
||||
@router.patch("/{content_hash}/meta")
|
||||
@router.patch("/{cid}/meta")
|
||||
async def update_metadata(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
req: UpdateMetadataRequest,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Update content metadata."""
|
||||
success, error = await cache_service.update_metadata(
|
||||
content_hash=content_hash,
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
title=req.title,
|
||||
description=req.description,
|
||||
@@ -146,16 +152,16 @@ async def update_metadata(
|
||||
return {"updated": True}
|
||||
|
||||
|
||||
@router.post("/{content_hash}/publish")
|
||||
@router.post("/{cid}/publish")
|
||||
async def publish_content(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
request: Request,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Publish content to L2 and IPFS."""
|
||||
ipfs_cid, error = await cache_service.publish_to_l2(
|
||||
content_hash=content_hash,
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
auth_token=request.cookies.get("auth_token"),
|
||||
@@ -172,14 +178,14 @@ async def publish_content(
|
||||
return {"ipfs_cid": ipfs_cid, "published": True}
|
||||
|
||||
|
||||
@router.delete("/{content_hash}")
|
||||
@router.delete("/{cid}")
|
||||
async def delete_content(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Delete content from cache."""
|
||||
success, error = await cache_service.delete_content(content_hash, ctx.actor_id)
|
||||
success, error = await cache_service.delete_content(cid, ctx.actor_id)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400 if "Cannot" in error or "pinned" in error else 404, error)
|
||||
@@ -194,23 +200,105 @@ async def import_from_ipfs(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Import content from IPFS."""
|
||||
content_hash, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id)
|
||||
cid, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
return {"content_hash": content_hash, "imported": True}
|
||||
return {"cid": cid, "imported": True}
|
||||
|
||||
|
||||
@router.post("/upload/chunk")
|
||||
async def upload_chunk(
|
||||
request: Request,
|
||||
chunk: UploadFile = File(...),
|
||||
upload_id: str = Form(...),
|
||||
chunk_index: int = Form(...),
|
||||
total_chunks: int = Form(...),
|
||||
filename: str = Form(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Upload a file chunk. Assembles file when all chunks received."""
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
# Create temp dir for this upload
|
||||
chunk_dir = Path(tempfile.gettempdir()) / "uploads" / upload_id
|
||||
chunk_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save this chunk
|
||||
chunk_path = chunk_dir / f"chunk_{chunk_index:05d}"
|
||||
chunk_data = await chunk.read()
|
||||
chunk_path.write_bytes(chunk_data)
|
||||
|
||||
# Check if all chunks received
|
||||
received = len(list(chunk_dir.glob("chunk_*")))
|
||||
|
||||
if received < total_chunks:
|
||||
return {"status": "partial", "received": received, "total": total_chunks}
|
||||
|
||||
# All chunks received - assemble file
|
||||
final_path = chunk_dir / filename
|
||||
with open(final_path, 'wb') as f:
|
||||
for i in range(total_chunks):
|
||||
cp = chunk_dir / f"chunk_{i:05d}"
|
||||
f.write(cp.read_bytes())
|
||||
cp.unlink() # Clean up chunk
|
||||
|
||||
# Read assembled file
|
||||
content = final_path.read_bytes()
|
||||
final_path.unlink()
|
||||
chunk_dir.rmdir()
|
||||
|
||||
# Now do the normal upload flow
|
||||
cid, ipfs_cid, error = await cache_service.upload_content(
|
||||
content=content,
|
||||
filename=filename,
|
||||
actor_id=ctx.actor_id,
|
||||
)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
# Assign friendly name
|
||||
final_cid = ipfs_cid or cid
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly_entry = await naming.assign_name(
|
||||
cid=final_cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name,
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "complete",
|
||||
"cid": final_cid,
|
||||
"friendly_name": friendly_entry["friendly_name"],
|
||||
"filename": filename,
|
||||
"size": len(content),
|
||||
"uploaded": True,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_content(
|
||||
file: UploadFile = File(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Upload content to cache."""
|
||||
"""Upload content to cache and IPFS.
|
||||
|
||||
Args:
|
||||
file: The file to upload
|
||||
display_name: Optional custom name for the media (used as friendly name)
|
||||
"""
|
||||
content = await file.read()
|
||||
content_hash, error = await cache_service.upload_content(
|
||||
cid, ipfs_cid, error = await cache_service.upload_content(
|
||||
content=content,
|
||||
filename=file.filename,
|
||||
actor_id=ctx.actor_id,
|
||||
@@ -219,7 +307,26 @@ async def upload_content(
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
return {"content_hash": content_hash, "uploaded": True}
|
||||
# Assign friendly name (use IPFS CID if available, otherwise local hash)
|
||||
final_cid = ipfs_cid or cid
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly_entry = await naming.assign_name(
|
||||
cid=final_cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name, # Use custom name if provided
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
return {
|
||||
"cid": final_cid,
|
||||
"content_hash": cid, # Legacy, for backwards compatibility
|
||||
"friendly_name": friendly_entry["friendly_name"],
|
||||
"filename": file.filename,
|
||||
"size": len(content),
|
||||
"uploaded": True,
|
||||
}
|
||||
|
||||
|
||||
# Media listing endpoint
|
||||
@@ -230,17 +337,9 @@ async def list_media(
|
||||
limit: int = 24,
|
||||
media_type: Optional[str] = None,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""List all media in cache."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
items = await cache_service.list_media(
|
||||
actor_id=ctx.actor_id,
|
||||
username=ctx.username,
|
||||
@@ -253,10 +352,14 @@ async def list_media(
|
||||
if wants_json(request):
|
||||
return {"items": items, "offset": offset, "limit": limit, "has_more": has_more}
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "cache/media_list.html", request,
|
||||
items=items,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
@@ -265,24 +368,22 @@ async def list_media(
|
||||
|
||||
|
||||
# HTMX metadata form
|
||||
@router.get("/{content_hash}/meta-form", response_class=HTMLResponse)
|
||||
@router.get("/{cid}/meta-form", response_class=HTMLResponse)
|
||||
async def get_metadata_form(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
request: Request,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get metadata editing form (HTMX)."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
meta = await cache_service.get_metadata(content_hash, ctx.actor_id)
|
||||
meta = await cache_service.get_metadata(cid, ctx.actor_id)
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<h2 class="text-lg font-semibold mb-4">Metadata</h2>
|
||||
<form hx-patch="/cache/{content_hash}/meta"
|
||||
<form hx-patch="/cache/{cid}/meta"
|
||||
hx-target="#metadata-section"
|
||||
hx-swap="innerHTML"
|
||||
class="space-y-4">
|
||||
@@ -305,23 +406,21 @@ async def get_metadata_form(
|
||||
''')
|
||||
|
||||
|
||||
@router.patch("/{content_hash}/meta", response_class=HTMLResponse)
|
||||
@router.patch("/{cid}/meta", response_class=HTMLResponse)
|
||||
async def update_metadata_htmx(
|
||||
content_hash: str,
|
||||
cid: str,
|
||||
request: Request,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Update metadata (HTMX form handler)."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
form_data = await request.form()
|
||||
|
||||
success, error = await cache_service.update_metadata(
|
||||
content_hash=content_hash,
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
title=form_data.get("title"),
|
||||
description=form_data.get("description"),
|
||||
@@ -334,3 +433,83 @@ async def update_metadata_htmx(
|
||||
<div class="text-green-400 mb-4">Metadata saved!</div>
|
||||
<script>setTimeout(() => location.reload(), 1000);</script>
|
||||
''')
|
||||
|
||||
|
||||
# Friendly name editing
|
||||
@router.get("/{cid}/name-form", response_class=HTMLResponse)
|
||||
async def get_name_form(
|
||||
cid: str,
|
||||
request: Request,
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
"""Get friendly name editing form (HTMX)."""
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
# Get current friendly name
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
entry = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
current_name = entry.get("base_name", "") if entry else ""
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<form hx-post="/cache/{cid}/name"
|
||||
hx-target="#friendly-name-section"
|
||||
hx-swap="innerHTML"
|
||||
class="space-y-3">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Friendly Name</label>
|
||||
<input type="text" name="display_name" value="{current_name}"
|
||||
placeholder="e.g., my-background-video"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A name to reference this media in recipes</p>
|
||||
</div>
|
||||
<div class="flex space-x-2">
|
||||
<button type="submit"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Save
|
||||
</button>
|
||||
<button type="button"
|
||||
onclick="location.reload()"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
''')
|
||||
|
||||
|
||||
@router.post("/{cid}/name", response_class=HTMLResponse)
|
||||
async def update_friendly_name(
|
||||
cid: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Update friendly name (HTMX form handler)."""
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>')
|
||||
|
||||
form_data = await request.form()
|
||||
display_name = form_data.get("display_name", "").strip()
|
||||
|
||||
if not display_name:
|
||||
return HTMLResponse('<div class="text-red-400">Name cannot be empty</div>')
|
||||
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
|
||||
try:
|
||||
entry = await naming.assign_name(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="media",
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<div class="text-green-400 mb-2">Name updated!</div>
|
||||
<script>setTimeout(() => location.reload(), 1000);</script>
|
||||
''')
|
||||
except Exception as e:
|
||||
return HTMLResponse(f'<div class="text-red-400">Error: {e}</div>')
|
||||
|
||||
415
app/routers/effects.py
Normal file
415
app/routers/effects.py
Normal file
@@ -0,0 +1,415 @@
|
||||
"""
|
||||
Effects routes for L1 server.
|
||||
|
||||
Handles effect upload, listing, and metadata.
|
||||
Effects are S-expression files stored in IPFS like all other content-addressed data.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File, Form
|
||||
from fastapi.responses import HTMLResponse, PlainTextResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
|
||||
from ..dependencies import (
|
||||
require_auth, get_templates, get_redis_client,
|
||||
get_cache_manager,
|
||||
)
|
||||
from ..services.auth_service import AuthService
|
||||
import ipfs_client
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_effects_dir() -> Path:
|
||||
"""Get effects storage directory."""
|
||||
cache_mgr = get_cache_manager()
|
||||
effects_dir = Path(cache_mgr.cache_dir) / "_effects"
|
||||
effects_dir.mkdir(parents=True, exist_ok=True)
|
||||
return effects_dir
|
||||
|
||||
|
||||
def parse_effect_metadata(source: str) -> dict:
|
||||
"""
|
||||
Parse effect metadata from S-expression source code.
|
||||
|
||||
Extracts metadata from comment headers (;; @key value format)
|
||||
or from (defeffect name ...) form.
|
||||
"""
|
||||
metadata = {
|
||||
"name": "",
|
||||
"version": "1.0.0",
|
||||
"author": "",
|
||||
"temporal": False,
|
||||
"description": "",
|
||||
"params": [],
|
||||
}
|
||||
|
||||
# Parse comment-based metadata (;; @key value)
|
||||
for line in source.split("\n"):
|
||||
stripped = line.strip()
|
||||
if not stripped.startswith(";"):
|
||||
# Stop parsing metadata at first non-comment line
|
||||
if stripped and not stripped.startswith("("):
|
||||
continue
|
||||
if stripped.startswith("("):
|
||||
break
|
||||
|
||||
# Remove comment prefix
|
||||
comment = stripped.lstrip(";").strip()
|
||||
|
||||
if comment.startswith("@effect "):
|
||||
metadata["name"] = comment[8:].strip()
|
||||
elif comment.startswith("@name "):
|
||||
metadata["name"] = comment[6:].strip()
|
||||
elif comment.startswith("@version "):
|
||||
metadata["version"] = comment[9:].strip()
|
||||
elif comment.startswith("@author "):
|
||||
metadata["author"] = comment[8:].strip()
|
||||
elif comment.startswith("@temporal"):
|
||||
val = comment[9:].strip().lower() if len(comment) > 9 else "true"
|
||||
metadata["temporal"] = val in ("true", "yes", "1", "")
|
||||
elif comment.startswith("@description "):
|
||||
metadata["description"] = comment[13:].strip()
|
||||
elif comment.startswith("@param "):
|
||||
# Format: @param name type [description]
|
||||
parts = comment[7:].split(None, 2)
|
||||
if len(parts) >= 2:
|
||||
param = {"name": parts[0], "type": parts[1]}
|
||||
if len(parts) > 2:
|
||||
param["description"] = parts[2]
|
||||
metadata["params"].append(param)
|
||||
|
||||
# Also try to extract name from (defeffect "name" ...) or (effect "name" ...)
|
||||
if not metadata["name"]:
|
||||
name_match = re.search(r'\((defeffect|effect)\s+"([^"]+)"', source)
|
||||
if name_match:
|
||||
metadata["name"] = name_match.group(2)
|
||||
|
||||
# Try to extract name from first (define ...) form
|
||||
if not metadata["name"]:
|
||||
define_match = re.search(r'\(define\s+(\w+)', source)
|
||||
if define_match:
|
||||
metadata["name"] = define_match.group(1)
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_effect(
|
||||
file: UploadFile = File(...),
|
||||
display_name: Optional[str] = Form(None),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""
|
||||
Upload an S-expression effect to IPFS.
|
||||
|
||||
Parses metadata from comment headers.
|
||||
Returns IPFS CID for use in recipes.
|
||||
|
||||
Args:
|
||||
file: The .sexp effect file
|
||||
display_name: Optional custom friendly name for the effect
|
||||
"""
|
||||
content = await file.read()
|
||||
|
||||
try:
|
||||
source = content.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPException(400, "Effect must be valid UTF-8 text")
|
||||
|
||||
# Parse metadata from sexp source
|
||||
try:
|
||||
meta = parse_effect_metadata(source)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to parse effect metadata: {e}")
|
||||
meta = {"name": file.filename or "unknown"}
|
||||
|
||||
if not meta.get("name"):
|
||||
meta["name"] = Path(file.filename).stem if file.filename else "unknown"
|
||||
|
||||
# Store effect source in IPFS
|
||||
cid = ipfs_client.add_bytes(content)
|
||||
if not cid:
|
||||
raise HTTPException(500, "Failed to store effect in IPFS")
|
||||
|
||||
# Also keep local cache for fast worker access
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
effect_dir.mkdir(parents=True, exist_ok=True)
|
||||
(effect_dir / "effect.sexp").write_text(source, encoding="utf-8")
|
||||
|
||||
# Store metadata (locally and in IPFS)
|
||||
full_meta = {
|
||||
"cid": cid,
|
||||
"meta": meta,
|
||||
"uploader": ctx.actor_id,
|
||||
"uploaded_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
|
||||
"filename": file.filename,
|
||||
}
|
||||
(effect_dir / "metadata.json").write_text(json.dumps(full_meta, indent=2))
|
||||
|
||||
# Also store metadata in IPFS for discoverability
|
||||
meta_cid = ipfs_client.add_json(full_meta)
|
||||
|
||||
# Track ownership in item_types
|
||||
import database
|
||||
await database.save_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="effect",
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
# Assign friendly name (use custom display_name if provided, else from metadata)
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly_entry = await naming.assign_name(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
item_type="effect",
|
||||
display_name=display_name or meta.get("name"),
|
||||
filename=file.filename,
|
||||
)
|
||||
|
||||
logger.info(f"Uploaded effect '{meta.get('name')}' cid={cid} friendly_name='{friendly_entry['friendly_name']}' by {ctx.actor_id}")
|
||||
|
||||
return {
|
||||
"cid": cid,
|
||||
"metadata_cid": meta_cid,
|
||||
"name": meta.get("name"),
|
||||
"friendly_name": friendly_entry["friendly_name"],
|
||||
"version": meta.get("version"),
|
||||
"temporal": meta.get("temporal", False),
|
||||
"params": meta.get("params", []),
|
||||
"uploaded": True,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{cid}")
|
||||
async def get_effect(
|
||||
cid: str,
|
||||
request: Request,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Get effect metadata by CID."""
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
metadata_path = effect_dir / "metadata.json"
|
||||
|
||||
# Try local cache first
|
||||
if metadata_path.exists():
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
else:
|
||||
# Fetch from IPFS
|
||||
source_bytes = ipfs_client.get_bytes(cid)
|
||||
if not source_bytes:
|
||||
raise HTTPException(404, f"Effect {cid[:16]}... not found")
|
||||
|
||||
# Cache locally
|
||||
effect_dir.mkdir(parents=True, exist_ok=True)
|
||||
source = source_bytes.decode("utf-8")
|
||||
(effect_dir / "effect.sexp").write_text(source)
|
||||
|
||||
# Parse metadata from source
|
||||
parsed_meta = parse_effect_metadata(source)
|
||||
meta = {"cid": cid, "meta": parsed_meta}
|
||||
(effect_dir / "metadata.json").write_text(json.dumps(meta, indent=2))
|
||||
|
||||
# Add friendly name if available
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
if friendly:
|
||||
meta["friendly_name"] = friendly["friendly_name"]
|
||||
meta["base_name"] = friendly["base_name"]
|
||||
meta["version_id"] = friendly["version_id"]
|
||||
|
||||
if wants_json(request):
|
||||
return meta
|
||||
|
||||
# HTML response
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "effects/detail.html", request,
|
||||
effect=meta,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="effects",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{cid}/source")
|
||||
async def get_effect_source(
|
||||
cid: str,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Get effect source code."""
|
||||
effects_dir = get_effects_dir()
|
||||
source_path = effects_dir / cid / "effect.sexp"
|
||||
|
||||
# Try local cache first (check both .sexp and legacy .py)
|
||||
if source_path.exists():
|
||||
return PlainTextResponse(source_path.read_text())
|
||||
|
||||
legacy_path = effects_dir / cid / "effect.py"
|
||||
if legacy_path.exists():
|
||||
return PlainTextResponse(legacy_path.read_text())
|
||||
|
||||
# Fetch from IPFS
|
||||
source_bytes = ipfs_client.get_bytes(cid)
|
||||
if not source_bytes:
|
||||
raise HTTPException(404, f"Effect {cid[:16]}... not found")
|
||||
|
||||
# Cache locally
|
||||
source_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
source = source_bytes.decode("utf-8")
|
||||
source_path.write_text(source)
|
||||
|
||||
return PlainTextResponse(source)
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_effects(
|
||||
request: Request,
|
||||
offset: int = 0,
|
||||
limit: int = 20,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""List user's effects with pagination."""
|
||||
import database
|
||||
effects_dir = get_effects_dir()
|
||||
effects = []
|
||||
|
||||
# Get user's effect CIDs from item_types
|
||||
user_items = await database.get_user_items(ctx.actor_id, item_type="effect", limit=1000)
|
||||
effect_cids = [item["cid"] for item in user_items]
|
||||
|
||||
# Get naming service for friendly name lookup
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
|
||||
for cid in effect_cids:
|
||||
effect_dir = effects_dir / cid
|
||||
metadata_path = effect_dir / "metadata.json"
|
||||
if metadata_path.exists():
|
||||
try:
|
||||
meta = json.loads(metadata_path.read_text())
|
||||
# Add friendly name if available
|
||||
friendly = await naming.get_by_cid(ctx.actor_id, cid)
|
||||
if friendly:
|
||||
meta["friendly_name"] = friendly["friendly_name"]
|
||||
meta["base_name"] = friendly["base_name"]
|
||||
effects.append(meta)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# Sort by upload time (newest first)
|
||||
effects.sort(key=lambda e: e.get("uploaded_at", ""), reverse=True)
|
||||
|
||||
# Apply pagination
|
||||
total = len(effects)
|
||||
paginated_effects = effects[offset:offset + limit]
|
||||
has_more = offset + limit < total
|
||||
|
||||
if wants_json(request):
|
||||
return {"effects": paginated_effects, "offset": offset, "limit": limit, "has_more": has_more}
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "effects/list.html", request,
|
||||
effects=paginated_effects,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="effects",
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{cid}/publish")
|
||||
async def publish_effect(
|
||||
cid: str,
|
||||
request: Request,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Publish effect to L2 ActivityPub server."""
|
||||
from ..services.cache_service import CacheService
|
||||
import database
|
||||
|
||||
# Verify effect exists
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
if not effect_dir.exists():
|
||||
error = "Effect not found"
|
||||
if wants_html(request):
|
||||
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
|
||||
raise HTTPException(404, error)
|
||||
|
||||
# Use cache service to publish
|
||||
cache_service = CacheService(database, get_cache_manager())
|
||||
ipfs_cid, error = await cache_service.publish_to_l2(
|
||||
cid=cid,
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
auth_token=request.cookies.get("auth_token"),
|
||||
)
|
||||
|
||||
if error:
|
||||
if wants_html(request):
|
||||
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
|
||||
raise HTTPException(400, error)
|
||||
|
||||
logger.info(f"Published effect {cid[:16]}... to L2 by {ctx.actor_id}")
|
||||
|
||||
if wants_html(request):
|
||||
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
|
||||
|
||||
return {"ipfs_cid": ipfs_cid, "cid": cid, "published": True}
|
||||
|
||||
|
||||
@router.delete("/{cid}")
|
||||
async def delete_effect(
|
||||
cid: str,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Remove user's ownership link to an effect."""
|
||||
import database
|
||||
|
||||
# Remove user's ownership link from item_types
|
||||
await database.delete_item_type(cid, ctx.actor_id, "effect")
|
||||
|
||||
# Remove friendly name
|
||||
await database.delete_friendly_name(ctx.actor_id, cid)
|
||||
|
||||
# Check if anyone still owns this effect
|
||||
remaining_owners = await database.get_item_types(cid)
|
||||
|
||||
# Only delete local files if no one owns it anymore
|
||||
if not remaining_owners:
|
||||
effects_dir = get_effects_dir()
|
||||
effect_dir = effects_dir / cid
|
||||
if effect_dir.exists():
|
||||
import shutil
|
||||
shutil.rmtree(effect_dir)
|
||||
|
||||
# Unpin from IPFS
|
||||
ipfs_client.unpin(cid)
|
||||
logger.info(f"Garbage collected effect {cid[:16]}... (no remaining owners)")
|
||||
|
||||
logger.info(f"Removed effect {cid[:16]}... ownership for {ctx.actor_id}")
|
||||
return {"deleted": True}
|
||||
143
app/routers/fragments.py
Normal file
143
app/routers/fragments.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""
|
||||
Art-DAG fragment endpoints.
|
||||
|
||||
Exposes HTML fragments at ``/internal/fragments/{type}`` for consumption
|
||||
by coop apps via the fragment client.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import APIRouter, Request, Response
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Registry of fragment handlers: type -> async callable(request) returning HTML str
|
||||
_handlers: dict[str, object] = {}
|
||||
|
||||
FRAGMENT_HEADER = "X-Fragment-Request"
|
||||
|
||||
|
||||
@router.get("/internal/fragments/{fragment_type}")
|
||||
async def get_fragment(fragment_type: str, request: Request):
|
||||
if not request.headers.get(FRAGMENT_HEADER):
|
||||
return Response(content="", status_code=403)
|
||||
|
||||
handler = _handlers.get(fragment_type)
|
||||
if handler is None:
|
||||
return Response(content="", media_type="text/html", status_code=200)
|
||||
html = await handler(request)
|
||||
return Response(content=html, media_type="text/html", status_code=200)
|
||||
|
||||
|
||||
# --- nav-item fragment ---
|
||||
|
||||
async def _nav_item_handler(request: Request) -> str:
|
||||
from artdag_common import render_fragment
|
||||
|
||||
templates = request.app.state.templates
|
||||
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
|
||||
return render_fragment(templates, "fragments/nav_item.html", artdag_url=artdag_url)
|
||||
|
||||
|
||||
_handlers["nav-item"] = _nav_item_handler
|
||||
|
||||
|
||||
# --- link-card fragment ---
|
||||
|
||||
async def _link_card_handler(request: Request) -> str:
|
||||
from artdag_common import render_fragment
|
||||
import database
|
||||
|
||||
templates = request.app.state.templates
|
||||
cid = request.query_params.get("cid", "")
|
||||
content_type = request.query_params.get("type", "media")
|
||||
slug = request.query_params.get("slug", "")
|
||||
keys_raw = request.query_params.get("keys", "")
|
||||
|
||||
# Batch mode: return multiple cards separated by markers
|
||||
if keys_raw:
|
||||
keys = [k.strip() for k in keys_raw.split(",") if k.strip()]
|
||||
parts = []
|
||||
for key in keys:
|
||||
parts.append(f"<!-- fragment:{key} -->")
|
||||
card_html = await _render_single_link_card(
|
||||
templates, key, content_type,
|
||||
)
|
||||
parts.append(card_html)
|
||||
return "\n".join(parts)
|
||||
|
||||
# Single mode: use cid or slug
|
||||
lookup_cid = cid or slug
|
||||
if not lookup_cid:
|
||||
return ""
|
||||
return await _render_single_link_card(templates, lookup_cid, content_type)
|
||||
|
||||
|
||||
async def _render_single_link_card(templates, cid: str, content_type: str) -> str:
|
||||
import database
|
||||
from artdag_common import render_fragment
|
||||
|
||||
if not cid:
|
||||
return ""
|
||||
|
||||
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
|
||||
|
||||
# Try item_types first (has metadata)
|
||||
item = await database.get_item_types(cid)
|
||||
# get_item_types returns a list; pick best match for content_type
|
||||
meta = None
|
||||
if item:
|
||||
for it in item:
|
||||
if it.get("type") == content_type:
|
||||
meta = it
|
||||
break
|
||||
if not meta:
|
||||
meta = item[0]
|
||||
|
||||
# Try friendly name for display
|
||||
friendly = None
|
||||
if meta and meta.get("actor_id"):
|
||||
friendly = await database.get_friendly_name_by_cid(meta["actor_id"], cid)
|
||||
|
||||
# Try run cache if type is "run"
|
||||
run = None
|
||||
if content_type == "run":
|
||||
run = await database.get_run_cache(cid)
|
||||
|
||||
title = ""
|
||||
description = ""
|
||||
link = ""
|
||||
|
||||
if friendly:
|
||||
title = friendly.get("display_name") or friendly.get("base_name", cid[:12])
|
||||
elif meta:
|
||||
title = meta.get("filename") or meta.get("description", cid[:12])
|
||||
elif run:
|
||||
title = f"Run {cid[:12]}"
|
||||
else:
|
||||
title = cid[:16]
|
||||
|
||||
if meta:
|
||||
description = meta.get("description", "")
|
||||
|
||||
if content_type == "run":
|
||||
link = f"{artdag_url}/runs/{cid}"
|
||||
elif content_type == "recipe":
|
||||
link = f"{artdag_url}/recipes/{cid}"
|
||||
elif content_type == "effect":
|
||||
link = f"{artdag_url}/effects/{cid}"
|
||||
else:
|
||||
link = f"{artdag_url}/cache/{cid}"
|
||||
|
||||
return render_fragment(
|
||||
templates, "fragments/link_card.html",
|
||||
title=title,
|
||||
description=description,
|
||||
link=link,
|
||||
cid=cid,
|
||||
content_type=content_type,
|
||||
artdag_url=artdag_url,
|
||||
)
|
||||
|
||||
|
||||
_handlers["link-card"] = _link_card_handler
|
||||
@@ -2,8 +2,11 @@
|
||||
Home and root routes for L1 server.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Request, Depends
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from pathlib import Path
|
||||
|
||||
import markdown
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, FileResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html
|
||||
@@ -13,37 +16,238 @@ from ..dependencies import get_templates, get_current_user
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health():
|
||||
"""Health check endpoint — always returns 200."""
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
async def get_user_stats(actor_id: str) -> dict:
|
||||
"""Get stats for a user."""
|
||||
import database
|
||||
from ..services.run_service import RunService
|
||||
from ..dependencies import get_redis_client, get_cache_manager
|
||||
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
# Count only actual media types (video, image, audio), not effects/recipes
|
||||
media_count = 0
|
||||
for media_type in ["video", "image", "audio", "unknown"]:
|
||||
media_count += await database.count_user_items(actor_id, item_type=media_type)
|
||||
stats["media"] = media_count
|
||||
except Exception:
|
||||
stats["media"] = 0
|
||||
|
||||
try:
|
||||
# Count user's recipes from database (ownership-based)
|
||||
stats["recipes"] = await database.count_user_items(actor_id, item_type="recipe")
|
||||
except Exception:
|
||||
stats["recipes"] = 0
|
||||
|
||||
try:
|
||||
run_service = RunService(database, get_redis_client(), get_cache_manager())
|
||||
runs = await run_service.list_runs(actor_id)
|
||||
stats["runs"] = len(runs)
|
||||
except Exception:
|
||||
stats["runs"] = 0
|
||||
|
||||
try:
|
||||
storage_providers = await database.get_user_storage_providers(actor_id)
|
||||
stats["storage"] = len(storage_providers) if storage_providers else 0
|
||||
except Exception:
|
||||
stats["storage"] = 0
|
||||
|
||||
try:
|
||||
# Count user's effects from database (ownership-based)
|
||||
stats["effects"] = await database.count_user_items(actor_id, item_type="effect")
|
||||
except Exception:
|
||||
stats["effects"] = 0
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
@router.get("/api/stats")
|
||||
async def api_stats(request: Request):
|
||||
"""Get user stats as JSON for CLI and API clients."""
|
||||
user = await get_current_user(request)
|
||||
if not user:
|
||||
raise HTTPException(401, "Authentication required")
|
||||
|
||||
stats = await get_user_stats(user.actor_id)
|
||||
return stats
|
||||
|
||||
|
||||
@router.delete("/api/clear-data")
|
||||
async def clear_user_data(request: Request):
|
||||
"""
|
||||
Clear all user L1 data except storage configuration.
|
||||
|
||||
Deletes: runs, recipes, effects, media/cache items.
|
||||
Preserves: storage provider configurations.
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
user = await get_current_user(request)
|
||||
if not user:
|
||||
raise HTTPException(401, "Authentication required")
|
||||
|
||||
import database
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..services.run_service import RunService
|
||||
from ..dependencies import get_redis_client, get_cache_manager
|
||||
|
||||
actor_id = user.actor_id
|
||||
username = user.username
|
||||
deleted = {
|
||||
"runs": 0,
|
||||
"recipes": 0,
|
||||
"effects": 0,
|
||||
"media": 0,
|
||||
}
|
||||
errors = []
|
||||
|
||||
# Delete all runs
|
||||
try:
|
||||
run_service = RunService(database, get_redis_client(), get_cache_manager())
|
||||
runs = await run_service.list_runs(actor_id, offset=0, limit=10000)
|
||||
for run in runs:
|
||||
try:
|
||||
await run_service.discard_run(run["run_id"], actor_id, username)
|
||||
deleted["runs"] += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Run {run['run_id']}: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to list runs: {e}")
|
||||
|
||||
# Delete all recipes
|
||||
try:
|
||||
recipe_service = RecipeService(get_redis_client(), get_cache_manager())
|
||||
recipes = await recipe_service.list_recipes(actor_id, offset=0, limit=10000)
|
||||
for recipe in recipes:
|
||||
try:
|
||||
success, error = await recipe_service.delete_recipe(recipe["recipe_id"], actor_id)
|
||||
if success:
|
||||
deleted["recipes"] += 1
|
||||
else:
|
||||
errors.append(f"Recipe {recipe['recipe_id']}: {error}")
|
||||
except Exception as e:
|
||||
errors.append(f"Recipe {recipe['recipe_id']}: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to list recipes: {e}")
|
||||
|
||||
# Delete all effects (uses ownership model)
|
||||
cache_manager = get_cache_manager()
|
||||
try:
|
||||
# Get user's effects from item_types
|
||||
effect_items = await database.get_user_items(actor_id, item_type="effect", limit=10000)
|
||||
for item in effect_items:
|
||||
cid = item.get("cid")
|
||||
if cid:
|
||||
try:
|
||||
# Remove ownership link
|
||||
await database.delete_item_type(cid, actor_id, "effect")
|
||||
await database.delete_friendly_name(actor_id, cid)
|
||||
|
||||
# Check if orphaned
|
||||
remaining = await database.get_item_types(cid)
|
||||
if not remaining:
|
||||
# Garbage collect
|
||||
effects_dir = Path(cache_manager.cache_dir) / "_effects" / cid
|
||||
if effects_dir.exists():
|
||||
import shutil
|
||||
shutil.rmtree(effects_dir)
|
||||
import ipfs_client
|
||||
ipfs_client.unpin(cid)
|
||||
deleted["effects"] += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Effect {cid[:16]}...: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to delete effects: {e}")
|
||||
|
||||
# Delete all media/cache items for user (uses ownership model)
|
||||
try:
|
||||
from ..services.cache_service import CacheService
|
||||
cache_service = CacheService(database, cache_manager)
|
||||
|
||||
# Get user's media items (video, image, audio)
|
||||
for media_type in ["video", "image", "audio", "unknown"]:
|
||||
items = await database.get_user_items(actor_id, item_type=media_type, limit=10000)
|
||||
for item in items:
|
||||
cid = item.get("cid")
|
||||
if cid:
|
||||
try:
|
||||
success, error = await cache_service.delete_content(cid, actor_id)
|
||||
if success:
|
||||
deleted["media"] += 1
|
||||
elif error:
|
||||
errors.append(f"Media {cid[:16]}...: {error}")
|
||||
except Exception as e:
|
||||
errors.append(f"Media {cid[:16]}...: {e}")
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to delete media: {e}")
|
||||
|
||||
logger.info(f"Cleared data for {actor_id}: {deleted}")
|
||||
if errors:
|
||||
logger.warning(f"Errors during clear: {errors[:10]}") # Log first 10 errors
|
||||
|
||||
return {
|
||||
"message": "User data cleared",
|
||||
"deleted": deleted,
|
||||
"errors": errors[:10] if errors else [], # Return first 10 errors
|
||||
"storage_preserved": True,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def home(request: Request):
|
||||
"""
|
||||
Home page - redirect to runs if authenticated, show landing otherwise.
|
||||
Home page - show README and stats.
|
||||
"""
|
||||
user = await get_current_user(request)
|
||||
|
||||
if user:
|
||||
return RedirectResponse(url="/runs", status_code=302)
|
||||
# Load README
|
||||
readme_html = ""
|
||||
try:
|
||||
readme_path = Path(__file__).parent.parent.parent / "README.md"
|
||||
if readme_path.exists():
|
||||
readme_html = markdown.markdown(readme_path.read_text(), extensions=['tables', 'fenced_code'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# For now, redirect to login at L2
|
||||
# TODO: Show a landing page with login link
|
||||
return RedirectResponse(url="/runs", status_code=302)
|
||||
# Get stats for current user
|
||||
stats = {}
|
||||
if user:
|
||||
stats = await get_user_stats(user.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "home.html", request,
|
||||
user=user,
|
||||
readme_html=readme_html,
|
||||
stats=stats,
|
||||
nav_counts=stats, # Reuse stats for nav counts
|
||||
active_tab="home",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/login")
|
||||
async def login_redirect(request: Request):
|
||||
"""
|
||||
Redirect to L2 for login.
|
||||
"""
|
||||
from ..config import settings
|
||||
"""Redirect to OAuth login flow."""
|
||||
return RedirectResponse(url="/auth/login", status_code=302)
|
||||
|
||||
if settings.l2_server:
|
||||
# Redirect to L2 login with return URL
|
||||
return_url = str(request.url_for("auth_callback"))
|
||||
login_url = f"{settings.l2_server}/login?return_to={return_url}"
|
||||
return RedirectResponse(url=login_url, status_code=302)
|
||||
|
||||
# No L2 configured - show error
|
||||
return HTMLResponse(
|
||||
"<html><body><h1>Login not configured</h1>"
|
||||
"<p>No L2 server configured for authentication.</p></body></html>",
|
||||
status_code=503
|
||||
# Client tarball path
|
||||
CLIENT_TARBALL = Path(__file__).parent.parent.parent / "artdag-client.tar.gz"
|
||||
|
||||
|
||||
@router.get("/download/client")
|
||||
async def download_client():
|
||||
"""Download the Art DAG CLI client."""
|
||||
if not CLIENT_TARBALL.exists():
|
||||
raise HTTPException(404, "Client package not found. Run build-client.sh to create it.")
|
||||
return FileResponse(
|
||||
CLIENT_TARBALL,
|
||||
media_type="application/gzip",
|
||||
filename="artdag-client.tar.gz"
|
||||
)
|
||||
|
||||
125
app/routers/inbox.py
Normal file
125
app/routers/inbox.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""AP-style inbox endpoint for receiving signed activities from the coop.
|
||||
|
||||
POST /inbox — verify HTTP Signature, dispatch by activity type.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from ..dependencies import get_redis_client
|
||||
from ..utils.http_signatures import verify_request_signature, parse_key_id
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
# Cache fetched public keys in Redis for 24 hours
|
||||
_KEY_CACHE_TTL = 86400
|
||||
|
||||
|
||||
async def _fetch_actor_public_key(actor_url: str) -> str | None:
|
||||
"""Fetch an actor's public key, with Redis caching."""
|
||||
redis = get_redis_client()
|
||||
cache_key = f"actor_pubkey:{actor_url}"
|
||||
|
||||
# Check cache
|
||||
cached = redis.get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
# Fetch actor JSON
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
actor_url,
|
||||
headers={"Accept": "application/activity+json, application/ld+json"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
log.warning("Failed to fetch actor %s: %d", actor_url, resp.status_code)
|
||||
return None
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
log.warning("Error fetching actor %s", actor_url, exc_info=True)
|
||||
return None
|
||||
|
||||
pub_key_pem = (data.get("publicKey") or {}).get("publicKeyPem")
|
||||
if not pub_key_pem:
|
||||
log.warning("No publicKey in actor %s", actor_url)
|
||||
return None
|
||||
|
||||
# Cache it
|
||||
redis.set(cache_key, pub_key_pem, ex=_KEY_CACHE_TTL)
|
||||
return pub_key_pem
|
||||
|
||||
|
||||
@router.post("/inbox")
|
||||
async def inbox(request: Request):
|
||||
"""Receive signed AP activities from the coop platform."""
|
||||
sig_header = request.headers.get("signature", "")
|
||||
if not sig_header:
|
||||
return JSONResponse({"error": "missing signature"}, status_code=401)
|
||||
|
||||
# Read body
|
||||
body = await request.body()
|
||||
|
||||
# Verify HTTP Signature
|
||||
actor_url = parse_key_id(sig_header)
|
||||
if not actor_url:
|
||||
return JSONResponse({"error": "invalid keyId"}, status_code=401)
|
||||
|
||||
pub_key = await _fetch_actor_public_key(actor_url)
|
||||
if not pub_key:
|
||||
return JSONResponse({"error": "could not fetch public key"}, status_code=401)
|
||||
|
||||
req_headers = dict(request.headers)
|
||||
path = request.url.path
|
||||
valid = verify_request_signature(
|
||||
public_key_pem=pub_key,
|
||||
signature_header=sig_header,
|
||||
method="POST",
|
||||
path=path,
|
||||
headers=req_headers,
|
||||
)
|
||||
if not valid:
|
||||
log.warning("Invalid signature from %s", actor_url)
|
||||
return JSONResponse({"error": "invalid signature"}, status_code=401)
|
||||
|
||||
# Parse and dispatch
|
||||
try:
|
||||
activity = await request.json()
|
||||
except Exception:
|
||||
return JSONResponse({"error": "invalid json"}, status_code=400)
|
||||
|
||||
activity_type = activity.get("type", "")
|
||||
log.info("Inbox received: %s from %s", activity_type, actor_url)
|
||||
|
||||
if activity_type == "rose:DeviceAuth":
|
||||
_handle_device_auth(activity)
|
||||
|
||||
# Always 202 — AP convention
|
||||
return JSONResponse({"status": "accepted"}, status_code=202)
|
||||
|
||||
|
||||
def _handle_device_auth(activity: dict) -> None:
|
||||
"""Set or delete did_auth:{device_id} in local Redis."""
|
||||
obj = activity.get("object", {})
|
||||
device_id = obj.get("device_id", "")
|
||||
action = obj.get("action", "")
|
||||
|
||||
if not device_id:
|
||||
log.warning("rose:DeviceAuth missing device_id")
|
||||
return
|
||||
|
||||
redis = get_redis_client()
|
||||
if action == "login":
|
||||
redis.set(f"did_auth:{device_id}", str(time.time()), ex=30 * 24 * 3600)
|
||||
log.info("did_auth set for device %s...", device_id[:16])
|
||||
elif action == "logout":
|
||||
redis.delete(f"did_auth:{device_id}")
|
||||
log.info("did_auth cleared for device %s...", device_id[:16])
|
||||
else:
|
||||
log.warning("rose:DeviceAuth unknown action: %s", action)
|
||||
74
app/routers/oembed.py
Normal file
74
app/routers/oembed.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Art-DAG oEmbed endpoint.
|
||||
|
||||
Returns oEmbed JSON responses for Art-DAG content (media, recipes, effects, runs).
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/oembed")
|
||||
async def oembed(request: Request):
|
||||
url = request.query_params.get("url", "")
|
||||
if not url:
|
||||
return JSONResponse({"error": "url parameter required"}, status_code=400)
|
||||
|
||||
# Parse URL to extract content type and CID
|
||||
# URL patterns: /cache/{cid}, /recipes/{cid}, /effects/{cid}, /runs/{cid}
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
parts = [p for p in parsed.path.strip("/").split("/") if p]
|
||||
|
||||
if len(parts) < 2:
|
||||
return JSONResponse({"error": "could not parse content URL"}, status_code=404)
|
||||
|
||||
content_type = parts[0].rstrip("s") # recipes -> recipe, runs -> run
|
||||
cid = parts[1]
|
||||
|
||||
import database
|
||||
|
||||
title = cid[:16]
|
||||
thumbnail_url = None
|
||||
|
||||
# Look up metadata
|
||||
items = await database.get_item_types(cid)
|
||||
if items:
|
||||
meta = items[0]
|
||||
title = meta.get("filename") or meta.get("description") or title
|
||||
|
||||
# Try friendly name
|
||||
actor_id = meta.get("actor_id")
|
||||
if actor_id:
|
||||
friendly = await database.get_friendly_name_by_cid(actor_id, cid)
|
||||
if friendly:
|
||||
title = friendly.get("display_name") or friendly.get("base_name", title)
|
||||
|
||||
# Media items get a thumbnail
|
||||
if meta.get("type") == "media":
|
||||
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
|
||||
thumbnail_url = f"{artdag_url}/cache/{cid}/raw"
|
||||
|
||||
elif content_type == "run":
|
||||
run = await database.get_run_cache(cid)
|
||||
if run:
|
||||
title = f"Run {cid[:12]}"
|
||||
|
||||
artdag_url = os.getenv("APP_URL_ARTDAG", "https://celery-artdag.rose-ash.com")
|
||||
|
||||
resp = {
|
||||
"version": "1.0",
|
||||
"type": "link",
|
||||
"title": title,
|
||||
"provider_name": "art-dag",
|
||||
"provider_url": artdag_url,
|
||||
"url": url,
|
||||
}
|
||||
if thumbnail_url:
|
||||
resp["thumbnail_url"] = thumbnail_url
|
||||
|
||||
return JSONResponse(resp)
|
||||
@@ -4,8 +4,9 @@ Recipe management routes for L1 server.
|
||||
Handles recipe upload, listing, viewing, and execution.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, UploadFile, File
|
||||
from fastapi.responses import HTMLResponse
|
||||
@@ -13,44 +14,349 @@ from pydantic import BaseModel
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
|
||||
from ..dependencies import require_auth, get_templates, get_redis_client
|
||||
from ..services.auth_service import UserContext, AuthService
|
||||
from ..dependencies import require_auth, get_current_user, get_templates, get_redis_client, get_cache_manager
|
||||
from ..services.auth_service import AuthService
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..types import (
|
||||
CompiledNode, TransformedNode, Registry, Recipe,
|
||||
is_variable_input, get_effect_cid,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecipeUploadRequest(BaseModel):
|
||||
yaml_content: str
|
||||
content: str # S-expression or YAML
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
def get_recipe_service():
|
||||
class RecipeRunRequest(BaseModel):
|
||||
"""Request to run a recipe with variable inputs."""
|
||||
inputs: Dict[str, str] = {} # Map input names to CIDs
|
||||
|
||||
|
||||
def get_recipe_service() -> RecipeService:
|
||||
"""Get recipe service instance."""
|
||||
return RecipeService(get_redis_client())
|
||||
return RecipeService(get_redis_client(), get_cache_manager())
|
||||
|
||||
|
||||
def transform_node(
|
||||
node: CompiledNode,
|
||||
assets: Dict[str, Dict[str, Any]],
|
||||
effects: Dict[str, Dict[str, Any]],
|
||||
) -> TransformedNode:
|
||||
"""
|
||||
Transform a compiled node to artdag execution format.
|
||||
|
||||
- Resolves asset references to CIDs for SOURCE nodes
|
||||
- Resolves effect references to CIDs for EFFECT nodes
|
||||
- Renames 'type' to 'node_type', 'id' to 'node_id'
|
||||
"""
|
||||
node_id = node.get("id", "")
|
||||
config = dict(node.get("config", {})) # Copy to avoid mutation
|
||||
|
||||
# Resolve asset references for SOURCE nodes
|
||||
if node.get("type") == "SOURCE" and "asset" in config:
|
||||
asset_name = config["asset"]
|
||||
if asset_name in assets:
|
||||
config["cid"] = assets[asset_name].get("cid")
|
||||
|
||||
# Resolve effect references for EFFECT nodes
|
||||
if node.get("type") == "EFFECT" and "effect" in config:
|
||||
effect_name = config["effect"]
|
||||
if effect_name in effects:
|
||||
config["cid"] = effects[effect_name].get("cid")
|
||||
|
||||
return {
|
||||
"node_id": node_id,
|
||||
"node_type": node.get("type", "EFFECT"),
|
||||
"config": config,
|
||||
"inputs": node.get("inputs", []),
|
||||
"name": node.get("name"),
|
||||
}
|
||||
|
||||
|
||||
def build_input_name_mapping(
|
||||
nodes: Dict[str, TransformedNode],
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Build a mapping from input names to node IDs for variable inputs.
|
||||
|
||||
Variable inputs can be referenced by:
|
||||
- node_id directly
|
||||
- config.name (e.g., "Second Video")
|
||||
- snake_case version (e.g., "second_video")
|
||||
- kebab-case version (e.g., "second-video")
|
||||
- node.name (def binding name)
|
||||
"""
|
||||
input_name_to_node: Dict[str, str] = {}
|
||||
|
||||
for node_id, node in nodes.items():
|
||||
if node.get("node_type") != "SOURCE":
|
||||
continue
|
||||
|
||||
config = node.get("config", {})
|
||||
if not is_variable_input(config):
|
||||
continue
|
||||
|
||||
# Map by node_id
|
||||
input_name_to_node[node_id] = node_id
|
||||
|
||||
# Map by config.name
|
||||
name = config.get("name")
|
||||
if name:
|
||||
input_name_to_node[name] = node_id
|
||||
input_name_to_node[name.lower().replace(" ", "_")] = node_id
|
||||
input_name_to_node[name.lower().replace(" ", "-")] = node_id
|
||||
|
||||
# Map by node.name (def binding)
|
||||
node_name = node.get("name")
|
||||
if node_name:
|
||||
input_name_to_node[node_name] = node_id
|
||||
input_name_to_node[node_name.replace("-", "_")] = node_id
|
||||
|
||||
return input_name_to_node
|
||||
|
||||
|
||||
def bind_inputs(
|
||||
nodes: Dict[str, TransformedNode],
|
||||
input_name_to_node: Dict[str, str],
|
||||
user_inputs: Dict[str, str],
|
||||
) -> List[str]:
|
||||
"""
|
||||
Bind user-provided input CIDs to source nodes.
|
||||
|
||||
Returns list of warnings for inputs that couldn't be bound.
|
||||
"""
|
||||
warnings: List[str] = []
|
||||
|
||||
for input_name, cid in user_inputs.items():
|
||||
# Try direct node ID match first
|
||||
if input_name in nodes:
|
||||
node = nodes[input_name]
|
||||
if node.get("node_type") == "SOURCE":
|
||||
node["config"]["cid"] = cid
|
||||
logger.info(f"Bound input {input_name} directly to node, cid={cid[:16]}...")
|
||||
continue
|
||||
|
||||
# Try input name lookup
|
||||
if input_name in input_name_to_node:
|
||||
node_id = input_name_to_node[input_name]
|
||||
node = nodes[node_id]
|
||||
node["config"]["cid"] = cid
|
||||
logger.info(f"Bound input {input_name} via lookup to node {node_id}, cid={cid[:16]}...")
|
||||
continue
|
||||
|
||||
# Input not found
|
||||
warnings.append(f"Input '{input_name}' not found in recipe")
|
||||
logger.warning(f"Input {input_name} not found in nodes or input_name_to_node")
|
||||
|
||||
return warnings
|
||||
|
||||
|
||||
async def resolve_friendly_names_in_registry(
|
||||
registry: dict,
|
||||
actor_id: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Resolve friendly names to CIDs in the registry.
|
||||
|
||||
Friendly names are identified by containing a space (e.g., "brightness 01hw3x9k")
|
||||
or by not being a valid CID format.
|
||||
"""
|
||||
from ..services.naming_service import get_naming_service
|
||||
import re
|
||||
|
||||
naming = get_naming_service()
|
||||
resolved = {"assets": {}, "effects": {}}
|
||||
|
||||
# CID patterns: IPFS CID (Qm..., bafy...) or SHA256 hash (64 hex chars)
|
||||
cid_pattern = re.compile(r'^(Qm[a-zA-Z0-9]{44}|bafy[a-zA-Z0-9]+|[a-f0-9]{64})$')
|
||||
|
||||
for asset_name, asset_info in registry.get("assets", {}).items():
|
||||
cid = asset_info.get("cid", "")
|
||||
if cid and not cid_pattern.match(cid):
|
||||
# Looks like a friendly name, resolve it
|
||||
resolved_cid = await naming.resolve(actor_id, cid, item_type="media")
|
||||
if resolved_cid:
|
||||
asset_info = dict(asset_info)
|
||||
asset_info["cid"] = resolved_cid
|
||||
asset_info["_resolved_from"] = cid
|
||||
resolved["assets"][asset_name] = asset_info
|
||||
|
||||
for effect_name, effect_info in registry.get("effects", {}).items():
|
||||
cid = effect_info.get("cid", "")
|
||||
if cid and not cid_pattern.match(cid):
|
||||
# Looks like a friendly name, resolve it
|
||||
resolved_cid = await naming.resolve(actor_id, cid, item_type="effect")
|
||||
if resolved_cid:
|
||||
effect_info = dict(effect_info)
|
||||
effect_info["cid"] = resolved_cid
|
||||
effect_info["_resolved_from"] = cid
|
||||
resolved["effects"][effect_name] = effect_info
|
||||
|
||||
return resolved
|
||||
|
||||
|
||||
async def prepare_dag_for_execution(
|
||||
recipe: Recipe,
|
||||
user_inputs: Dict[str, str],
|
||||
actor_id: str = None,
|
||||
) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
Prepare a recipe DAG for execution by transforming nodes and binding inputs.
|
||||
|
||||
Resolves friendly names to CIDs if actor_id is provided.
|
||||
Returns (dag_json, warnings).
|
||||
"""
|
||||
recipe_dag = recipe.get("dag")
|
||||
if not recipe_dag or not isinstance(recipe_dag, dict):
|
||||
raise ValueError("Recipe has no DAG definition")
|
||||
|
||||
# Deep copy to avoid mutating original
|
||||
dag_copy = json.loads(json.dumps(recipe_dag))
|
||||
nodes = dag_copy.get("nodes", {})
|
||||
|
||||
# Get registry for resolving references
|
||||
registry = recipe.get("registry", {})
|
||||
|
||||
# Resolve friendly names to CIDs
|
||||
if actor_id and registry:
|
||||
registry = await resolve_friendly_names_in_registry(registry, actor_id)
|
||||
|
||||
assets = registry.get("assets", {}) if registry else {}
|
||||
effects = registry.get("effects", {}) if registry else {}
|
||||
|
||||
# Transform nodes from list to dict if needed
|
||||
if isinstance(nodes, list):
|
||||
nodes_dict: Dict[str, TransformedNode] = {}
|
||||
for node in nodes:
|
||||
node_id = node.get("id")
|
||||
if node_id:
|
||||
nodes_dict[node_id] = transform_node(node, assets, effects)
|
||||
nodes = nodes_dict
|
||||
dag_copy["nodes"] = nodes
|
||||
|
||||
# Build input name mapping and bind user inputs
|
||||
input_name_to_node = build_input_name_mapping(nodes)
|
||||
logger.info(f"Input name to node mapping: {input_name_to_node}")
|
||||
logger.info(f"User-provided inputs: {user_inputs}")
|
||||
|
||||
warnings = bind_inputs(nodes, input_name_to_node, user_inputs)
|
||||
|
||||
# Log final SOURCE node configs for debugging
|
||||
for nid, n in nodes.items():
|
||||
if n.get("node_type") == "SOURCE":
|
||||
logger.info(f"Final SOURCE node {nid}: config={n.get('config')}")
|
||||
|
||||
# Transform output to output_id
|
||||
if "output" in dag_copy:
|
||||
dag_copy["output_id"] = dag_copy.pop("output")
|
||||
|
||||
# Add metadata if not present
|
||||
if "metadata" not in dag_copy:
|
||||
dag_copy["metadata"] = {}
|
||||
|
||||
return json.dumps(dag_copy), warnings
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_recipe(
|
||||
req: RecipeUploadRequest,
|
||||
file: UploadFile = File(...),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
):
|
||||
"""Upload a new recipe from YAML."""
|
||||
"""Upload a new recipe from S-expression or YAML file."""
|
||||
import yaml
|
||||
|
||||
# Read content from the uploaded file
|
||||
content = (await file.read()).decode("utf-8")
|
||||
|
||||
# Detect format (skip comments starting with ;)
|
||||
def is_sexp_format(text):
|
||||
for line in text.split('\n'):
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith(';'):
|
||||
continue
|
||||
return stripped.startswith('(')
|
||||
return False
|
||||
|
||||
is_sexp = is_sexp_format(content)
|
||||
|
||||
try:
|
||||
from artdag.sexp import compile_string, ParseError, CompileError
|
||||
SEXP_AVAILABLE = True
|
||||
except ImportError:
|
||||
SEXP_AVAILABLE = False
|
||||
|
||||
recipe_name = None
|
||||
recipe_version = "1.0"
|
||||
recipe_description = None
|
||||
variable_inputs = []
|
||||
fixed_inputs = []
|
||||
|
||||
if is_sexp:
|
||||
if not SEXP_AVAILABLE:
|
||||
raise HTTPException(500, "S-expression recipes require artdag.sexp module (not installed on server)")
|
||||
# Parse S-expression
|
||||
try:
|
||||
compiled = compile_string(content)
|
||||
recipe_name = compiled.name
|
||||
recipe_version = compiled.version
|
||||
recipe_description = compiled.description
|
||||
|
||||
for node in compiled.nodes:
|
||||
if node.get("type") == "SOURCE":
|
||||
config = node.get("config", {})
|
||||
if config.get("input"):
|
||||
variable_inputs.append(config.get("name", node.get("id")))
|
||||
elif config.get("asset"):
|
||||
fixed_inputs.append(config.get("asset"))
|
||||
except Exception as e:
|
||||
raise HTTPException(400, f"Parse error: {e}")
|
||||
else:
|
||||
# Parse YAML
|
||||
try:
|
||||
recipe_data = yaml.safe_load(content)
|
||||
recipe_name = recipe_data.get("name")
|
||||
recipe_version = recipe_data.get("version", "1.0")
|
||||
recipe_description = recipe_data.get("description")
|
||||
|
||||
inputs = recipe_data.get("inputs", {})
|
||||
for input_name, input_def in inputs.items():
|
||||
if isinstance(input_def, dict) and input_def.get("fixed"):
|
||||
fixed_inputs.append(input_name)
|
||||
else:
|
||||
variable_inputs.append(input_name)
|
||||
except yaml.YAMLError as e:
|
||||
raise HTTPException(400, f"Invalid YAML: {e}")
|
||||
|
||||
# Use filename as recipe name if not specified
|
||||
if not recipe_name and file.filename:
|
||||
recipe_name = file.filename.rsplit(".", 1)[0]
|
||||
|
||||
recipe_id, error = await recipe_service.upload_recipe(
|
||||
yaml_content=req.yaml_content,
|
||||
content=content,
|
||||
uploader=ctx.actor_id,
|
||||
name=req.name,
|
||||
description=req.description,
|
||||
name=recipe_name,
|
||||
description=recipe_description,
|
||||
)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
return {"recipe_id": recipe_id, "message": "Recipe uploaded successfully"}
|
||||
return {
|
||||
"recipe_id": recipe_id,
|
||||
"name": recipe_name or "unnamed",
|
||||
"version": recipe_version,
|
||||
"variable_inputs": variable_inputs,
|
||||
"fixed_inputs": fixed_inputs,
|
||||
"message": "Recipe uploaded successfully",
|
||||
}
|
||||
|
||||
|
||||
@router.get("")
|
||||
@@ -59,27 +365,27 @@ async def list_recipes(
|
||||
offset: int = 0,
|
||||
limit: int = 20,
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""List available recipes."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
recipes = await recipe_service.list_recipes(ctx.actor_id, offset=offset, limit=limit)
|
||||
has_more = len(recipes) >= limit
|
||||
|
||||
if wants_json(request):
|
||||
return {"recipes": recipes, "offset": offset, "limit": limit}
|
||||
return {"recipes": recipes, "offset": offset, "limit": limit, "has_more": has_more}
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "recipes/list.html", request,
|
||||
recipes=recipes,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="recipes",
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
)
|
||||
|
||||
|
||||
@@ -88,53 +394,129 @@ async def get_recipe(
|
||||
recipe_id: str,
|
||||
request: Request,
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Get recipe details."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
recipe = await recipe_service.get_recipe(recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(404, "Recipe not found")
|
||||
|
||||
# Add friendly name if available
|
||||
from ..services.naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly = await naming.get_by_cid(ctx.actor_id, recipe_id)
|
||||
if friendly:
|
||||
recipe["friendly_name"] = friendly["friendly_name"]
|
||||
recipe["base_name"] = friendly["base_name"]
|
||||
recipe["version_id"] = friendly["version_id"]
|
||||
|
||||
if wants_json(request):
|
||||
return recipe
|
||||
|
||||
# Build DAG elements for visualization
|
||||
# Build DAG elements for visualization and convert nodes to steps format
|
||||
dag_elements = []
|
||||
steps = []
|
||||
node_colors = {
|
||||
"input": "#3b82f6",
|
||||
"effect": "#8b5cf6",
|
||||
"analyze": "#ec4899",
|
||||
"SOURCE": "#3b82f6",
|
||||
"EFFECT": "#8b5cf6",
|
||||
"SEQUENCE": "#ec4899",
|
||||
"transform": "#10b981",
|
||||
"output": "#f59e0b",
|
||||
}
|
||||
|
||||
for i, step in enumerate(recipe.get("steps", [])):
|
||||
step_id = step.get("id", f"step-{i}")
|
||||
dag_elements.append({
|
||||
"data": {
|
||||
"id": step_id,
|
||||
"label": step.get("name", f"Step {i+1}"),
|
||||
"color": node_colors.get(step.get("type", "effect"), "#6b7280"),
|
||||
}
|
||||
})
|
||||
for inp in step.get("inputs", []):
|
||||
dag_elements.append({
|
||||
"data": {"source": inp, "target": step_id}
|
||||
# Debug: log recipe structure
|
||||
logger.info(f"Recipe keys: {list(recipe.keys())}")
|
||||
|
||||
# Get nodes from dag - can be list or dict, can be under "dag" or directly on recipe
|
||||
dag = recipe.get("dag", {})
|
||||
logger.info(f"DAG type: {type(dag)}, keys: {list(dag.keys()) if isinstance(dag, dict) else 'not dict'}")
|
||||
nodes = dag.get("nodes", []) if isinstance(dag, dict) else []
|
||||
logger.info(f"Nodes from dag.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
|
||||
|
||||
# Also check for nodes directly on recipe (alternative formats)
|
||||
if not nodes:
|
||||
nodes = recipe.get("nodes", [])
|
||||
logger.info(f"Nodes from recipe.nodes: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
|
||||
if not nodes:
|
||||
nodes = recipe.get("pipeline", [])
|
||||
logger.info(f"Nodes from recipe.pipeline: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
|
||||
if not nodes:
|
||||
nodes = recipe.get("steps", [])
|
||||
logger.info(f"Nodes from recipe.steps: {type(nodes)}, len: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
|
||||
|
||||
logger.info(f"Final nodes count: {len(nodes) if hasattr(nodes, '__len__') else 'N/A'}")
|
||||
|
||||
# Convert list of nodes to steps format
|
||||
if isinstance(nodes, list):
|
||||
for node in nodes:
|
||||
node_id = node.get("id", "")
|
||||
node_type = node.get("type", "EFFECT")
|
||||
inputs = node.get("inputs", [])
|
||||
config = node.get("config", {})
|
||||
|
||||
steps.append({
|
||||
"id": node_id,
|
||||
"name": node_id,
|
||||
"type": node_type,
|
||||
"inputs": inputs,
|
||||
"params": config,
|
||||
})
|
||||
|
||||
dag_elements.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"color": node_colors.get(node_type, "#6b7280"),
|
||||
}
|
||||
})
|
||||
for inp in inputs:
|
||||
if isinstance(inp, str):
|
||||
dag_elements.append({
|
||||
"data": {"source": inp, "target": node_id}
|
||||
})
|
||||
elif isinstance(nodes, dict):
|
||||
for node_id, node in nodes.items():
|
||||
node_type = node.get("type", "EFFECT")
|
||||
inputs = node.get("inputs", [])
|
||||
config = node.get("config", {})
|
||||
|
||||
steps.append({
|
||||
"id": node_id,
|
||||
"name": node_id,
|
||||
"type": node_type,
|
||||
"inputs": inputs,
|
||||
"params": config,
|
||||
})
|
||||
|
||||
dag_elements.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"color": node_colors.get(node_type, "#6b7280"),
|
||||
}
|
||||
})
|
||||
for inp in inputs:
|
||||
if isinstance(inp, str):
|
||||
dag_elements.append({
|
||||
"data": {"source": inp, "target": node_id}
|
||||
})
|
||||
|
||||
# Add steps to recipe for template
|
||||
recipe["steps"] = steps
|
||||
|
||||
# Use S-expression source if available
|
||||
if "sexp" not in recipe:
|
||||
recipe["sexp"] = "; No S-expression source available"
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "recipes/detail.html", request,
|
||||
recipe=recipe,
|
||||
dag_elements=dag_elements,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="recipes",
|
||||
)
|
||||
|
||||
@@ -155,7 +537,7 @@ async def delete_recipe(
|
||||
@router.post("/{recipe_id}/run")
|
||||
async def run_recipe(
|
||||
recipe_id: str,
|
||||
inputs: List[str],
|
||||
req: RecipeRunRequest,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
):
|
||||
@@ -168,24 +550,44 @@ async def run_recipe(
|
||||
if not recipe:
|
||||
raise HTTPException(404, "Recipe not found")
|
||||
|
||||
# Create run using run service
|
||||
run_service = RunService(database, get_redis_client(), get_cache_manager())
|
||||
run, error = await run_service.create_run(
|
||||
recipe=recipe.get("name", recipe_id),
|
||||
inputs=inputs,
|
||||
use_dag=True,
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
)
|
||||
try:
|
||||
# Create run using run service
|
||||
run_service = RunService(database, get_redis_client(), get_cache_manager())
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
# Prepare DAG for execution (transform nodes, bind inputs, resolve friendly names)
|
||||
dag_json = None
|
||||
if recipe.get("dag"):
|
||||
dag_json, warnings = await prepare_dag_for_execution(recipe, req.inputs, actor_id=ctx.actor_id)
|
||||
for warning in warnings:
|
||||
logger.warning(warning)
|
||||
|
||||
return {
|
||||
"run_id": run.run_id,
|
||||
"status": run.status,
|
||||
"message": "Recipe execution started",
|
||||
}
|
||||
run, error = await run_service.create_run(
|
||||
recipe=recipe_id, # Use recipe hash as primary identifier
|
||||
inputs=req.inputs,
|
||||
use_dag=True,
|
||||
dag_json=dag_json,
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
recipe_name=recipe.get("name"), # Store name for display
|
||||
recipe_sexp=recipe.get("sexp"), # S-expression for code-addressed execution
|
||||
)
|
||||
|
||||
if error:
|
||||
raise HTTPException(400, error)
|
||||
|
||||
if not run:
|
||||
raise HTTPException(500, "Run creation returned no result")
|
||||
|
||||
return {
|
||||
"run_id": run["run_id"] if isinstance(run, dict) else run.run_id,
|
||||
"status": run.get("status", "pending") if isinstance(run, dict) else run.status,
|
||||
"message": "Recipe execution started",
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Error running recipe {recipe_id}")
|
||||
raise HTTPException(500, f"Run failed: {e}")
|
||||
|
||||
|
||||
@router.get("/{recipe_id}/dag")
|
||||
@@ -232,9 +634,7 @@ async def ui_discard_recipe(
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
):
|
||||
"""HTMX handler: discard a recipe."""
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Login required</div>', status_code=401)
|
||||
|
||||
@@ -247,3 +647,40 @@ async def ui_discard_recipe(
|
||||
'<div class="text-green-400">Recipe deleted</div>'
|
||||
'<script>setTimeout(() => window.location.href = "/recipes", 1500);</script>'
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{recipe_id}/publish")
|
||||
async def publish_recipe(
|
||||
recipe_id: str,
|
||||
request: Request,
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
recipe_service: RecipeService = Depends(get_recipe_service),
|
||||
):
|
||||
"""Publish recipe to L2 and IPFS."""
|
||||
from ..services.cache_service import CacheService
|
||||
from ..dependencies import get_cache_manager
|
||||
import database
|
||||
|
||||
# Verify recipe exists
|
||||
recipe = await recipe_service.get_recipe(recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(404, "Recipe not found")
|
||||
|
||||
# Use cache service to publish (recipes are stored in cache)
|
||||
cache_service = CacheService(database, get_cache_manager())
|
||||
ipfs_cid, error = await cache_service.publish_to_l2(
|
||||
cid=recipe_id,
|
||||
actor_id=ctx.actor_id,
|
||||
l2_server=ctx.l2_server,
|
||||
auth_token=request.cookies.get("auth_token"),
|
||||
)
|
||||
|
||||
if error:
|
||||
if wants_html(request):
|
||||
return HTMLResponse(f'<span class="text-red-400">{error}</span>')
|
||||
raise HTTPException(400, error)
|
||||
|
||||
if wants_html(request):
|
||||
return HTMLResponse(f'<span class="text-green-400">Shared: {ipfs_cid[:16]}...</span>')
|
||||
|
||||
return {"ipfs_cid": ipfs_cid, "published": True}
|
||||
|
||||
1482
app/routers/runs.py
1482
app/routers/runs.py
File diff suppressed because it is too large
Load Diff
@@ -12,9 +12,9 @@ from pydantic import BaseModel
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
|
||||
from ..dependencies import get_database, get_current_user, require_auth, get_templates
|
||||
from ..services.auth_service import UserContext
|
||||
from ..services.storage_service import StorageService, STORAGE_PROVIDERS_INFO, VALID_PROVIDER_TYPES
|
||||
|
||||
router = APIRouter()
|
||||
@@ -47,30 +47,25 @@ class UpdateStorageRequest(BaseModel):
|
||||
async def list_storage(
|
||||
request: Request,
|
||||
storage_service: StorageService = Depends(get_storage_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""List user's storage providers. HTML for browsers, JSON for API."""
|
||||
from ..services.auth_service import AuthService
|
||||
from ..dependencies import get_redis_client
|
||||
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
storages = await storage_service.list_storages(ctx.actor_id)
|
||||
|
||||
if wants_json(request):
|
||||
return {"storages": storages}
|
||||
|
||||
# Render HTML template
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "storage/list.html", request,
|
||||
storages=storages,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
providers_info=STORAGE_PROVIDERS_INFO,
|
||||
active_tab="storage",
|
||||
)
|
||||
|
||||
|
||||
@@ -115,12 +110,7 @@ async def add_storage_form(
|
||||
storage_service: StorageService = Depends(get_storage_service),
|
||||
):
|
||||
"""Add a storage provider via HTML form."""
|
||||
from ..services.auth_service import AuthService
|
||||
from ..dependencies import get_redis_client
|
||||
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
return HTMLResponse('<div class="text-red-400">Not authenticated</div>', status_code=401)
|
||||
|
||||
@@ -203,17 +193,9 @@ async def delete_storage(
|
||||
storage_id: int,
|
||||
request: Request,
|
||||
storage_service: StorageService = Depends(get_storage_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Remove a storage provider."""
|
||||
from ..services.auth_service import AuthService
|
||||
from ..dependencies import get_redis_client
|
||||
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
raise HTTPException(401, "Not authenticated")
|
||||
|
||||
success, error = await storage_service.delete_storage(storage_id, ctx.actor_id)
|
||||
|
||||
if error:
|
||||
@@ -232,12 +214,7 @@ async def test_storage(
|
||||
storage_service: StorageService = Depends(get_storage_service),
|
||||
):
|
||||
"""Test storage provider connectivity."""
|
||||
from ..services.auth_service import AuthService
|
||||
from ..dependencies import get_redis_client
|
||||
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
ctx = await get_current_user(request)
|
||||
if not ctx:
|
||||
if wants_html(request):
|
||||
return HTMLResponse('<span class="text-red-400">Not authenticated</span>', status_code=401)
|
||||
@@ -257,27 +234,31 @@ async def storage_type_page(
|
||||
provider_type: str,
|
||||
request: Request,
|
||||
storage_service: StorageService = Depends(get_storage_service),
|
||||
ctx: UserContext = Depends(require_auth),
|
||||
):
|
||||
"""Page for managing storage configs of a specific type."""
|
||||
from ..services.auth_service import AuthService
|
||||
from ..dependencies import get_redis_client
|
||||
|
||||
auth_service = AuthService(get_redis_client())
|
||||
ctx = auth_service.get_user_from_cookie(request)
|
||||
|
||||
if not ctx:
|
||||
return RedirectResponse(url="/auth", status_code=302)
|
||||
|
||||
if provider_type not in STORAGE_PROVIDERS_INFO:
|
||||
raise HTTPException(404, "Invalid provider type")
|
||||
|
||||
storages = await storage_service.list_by_type(ctx.actor_id, provider_type)
|
||||
provider_info = STORAGE_PROVIDERS_INFO[provider_type]
|
||||
|
||||
if wants_json(request):
|
||||
return {
|
||||
"provider_type": provider_type,
|
||||
"provider_info": provider_info,
|
||||
"storages": storages,
|
||||
}
|
||||
|
||||
from ..dependencies import get_nav_counts
|
||||
nav_counts = await get_nav_counts(ctx.actor_id)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "storage/type.html", request,
|
||||
provider_type=provider_type,
|
||||
provider_info=provider_info,
|
||||
storages=storages,
|
||||
user=ctx,
|
||||
nav_counts=nav_counts,
|
||||
active_tab="storage",
|
||||
)
|
||||
|
||||
@@ -5,13 +5,17 @@ Auth Service - token management and user verification.
|
||||
import hashlib
|
||||
import base64
|
||||
import json
|
||||
from typing import Optional
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Dict, Any, TYPE_CHECKING
|
||||
|
||||
import httpx
|
||||
|
||||
from artdag_common.middleware.auth import UserContext
|
||||
from ..config import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import redis
|
||||
from starlette.requests import Request
|
||||
|
||||
|
||||
# Token expiry (30 days to match token lifetime)
|
||||
TOKEN_EXPIRY_SECONDS = 60 * 60 * 24 * 30
|
||||
@@ -21,18 +25,10 @@ REVOKED_KEY_PREFIX = "artdag:revoked:"
|
||||
USER_TOKENS_PREFIX = "artdag:user_tokens:"
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserContext:
|
||||
"""User context from token."""
|
||||
username: str
|
||||
actor_id: str
|
||||
token: Optional[str] = None
|
||||
|
||||
|
||||
class AuthService:
|
||||
"""Service for authentication and token management."""
|
||||
|
||||
def __init__(self, redis_client):
|
||||
def __init__(self, redis_client: "redis.Redis[bytes]") -> None:
|
||||
self.redis = redis_client
|
||||
|
||||
def register_user_token(self, username: str, token: str) -> None:
|
||||
@@ -74,7 +70,7 @@ class AuthService:
|
||||
key = f"{REVOKED_KEY_PREFIX}{token_hash}"
|
||||
return self.redis.exists(key) > 0
|
||||
|
||||
def decode_token_claims(self, token: str) -> Optional[dict]:
|
||||
def decode_token_claims(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Decode JWT claims without verification."""
|
||||
try:
|
||||
parts = token.split(".")
|
||||
@@ -108,6 +104,7 @@ class AuthService:
|
||||
username=username,
|
||||
actor_id=actor_id or f"@{username}",
|
||||
token=token,
|
||||
l2_server=settings.l2_server,
|
||||
)
|
||||
|
||||
async def verify_token_with_l2(self, token: str) -> Optional[UserContext]:
|
||||
@@ -133,7 +130,7 @@ class AuthService:
|
||||
|
||||
return ctx
|
||||
|
||||
def get_user_from_cookie(self, request) -> Optional[UserContext]:
|
||||
def get_user_from_cookie(self, request: "Request") -> Optional[UserContext]:
|
||||
"""Extract user context from auth cookie."""
|
||||
token = request.cookies.get("auth_token")
|
||||
if not token:
|
||||
|
||||
@@ -2,10 +2,88 @@
|
||||
Cache Service - business logic for cache and media management.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any
|
||||
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
|
||||
|
||||
from artdag_common.utils.media import detect_media_type, get_mime_type
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from database import Database
|
||||
from cache_manager import L1CacheManager
|
||||
|
||||
|
||||
def detect_media_type(cache_path: Path) -> str:
|
||||
"""Detect if file is image, video, or audio based on magic bytes."""
|
||||
try:
|
||||
with open(cache_path, "rb") as f:
|
||||
header = f.read(32)
|
||||
except Exception:
|
||||
return "unknown"
|
||||
|
||||
# Video signatures
|
||||
if header[:4] == b'\x1a\x45\xdf\xa3': # WebM/MKV
|
||||
return "video"
|
||||
if len(header) > 8 and header[4:8] == b'ftyp': # MP4/MOV
|
||||
return "video"
|
||||
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'AVI ': # AVI
|
||||
return "video"
|
||||
|
||||
# Image signatures
|
||||
if header[:8] == b'\x89PNG\r\n\x1a\n': # PNG
|
||||
return "image"
|
||||
if header[:2] == b'\xff\xd8': # JPEG
|
||||
return "image"
|
||||
if header[:6] in (b'GIF87a', b'GIF89a'): # GIF
|
||||
return "image"
|
||||
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'WEBP': # WebP
|
||||
return "image"
|
||||
|
||||
# Audio signatures
|
||||
if header[:4] == b'RIFF' and len(header) > 12 and header[8:12] == b'WAVE': # WAV
|
||||
return "audio"
|
||||
if header[:3] == b'ID3' or header[:2] == b'\xff\xfb': # MP3
|
||||
return "audio"
|
||||
if header[:4] == b'fLaC': # FLAC
|
||||
return "audio"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def get_mime_type(path: Path) -> str:
|
||||
"""Get MIME type based on file magic bytes."""
|
||||
media_type = detect_media_type(path)
|
||||
if media_type == "video":
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
header = f.read(12)
|
||||
if header[:4] == b'\x1a\x45\xdf\xa3':
|
||||
return "video/x-matroska"
|
||||
return "video/mp4"
|
||||
except Exception:
|
||||
return "video/mp4"
|
||||
elif media_type == "image":
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
header = f.read(8)
|
||||
if header[:8] == b'\x89PNG\r\n\x1a\n':
|
||||
return "image/png"
|
||||
if header[:2] == b'\xff\xd8':
|
||||
return "image/jpeg"
|
||||
if header[:6] in (b'GIF87a', b'GIF89a'):
|
||||
return "image/gif"
|
||||
return "image/jpeg"
|
||||
except Exception:
|
||||
return "image/jpeg"
|
||||
elif media_type == "audio":
|
||||
return "audio/mpeg"
|
||||
return "application/octet-stream"
|
||||
|
||||
|
||||
class CacheService:
|
||||
@@ -15,96 +93,526 @@ class CacheService:
|
||||
Handles content retrieval, metadata, and media type detection.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_manager, database):
|
||||
self.cache = cache_manager
|
||||
def __init__(self, database: "Database", cache_manager: "L1CacheManager") -> None:
|
||||
self.db = database
|
||||
self.cache = cache_manager
|
||||
self.cache_dir = Path(os.environ.get("CACHE_DIR", "/tmp/artdag-cache"))
|
||||
|
||||
async def get_item(self, content_hash: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached item by content hash."""
|
||||
path = self.cache.get_by_content_hash(content_hash)
|
||||
if not path or not path.exists():
|
||||
return None
|
||||
async def get_cache_item(self, cid: str, actor_id: str = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached item with full metadata for display."""
|
||||
# Get metadata from database first
|
||||
meta = await self.db.load_item_metadata(cid, actor_id)
|
||||
cache_item = await self.db.get_cache_item(cid)
|
||||
|
||||
# Get metadata from database
|
||||
meta = await self.db.get_cache_item(content_hash)
|
||||
# Check if content exists locally
|
||||
path = self.cache.get_by_cid(cid) if self.cache.has_content(cid) else None
|
||||
|
||||
media_type = detect_media_type(path)
|
||||
mime_type = get_mime_type(path)
|
||||
size = path.stat().st_size
|
||||
if path and path.exists():
|
||||
# Local file exists - detect type from file
|
||||
media_type = detect_media_type(path)
|
||||
mime_type = get_mime_type(path)
|
||||
size = path.stat().st_size
|
||||
else:
|
||||
# File not local - check database for type info
|
||||
# Try to get type from item_types table
|
||||
media_type = "unknown"
|
||||
mime_type = "application/octet-stream"
|
||||
size = 0
|
||||
|
||||
return {
|
||||
"content_hash": content_hash,
|
||||
"path": str(path),
|
||||
if actor_id:
|
||||
try:
|
||||
item_types = await self.db.get_item_types(cid, actor_id)
|
||||
if item_types:
|
||||
media_type = item_types[0].get("type", "unknown")
|
||||
if media_type == "video":
|
||||
mime_type = "video/mp4"
|
||||
elif media_type == "image":
|
||||
mime_type = "image/png"
|
||||
elif media_type == "audio":
|
||||
mime_type = "audio/mpeg"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If no local path but we have IPFS CID, content is available remotely
|
||||
if not cache_item:
|
||||
return None
|
||||
|
||||
result = {
|
||||
"cid": cid,
|
||||
"path": str(path) if path else None,
|
||||
"media_type": media_type,
|
||||
"mime_type": mime_type,
|
||||
"size": size,
|
||||
"name": meta.get("name") if meta else None,
|
||||
"description": meta.get("description") if meta else None,
|
||||
"tags": meta.get("tags", []) if meta else [],
|
||||
"ipfs_cid": meta.get("ipfs_cid") if meta else None,
|
||||
"ipfs_cid": cache_item.get("ipfs_cid") if cache_item else None,
|
||||
"meta": meta,
|
||||
"remote_only": path is None or not path.exists(),
|
||||
}
|
||||
|
||||
async def get_path(self, content_hash: str) -> Optional[Path]:
|
||||
"""Get the file path for cached content."""
|
||||
return self.cache.get_by_content_hash(content_hash)
|
||||
# Unpack meta fields to top level for template convenience
|
||||
if meta:
|
||||
result["title"] = meta.get("title")
|
||||
result["description"] = meta.get("description")
|
||||
result["tags"] = meta.get("tags", [])
|
||||
result["source_type"] = meta.get("source_type")
|
||||
result["source_note"] = meta.get("source_note")
|
||||
result["created_at"] = meta.get("created_at")
|
||||
result["filename"] = meta.get("filename")
|
||||
|
||||
async def list_items(
|
||||
self,
|
||||
actor_id: str = None,
|
||||
media_type: str = None,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
) -> Dict[str, Any]:
|
||||
"""List cached items with filters and pagination."""
|
||||
# Get items from database
|
||||
items = await self.db.list_cache_items(
|
||||
actor_id=actor_id,
|
||||
media_type=media_type,
|
||||
offset=(page - 1) * limit,
|
||||
limit=limit,
|
||||
)
|
||||
# Get friendly name if actor_id provided
|
||||
if actor_id:
|
||||
from .naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
friendly = await naming.get_by_cid(actor_id, cid)
|
||||
if friendly:
|
||||
result["friendly_name"] = friendly["friendly_name"]
|
||||
result["base_name"] = friendly["base_name"]
|
||||
result["version_id"] = friendly["version_id"]
|
||||
|
||||
total = await self.db.count_cache_items(actor_id=actor_id, media_type=media_type)
|
||||
return result
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": page * limit < total,
|
||||
}
|
||||
}
|
||||
async def check_access(self, cid: str, actor_id: str, username: str) -> bool:
|
||||
"""Check if user has access to content."""
|
||||
user_hashes = await self._get_user_cache_hashes(username, actor_id)
|
||||
return cid in user_hashes
|
||||
|
||||
async def _get_user_cache_hashes(self, username: str, actor_id: Optional[str] = None) -> set:
|
||||
"""Get all cache hashes owned by or associated with a user."""
|
||||
match_values = [username]
|
||||
if actor_id:
|
||||
match_values.append(actor_id)
|
||||
|
||||
hashes = set()
|
||||
|
||||
# Query database for items owned by user
|
||||
if actor_id:
|
||||
try:
|
||||
db_items = await self.db.get_user_items(actor_id)
|
||||
for item in db_items:
|
||||
hashes.add(item["cid"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Legacy: Files uploaded by user (JSON metadata)
|
||||
if self.cache_dir.exists():
|
||||
for f in self.cache_dir.iterdir():
|
||||
if f.name.endswith('.meta.json'):
|
||||
try:
|
||||
with open(f, 'r') as mf:
|
||||
meta = json.load(mf)
|
||||
if meta.get("uploader") in match_values:
|
||||
hashes.add(f.name.replace('.meta.json', ''))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Files from user's runs (inputs and outputs)
|
||||
runs = await self._list_user_runs(username, actor_id)
|
||||
for run in runs:
|
||||
inputs = run.get("inputs", [])
|
||||
if isinstance(inputs, dict):
|
||||
inputs = list(inputs.values())
|
||||
hashes.update(inputs)
|
||||
if run.get("output_cid"):
|
||||
hashes.add(run["output_cid"])
|
||||
|
||||
return hashes
|
||||
|
||||
async def _list_user_runs(self, username: str, actor_id: Optional[str]) -> List[Dict]:
|
||||
"""List runs for a user (helper for access check)."""
|
||||
from ..dependencies import get_redis_client
|
||||
import json
|
||||
|
||||
redis = get_redis_client()
|
||||
runs = []
|
||||
cursor = 0
|
||||
prefix = "artdag:run:"
|
||||
|
||||
while True:
|
||||
cursor, keys = redis.scan(cursor=cursor, match=f"{prefix}*", count=100)
|
||||
for key in keys:
|
||||
data = redis.get(key)
|
||||
if data:
|
||||
run = json.loads(data)
|
||||
if run.get("actor_id") in (username, actor_id) or run.get("username") in (username, actor_id):
|
||||
runs.append(run)
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
return runs
|
||||
|
||||
async def get_raw_file(self, cid: str) -> Tuple[Optional[Path], Optional[str], Optional[str]]:
|
||||
"""Get raw file path, media type, and filename for download."""
|
||||
if not self.cache.has_content(cid):
|
||||
return None, None, None
|
||||
|
||||
path = self.cache.get_by_cid(cid)
|
||||
if not path or not path.exists():
|
||||
return None, None, None
|
||||
|
||||
media_type = detect_media_type(path)
|
||||
mime = get_mime_type(path)
|
||||
|
||||
# Determine extension
|
||||
ext = "bin"
|
||||
if media_type == "video":
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
header = f.read(12)
|
||||
if header[:4] == b'\x1a\x45\xdf\xa3':
|
||||
ext = "mkv"
|
||||
else:
|
||||
ext = "mp4"
|
||||
except Exception:
|
||||
ext = "mp4"
|
||||
elif media_type == "image":
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
header = f.read(8)
|
||||
if header[:8] == b'\x89PNG\r\n\x1a\n':
|
||||
ext = "png"
|
||||
else:
|
||||
ext = "jpg"
|
||||
except Exception:
|
||||
ext = "jpg"
|
||||
|
||||
filename = f"{cid}.{ext}"
|
||||
return path, mime, filename
|
||||
|
||||
async def get_as_mp4(self, cid: str) -> Tuple[Optional[Path], Optional[str]]:
|
||||
"""Get content as MP4, transcoding if necessary. Returns (path, error)."""
|
||||
if not self.cache.has_content(cid):
|
||||
return None, f"Content {cid} not in cache"
|
||||
|
||||
path = self.cache.get_by_cid(cid)
|
||||
if not path or not path.exists():
|
||||
return None, f"Content {cid} not in cache"
|
||||
|
||||
# Check if video
|
||||
media_type = detect_media_type(path)
|
||||
if media_type != "video":
|
||||
return None, "Content is not a video"
|
||||
|
||||
# Check for cached MP4
|
||||
mp4_path = self.cache_dir / f"{cid}.mp4"
|
||||
if mp4_path.exists():
|
||||
return mp4_path, None
|
||||
|
||||
# Check if already MP4 format
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ffprobe", "-v", "error", "-select_streams", "v:0",
|
||||
"-show_entries", "format=format_name", "-of", "csv=p=0", str(path)],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
if "mp4" in result.stdout.lower() or "mov" in result.stdout.lower():
|
||||
return path, None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Transcode to MP4
|
||||
transcode_path = self.cache_dir / f"{cid}.transcoding.mp4"
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ffmpeg", "-y", "-i", str(path),
|
||||
"-c:v", "libx264", "-preset", "fast", "-crf", "23",
|
||||
"-c:a", "aac", "-b:a", "128k",
|
||||
"-movflags", "+faststart",
|
||||
str(transcode_path)],
|
||||
capture_output=True, text=True, timeout=600
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None, f"Transcoding failed: {result.stderr[:200]}"
|
||||
|
||||
transcode_path.rename(mp4_path)
|
||||
return mp4_path, None
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
if transcode_path.exists():
|
||||
transcode_path.unlink()
|
||||
return None, "Transcoding timed out"
|
||||
except Exception as e:
|
||||
if transcode_path.exists():
|
||||
transcode_path.unlink()
|
||||
return None, f"Transcoding failed: {e}"
|
||||
|
||||
async def get_metadata(self, cid: str, actor_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get content metadata."""
|
||||
if not self.cache.has_content(cid):
|
||||
return None
|
||||
return await self.db.load_item_metadata(cid, actor_id)
|
||||
|
||||
async def update_metadata(
|
||||
self,
|
||||
content_hash: str,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
tags: List[str] = None,
|
||||
) -> bool:
|
||||
"""Update item metadata."""
|
||||
return await self.db.update_cache_metadata(
|
||||
content_hash=content_hash,
|
||||
name=name,
|
||||
description=description,
|
||||
tags=tags,
|
||||
cid: str,
|
||||
actor_id: str,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
custom: Optional[Dict[str, Any]] = None,
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""Update content metadata. Returns (success, error)."""
|
||||
if not self.cache.has_content(cid):
|
||||
return False, "Content not found"
|
||||
|
||||
# Build update dict
|
||||
updates = {}
|
||||
if title is not None:
|
||||
updates["title"] = title
|
||||
if description is not None:
|
||||
updates["description"] = description
|
||||
if tags is not None:
|
||||
updates["tags"] = tags
|
||||
if custom is not None:
|
||||
updates["custom"] = custom
|
||||
|
||||
try:
|
||||
await self.db.update_item_metadata(cid, actor_id, **updates)
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
async def publish_to_l2(
|
||||
self,
|
||||
cid: str,
|
||||
actor_id: str,
|
||||
l2_server: str,
|
||||
auth_token: str,
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""Publish content to L2 and IPFS. Returns (ipfs_cid, error)."""
|
||||
if not self.cache.has_content(cid):
|
||||
return None, "Content not found"
|
||||
|
||||
# Get IPFS CID
|
||||
cache_item = await self.db.get_cache_item(cid)
|
||||
ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None
|
||||
|
||||
# Get metadata for origin info
|
||||
meta = await self.db.load_item_metadata(cid, actor_id)
|
||||
origin = meta.get("origin") if meta else None
|
||||
|
||||
if not origin or "type" not in origin:
|
||||
return None, "Origin must be set before publishing"
|
||||
|
||||
if not auth_token:
|
||||
return None, "Authentication token required"
|
||||
|
||||
# Call L2 publish-cache endpoint
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await client.post(
|
||||
f"{l2_server}/assets/publish-cache",
|
||||
headers={"Authorization": f"Bearer {auth_token}"},
|
||||
json={
|
||||
"cid": cid,
|
||||
"ipfs_cid": ipfs_cid,
|
||||
"asset_name": meta.get("title") or cid[:16],
|
||||
"asset_type": detect_media_type(self.cache.get_by_cid(cid)),
|
||||
"origin": origin,
|
||||
"description": meta.get("description"),
|
||||
"tags": meta.get("tags", []),
|
||||
}
|
||||
)
|
||||
resp.raise_for_status()
|
||||
l2_result = resp.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
error_detail = str(e)
|
||||
try:
|
||||
error_detail = e.response.json().get("detail", str(e))
|
||||
except Exception:
|
||||
pass
|
||||
return None, f"L2 publish failed: {error_detail}"
|
||||
except Exception as e:
|
||||
return None, f"L2 publish failed: {e}"
|
||||
|
||||
# Update local metadata with publish status
|
||||
await self.db.save_l2_share(
|
||||
cid=cid,
|
||||
actor_id=actor_id,
|
||||
l2_server=l2_server,
|
||||
asset_name=meta.get("title") or cid[:16],
|
||||
content_type=detect_media_type(self.cache.get_by_cid(cid))
|
||||
)
|
||||
await self.db.update_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=actor_id,
|
||||
pinned=True,
|
||||
pin_reason="published"
|
||||
)
|
||||
|
||||
async def delete_item(self, content_hash: str) -> bool:
|
||||
"""Delete a cached item."""
|
||||
path = self.cache.get_by_content_hash(content_hash)
|
||||
if path and path.exists():
|
||||
path.unlink()
|
||||
return l2_result.get("ipfs_cid") or ipfs_cid, None
|
||||
|
||||
# Remove from database
|
||||
await self.db.delete_cache_item(content_hash)
|
||||
return True
|
||||
async def delete_content(self, cid: str, actor_id: str) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Remove user's ownership link to cached content.
|
||||
|
||||
def has_content(self, content_hash: str) -> bool:
|
||||
This removes the item_types entry linking the user to the content.
|
||||
The cached file is only deleted if no other users own it.
|
||||
Returns (success, error).
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Check if pinned for this user
|
||||
meta = await self.db.load_item_metadata(cid, actor_id)
|
||||
if meta and meta.get("pinned"):
|
||||
pin_reason = meta.get("pin_reason", "unknown")
|
||||
return False, f"Cannot discard pinned item (reason: {pin_reason})"
|
||||
|
||||
# Get the item type to delete the right ownership entry
|
||||
item_types = await self.db.get_item_types(cid, actor_id)
|
||||
if not item_types:
|
||||
return False, "You don't own this content"
|
||||
|
||||
# Remove user's ownership links (all types for this user)
|
||||
for item in item_types:
|
||||
item_type = item.get("type", "media")
|
||||
await self.db.delete_item_type(cid, actor_id, item_type)
|
||||
|
||||
# Remove friendly name
|
||||
await self.db.delete_friendly_name(actor_id, cid)
|
||||
|
||||
# Check if anyone else still owns this content
|
||||
remaining_owners = await self.db.get_item_types(cid)
|
||||
|
||||
# Only delete the actual file if no one owns it anymore
|
||||
if not remaining_owners:
|
||||
# Check deletion rules via cache_manager
|
||||
can_delete, reason = self.cache.can_delete(cid)
|
||||
if can_delete:
|
||||
# Delete via cache_manager
|
||||
self.cache.delete_by_cid(cid)
|
||||
|
||||
# Clean up legacy metadata files
|
||||
meta_path = self.cache_dir / f"{cid}.meta.json"
|
||||
if meta_path.exists():
|
||||
meta_path.unlink()
|
||||
mp4_path = self.cache_dir / f"{cid}.mp4"
|
||||
if mp4_path.exists():
|
||||
mp4_path.unlink()
|
||||
|
||||
# Delete from database
|
||||
await self.db.delete_cache_item(cid)
|
||||
|
||||
logger.info(f"Garbage collected content {cid[:16]}... (no remaining owners)")
|
||||
else:
|
||||
logger.info(f"Content {cid[:16]}... orphaned but cannot delete: {reason}")
|
||||
|
||||
logger.info(f"Removed content {cid[:16]}... ownership for {actor_id}")
|
||||
return True, None
|
||||
|
||||
async def import_from_ipfs(self, ipfs_cid: str, actor_id: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""Import content from IPFS. Returns (cid, error)."""
|
||||
try:
|
||||
import ipfs_client
|
||||
|
||||
# Download from IPFS
|
||||
legacy_dir = self.cache_dir / "legacy"
|
||||
legacy_dir.mkdir(parents=True, exist_ok=True)
|
||||
tmp_path = legacy_dir / f"import-{ipfs_cid[:16]}"
|
||||
|
||||
if not ipfs_client.get_file(ipfs_cid, str(tmp_path)):
|
||||
return None, f"Could not fetch CID {ipfs_cid} from IPFS"
|
||||
|
||||
# Detect media type before storing
|
||||
media_type = detect_media_type(tmp_path)
|
||||
|
||||
# Store in cache
|
||||
cached, new_ipfs_cid = self.cache.put(tmp_path, node_type="import", move=True)
|
||||
cid = new_ipfs_cid or cached.cid # Prefer IPFS CID
|
||||
|
||||
# Save to database with detected media type
|
||||
await self.db.create_cache_item(cid, new_ipfs_cid)
|
||||
await self.db.save_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=actor_id,
|
||||
item_type=media_type, # Use detected type for filtering
|
||||
filename=f"ipfs-{ipfs_cid[:16]}"
|
||||
)
|
||||
|
||||
return cid, None
|
||||
except Exception as e:
|
||||
return None, f"Import failed: {e}"
|
||||
|
||||
async def upload_content(
|
||||
self,
|
||||
content: bytes,
|
||||
filename: str,
|
||||
actor_id: str,
|
||||
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
|
||||
"""Upload content to cache. Returns (cid, ipfs_cid, error).
|
||||
|
||||
Files are stored locally first for fast response, then uploaded
|
||||
to IPFS in the background.
|
||||
"""
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
# Write to temp file
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tmp:
|
||||
tmp.write(content)
|
||||
tmp_path = Path(tmp.name)
|
||||
|
||||
# Detect media type (video/image/audio) before moving file
|
||||
media_type = detect_media_type(tmp_path)
|
||||
|
||||
# Store locally AND upload to IPFS synchronously
|
||||
# This ensures the IPFS CID is available immediately for distributed access
|
||||
cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True, skip_ipfs=False)
|
||||
cid = ipfs_cid or cached.cid # Prefer IPFS CID, fall back to local hash
|
||||
|
||||
# Save to database with media category type
|
||||
await self.db.create_cache_item(cached.cid, ipfs_cid)
|
||||
await self.db.save_item_metadata(
|
||||
cid=cid,
|
||||
actor_id=actor_id,
|
||||
item_type=media_type,
|
||||
filename=filename
|
||||
)
|
||||
|
||||
if ipfs_cid:
|
||||
logger.info(f"Uploaded to IPFS: {ipfs_cid[:16]}...")
|
||||
else:
|
||||
logger.warning(f"IPFS upload failed, using local hash: {cid[:16]}...")
|
||||
|
||||
return cid, ipfs_cid, None
|
||||
except Exception as e:
|
||||
return None, None, f"Upload failed: {e}"
|
||||
|
||||
async def list_media(
|
||||
self,
|
||||
actor_id: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
offset: int = 0,
|
||||
limit: int = 24,
|
||||
media_type: Optional[str] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List media items in cache."""
|
||||
# Get items from database (uses item_types table)
|
||||
items = await self.db.get_user_items(
|
||||
actor_id=actor_id or username,
|
||||
item_type=media_type, # "video", "image", "audio", or None for all
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
# Add friendly names to items
|
||||
if actor_id:
|
||||
from .naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
for item in items:
|
||||
cid = item.get("cid")
|
||||
if cid:
|
||||
friendly = await naming.get_by_cid(actor_id, cid)
|
||||
if friendly:
|
||||
item["friendly_name"] = friendly["friendly_name"]
|
||||
item["base_name"] = friendly["base_name"]
|
||||
|
||||
return items
|
||||
|
||||
# Legacy compatibility methods
|
||||
def has_content(self, cid: str) -> bool:
|
||||
"""Check if content exists in cache."""
|
||||
return self.cache.has_content(content_hash)
|
||||
return self.cache.has_content(cid)
|
||||
|
||||
def get_ipfs_cid(self, content_hash: str) -> Optional[str]:
|
||||
def get_ipfs_cid(self, cid: str) -> Optional[str]:
|
||||
"""Get IPFS CID for cached content."""
|
||||
return self.cache.get_ipfs_cid(content_hash)
|
||||
return self.cache.get_ipfs_cid(cid)
|
||||
|
||||
234
app/services/naming_service.py
Normal file
234
app/services/naming_service.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""
|
||||
Naming service for friendly names.
|
||||
|
||||
Handles:
|
||||
- Name normalization (My Cool Effect -> my-cool-effect)
|
||||
- Version ID generation (server-signed timestamps)
|
||||
- Friendly name assignment and resolution
|
||||
"""
|
||||
|
||||
import hmac
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import database
|
||||
|
||||
|
||||
# Base32 Crockford alphabet (excludes I, L, O, U to avoid confusion)
|
||||
CROCKFORD_ALPHABET = "0123456789abcdefghjkmnpqrstvwxyz"
|
||||
|
||||
|
||||
def _get_server_secret() -> bytes:
|
||||
"""Get server secret for signing version IDs."""
|
||||
secret = os.environ.get("SERVER_SECRET", "")
|
||||
if not secret:
|
||||
# Fall back to a derived secret from other env vars
|
||||
# In production, SERVER_SECRET should be set explicitly
|
||||
secret = os.environ.get("SECRET_KEY", "default-dev-secret")
|
||||
return secret.encode("utf-8")
|
||||
|
||||
|
||||
def _base32_crockford_encode(data: bytes) -> str:
|
||||
"""Encode bytes as base32-crockford (lowercase)."""
|
||||
# Convert bytes to integer
|
||||
num = int.from_bytes(data, "big")
|
||||
if num == 0:
|
||||
return CROCKFORD_ALPHABET[0]
|
||||
|
||||
result = []
|
||||
while num > 0:
|
||||
result.append(CROCKFORD_ALPHABET[num % 32])
|
||||
num //= 32
|
||||
|
||||
return "".join(reversed(result))
|
||||
|
||||
|
||||
def generate_version_id() -> str:
|
||||
"""
|
||||
Generate a version ID that is:
|
||||
- Always increasing (timestamp-based prefix)
|
||||
- Verifiable as originating from this server (HMAC suffix)
|
||||
- Short and URL-safe (13 chars)
|
||||
|
||||
Format: 6 bytes timestamp (ms) + 2 bytes HMAC = 8 bytes = 13 base32 chars
|
||||
"""
|
||||
timestamp_ms = int(time.time() * 1000)
|
||||
timestamp_bytes = timestamp_ms.to_bytes(6, "big")
|
||||
|
||||
# HMAC the timestamp with server secret
|
||||
secret = _get_server_secret()
|
||||
sig = hmac.new(secret, timestamp_bytes, "sha256").digest()
|
||||
|
||||
# Combine: 6 bytes timestamp + 2 bytes HMAC signature
|
||||
combined = timestamp_bytes + sig[:2]
|
||||
|
||||
# Encode as base32-crockford
|
||||
return _base32_crockford_encode(combined)
|
||||
|
||||
|
||||
def normalize_name(name: str) -> str:
|
||||
"""
|
||||
Normalize a display name to a base name.
|
||||
|
||||
- Lowercase
|
||||
- Replace spaces and underscores with dashes
|
||||
- Remove special characters (keep alphanumeric and dashes)
|
||||
- Collapse multiple dashes
|
||||
- Strip leading/trailing dashes
|
||||
|
||||
Examples:
|
||||
"My Cool Effect" -> "my-cool-effect"
|
||||
"Brightness_V2" -> "brightness-v2"
|
||||
"Test!!!Effect" -> "test-effect"
|
||||
"""
|
||||
# Lowercase
|
||||
name = name.lower()
|
||||
|
||||
# Replace spaces and underscores with dashes
|
||||
name = re.sub(r"[\s_]+", "-", name)
|
||||
|
||||
# Remove anything that's not alphanumeric or dash
|
||||
name = re.sub(r"[^a-z0-9-]", "", name)
|
||||
|
||||
# Collapse multiple dashes
|
||||
name = re.sub(r"-+", "-", name)
|
||||
|
||||
# Strip leading/trailing dashes
|
||||
name = name.strip("-")
|
||||
|
||||
return name or "unnamed"
|
||||
|
||||
|
||||
def parse_friendly_name(friendly_name: str) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Parse a friendly name into base name and optional version.
|
||||
|
||||
Args:
|
||||
friendly_name: Name like "my-effect" or "my-effect 01hw3x9k"
|
||||
|
||||
Returns:
|
||||
Tuple of (base_name, version_id or None)
|
||||
"""
|
||||
parts = friendly_name.strip().split(" ", 1)
|
||||
base_name = parts[0]
|
||||
version_id = parts[1] if len(parts) > 1 else None
|
||||
return base_name, version_id
|
||||
|
||||
|
||||
def format_friendly_name(base_name: str, version_id: str) -> str:
|
||||
"""Format a base name and version into a full friendly name."""
|
||||
return f"{base_name} {version_id}"
|
||||
|
||||
|
||||
def format_l2_name(actor_id: str, base_name: str, version_id: str) -> str:
|
||||
"""
|
||||
Format a friendly name for L2 sharing.
|
||||
|
||||
Format: @user@domain base-name version-id
|
||||
"""
|
||||
return f"{actor_id} {base_name} {version_id}"
|
||||
|
||||
|
||||
class NamingService:
|
||||
"""Service for managing friendly names."""
|
||||
|
||||
async def assign_name(
|
||||
self,
|
||||
cid: str,
|
||||
actor_id: str,
|
||||
item_type: str,
|
||||
display_name: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Assign a friendly name to content.
|
||||
|
||||
Args:
|
||||
cid: Content ID
|
||||
actor_id: User ID
|
||||
item_type: Type (recipe, effect, media)
|
||||
display_name: Human-readable name (optional)
|
||||
filename: Original filename (used as fallback for media)
|
||||
|
||||
Returns:
|
||||
Friendly name entry dict
|
||||
"""
|
||||
# Determine display name
|
||||
if not display_name:
|
||||
if filename:
|
||||
# Use filename without extension
|
||||
display_name = os.path.splitext(filename)[0]
|
||||
else:
|
||||
display_name = f"unnamed-{item_type}"
|
||||
|
||||
# Normalize to base name
|
||||
base_name = normalize_name(display_name)
|
||||
|
||||
# Generate version ID
|
||||
version_id = generate_version_id()
|
||||
|
||||
# Create database entry
|
||||
entry = await database.create_friendly_name(
|
||||
actor_id=actor_id,
|
||||
base_name=base_name,
|
||||
version_id=version_id,
|
||||
cid=cid,
|
||||
item_type=item_type,
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
async def get_by_cid(self, actor_id: str, cid: str) -> Optional[dict]:
|
||||
"""Get friendly name entry by CID."""
|
||||
return await database.get_friendly_name_by_cid(actor_id, cid)
|
||||
|
||||
async def resolve(
|
||||
self,
|
||||
actor_id: str,
|
||||
name: str,
|
||||
item_type: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Resolve a friendly name to a CID.
|
||||
|
||||
Args:
|
||||
actor_id: User ID
|
||||
name: Friendly name ("base-name" or "base-name version")
|
||||
item_type: Optional type filter
|
||||
|
||||
Returns:
|
||||
CID or None if not found
|
||||
"""
|
||||
return await database.resolve_friendly_name(actor_id, name, item_type)
|
||||
|
||||
async def list_names(
|
||||
self,
|
||||
actor_id: str,
|
||||
item_type: Optional[str] = None,
|
||||
latest_only: bool = False,
|
||||
) -> list:
|
||||
"""List friendly names for a user."""
|
||||
return await database.list_friendly_names(
|
||||
actor_id=actor_id,
|
||||
item_type=item_type,
|
||||
latest_only=latest_only,
|
||||
)
|
||||
|
||||
async def delete(self, actor_id: str, cid: str) -> bool:
|
||||
"""Delete a friendly name entry."""
|
||||
return await database.delete_friendly_name(actor_id, cid)
|
||||
|
||||
|
||||
# Module-level instance
|
||||
_naming_service: Optional[NamingService] = None
|
||||
|
||||
|
||||
def get_naming_service() -> NamingService:
|
||||
"""Get the naming service singleton."""
|
||||
global _naming_service
|
||||
if _naming_service is None:
|
||||
_naming_service = NamingService()
|
||||
return _naming_service
|
||||
@@ -1,128 +1,337 @@
|
||||
"""
|
||||
Recipe Service - business logic for recipe management.
|
||||
|
||||
Recipes are S-expressions stored in the content-addressed cache (and IPFS).
|
||||
The recipe ID is the content hash of the file.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
import json
|
||||
import yaml
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
|
||||
|
||||
from artdag.sexp import compile_string, parse, serialize, CompileError, ParseError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import redis
|
||||
from cache_manager import L1CacheManager
|
||||
|
||||
from ..types import Recipe, CompiledDAG, VisualizationDAG, VisNode, VisEdge
|
||||
|
||||
|
||||
class RecipeService:
|
||||
"""
|
||||
Service for managing recipes.
|
||||
|
||||
Handles recipe parsing, validation, and DAG building.
|
||||
Recipes are S-expressions stored in the content-addressed cache.
|
||||
"""
|
||||
|
||||
def __init__(self, redis, cache):
|
||||
def __init__(self, redis: "redis.Redis", cache: "L1CacheManager") -> None:
|
||||
# Redis kept for compatibility but not used for recipe storage
|
||||
self.redis = redis
|
||||
self.cache = cache
|
||||
self.recipe_prefix = "recipe:"
|
||||
|
||||
async def get_recipe(self, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
async def get_recipe(self, recipe_id: str) -> Optional[Recipe]:
|
||||
"""Get a recipe by ID (content hash)."""
|
||||
# First check Redis
|
||||
data = self.redis.get(f"{self.recipe_prefix}{recipe_id}")
|
||||
if data:
|
||||
return json.loads(data)
|
||||
import yaml
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Fall back to cache
|
||||
path = self.cache.get_by_content_hash(recipe_id)
|
||||
if path and path.exists():
|
||||
with open(path) as f:
|
||||
return yaml.safe_load(f)
|
||||
# Get from cache (content-addressed storage)
|
||||
logger.info(f"get_recipe: Looking up recipe_id={recipe_id[:16]}...")
|
||||
path = self.cache.get_by_cid(recipe_id)
|
||||
logger.info(f"get_recipe: cache.get_by_cid returned path={path}")
|
||||
if not path or not path.exists():
|
||||
logger.warning(f"get_recipe: Recipe {recipe_id[:16]}... not found in cache")
|
||||
return None
|
||||
|
||||
return None
|
||||
with open(path) as f:
|
||||
content = f.read()
|
||||
|
||||
# Detect format - check if it starts with ( after skipping comments
|
||||
def is_sexp_format(text):
|
||||
for line in text.split('\n'):
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith(';'):
|
||||
continue
|
||||
return stripped.startswith('(')
|
||||
return False
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if is_sexp_format(content):
|
||||
# Detect if this is a streaming recipe (starts with (stream ...))
|
||||
def is_streaming_recipe(text):
|
||||
for line in text.split('\n'):
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith(';'):
|
||||
continue
|
||||
return stripped.startswith('(stream')
|
||||
return False
|
||||
|
||||
if is_streaming_recipe(content):
|
||||
# Streaming recipes have different format - parse manually
|
||||
import re
|
||||
name_match = re.search(r'\(stream\s+"([^"]+)"', content)
|
||||
recipe_name = name_match.group(1) if name_match else "streaming"
|
||||
|
||||
recipe_data = {
|
||||
"name": recipe_name,
|
||||
"sexp": content,
|
||||
"format": "sexp",
|
||||
"type": "streaming",
|
||||
"dag": {"nodes": []}, # Streaming recipes don't have traditional DAG
|
||||
}
|
||||
logger.info(f"Parsed streaming recipe {recipe_id[:16]}..., name: {recipe_name}")
|
||||
else:
|
||||
# Parse traditional (recipe ...) S-expression
|
||||
try:
|
||||
compiled = compile_string(content)
|
||||
recipe_data = compiled.to_dict()
|
||||
recipe_data["sexp"] = content
|
||||
recipe_data["format"] = "sexp"
|
||||
logger.info(f"Parsed sexp recipe {recipe_id[:16]}..., keys: {list(recipe_data.keys())}")
|
||||
except (ParseError, CompileError) as e:
|
||||
logger.warning(f"Failed to parse sexp recipe {recipe_id[:16]}...: {e}")
|
||||
return {"error": str(e), "recipe_id": recipe_id}
|
||||
else:
|
||||
# Parse YAML
|
||||
try:
|
||||
recipe_data = yaml.safe_load(content)
|
||||
if not isinstance(recipe_data, dict):
|
||||
return {"error": "Invalid YAML: expected dictionary", "recipe_id": recipe_id}
|
||||
recipe_data["yaml"] = content
|
||||
recipe_data["format"] = "yaml"
|
||||
except yaml.YAMLError as e:
|
||||
return {"error": f"YAML parse error: {e}", "recipe_id": recipe_id}
|
||||
|
||||
# Add the recipe_id to the data for convenience
|
||||
recipe_data["recipe_id"] = recipe_id
|
||||
|
||||
# Get IPFS CID if available
|
||||
ipfs_cid = self.cache.get_ipfs_cid(recipe_id)
|
||||
if ipfs_cid:
|
||||
recipe_data["ipfs_cid"] = ipfs_cid
|
||||
|
||||
# Compute step_count from nodes (handle both formats)
|
||||
if recipe_data.get("format") == "sexp":
|
||||
nodes = recipe_data.get("dag", {}).get("nodes", [])
|
||||
else:
|
||||
# YAML format: nodes might be at top level or under dag
|
||||
nodes = recipe_data.get("nodes", recipe_data.get("dag", {}).get("nodes", []))
|
||||
recipe_data["step_count"] = len(nodes) if isinstance(nodes, (list, dict)) else 0
|
||||
|
||||
return recipe_data
|
||||
|
||||
async def list_recipes(self, actor_id: Optional[str] = None, offset: int = 0, limit: int = 20) -> List[Recipe]:
|
||||
"""
|
||||
List recipes owned by a user.
|
||||
|
||||
Queries item_types table for user's recipe links.
|
||||
"""
|
||||
import logging
|
||||
import database
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
async def list_recipes(self, actor_id: str = None, page: int = 1, limit: int = 20) -> Dict[str, Any]:
|
||||
"""List available recipes with pagination."""
|
||||
recipes = []
|
||||
cursor = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = self.redis.scan(
|
||||
cursor=cursor,
|
||||
match=f"{self.recipe_prefix}*",
|
||||
count=100
|
||||
)
|
||||
for key in keys:
|
||||
data = self.redis.get(key)
|
||||
if data:
|
||||
recipe = json.loads(data)
|
||||
# Filter by actor if specified
|
||||
if actor_id is None or recipe.get("actor_id") == actor_id:
|
||||
recipes.append(recipe)
|
||||
if cursor == 0:
|
||||
break
|
||||
if not actor_id:
|
||||
logger.warning("list_recipes called without actor_id")
|
||||
return []
|
||||
|
||||
# Get user's recipe CIDs from item_types
|
||||
user_items = await database.get_user_items(actor_id, item_type="recipe", limit=1000)
|
||||
recipe_cids = [item["cid"] for item in user_items]
|
||||
logger.info(f"Found {len(recipe_cids)} recipe CIDs for user {actor_id}")
|
||||
|
||||
for cid in recipe_cids:
|
||||
recipe = await self.get_recipe(cid)
|
||||
if recipe and not recipe.get("error"):
|
||||
recipes.append(recipe)
|
||||
elif recipe and recipe.get("error"):
|
||||
logger.warning(f"Recipe {cid[:16]}... has error: {recipe.get('error')}")
|
||||
|
||||
# Add friendly names
|
||||
from .naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
for recipe in recipes:
|
||||
recipe_id = recipe.get("recipe_id")
|
||||
if recipe_id:
|
||||
friendly = await naming.get_by_cid(actor_id, recipe_id)
|
||||
if friendly:
|
||||
recipe["friendly_name"] = friendly["friendly_name"]
|
||||
recipe["base_name"] = friendly["base_name"]
|
||||
|
||||
# Sort by name
|
||||
recipes.sort(key=lambda r: r.get("name", ""))
|
||||
|
||||
# Paginate
|
||||
total = len(recipes)
|
||||
start = (page - 1) * limit
|
||||
end = start + limit
|
||||
page_recipes = recipes[start:end]
|
||||
return recipes[offset:offset + limit]
|
||||
|
||||
return {
|
||||
"recipes": page_recipes,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": end < total,
|
||||
}
|
||||
}
|
||||
async def upload_recipe(
|
||||
self,
|
||||
content: str,
|
||||
uploader: str,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""
|
||||
Upload a recipe from S-expression content.
|
||||
|
||||
async def save_recipe(self, recipe_id: str, recipe_data: Dict[str, Any]) -> None:
|
||||
"""Save a recipe to Redis."""
|
||||
self.redis.set(f"{self.recipe_prefix}{recipe_id}", json.dumps(recipe_data))
|
||||
The recipe is stored in the cache and pinned to IPFS.
|
||||
Returns (recipe_id, error_message).
|
||||
"""
|
||||
# Validate S-expression
|
||||
try:
|
||||
compiled = compile_string(content)
|
||||
except ParseError as e:
|
||||
return None, f"Parse error: {e}"
|
||||
except CompileError as e:
|
||||
return None, f"Compile error: {e}"
|
||||
|
||||
async def delete_recipe(self, recipe_id: str) -> bool:
|
||||
"""Delete a recipe."""
|
||||
return self.redis.delete(f"{self.recipe_prefix}{recipe_id}") > 0
|
||||
# Write to temp file for caching
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".sexp", mode="w") as tmp:
|
||||
tmp.write(content)
|
||||
tmp_path = Path(tmp.name)
|
||||
|
||||
def parse_yaml(self, yaml_content: str) -> Dict[str, Any]:
|
||||
"""Parse recipe YAML content."""
|
||||
return yaml.safe_load(yaml_content)
|
||||
# Store in cache (content-addressed, auto-pins to IPFS)
|
||||
logger.info(f"upload_recipe: Storing recipe in cache from {tmp_path}")
|
||||
cached, ipfs_cid = self.cache.put(tmp_path, node_type="recipe", move=True)
|
||||
recipe_id = ipfs_cid or cached.cid # Prefer IPFS CID
|
||||
logger.info(f"upload_recipe: Stored recipe, cached.cid={cached.cid[:16]}..., ipfs_cid={ipfs_cid[:16] if ipfs_cid else None}, recipe_id={recipe_id[:16]}...")
|
||||
|
||||
def build_dag(self, recipe: Dict[str, Any]) -> Dict[str, Any]:
|
||||
# Track ownership in item_types and assign friendly name
|
||||
if uploader:
|
||||
import database
|
||||
display_name = name or compiled.name or "unnamed-recipe"
|
||||
|
||||
# Create item_types entry (ownership link)
|
||||
await database.save_item_metadata(
|
||||
cid=recipe_id,
|
||||
actor_id=uploader,
|
||||
item_type="recipe",
|
||||
description=description,
|
||||
filename=f"{display_name}.sexp",
|
||||
)
|
||||
|
||||
# Assign friendly name
|
||||
from .naming_service import get_naming_service
|
||||
naming = get_naming_service()
|
||||
await naming.assign_name(
|
||||
cid=recipe_id,
|
||||
actor_id=uploader,
|
||||
item_type="recipe",
|
||||
display_name=display_name,
|
||||
)
|
||||
|
||||
return recipe_id, None
|
||||
|
||||
except Exception as e:
|
||||
return None, f"Failed to cache recipe: {e}"
|
||||
|
||||
async def delete_recipe(self, recipe_id: str, actor_id: str = None) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Remove user's ownership link to a recipe.
|
||||
|
||||
This removes the item_types entry linking the user to the recipe.
|
||||
The cached file is only deleted if no other users own it.
|
||||
Returns (success, error_message).
|
||||
"""
|
||||
import database
|
||||
|
||||
if not actor_id:
|
||||
return False, "actor_id required"
|
||||
|
||||
# Remove user's ownership link
|
||||
try:
|
||||
await database.delete_item_type(recipe_id, actor_id, "recipe")
|
||||
|
||||
# Also remove friendly name
|
||||
await database.delete_friendly_name(actor_id, recipe_id)
|
||||
|
||||
# Try to garbage collect if no one owns it anymore
|
||||
# (delete_cache_item only deletes if no item_types remain)
|
||||
await database.delete_cache_item(recipe_id)
|
||||
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, f"Failed to delete: {e}"
|
||||
|
||||
def parse_recipe(self, content: str) -> CompiledDAG:
|
||||
"""Parse recipe S-expression content."""
|
||||
compiled = compile_string(content)
|
||||
return compiled.to_dict()
|
||||
|
||||
def build_dag(self, recipe: Recipe) -> VisualizationDAG:
|
||||
"""
|
||||
Build DAG visualization data from recipe.
|
||||
|
||||
Returns nodes and edges for Cytoscape.js.
|
||||
"""
|
||||
nodes = []
|
||||
edges = []
|
||||
vis_nodes: List[VisNode] = []
|
||||
edges: List[VisEdge] = []
|
||||
|
||||
dag = recipe.get("dag", {})
|
||||
dag_nodes = dag.get("nodes", {})
|
||||
dag_nodes = dag.get("nodes", [])
|
||||
output_node = dag.get("output")
|
||||
|
||||
for node_id, node_def in dag_nodes.items():
|
||||
node_type = node_def.get("type", "EFFECT")
|
||||
nodes.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"nodeType": node_type,
|
||||
"isOutput": node_id == output_node,
|
||||
}
|
||||
})
|
||||
# Handle list format (compiled S-expression)
|
||||
if isinstance(dag_nodes, list):
|
||||
for node_def in dag_nodes:
|
||||
node_id = node_def.get("id")
|
||||
node_type = node_def.get("type", "EFFECT")
|
||||
|
||||
# Build edges from inputs
|
||||
for input_ref in node_def.get("inputs", []):
|
||||
if isinstance(input_ref, dict):
|
||||
source = input_ref.get("node") or input_ref.get("input")
|
||||
else:
|
||||
source = input_ref
|
||||
vis_nodes.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"nodeType": node_type,
|
||||
"isOutput": node_id == output_node,
|
||||
}
|
||||
})
|
||||
|
||||
if source:
|
||||
edges.append({
|
||||
"data": {
|
||||
"source": source,
|
||||
"target": node_id,
|
||||
}
|
||||
})
|
||||
for input_ref in node_def.get("inputs", []):
|
||||
if isinstance(input_ref, dict):
|
||||
source = input_ref.get("node") or input_ref.get("input")
|
||||
else:
|
||||
source = input_ref
|
||||
|
||||
return {"nodes": nodes, "edges": edges}
|
||||
if source:
|
||||
edges.append({
|
||||
"data": {
|
||||
"source": source,
|
||||
"target": node_id,
|
||||
}
|
||||
})
|
||||
|
||||
# Handle dict format
|
||||
elif isinstance(dag_nodes, dict):
|
||||
for node_id, node_def in dag_nodes.items():
|
||||
node_type = node_def.get("type", "EFFECT")
|
||||
|
||||
vis_nodes.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"nodeType": node_type,
|
||||
"isOutput": node_id == output_node,
|
||||
}
|
||||
})
|
||||
|
||||
for input_ref in node_def.get("inputs", []):
|
||||
if isinstance(input_ref, dict):
|
||||
source = input_ref.get("node") or input_ref.get("input")
|
||||
else:
|
||||
source = input_ref
|
||||
|
||||
if source:
|
||||
edges.append({
|
||||
"data": {
|
||||
"source": source,
|
||||
"target": node_id,
|
||||
}
|
||||
})
|
||||
|
||||
return {"nodes": vis_nodes, "edges": edges}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,11 @@ Storage Service - business logic for storage provider management.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from typing import Optional, List, Dict, Any, Tuple, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from database import Database
|
||||
from storage_providers import StorageProvidersModule
|
||||
|
||||
|
||||
STORAGE_PROVIDERS_INFO = {
|
||||
@@ -22,7 +26,7 @@ VALID_PROVIDER_TYPES = list(STORAGE_PROVIDERS_INFO.keys())
|
||||
class StorageService:
|
||||
"""Service for managing user storage providers."""
|
||||
|
||||
def __init__(self, database, storage_providers_module):
|
||||
def __init__(self, database: "Database", storage_providers_module: "StorageProvidersModule") -> None:
|
||||
self.db = database
|
||||
self.providers = storage_providers_module
|
||||
|
||||
@@ -72,7 +76,7 @@ class StorageService:
|
||||
capacity_gb: int = 5,
|
||||
provider_name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
) -> tuple[Optional[int], Optional[str]]:
|
||||
) -> Tuple[Optional[int], Optional[str]]:
|
||||
"""Add a new storage provider. Returns (storage_id, error_message)."""
|
||||
if provider_type not in VALID_PROVIDER_TYPES:
|
||||
return None, f"Invalid provider type: {provider_type}"
|
||||
@@ -115,7 +119,7 @@ class StorageService:
|
||||
config: Optional[Dict[str, Any]] = None,
|
||||
capacity_gb: Optional[int] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""Update a storage provider. Returns (success, error_message)."""
|
||||
storage = await self.db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
@@ -145,7 +149,7 @@ class StorageService:
|
||||
|
||||
return success, None if success else "Failed to update storage provider"
|
||||
|
||||
async def delete_storage(self, storage_id: int, actor_id: str) -> tuple[bool, Optional[str]]:
|
||||
async def delete_storage(self, storage_id: int, actor_id: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Delete a storage provider. Returns (success, error_message)."""
|
||||
storage = await self.db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
@@ -156,7 +160,7 @@ class StorageService:
|
||||
success = await self.db.remove_user_storage(storage_id)
|
||||
return success, None if success else "Failed to remove storage provider"
|
||||
|
||||
async def test_storage(self, storage_id: int, actor_id: str) -> tuple[bool, str]:
|
||||
async def test_storage(self, storage_id: int, actor_id: str) -> Tuple[bool, str]:
|
||||
"""Test storage provider connectivity. Returns (success, message)."""
|
||||
storage = await self.db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
@@ -179,7 +183,7 @@ class StorageService:
|
||||
"""List storage providers of a specific type."""
|
||||
return await self.db.get_user_storage_by_type(actor_id, provider_type)
|
||||
|
||||
def build_config_from_form(self, provider_type: str, form_data: Dict[str, Any]) -> tuple[Optional[Dict], Optional[str]]:
|
||||
def build_config_from_form(self, provider_type: str, form_data: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
|
||||
"""Build provider config from form data. Returns (config, error)."""
|
||||
api_key = form_data.get("api_key")
|
||||
secret_key = form_data.get("secret_key")
|
||||
|
||||
14
app/templates/404.html
Normal file
14
app/templates/404.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Not Found - Art-DAG L1{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-2xl mx-auto text-center py-16">
|
||||
<h1 class="text-6xl font-bold text-gray-400 mb-4">404</h1>
|
||||
<h2 class="text-2xl font-semibold mb-4">Page Not Found</h2>
|
||||
<p class="text-gray-400 mb-8">The page you're looking for doesn't exist or has been moved.</p>
|
||||
<a href="/" class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded-lg font-medium">
|
||||
Go Home
|
||||
</a>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,23 +1,46 @@
|
||||
{% extends "base.html" %}
|
||||
{% extends "_base.html" %}
|
||||
|
||||
{% block brand %}Art-DAG L1{% endblock %}
|
||||
|
||||
{% block nav_items %}
|
||||
<nav class="flex items-center space-x-6">
|
||||
<a href="/runs" class="text-gray-300 hover:text-white {% if active_tab == 'runs' %}text-white font-medium{% endif %}">Runs</a>
|
||||
<a href="/recipes" class="text-gray-300 hover:text-white {% if active_tab == 'recipes' %}text-white font-medium{% endif %}">Recipes</a>
|
||||
<a href="/media" class="text-gray-300 hover:text-white {% if active_tab == 'media' %}text-white font-medium{% endif %}">Media</a>
|
||||
<a href="/storage" class="text-gray-300 hover:text-white {% if active_tab == 'storage' %}text-white font-medium{% endif %}">Storage</a>
|
||||
</nav>
|
||||
{% block brand %}
|
||||
<a href="https://blog.rose-ash.com/" class="no-underline text-stone-900">Rose Ash</a>
|
||||
<span class="text-stone-400 mx-1">|</span>
|
||||
<a href="/" class="no-underline text-stone-900">Art-DAG</a>
|
||||
{% endblock %}
|
||||
|
||||
{% block nav_right %}
|
||||
{% if user %}
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="text-gray-400">{{ user.username }}</span>
|
||||
<a href="/auth/logout" class="text-gray-300 hover:text-white">Logout</a>
|
||||
</div>
|
||||
{% else %}
|
||||
<a href="/login" class="text-gray-300 hover:text-white">Login</a>
|
||||
{% block cart_mini %}
|
||||
{% if request and request.state.cart_mini_html %}
|
||||
{{ request.state.cart_mini_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block nav_tree %}
|
||||
{% if request and request.state.nav_tree_html %}
|
||||
{{ request.state.nav_tree_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block auth_menu %}
|
||||
{% if request and request.state.auth_menu_html %}
|
||||
{{ request.state.auth_menu_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block auth_menu_mobile %}
|
||||
{% if request and request.state.auth_menu_html %}
|
||||
{{ request.state.auth_menu_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block sub_nav %}
|
||||
<div class="bg-stone-200 border-b border-stone-300">
|
||||
<div class="max-w-screen-2xl mx-auto px-4">
|
||||
<nav class="flex items-center gap-4 py-2 text-sm overflow-x-auto no-scrollbar">
|
||||
<a href="/runs" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'runs' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Runs{% if nav_counts and nav_counts.runs %} ({{ nav_counts.runs }}){% endif %}</a>
|
||||
<a href="/recipes" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'recipes' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Recipes{% if nav_counts and nav_counts.recipes %} ({{ nav_counts.recipes }}){% endif %}</a>
|
||||
<a href="/effects" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'effects' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Effects{% if nav_counts and nav_counts.effects %} ({{ nav_counts.effects }}){% endif %}</a>
|
||||
<a href="/media" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'media' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Media{% if nav_counts and nav_counts.media %} ({{ nav_counts.media }}){% endif %}</a>
|
||||
<a href="/storage" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'storage' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Storage{% if nav_counts and nav_counts.storage %} ({{ nav_counts.storage }}){% endif %}</a>
|
||||
<a href="/download/client" class="whitespace-nowrap px-3 py-1.5 rounded text-stone-700 hover:bg-stone-300" title="Download CLI client">Client</a>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
122
app/templates/cache/detail.html
vendored
122
app/templates/cache/detail.html
vendored
@@ -1,57 +1,131 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ cache.hash[:16] }} - Cache - Art-DAG L1{% endblock %}
|
||||
{% block title %}{{ cache.cid[:16] }} - Cache - Art-DAG L1{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
<!-- Header -->
|
||||
<div class="flex items-center space-x-4 mb-6">
|
||||
<a href="/media" class="text-gray-400 hover:text-white">← Media</a>
|
||||
<h1 class="text-xl font-bold font-mono">{{ cache.hash[:24] }}...</h1>
|
||||
<h1 class="text-xl font-bold font-mono">{{ cache.cid[:24] }}...</h1>
|
||||
</div>
|
||||
|
||||
<!-- Preview -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 overflow-hidden">
|
||||
{% if cache.media_type and cache.media_type.startswith('image/') %}
|
||||
<img src="/cache/{{ cache.hash }}/raw" alt=""
|
||||
{% if cache.mime_type and cache.mime_type.startswith('image/') %}
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<img src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" alt=""
|
||||
class="w-full max-h-96 object-contain bg-gray-900">
|
||||
{% else %}
|
||||
<img src="/cache/{{ cache.cid }}/raw" alt=""
|
||||
class="w-full max-h-96 object-contain bg-gray-900">
|
||||
{% endif %}
|
||||
|
||||
{% elif cache.media_type and cache.media_type.startswith('video/') %}
|
||||
<video src="/cache/{{ cache.hash }}/raw" controls
|
||||
{% elif cache.mime_type and cache.mime_type.startswith('video/') %}
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<video src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls
|
||||
class="w-full max-h-96 bg-gray-900">
|
||||
</video>
|
||||
{% else %}
|
||||
<video src="/cache/{{ cache.cid }}/raw" controls
|
||||
class="w-full max-h-96 bg-gray-900">
|
||||
</video>
|
||||
{% endif %}
|
||||
|
||||
{% elif cache.media_type and cache.media_type.startswith('audio/') %}
|
||||
{% elif cache.mime_type and cache.mime_type.startswith('audio/') %}
|
||||
<div class="p-8 bg-gray-900">
|
||||
<audio src="/cache/{{ cache.hash }}/raw" controls class="w-full"></audio>
|
||||
{% if cache.remote_only and cache.ipfs_cid %}
|
||||
<audio src="https://ipfs.io/ipfs/{{ cache.ipfs_cid }}" controls class="w-full"></audio>
|
||||
{% else %}
|
||||
<audio src="/cache/{{ cache.cid }}/raw" controls class="w-full"></audio>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% elif cache.media_type == 'application/json' %}
|
||||
{% elif cache.mime_type == 'application/json' %}
|
||||
<div class="p-4 bg-gray-900 max-h-96 overflow-auto">
|
||||
<pre class="text-sm text-gray-300">{{ cache.content_preview }}</pre>
|
||||
</div>
|
||||
|
||||
{% else %}
|
||||
<div class="p-8 bg-gray-900 text-center text-gray-500">
|
||||
<div class="text-4xl mb-2">{{ cache.media_type or 'Unknown type' }}</div>
|
||||
<div>{{ cache.size_bytes | filesizeformat if cache.size_bytes else 'Unknown size' }}</div>
|
||||
<div class="text-4xl mb-2">{{ cache.mime_type or 'Unknown type' }}</div>
|
||||
<div>{{ cache.size | filesizeformat if cache.size else 'Unknown size' }}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Metadata -->
|
||||
<!-- Friendly Name -->
|
||||
<div id="friendly-name-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="text-gray-500 text-sm">Friendly Name</span>
|
||||
<button hx-get="/cache/{{ cache.cid }}/name-form"
|
||||
hx-target="#friendly-name-section"
|
||||
hx-swap="innerHTML"
|
||||
class="text-blue-400 hover:text-blue-300 text-sm">
|
||||
Edit
|
||||
</button>
|
||||
</div>
|
||||
{% if cache.friendly_name %}
|
||||
<p class="text-blue-400 font-medium text-lg">{{ cache.friendly_name }}</p>
|
||||
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">{{ cache.base_name }}</code></p>
|
||||
{% else %}
|
||||
<p class="text-gray-500 text-sm">No friendly name assigned. Click Edit to add one.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- User Metadata (editable) -->
|
||||
<div id="metadata-section" class="bg-gray-800 rounded-lg border border-gray-700 p-4 mb-6">
|
||||
<div class="flex items-center justify-between mb-3">
|
||||
<h3 class="text-lg font-semibold">Details</h3>
|
||||
<button hx-get="/cache/{{ cache.cid }}/meta-form"
|
||||
hx-target="#metadata-section"
|
||||
hx-swap="innerHTML"
|
||||
class="text-blue-400 hover:text-blue-300 text-sm">
|
||||
Edit
|
||||
</button>
|
||||
</div>
|
||||
{% if cache.title or cache.description or cache.filename %}
|
||||
<div class="space-y-2 mb-4">
|
||||
{% if cache.title %}
|
||||
<h4 class="text-white font-medium">{{ cache.title }}</h4>
|
||||
{% elif cache.filename %}
|
||||
<h4 class="text-white font-medium">{{ cache.filename }}</h4>
|
||||
{% endif %}
|
||||
{% if cache.description %}
|
||||
<p class="text-gray-400">{{ cache.description }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-gray-500 text-sm mb-4">No title or description set. Click Edit to add metadata.</p>
|
||||
{% endif %}
|
||||
{% if cache.tags %}
|
||||
<div class="flex flex-wrap gap-2 mb-4">
|
||||
{% for tag in cache.tags %}
|
||||
<span class="bg-gray-700 text-gray-300 px-2 py-1 rounded text-sm">{{ tag }}</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if cache.source_type or cache.source_note %}
|
||||
<div class="text-sm text-gray-500">
|
||||
{% if cache.source_type %}Source: {{ cache.source_type }}{% endif %}
|
||||
{% if cache.source_note %} - {{ cache.source_note }}{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Technical Metadata -->
|
||||
<div class="grid grid-cols-2 gap-4 mb-6">
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Hash</div>
|
||||
<div class="font-mono text-sm text-white break-all">{{ cache.hash }}</div>
|
||||
<div class="text-gray-500 text-sm">CID</div>
|
||||
<div class="font-mono text-sm text-white break-all">{{ cache.cid }}</div>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Content Type</div>
|
||||
<div class="text-white">{{ cache.media_type or 'Unknown' }}</div>
|
||||
<div class="text-white">{{ cache.mime_type or 'Unknown' }}</div>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Size</div>
|
||||
<div class="text-white">{{ cache.size_bytes | filesizeformat if cache.size_bytes else 'Unknown' }}</div>
|
||||
<div class="text-white">{{ cache.size | filesizeformat if cache.size else 'Unknown' }}</div>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Created</div>
|
||||
@@ -79,7 +153,7 @@
|
||||
<h2 class="text-lg font-semibold mb-4">Related Runs</h2>
|
||||
<div class="space-y-2">
|
||||
{% for run in cache.runs %}
|
||||
<a href="/run/{{ run.run_id }}"
|
||||
<a href="/runs/{{ run.run_id }}"
|
||||
class="block bg-gray-800 rounded p-3 hover:bg-gray-750 transition-colors">
|
||||
<div class="flex items-center justify-between">
|
||||
<span class="font-mono text-sm">{{ run.run_id[:16] }}...</span>
|
||||
@@ -92,19 +166,17 @@
|
||||
|
||||
<!-- Actions -->
|
||||
<div class="flex items-center space-x-4 mt-8">
|
||||
<a href="/cache/{{ cache.hash }}/raw"
|
||||
<a href="/cache/{{ cache.cid }}/raw"
|
||||
download
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Download
|
||||
</a>
|
||||
{% if not cache.ipfs_cid %}
|
||||
<button hx-post="/cache/{{ cache.hash }}/publish"
|
||||
hx-target="#publish-result"
|
||||
class="bg-gray-700 hover:bg-gray-600 px-4 py-2 rounded font-medium">
|
||||
Publish to IPFS
|
||||
<button hx-post="/cache/{{ cache.cid }}/publish"
|
||||
hx-target="#share-result"
|
||||
class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<span id="publish-result"></span>
|
||||
{% endif %}
|
||||
<span id="share-result"></span>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
239
app/templates/cache/media_list.html
vendored
239
app/templates/cache/media_list.html
vendored
@@ -7,6 +7,10 @@
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-3xl font-bold">Media</h1>
|
||||
<div class="flex items-center space-x-4">
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload Media
|
||||
</button>
|
||||
<select id="type-filter" onchange="filterMedia()"
|
||||
class="bg-gray-800 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<option value="">All Types</option>
|
||||
@@ -17,22 +21,79 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Upload Modal -->
|
||||
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h2 class="text-xl font-semibold">Upload Media</h2>
|
||||
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="text-gray-400 hover:text-white">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Files</label>
|
||||
<input type="file" name="files" id="upload-file" required multiple
|
||||
accept="image/*,video/*,audio/*"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
|
||||
<p class="text-gray-500 text-xs mt-1">Select one or more files to upload</p>
|
||||
</div>
|
||||
|
||||
<div id="single-name-field">
|
||||
<label class="block text-gray-400 text-sm mb-1">Name (optional, for single file)</label>
|
||||
<input type="text" name="display_name" id="upload-name" placeholder="e.g., my-background-video"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A friendly name to reference this media in recipes</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-progress" class="hidden">
|
||||
<div class="bg-gray-700 rounded-full h-2">
|
||||
<div id="progress-bar" class="bg-blue-600 h-2 rounded-full transition-all" style="width: 0%"></div>
|
||||
</div>
|
||||
<p id="progress-text" class="text-gray-400 text-sm mt-1">Uploading...</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="hidden max-h-48 overflow-y-auto"></div>
|
||||
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" id="upload-btn"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if items %}
|
||||
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4" id="media-grid">
|
||||
{% for item in items %}
|
||||
<a href="/cache/{{ item.hash }}"
|
||||
class="media-item bg-gray-800 rounded-lg overflow-hidden hover:ring-2 hover:ring-blue-500 transition-all"
|
||||
data-type="{{ item.media_type.split('/')[0] if item.media_type else 'other' }}">
|
||||
{# Determine media category from type or filename #}
|
||||
{% set is_image = item.type in ('image', 'image/jpeg', 'image/png', 'image/gif', 'image/webp') or (item.filename and item.filename.lower().endswith(('.jpg', '.jpeg', '.png', '.gif', '.webp'))) %}
|
||||
{% set is_video = item.type in ('video', 'video/mp4', 'video/webm', 'video/x-matroska') or (item.filename and item.filename.lower().endswith(('.mp4', '.mkv', '.webm', '.mov'))) %}
|
||||
{% set is_audio = item.type in ('audio', 'audio/mpeg', 'audio/wav', 'audio/flac') or (item.filename and item.filename.lower().endswith(('.mp3', '.wav', '.flac', '.ogg'))) %}
|
||||
|
||||
{% if item.media_type and item.media_type.startswith('image/') %}
|
||||
<img src="/cache/{{ item.hash }}/raw"
|
||||
<a href="/cache/{{ item.cid }}"
|
||||
class="media-item bg-gray-800 rounded-lg overflow-hidden hover:ring-2 hover:ring-blue-500 transition-all"
|
||||
data-type="{% if is_image %}image{% elif is_video %}video{% elif is_audio %}audio{% else %}other{% endif %}">
|
||||
|
||||
{% if is_image %}
|
||||
<img src="/cache/{{ item.cid }}/raw"
|
||||
alt=""
|
||||
loading="lazy"
|
||||
class="w-full h-40 object-cover">
|
||||
|
||||
{% elif item.media_type and item.media_type.startswith('video/') %}
|
||||
{% elif is_video %}
|
||||
<div class="relative">
|
||||
<video src="/cache/{{ item.hash }}/raw"
|
||||
<video src="/cache/{{ item.cid }}/raw"
|
||||
class="w-full h-40 object-cover"
|
||||
muted
|
||||
onmouseover="this.play()"
|
||||
@@ -47,7 +108,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% elif item.media_type and item.media_type.startswith('audio/') %}
|
||||
{% elif is_audio %}
|
||||
<div class="w-full h-40 bg-gray-900 flex flex-col items-center justify-center">
|
||||
<svg class="w-12 h-12 text-gray-600 mb-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
|
||||
@@ -58,14 +119,18 @@
|
||||
|
||||
{% else %}
|
||||
<div class="w-full h-40 bg-gray-900 flex items-center justify-center">
|
||||
<span class="text-gray-600 text-sm">{{ item.media_type or 'Unknown' }}</span>
|
||||
<span class="text-gray-600 text-sm">{{ item.type or 'Media' }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="p-3">
|
||||
<div class="font-mono text-xs text-gray-500 truncate">{{ item.hash[:16] }}...</div>
|
||||
{% if item.size_bytes %}
|
||||
<div class="text-xs text-gray-600">{{ item.size_bytes | filesizeformat }}</div>
|
||||
{% if item.friendly_name %}
|
||||
<div class="text-xs text-blue-400 font-medium truncate">{{ item.friendly_name }}</div>
|
||||
{% else %}
|
||||
<div class="font-mono text-xs text-gray-500 truncate">{{ item.cid[:16] }}...</div>
|
||||
{% endif %}
|
||||
{% if item.filename %}
|
||||
<div class="text-xs text-gray-600 truncate">{{ item.filename }}</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
@@ -106,5 +171,155 @@ function filterMedia() {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Show/hide name field based on file count
|
||||
document.getElementById('upload-file').addEventListener('change', function(e) {
|
||||
const nameField = document.getElementById('single-name-field');
|
||||
if (e.target.files.length > 1) {
|
||||
nameField.style.display = 'none';
|
||||
} else {
|
||||
nameField.style.display = 'block';
|
||||
}
|
||||
});
|
||||
|
||||
// Handle upload form
|
||||
document.getElementById('upload-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const form = e.target;
|
||||
const fileInput = document.getElementById('upload-file');
|
||||
const files = fileInput.files;
|
||||
const displayName = document.getElementById('upload-name').value;
|
||||
const progressDiv = document.getElementById('upload-progress');
|
||||
const progressBar = document.getElementById('progress-bar');
|
||||
const progressText = document.getElementById('progress-text');
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
const uploadBtn = document.getElementById('upload-btn');
|
||||
|
||||
// Show progress
|
||||
progressDiv.classList.remove('hidden');
|
||||
resultDiv.classList.add('hidden');
|
||||
uploadBtn.disabled = true;
|
||||
|
||||
const results = [];
|
||||
const errors = [];
|
||||
|
||||
const CHUNK_SIZE = 1024 * 1024; // 1MB chunks
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
|
||||
const uploadId = crypto.randomUUID();
|
||||
const useChunked = file.size > CHUNK_SIZE * 2; // Use chunked for files > 2MB
|
||||
|
||||
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name}`;
|
||||
|
||||
try {
|
||||
let data;
|
||||
|
||||
if (useChunked && totalChunks > 1) {
|
||||
// Chunked upload for large files
|
||||
for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
|
||||
const start = chunkIndex * CHUNK_SIZE;
|
||||
const end = Math.min(start + CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
const chunkForm = new FormData();
|
||||
chunkForm.append('chunk', chunk);
|
||||
chunkForm.append('upload_id', uploadId);
|
||||
chunkForm.append('chunk_index', chunkIndex);
|
||||
chunkForm.append('total_chunks', totalChunks);
|
||||
chunkForm.append('filename', file.name);
|
||||
if (files.length === 1 && displayName) {
|
||||
chunkForm.append('display_name', displayName);
|
||||
}
|
||||
|
||||
const chunkProgress = ((i + (chunkIndex + 1) / totalChunks) / files.length) * 100;
|
||||
progressBar.style.width = `${chunkProgress}%`;
|
||||
progressText.textContent = `Uploading ${i + 1} of ${files.length}: ${file.name} (${chunkIndex + 1}/${totalChunks} chunks)`;
|
||||
|
||||
const response = await fetch('/media/upload/chunk', {
|
||||
method: 'POST',
|
||||
body: chunkForm,
|
||||
});
|
||||
|
||||
const contentType = response.headers.get('content-type') || '';
|
||||
if (!contentType.includes('application/json')) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(data.detail || 'Chunk upload failed');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Regular upload for small files
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (files.length === 1 && displayName) {
|
||||
formData.append('display_name', displayName);
|
||||
}
|
||||
|
||||
progressBar.style.width = `${((i + 0.5) / files.length) * 100}%`;
|
||||
|
||||
const response = await fetch('/media/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const contentType = response.headers.get('content-type') || '';
|
||||
if (!contentType.includes('application/json')) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Server error (${response.status}): ${text.substring(0, 100)}`);
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(data.detail || 'Upload failed');
|
||||
}
|
||||
}
|
||||
|
||||
results.push({ filename: file.name, friendly_name: data.friendly_name, cid: data.cid });
|
||||
} catch (err) {
|
||||
errors.push({ filename: file.name, error: err.message });
|
||||
}
|
||||
|
||||
progressBar.style.width = `${((i + 1) / files.length) * 100}%`;
|
||||
}
|
||||
|
||||
progressText.textContent = 'Upload complete!';
|
||||
|
||||
// Show results
|
||||
let html = '';
|
||||
if (results.length > 0) {
|
||||
html += '<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300 mb-2">';
|
||||
html += `<p class="font-medium">${results.length} file(s) uploaded successfully!</p>`;
|
||||
for (const r of results) {
|
||||
html += `<p class="text-sm mt-1">${r.filename} → <span class="font-mono">${r.friendly_name}</span></p>`;
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
if (errors.length > 0) {
|
||||
html += '<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">';
|
||||
html += `<p class="font-medium">${errors.length} file(s) failed:</p>`;
|
||||
for (const e of errors) {
|
||||
html += `<p class="text-sm mt-1">${e.filename}: ${e.error}</p>`;
|
||||
}
|
||||
html += '</div>';
|
||||
}
|
||||
|
||||
resultDiv.innerHTML = html;
|
||||
resultDiv.classList.remove('hidden');
|
||||
|
||||
if (results.length > 0) {
|
||||
// Reload page after 2 seconds
|
||||
setTimeout(() => location.reload(), 2000);
|
||||
} else {
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
21
app/templates/cache/not_found.html
vendored
Normal file
21
app/templates/cache/not_found.html
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Content Not Found - Art-DAG L1{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-2xl mx-auto text-center py-16">
|
||||
<h1 class="text-6xl font-bold text-gray-400 mb-4">404</h1>
|
||||
<h2 class="text-2xl font-semibold mb-4">Content Not Found</h2>
|
||||
<p class="text-gray-400 mb-8">
|
||||
The content with hash <code class="bg-gray-800 px-2 py-1 rounded">{{ cid[:24] if cid else 'unknown' }}...</code> was not found in the cache.
|
||||
</p>
|
||||
<div class="flex justify-center gap-4">
|
||||
<a href="/cache/" class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded-lg font-medium">
|
||||
Browse Media
|
||||
</a>
|
||||
<a href="/" class="bg-gray-700 hover:bg-gray-600 px-6 py-3 rounded-lg font-medium">
|
||||
Go Home
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
203
app/templates/effects/detail.html
Normal file
203
app/templates/effects/detail.html
Normal file
@@ -0,0 +1,203 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% set meta = effect.meta or effect %}
|
||||
|
||||
{% block title %}{{ meta.name or 'Effect' }} - Effects - Art-DAG L1{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
{{ super() }}
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css">
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/lisp.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/languages/scheme.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-6xl mx-auto">
|
||||
<!-- Header -->
|
||||
<div class="flex items-center space-x-4 mb-6">
|
||||
<a href="/effects" class="text-gray-400 hover:text-white">← Effects</a>
|
||||
<h1 class="text-2xl font-bold">{{ meta.name or 'Unnamed Effect' }}</h1>
|
||||
<span class="text-gray-500">v{{ meta.version or '1.0.0' }}</span>
|
||||
{% if meta.temporal %}
|
||||
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">temporal</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if meta.author %}
|
||||
<p class="text-gray-500 mb-2">by {{ meta.author }}</p>
|
||||
{% endif %}
|
||||
|
||||
{% if meta.description %}
|
||||
<p class="text-gray-400 mb-6">{{ meta.description }}</p>
|
||||
{% endif %}
|
||||
|
||||
<!-- Friendly Name & CID Info -->
|
||||
<div class="bg-gray-800 rounded-lg p-4 border border-gray-700 mb-6">
|
||||
{% if effect.friendly_name %}
|
||||
<div class="mb-4 pb-4 border-b border-gray-700">
|
||||
<span class="text-gray-500 text-sm">Friendly Name</span>
|
||||
<p class="text-blue-400 font-medium text-lg mt-1">{{ effect.friendly_name }}</p>
|
||||
<p class="text-gray-500 text-xs mt-1">Use in recipes: <code class="bg-gray-900 px-2 py-0.5 rounded">(effect {{ effect.base_name }})</code></p>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<span class="text-gray-500 text-sm">Content ID (CID)</span>
|
||||
<p class="font-mono text-sm text-gray-300 mt-1" id="effect-cid">{{ effect.cid }}</p>
|
||||
</div>
|
||||
<button onclick="copyToClipboard('{{ effect.cid }}')"
|
||||
class="bg-gray-700 hover:bg-gray-600 px-3 py-1 rounded text-sm">
|
||||
Copy
|
||||
</button>
|
||||
</div>
|
||||
{% if effect.uploaded_at %}
|
||||
<div class="mt-3 text-gray-500 text-sm">
|
||||
Uploaded: {{ effect.uploaded_at }}
|
||||
{% if effect.uploader %}
|
||||
by {{ effect.uploader }}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
<!-- Left Column: Parameters & Dependencies -->
|
||||
<div class="lg:col-span-1 space-y-6">
|
||||
<!-- Parameters -->
|
||||
{% if meta.params %}
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2">
|
||||
<span class="text-gray-400 text-sm font-medium">Parameters</span>
|
||||
</div>
|
||||
<div class="p-4 space-y-4">
|
||||
{% for param in meta.params %}
|
||||
<div>
|
||||
<div class="flex items-center space-x-2 mb-1">
|
||||
<span class="font-medium text-white">{{ param.name }}</span>
|
||||
<span class="bg-blue-900 text-blue-300 px-2 py-0.5 rounded text-xs">{{ param.type }}</span>
|
||||
</div>
|
||||
{% if param.description %}
|
||||
<p class="text-gray-400 text-sm">{{ param.description }}</p>
|
||||
{% endif %}
|
||||
<div class="flex flex-wrap gap-2 mt-1 text-xs">
|
||||
{% if param.range %}
|
||||
<span class="text-gray-500">range: {{ param.range[0] }} - {{ param.range[1] }}</span>
|
||||
{% endif %}
|
||||
{% if param.default is defined %}
|
||||
<span class="text-gray-500">default: {{ param.default }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Usage in Recipe -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2">
|
||||
<span class="text-gray-400 text-sm font-medium">Usage in Recipe</span>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
{% if effect.base_name %}
|
||||
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">({{ effect.base_name }} ...)</code></pre>
|
||||
<p class="text-gray-500 text-xs mt-2">
|
||||
Use the friendly name to reference this effect.
|
||||
</p>
|
||||
{% else %}
|
||||
<pre class="text-sm text-gray-300 bg-gray-900 rounded p-3 overflow-x-auto"><code class="language-lisp">(effect :cid "{{ effect.cid }}")</code></pre>
|
||||
<p class="text-gray-500 text-xs mt-2">
|
||||
Reference this effect by CID in your recipe.
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Right Column: Source Code -->
|
||||
<div class="lg:col-span-2">
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700">
|
||||
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
|
||||
<span class="text-gray-400 text-sm font-medium">Source Code (S-expression)</span>
|
||||
<div class="flex items-center space-x-2">
|
||||
<a href="/effects/{{ effect.cid }}/source"
|
||||
class="text-gray-400 hover:text-white text-sm"
|
||||
download="{{ meta.name or 'effect' }}.sexp">
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<pre class="text-sm overflow-x-auto rounded bg-gray-900"><code class="language-lisp" id="source-code">Loading...</code></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Actions -->
|
||||
<div class="flex items-center space-x-4 mt-8">
|
||||
{% if effect.cid.startswith('Qm') or effect.cid.startswith('bafy') %}
|
||||
<a href="https://ipfs.io/ipfs/{{ effect.cid }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% endif %}
|
||||
<button hx-post="/effects/{{ effect.cid }}/publish"
|
||||
hx-target="#action-result"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<button onclick="deleteEffect('{{ effect.cid }}')"
|
||||
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
|
||||
Delete
|
||||
</button>
|
||||
<span id="action-result"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
// Load source code
|
||||
fetch('/effects/{{ effect.cid }}/source')
|
||||
.then(response => response.text())
|
||||
.then(source => {
|
||||
const codeEl = document.getElementById('source-code');
|
||||
codeEl.textContent = source;
|
||||
hljs.highlightElement(codeEl);
|
||||
})
|
||||
.catch(error => {
|
||||
document.getElementById('source-code').textContent = 'Failed to load source code';
|
||||
});
|
||||
});
|
||||
|
||||
function copyToClipboard(text) {
|
||||
navigator.clipboard.writeText(text).then(() => {
|
||||
const btn = event.target;
|
||||
const originalText = btn.textContent;
|
||||
btn.textContent = 'Copied!';
|
||||
setTimeout(() => { btn.textContent = originalText; }, 1500);
|
||||
});
|
||||
}
|
||||
|
||||
function deleteEffect(cid) {
|
||||
if (!confirm('Delete this effect from local cache? IPFS copies will persist.')) return;
|
||||
|
||||
fetch('/effects/' + cid, { method: 'DELETE' })
|
||||
.then(response => {
|
||||
if (!response.ok) throw new Error('Delete failed');
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
document.getElementById('action-result').innerHTML =
|
||||
'<span class="text-green-400">Deleted. Redirecting...</span>';
|
||||
setTimeout(() => { window.location.href = '/effects'; }, 1000);
|
||||
})
|
||||
.catch(error => {
|
||||
document.getElementById('action-result').innerHTML =
|
||||
'<span class="text-red-400">' + error.message + '</span>';
|
||||
});
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
200
app/templates/effects/list.html
Normal file
200
app/templates/effects/list.html
Normal file
@@ -0,0 +1,200 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Effects - Art-DAG L1{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-6xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-3xl font-bold">Effects</h1>
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload Effect
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Upload Modal -->
|
||||
<div id="upload-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||
<div class="bg-gray-800 rounded-lg p-6 w-full max-w-md border border-gray-700">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h2 class="text-xl font-semibold">Upload Effect</h2>
|
||||
<button onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="text-gray-400 hover:text-white">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form id="upload-form" enctype="multipart/form-data" class="space-y-4">
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Effect File (.sexp)</label>
|
||||
<input type="file" name="file" id="upload-file" required
|
||||
accept=".sexp,.lisp"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white file:mr-4 file:py-2 file:px-4 file:rounded file:border-0 file:bg-blue-600 file:text-white hover:file:bg-blue-700">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="block text-gray-400 text-sm mb-1">Friendly Name (optional)</label>
|
||||
<input type="text" name="display_name" id="upload-name" placeholder="e.g., color-shift"
|
||||
class="w-full bg-gray-900 border border-gray-600 rounded px-3 py-2 text-white">
|
||||
<p class="text-gray-500 text-xs mt-1">A name to reference this effect in recipes</p>
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="hidden"></div>
|
||||
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button type="button" onclick="document.getElementById('upload-modal').classList.add('hidden')"
|
||||
class="px-4 py-2 rounded border border-gray-600 hover:bg-gray-700">
|
||||
Cancel
|
||||
</button>
|
||||
<button type="submit" id="upload-btn"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium">
|
||||
Upload
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p class="text-gray-400 mb-8">
|
||||
Effects are S-expression files that define video processing operations.
|
||||
Each effect is stored in IPFS and can be referenced by name in recipes.
|
||||
</p>
|
||||
|
||||
{% if effects %}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="effects-list">
|
||||
{% for effect in effects %}
|
||||
{% set meta = effect.meta or effect %}
|
||||
<a href="/effects/{{ effect.cid }}"
|
||||
class="effect-card bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-medium text-white">{{ meta.name or 'Unnamed' }}</span>
|
||||
<span class="text-gray-500 text-sm">v{{ meta.version or '1.0.0' }}</span>
|
||||
</div>
|
||||
|
||||
{% if meta.description %}
|
||||
<p class="text-gray-400 text-sm mb-3 line-clamp-2">{{ meta.description }}</p>
|
||||
{% endif %}
|
||||
|
||||
<div class="flex items-center justify-between text-sm mb-2">
|
||||
{% if meta.author %}
|
||||
<span class="text-gray-500">by {{ meta.author }}</span>
|
||||
{% else %}
|
||||
<span></span>
|
||||
{% endif %}
|
||||
{% if meta.temporal %}
|
||||
<span class="bg-purple-900 text-purple-300 px-2 py-0.5 rounded text-xs">temporal</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if meta.params %}
|
||||
<div class="text-gray-500 text-sm">
|
||||
{{ meta.params | length }} parameter{{ 's' if meta.params | length != 1 else '' }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="mt-3 text-xs">
|
||||
{% if effect.friendly_name %}
|
||||
<span class="text-blue-400 font-medium">{{ effect.friendly_name }}</span>
|
||||
{% else %}
|
||||
<span class="text-gray-600 font-mono truncate">{{ effect.cid[:24] }}...</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if has_more %}
|
||||
<div hx-get="/effects?offset={{ offset + limit }}&limit={{ limit }}"
|
||||
hx-trigger="revealed"
|
||||
hx-swap="afterend"
|
||||
hx-select="#effects-list > *"
|
||||
class="h-20 flex items-center justify-center text-gray-500">
|
||||
Loading more...
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
|
||||
<svg class="w-16 h-16 mx-auto mb-4 text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"
|
||||
d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4"/>
|
||||
</svg>
|
||||
<p class="text-gray-500 mb-4">No effects uploaded yet.</p>
|
||||
<p class="text-gray-600 text-sm mb-6">
|
||||
Effects are S-expression files with metadata in comment headers.
|
||||
</p>
|
||||
<button onclick="document.getElementById('upload-modal').classList.remove('hidden')"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium">
|
||||
Upload Your First Effect
|
||||
</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Handle upload form
|
||||
document.getElementById('upload-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const form = e.target;
|
||||
const fileInput = document.getElementById('upload-file');
|
||||
const displayName = document.getElementById('upload-name').value;
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
const uploadBtn = document.getElementById('upload-btn');
|
||||
|
||||
const file = fileInput.files[0];
|
||||
if (!file) return;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (displayName) {
|
||||
formData.append('display_name', displayName);
|
||||
}
|
||||
|
||||
uploadBtn.disabled = true;
|
||||
uploadBtn.textContent = 'Uploading...';
|
||||
resultDiv.classList.add('hidden');
|
||||
|
||||
try {
|
||||
const response = await fetch('/effects/upload', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-green-900 border border-green-700 rounded p-3 text-green-300">
|
||||
<p class="font-medium">Effect uploaded!</p>
|
||||
<p class="text-sm mt-1">${data.name} <span class="font-mono">${data.friendly_name}</span></p>
|
||||
</div>
|
||||
`;
|
||||
resultDiv.classList.remove('hidden');
|
||||
setTimeout(() => location.reload(), 1500);
|
||||
} else {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
|
||||
<p class="font-medium">Upload failed</p>
|
||||
<p class="text-sm mt-1">${data.detail || 'Unknown error'}</p>
|
||||
</div>
|
||||
`;
|
||||
resultDiv.classList.remove('hidden');
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
} catch (error) {
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-red-900 border border-red-700 rounded p-3 text-red-300">
|
||||
<p class="font-medium">Upload failed</p>
|
||||
<p class="text-sm mt-1">${error.message}</p>
|
||||
</div>
|
||||
`;
|
||||
resultDiv.classList.remove('hidden');
|
||||
uploadBtn.disabled = false;
|
||||
uploadBtn.textContent = 'Upload';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
22
app/templates/fragments/link_card.html
Normal file
22
app/templates/fragments/link_card.html
Normal file
@@ -0,0 +1,22 @@
|
||||
<a href="{{ link }}" class="block rounded border border-stone-200 bg-white hover:bg-stone-50 transition-colors no-underline" data-fragment="link-card" data-app="artdag" data-hx-disable>
|
||||
<div class="flex flex-row items-center gap-3 p-3">
|
||||
<div class="flex-shrink-0 w-10 h-10 rounded bg-stone-100 flex items-center justify-center text-stone-500">
|
||||
{% if content_type == "recipe" %}
|
||||
<i class="fas fa-scroll text-sm"></i>
|
||||
{% elif content_type == "effect" %}
|
||||
<i class="fas fa-magic text-sm"></i>
|
||||
{% elif content_type == "run" %}
|
||||
<i class="fas fa-play-circle text-sm"></i>
|
||||
{% else %}
|
||||
<i class="fas fa-cube text-sm"></i>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="font-medium text-stone-900 text-sm truncate">{{ title }}</div>
|
||||
{% if description %}
|
||||
<div class="text-xs text-stone-500 clamp-2">{{ description }}</div>
|
||||
{% endif %}
|
||||
<div class="text-xs text-stone-400 mt-0.5">{{ content_type }} · {{ cid[:12] }}…</div>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
7
app/templates/fragments/nav_item.html
Normal file
7
app/templates/fragments/nav_item.html
Normal file
@@ -0,0 +1,7 @@
|
||||
<div class="relative nav-group">
|
||||
<a href="{{ artdag_url }}"
|
||||
class="justify-center cursor-pointer flex flex-row items-center gap-2 rounded bg-stone-200 text-black p-3"
|
||||
data-hx-disable>
|
||||
<i class="fas fa-project-diagram text-sm"></i> art-dag
|
||||
</a>
|
||||
</div>
|
||||
@@ -7,7 +7,7 @@
|
||||
<h1 class="text-4xl font-bold mb-4">Art-DAG L1</h1>
|
||||
<p class="text-xl text-gray-400 mb-8">Content-Addressable Media Processing</p>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-6 max-w-2xl mx-auto mb-12">
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-6 max-w-3xl mx-auto mb-12">
|
||||
<a href="/runs"
|
||||
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-blue-500 transition-colors">
|
||||
<div class="text-blue-400 text-3xl font-bold mb-2">{{ stats.runs or 0 }}</div>
|
||||
@@ -18,6 +18,11 @@
|
||||
<div class="text-green-400 text-3xl font-bold mb-2">{{ stats.recipes or 0 }}</div>
|
||||
<div class="text-gray-400">Recipes</div>
|
||||
</a>
|
||||
<a href="/effects"
|
||||
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-cyan-500 transition-colors">
|
||||
<div class="text-cyan-400 text-3xl font-bold mb-2">{{ stats.effects or 0 }}</div>
|
||||
<div class="text-gray-400">Effects</div>
|
||||
</a>
|
||||
<a href="/media"
|
||||
class="bg-gray-800 border border-gray-700 rounded-lg p-6 hover:border-purple-500 transition-colors">
|
||||
<div class="text-purple-400 text-3xl font-bold mb-2">{{ stats.media or 0 }}</div>
|
||||
@@ -31,10 +36,16 @@
|
||||
</div>
|
||||
|
||||
{% if not user %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-8 max-w-md mx-auto">
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-8 max-w-md mx-auto mb-12">
|
||||
<p class="text-gray-400 mb-4">Sign in through your L2 server to access all features.</p>
|
||||
<a href="/auth" class="text-blue-400 hover:text-blue-300">Sign In →</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if readme_html %}
|
||||
<div class="text-left bg-gray-800 border border-gray-700 rounded-lg p-8 prose prose-invert max-w-none">
|
||||
{{ readme_html | safe }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
{% block head %}
|
||||
{{ super() }}
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.23.0/cytoscape.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
@@ -12,16 +14,53 @@
|
||||
<!-- Header -->
|
||||
<div class="flex items-center space-x-4 mb-6">
|
||||
<a href="/recipes" class="text-gray-400 hover:text-white">← Recipes</a>
|
||||
<h1 class="text-2xl font-bold">{{ recipe.name }}</h1>
|
||||
<h1 class="text-2xl font-bold">{{ recipe.name or 'Unnamed Recipe' }}</h1>
|
||||
{% if recipe.version %}
|
||||
<span class="text-gray-500">v{{ recipe.version }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if recipe.description %}
|
||||
<p class="text-gray-400 mb-6">{{ recipe.description }}</p>
|
||||
<p class="text-gray-400 mb-4">{{ recipe.description }}</p>
|
||||
{% endif %}
|
||||
|
||||
<!-- Metadata -->
|
||||
<div class="bg-gray-800 rounded-lg p-4 border border-gray-700 mb-6">
|
||||
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 text-sm">
|
||||
<div>
|
||||
<span class="text-gray-500">Recipe ID</span>
|
||||
<p class="text-gray-300 font-mono text-xs truncate" title="{{ recipe.recipe_id }}">{{ recipe.recipe_id[:16] }}...</p>
|
||||
</div>
|
||||
{% if recipe.ipfs_cid %}
|
||||
<div>
|
||||
<span class="text-gray-500">IPFS CID</span>
|
||||
<p class="text-gray-300 font-mono text-xs truncate" title="{{ recipe.ipfs_cid }}">{{ recipe.ipfs_cid[:16] }}...</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div>
|
||||
<span class="text-gray-500">Steps</span>
|
||||
<p class="text-gray-300">{{ recipe.step_count or recipe.steps|length }}</p>
|
||||
</div>
|
||||
{% if recipe.author %}
|
||||
<div>
|
||||
<span class="text-gray-500">Author</span>
|
||||
<p class="text-gray-300">{{ recipe.author }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if recipe.type == 'streaming' %}
|
||||
<!-- Streaming Recipe Info -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6 p-4">
|
||||
<div class="flex items-center space-x-2 mb-2">
|
||||
<span class="bg-purple-900 text-purple-300 px-2 py-1 rounded text-sm">Streaming Recipe</span>
|
||||
</div>
|
||||
<p class="text-gray-400 text-sm">
|
||||
This recipe uses frame-by-frame streaming rendering. The pipeline is defined as an S-expression that generates frames dynamically.
|
||||
</p>
|
||||
</div>
|
||||
{% else %}
|
||||
<!-- DAG Visualization -->
|
||||
<div class="bg-gray-800 rounded-lg border border-gray-700 mb-6">
|
||||
<div class="border-b border-gray-700 px-4 py-2 flex items-center justify-between">
|
||||
@@ -71,11 +110,125 @@
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- YAML Source -->
|
||||
<h2 class="text-lg font-semibold mb-4">Source</h2>
|
||||
<!-- Source Code -->
|
||||
<h2 class="text-lg font-semibold mb-4">Recipe (S-expression)</h2>
|
||||
<div class="bg-gray-900 rounded-lg p-4 border border-gray-700">
|
||||
<pre class="text-sm text-gray-300 overflow-x-auto whitespace-pre-wrap">{{ recipe.yaml }}</pre>
|
||||
{% if recipe.sexp %}
|
||||
<pre class="text-sm font-mono text-gray-300 overflow-x-auto whitespace-pre-wrap sexp-code">{{ recipe.sexp }}</pre>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No source available</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Single-pass S-expression syntax highlighter (avoids regex corruption)
|
||||
function highlightSexp(text) {
|
||||
const SPECIAL = new Set(['plan','recipe','def','->','stream','let','lambda','if','cond','define']);
|
||||
const PRIMS = new Set(['source','effect','sequence','segment','resize','transform','layer','blend','mux','analyze','fused-pipeline']);
|
||||
function esc(s) { return s.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); }
|
||||
function span(cls, s) { return '<span class="' + cls + '">' + esc(s) + '</span>'; }
|
||||
|
||||
let out = '', i = 0, len = text.length;
|
||||
while (i < len) {
|
||||
if (text[i] === ';' && i + 1 < len && text[i+1] === ';') {
|
||||
let end = text.indexOf('\n', i);
|
||||
if (end === -1) end = len;
|
||||
out += span('text-gray-500', text.slice(i, end));
|
||||
i = end;
|
||||
}
|
||||
else if (text[i] === '"') {
|
||||
let j = i + 1;
|
||||
while (j < len && text[j] !== '"') { if (text[j] === '\\') j++; j++; }
|
||||
if (j < len) j++;
|
||||
out += span('text-green-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else if (text[i] === ':' && i + 1 < len && /[a-zA-Z_-]/.test(text[i+1])) {
|
||||
let j = i + 1;
|
||||
while (j < len && /[a-zA-Z0-9_-]/.test(text[j])) j++;
|
||||
out += span('text-purple-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else if (text[i] === '(') {
|
||||
out += span('text-yellow-500', '(');
|
||||
i++;
|
||||
let ws = '';
|
||||
while (i < len && (text[i] === ' ' || text[i] === '\t')) { ws += text[i]; i++; }
|
||||
out += esc(ws);
|
||||
if (i < len && /[a-zA-Z_>-]/.test(text[i])) {
|
||||
let j = i;
|
||||
while (j < len && /[a-zA-Z0-9_>-]/.test(text[j])) j++;
|
||||
let word = text.slice(i, j);
|
||||
if (SPECIAL.has(word)) out += span('text-pink-400 font-semibold', word);
|
||||
else if (PRIMS.has(word)) out += span('text-blue-400', word);
|
||||
else out += esc(word);
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
else if (text[i] === ')') {
|
||||
out += span('text-yellow-500', ')');
|
||||
i++;
|
||||
}
|
||||
else if (/[0-9]/.test(text[i]) && (i === 0 || /[\s(]/.test(text[i-1]))) {
|
||||
let j = i;
|
||||
while (j < len && /[0-9.]/.test(text[j])) j++;
|
||||
out += span('text-orange-300', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
else {
|
||||
let j = i;
|
||||
while (j < len && !'(;":)'.includes(text[j])) {
|
||||
if (text[j] === ':' && j + 1 < len && /[a-zA-Z_-]/.test(text[j+1])) break;
|
||||
if (/[0-9]/.test(text[j]) && (j === 0 || /[\s(]/.test(text[j-1]))) break;
|
||||
j++;
|
||||
}
|
||||
if (j === i) { out += esc(text[i]); i++; }
|
||||
else { out += esc(text.slice(i, j)); i = j; }
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
document.querySelectorAll('.sexp-code').forEach(el => {
|
||||
el.innerHTML = highlightSexp(el.textContent);
|
||||
});
|
||||
</script>
|
||||
|
||||
<!-- Actions -->
|
||||
<div class="flex items-center space-x-4 mt-8">
|
||||
<button hx-post="/runs/rerun/{{ recipe.recipe_id }}"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-green-600 hover:bg-green-700 px-4 py-2 rounded font-medium">
|
||||
Run Recipe
|
||||
</button>
|
||||
{% if recipe.ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ recipe.ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% elif recipe.recipe_id.startswith('Qm') or recipe.recipe_id.startswith('bafy') %}
|
||||
<a href="https://ipfs.io/ipfs/{{ recipe.recipe_id }}"
|
||||
target="_blank"
|
||||
class="bg-cyan-600 hover:bg-cyan-700 px-4 py-2 rounded font-medium">
|
||||
View on IPFS
|
||||
</a>
|
||||
{% endif %}
|
||||
<button hx-post="/recipes/{{ recipe.recipe_id }}/publish"
|
||||
hx-target="#action-result"
|
||||
class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<button hx-delete="/recipes/{{ recipe.recipe_id }}/ui"
|
||||
hx-target="#action-result"
|
||||
hx-confirm="Delete this recipe? This cannot be undone."
|
||||
class="bg-red-600 hover:bg-red-700 px-4 py-2 rounded font-medium">
|
||||
Delete
|
||||
</button>
|
||||
<span id="action-result"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
<div class="max-w-6xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-3xl font-bold">Recipes</h1>
|
||||
<label class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded font-medium cursor-pointer">
|
||||
Upload Recipe
|
||||
<input type="file" accept=".sexp,.yaml,.yml" class="hidden" id="recipe-upload" />
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<p class="text-gray-400 mb-8">
|
||||
@@ -13,10 +17,10 @@
|
||||
</p>
|
||||
|
||||
{% if recipes %}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="recipes-list">
|
||||
{% for recipe in recipes %}
|
||||
<a href="/recipe/{{ recipe.id }}"
|
||||
class="bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
|
||||
<a href="/recipes/{{ recipe.recipe_id }}"
|
||||
class="recipe-card bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-medium text-white">{{ recipe.name }}</span>
|
||||
{% if recipe.version %}
|
||||
@@ -42,14 +46,91 @@
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="mt-3 text-xs">
|
||||
{% if recipe.friendly_name %}
|
||||
<span class="text-blue-400 font-medium">{{ recipe.friendly_name }}</span>
|
||||
{% else %}
|
||||
<span class="text-gray-600 font-mono truncate">{{ recipe.recipe_id[:24] }}...</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if has_more %}
|
||||
<div hx-get="/recipes?offset={{ offset + limit }}&limit={{ limit }}"
|
||||
hx-trigger="revealed"
|
||||
hx-swap="afterend"
|
||||
hx-select="#recipes-list > *"
|
||||
class="h-20 flex items-center justify-center text-gray-500">
|
||||
Loading more...
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
|
||||
<p class="text-gray-500 mb-4">No recipes available.</p>
|
||||
<p class="text-gray-600 text-sm">Recipes are defined in YAML format and submitted via API.</p>
|
||||
<p class="text-gray-600 text-sm mb-6">
|
||||
Recipes are S-expression files (.sexp) that define processing pipelines.
|
||||
</p>
|
||||
<label class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded font-medium cursor-pointer inline-block">
|
||||
Upload Your First Recipe
|
||||
<input type="file" accept=".sexp,.yaml,.yml" class="hidden" id="recipe-upload-empty" />
|
||||
</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div id="upload-result" class="fixed bottom-4 right-4 max-w-sm"></div>
|
||||
|
||||
<script>
|
||||
function handleRecipeUpload(input) {
|
||||
const file = input.files[0];
|
||||
if (!file) return;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
fetch('/recipes/upload', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) throw new Error('Upload failed');
|
||||
return response.json();
|
||||
})
|
||||
.then(data => {
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-green-900 border border-green-700 rounded-lg p-4">
|
||||
<p class="text-green-300 font-medium">Recipe uploaded!</p>
|
||||
<p class="text-green-400 text-sm mt-1">${data.name} v${data.version}</p>
|
||||
<p class="text-gray-400 text-xs mt-2 font-mono">${data.recipe_id}</p>
|
||||
</div>
|
||||
`;
|
||||
setTimeout(() => {
|
||||
window.location.reload();
|
||||
}, 1500);
|
||||
})
|
||||
.catch(error => {
|
||||
const resultDiv = document.getElementById('upload-result');
|
||||
resultDiv.innerHTML = `
|
||||
<div class="bg-red-900 border border-red-700 rounded-lg p-4">
|
||||
<p class="text-red-300 font-medium">Upload failed</p>
|
||||
<p class="text-red-400 text-sm mt-1">${error.message}</p>
|
||||
</div>
|
||||
`;
|
||||
});
|
||||
|
||||
input.value = '';
|
||||
}
|
||||
|
||||
document.getElementById('recipe-upload')?.addEventListener('change', function() {
|
||||
handleRecipeUpload(this);
|
||||
});
|
||||
document.getElementById('recipe-upload-empty')?.addEventListener('change', function() {
|
||||
handleRecipeUpload(this);
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
} %}
|
||||
{% set color = status_colors.get(run.status, 'gray') %}
|
||||
|
||||
<a href="/run/{{ run.run_id }}"
|
||||
<a href="/runs/{{ run.run_id }}"
|
||||
class="block bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-gray-600 transition-colors">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<div class="flex items-center space-x-3">
|
||||
@@ -23,10 +23,10 @@
|
||||
<span class="text-gray-500 text-sm">{{ run.created_at }}</span>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center justify-between mb-3">
|
||||
<div class="flex items-center space-x-4 text-sm">
|
||||
<span class="text-gray-400">
|
||||
Recipe: <span class="text-white">{{ run.recipe or 'Unknown' }}</span>
|
||||
Recipe: <span class="text-white">{{ run.recipe_name or (run.recipe[:12] ~ '...' if run.recipe and run.recipe|length > 12 else run.recipe) or 'Unknown' }}</span>
|
||||
</span>
|
||||
{% if run.total_steps %}
|
||||
<span class="text-gray-400">
|
||||
@@ -34,15 +34,56 @@
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if run.output_hash %}
|
||||
<span class="font-mono text-xs text-gray-500">{{ run.output_hash[:16] }}...</span>
|
||||
{# Media previews row #}
|
||||
<div class="flex items-center space-x-4">
|
||||
{# Input previews #}
|
||||
{% if run.input_previews %}
|
||||
<div class="flex items-center space-x-1">
|
||||
<span class="text-xs text-gray-500 mr-1">In:</span>
|
||||
{% for inp in run.input_previews %}
|
||||
{% if inp.media_type and inp.media_type.startswith('image/') %}
|
||||
<img src="/cache/{{ inp.cid }}/raw" alt="" class="w-10 h-10 object-cover rounded">
|
||||
{% elif inp.media_type and inp.media_type.startswith('video/') %}
|
||||
<video src="/cache/{{ inp.cid }}/raw" class="w-10 h-10 object-cover rounded" muted></video>
|
||||
{% else %}
|
||||
<div class="w-10 h-10 bg-gray-700 rounded flex items-center justify-center text-gray-500 text-xs">?</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% if run.inputs and run.inputs|length > 3 %}
|
||||
<span class="text-xs text-gray-500">+{{ run.inputs|length - 3 }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% elif run.inputs %}
|
||||
<div class="text-xs text-gray-500">
|
||||
{{ run.inputs|length }} input(s)
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Arrow #}
|
||||
<span class="text-gray-600">-></span>
|
||||
|
||||
{# Output preview - prefer IPFS URLs when available #}
|
||||
{% if run.output_cid %}
|
||||
<div class="flex items-center space-x-1">
|
||||
<span class="text-xs text-gray-500 mr-1">Out:</span>
|
||||
{% if run.output_media_type and run.output_media_type.startswith('image/') %}
|
||||
<img src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" alt="" class="w-10 h-10 object-cover rounded">
|
||||
{% elif run.output_media_type and run.output_media_type.startswith('video/') %}
|
||||
<video src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" class="w-10 h-10 object-cover rounded" muted></video>
|
||||
{% else %}
|
||||
<div class="w-10 h-10 bg-gray-700 rounded flex items-center justify-center text-gray-500 text-xs">?</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<span class="text-xs text-gray-500">No output yet</span>
|
||||
{% endif %}
|
||||
|
||||
<div class="flex-grow"></div>
|
||||
|
||||
{% if run.output_cid %}
|
||||
<span class="font-mono text-xs text-gray-600">{{ run.output_cid[:12] }}...</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if run.inputs %}
|
||||
<div class="mt-2 text-xs text-gray-500">
|
||||
Inputs: {{ run.inputs | length }} file(s)
|
||||
</div>
|
||||
{% endif %}
|
||||
</a>
|
||||
|
||||
62
app/templates/runs/artifacts.html
Normal file
62
app/templates/runs/artifacts.html
Normal file
@@ -0,0 +1,62 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Run Artifacts{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mb-6">
|
||||
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
|
||||
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
|
||||
</svg>
|
||||
Back to Run
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<h1 class="text-2xl font-bold text-white mb-6">Run Artifacts</h1>
|
||||
|
||||
{% if artifacts %}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{% for artifact in artifacts %}
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-3">
|
||||
<span class="px-2 py-1 text-xs rounded
|
||||
{% if artifact.role == 'input' %}bg-blue-600
|
||||
{% elif artifact.role == 'output' %}bg-green-600
|
||||
{% else %}bg-purple-600{% endif %}">
|
||||
{{ artifact.role }}
|
||||
</span>
|
||||
<span class="text-sm text-gray-400">{{ artifact.step_name }}</span>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<p class="text-xs text-gray-500 mb-1">Content Hash</p>
|
||||
<p class="font-mono text-xs text-gray-300 truncate">{{ artifact.hash }}</p>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between text-sm">
|
||||
<span class="text-gray-400">
|
||||
{% if artifact.media_type == 'video' %}Video
|
||||
{% elif artifact.media_type == 'image' %}Image
|
||||
{% elif artifact.media_type == 'audio' %}Audio
|
||||
{% else %}File{% endif %}
|
||||
</span>
|
||||
<span class="text-gray-500">{{ (artifact.size_bytes / 1024)|round(1) }} KB</span>
|
||||
</div>
|
||||
|
||||
<div class="mt-3 flex gap-2">
|
||||
<a href="/cache/{{ artifact.hash }}" class="flex-1 px-3 py-1 bg-gray-700 hover:bg-gray-600 text-center text-sm rounded transition-colors">
|
||||
View
|
||||
</a>
|
||||
<a href="/cache/{{ artifact.hash }}/raw" class="flex-1 px-3 py-1 bg-blue-600 hover:bg-blue-700 text-center text-sm rounded transition-colors">
|
||||
Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="bg-gray-800 rounded-lg p-6 text-center">
|
||||
<p class="text-gray-400">No artifacts found for this run.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
@@ -5,10 +5,13 @@
|
||||
{% block head %}
|
||||
{{ super() }}
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.23.0/cytoscape.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/dagre/0.8.5/dagre.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/cytoscape-dagre@2.5.0/cytoscape-dagre.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/hls.js@1.4.12/dist/hls.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% set status_colors = {'completed': 'green', 'running': 'blue', 'pending': 'yellow', 'failed': 'red'} %}
|
||||
{% set status_colors = {'completed': 'green', 'running': 'blue', 'pending': 'yellow', 'failed': 'red', 'paused': 'yellow'} %}
|
||||
{% set color = status_colors.get(run.status, 'gray') %}
|
||||
|
||||
<div class="max-w-6xl mx-auto">
|
||||
@@ -22,18 +25,89 @@
|
||||
{% if run.cached %}
|
||||
<span class="bg-purple-900 text-purple-300 px-3 py-1 rounded text-sm">Cached</span>
|
||||
{% endif %}
|
||||
{% if run.error %}
|
||||
<span class="text-red-400 text-sm ml-2">{{ run.error }}</span>
|
||||
{% endif %}
|
||||
{% if run.checkpoint_frame %}
|
||||
<span class="text-gray-400 text-sm ml-2">
|
||||
Checkpoint: {{ run.checkpoint_frame }}{% if run.total_frames %} / {{ run.total_frames }}{% endif %} frames
|
||||
</span>
|
||||
{% endif %}
|
||||
<div class="flex-grow"></div>
|
||||
|
||||
<!-- Pause button for running renders -->
|
||||
{% if run.status == 'running' %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/pause"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-yellow-600 hover:bg-yellow-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Pause
|
||||
</button>
|
||||
{% endif %}
|
||||
|
||||
<!-- Resume/Restart buttons for failed/paused renders -->
|
||||
{% if run.status in ['failed', 'paused'] %}
|
||||
{% if run.checkpoint_frame %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/resume"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-green-600 hover:bg-green-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Resume{% if run.total_frames %} ({{ ((run.checkpoint_frame / run.total_frames) * 100)|round|int }}%){% endif %}
|
||||
</button>
|
||||
{% endif %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/restart"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
hx-confirm="Discard progress and start over?"
|
||||
class="bg-yellow-600 hover:bg-yellow-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Restart
|
||||
</button>
|
||||
{% endif %}
|
||||
|
||||
{% if run.recipe %}
|
||||
<button hx-post="/runs/rerun/{{ run.recipe }}"
|
||||
hx-target="#action-result"
|
||||
hx-swap="innerHTML"
|
||||
class="bg-blue-600 hover:bg-blue-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Run Again
|
||||
</button>
|
||||
{% endif %}
|
||||
<button hx-post="/runs/{{ run.run_id }}/publish"
|
||||
hx-target="#action-result"
|
||||
class="bg-purple-600 hover:bg-purple-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Share to L2
|
||||
</button>
|
||||
<button hx-delete="/runs/{{ run.run_id }}/ui"
|
||||
hx-target="#action-result"
|
||||
hx-confirm="Delete this run and all its artifacts? This cannot be undone."
|
||||
class="bg-red-600 hover:bg-red-700 px-3 py-1 rounded text-sm font-medium">
|
||||
Delete
|
||||
</button>
|
||||
<span id="action-result"></span>
|
||||
</div>
|
||||
|
||||
<!-- Info Grid -->
|
||||
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Recipe</div>
|
||||
<div class="text-white font-medium">{{ run.recipe or 'Unknown' }}</div>
|
||||
<div class="text-white font-medium">
|
||||
{% if run.recipe %}
|
||||
<a href="/recipes/{{ run.recipe }}" class="hover:text-blue-400">
|
||||
{{ run.recipe_name or (run.recipe[:16] ~ '...') }}
|
||||
</a>
|
||||
{% else %}
|
||||
Unknown
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="text-gray-500 text-sm">Steps</div>
|
||||
<div class="text-white font-medium">
|
||||
{{ run.executed or 0 }} / {{ run.total_steps or '?' }}
|
||||
{% if run.recipe == 'streaming' %}
|
||||
{% if run.status == 'completed' %}1 / 1{% else %}0 / 1{% endif %}
|
||||
{% else %}
|
||||
{{ run.executed or 0 }} / {{ run.total_steps or (plan.steps|length if plan and plan.steps else '?') }}
|
||||
{% endif %}
|
||||
{% if run.cached_steps %}
|
||||
<span class="text-purple-400 text-sm">({{ run.cached_steps }} cached)</span>
|
||||
{% endif %}
|
||||
@@ -49,6 +123,309 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Unified HLS Player (shown during rendering, for paused/failed runs with checkpoint, OR for completed HLS streams) -->
|
||||
{% if run.status == 'rendering' or run.ipfs_playlist_cid or (run.status in ['paused', 'failed'] and run.checkpoint_frame) %}
|
||||
<div id="hls-player-container" class="mb-6 bg-gray-800 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h3 class="text-lg font-semibold flex items-center">
|
||||
{% if run.status == 'rendering' %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-red-500 rounded-full mr-2 animate-pulse"></span>
|
||||
<span id="player-title">Live Preview</span>
|
||||
{% elif run.status == 'paused' %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-yellow-500 rounded-full mr-2"></span>
|
||||
<span id="player-title">Partial Output (Paused)</span>
|
||||
{% elif run.status == 'failed' and run.checkpoint_frame %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-red-500 rounded-full mr-2"></span>
|
||||
<span id="player-title">Partial Output (Failed)</span>
|
||||
{% else %}
|
||||
<span id="live-indicator" class="w-3 h-3 bg-green-500 rounded-full mr-2 hidden"></span>
|
||||
<span id="player-title">Video</span>
|
||||
{% endif %}
|
||||
</h3>
|
||||
<div class="flex items-center space-x-4">
|
||||
<!-- Mode toggle -->
|
||||
<div class="flex items-center space-x-2 text-sm">
|
||||
<button id="mode-replay" onclick="setPlayerMode('replay')"
|
||||
class="px-2 py-1 rounded {% if run.status != 'rendering' %}bg-blue-600 text-white{% else %}bg-gray-700 text-gray-400 hover:bg-gray-600{% endif %}">
|
||||
From Start
|
||||
</button>
|
||||
<button id="mode-live" onclick="setPlayerMode('live')"
|
||||
class="px-2 py-1 rounded {% if run.status == 'rendering' %}bg-blue-600 text-white{% else %}bg-gray-700 text-gray-400 hover:bg-gray-600{% endif %}">
|
||||
Live Edge
|
||||
</button>
|
||||
</div>
|
||||
<div id="stream-status" class="text-sm text-gray-400">Connecting...</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="relative bg-black rounded-lg overflow-hidden" style="aspect-ratio: 16/9;">
|
||||
<video id="hls-video" class="w-full h-full" controls autoplay muted playsinline></video>
|
||||
<div id="stream-loading" class="absolute inset-0 flex items-center justify-center bg-gray-900/80">
|
||||
<div class="text-center">
|
||||
<div class="animate-spin w-8 h-8 border-2 border-blue-500 border-t-transparent rounded-full mx-auto mb-2"></div>
|
||||
<div class="text-gray-400">Waiting for stream...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-2 flex items-center justify-between text-xs text-gray-500">
|
||||
<span>Stream: <code class="bg-gray-900 px-1 rounded">/runs/{{ run.run_id }}/playlist.m3u8</code></span>
|
||||
<span id="stream-info"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
const video = document.getElementById('hls-video');
|
||||
const statusEl = document.getElementById('stream-status');
|
||||
const loadingEl = document.getElementById('stream-loading');
|
||||
const streamInfoEl = document.getElementById('stream-info');
|
||||
const liveIndicator = document.getElementById('live-indicator');
|
||||
const playerTitle = document.getElementById('player-title');
|
||||
const modeReplayBtn = document.getElementById('mode-replay');
|
||||
const modeLiveBtn = document.getElementById('mode-live');
|
||||
|
||||
const baseUrl = '/runs/{{ run.run_id }}/playlist.m3u8';
|
||||
const isRendering = {{ 'true' if run.status == 'rendering' else 'false' }};
|
||||
const isPausedOrFailed = {{ 'true' if run.status in ['paused', 'failed'] else 'false' }};
|
||||
|
||||
let hls = null;
|
||||
let retryCount = 0;
|
||||
const maxRetries = 120;
|
||||
let segmentsLoaded = 0;
|
||||
// Start in replay mode for paused/failed (shows partial output from start)
|
||||
// Start in live mode for rendering (follows the render progress)
|
||||
let currentMode = isRendering ? 'live' : 'replay';
|
||||
|
||||
function getHlsUrl() {
|
||||
return baseUrl + '?_t=' + Date.now();
|
||||
}
|
||||
|
||||
// Custom playlist loader that adds cache-busting to every request
|
||||
class CacheBustingPlaylistLoader extends Hls.DefaultConfig.loader {
|
||||
load(context, config, callbacks) {
|
||||
if (context.type === 'manifest' || context.type === 'level') {
|
||||
const url = new URL(context.url, window.location.origin);
|
||||
url.searchParams.set('_t', Date.now());
|
||||
context.url = url.toString();
|
||||
}
|
||||
super.load(context, config, callbacks);
|
||||
}
|
||||
}
|
||||
|
||||
function getHlsConfig(mode) {
|
||||
const baseConfig = {
|
||||
maxBufferLength: 120,
|
||||
maxMaxBufferLength: 180,
|
||||
maxBufferSize: 100 * 1024 * 1024,
|
||||
maxBufferHole: 0.5,
|
||||
backBufferLength: 60,
|
||||
manifestLoadingTimeOut: 10000,
|
||||
manifestLoadingMaxRetry: 4,
|
||||
levelLoadingTimeOut: 10000,
|
||||
levelLoadingMaxRetry: 4,
|
||||
fragLoadingTimeOut: 20000,
|
||||
fragLoadingMaxRetry: 6,
|
||||
startLevel: 0,
|
||||
abrEwmaDefaultEstimate: 500000,
|
||||
};
|
||||
|
||||
if (mode === 'live') {
|
||||
// Live mode: follow the edge, cache-bust playlists
|
||||
return {
|
||||
...baseConfig,
|
||||
pLoader: CacheBustingPlaylistLoader,
|
||||
liveSyncDurationCount: 10,
|
||||
liveMaxLatencyDurationCount: 20,
|
||||
liveDurationInfinity: true,
|
||||
};
|
||||
} else {
|
||||
// Replay mode: start from beginning, no live sync
|
||||
return {
|
||||
...baseConfig,
|
||||
pLoader: CacheBustingPlaylistLoader, // Still bust cache for fresh playlist
|
||||
startPosition: 0,
|
||||
liveDurationInfinity: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function updateModeUI(mode) {
|
||||
currentMode = mode;
|
||||
if (mode === 'live') {
|
||||
modeLiveBtn.classList.add('bg-blue-600', 'text-white');
|
||||
modeLiveBtn.classList.remove('bg-gray-700', 'text-gray-400');
|
||||
modeReplayBtn.classList.remove('bg-blue-600', 'text-white');
|
||||
modeReplayBtn.classList.add('bg-gray-700', 'text-gray-400');
|
||||
liveIndicator.classList.remove('hidden', 'bg-green-500');
|
||||
liveIndicator.classList.add('bg-red-500', 'animate-pulse');
|
||||
playerTitle.textContent = isRendering ? 'Live Preview' : 'Live Edge';
|
||||
} else {
|
||||
modeReplayBtn.classList.add('bg-blue-600', 'text-white');
|
||||
modeReplayBtn.classList.remove('bg-gray-700', 'text-gray-400');
|
||||
modeLiveBtn.classList.remove('bg-blue-600', 'text-white');
|
||||
modeLiveBtn.classList.add('bg-gray-700', 'text-gray-400');
|
||||
liveIndicator.classList.add('hidden');
|
||||
liveIndicator.classList.remove('animate-pulse');
|
||||
playerTitle.textContent = 'Replay';
|
||||
}
|
||||
}
|
||||
|
||||
window.setPlayerMode = function(mode) {
|
||||
if (mode === currentMode) return;
|
||||
|
||||
const currentTime = video.currentTime;
|
||||
const wasPlaying = !video.paused;
|
||||
|
||||
// Destroy current HLS instance
|
||||
if (hls) {
|
||||
hls.destroy();
|
||||
hls = null;
|
||||
}
|
||||
|
||||
updateModeUI(mode);
|
||||
segmentsLoaded = 0;
|
||||
retryCount = 0;
|
||||
|
||||
// Reinitialize with new config
|
||||
initHls(mode, mode === 'replay' ? 0 : null); // Start from 0 in replay, live edge in live
|
||||
};
|
||||
|
||||
function initHls(mode, startPosition) {
|
||||
mode = mode || currentMode;
|
||||
|
||||
if (Hls.isSupported()) {
|
||||
const config = getHlsConfig(mode);
|
||||
if (startPosition !== null && startPosition !== undefined) {
|
||||
config.startPosition = startPosition;
|
||||
}
|
||||
hls = new Hls(config);
|
||||
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function(event, data) {
|
||||
loadingEl.classList.add('hidden');
|
||||
statusEl.textContent = 'Buffering...';
|
||||
statusEl.classList.remove('text-gray-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
streamInfoEl.textContent = `${data.levels.length} quality level(s)`;
|
||||
video.play().catch(() => {});
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.FRAG_LOADED, function(event, data) {
|
||||
retryCount = 0;
|
||||
segmentsLoaded++;
|
||||
const modeLabel = currentMode === 'live' ? 'Live' : 'Replay';
|
||||
statusEl.textContent = `${modeLabel} (${segmentsLoaded} segments)`;
|
||||
statusEl.classList.remove('text-yellow-400', 'text-gray-400');
|
||||
statusEl.classList.add('text-green-400');
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.BUFFER_APPENDED, function() {
|
||||
loadingEl.classList.add('hidden');
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.ERROR, function(event, data) {
|
||||
console.log('HLS error:', data.type, data.details, data.fatal);
|
||||
|
||||
if (data.fatal) {
|
||||
switch (data.type) {
|
||||
case Hls.ErrorTypes.NETWORK_ERROR:
|
||||
if (retryCount < maxRetries) {
|
||||
retryCount++;
|
||||
statusEl.textContent = `Waiting for stream... (${retryCount})`;
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
const delay = Math.min(1000 * Math.pow(1.5, Math.min(retryCount, 6)), 10000);
|
||||
setTimeout(() => {
|
||||
hls.loadSource(getHlsUrl());
|
||||
}, delay + Math.random() * 1000);
|
||||
} else {
|
||||
statusEl.textContent = 'Stream unavailable';
|
||||
statusEl.classList.add('text-red-400');
|
||||
}
|
||||
break;
|
||||
case Hls.ErrorTypes.MEDIA_ERROR:
|
||||
console.log('Media error, attempting recovery');
|
||||
hls.recoverMediaError();
|
||||
break;
|
||||
default:
|
||||
statusEl.textContent = 'Stream error';
|
||||
statusEl.classList.add('text-red-400');
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
if (data.details === 'bufferStalledError') {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
video.addEventListener('waiting', function() {
|
||||
if (currentMode === 'live' && hls && hls.liveSyncPosition) {
|
||||
const liveEdge = hls.liveSyncPosition;
|
||||
const behindLive = liveEdge - video.currentTime;
|
||||
if (behindLive < 8) {
|
||||
statusEl.textContent = 'Waiting for rendering...';
|
||||
} else {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
}
|
||||
} else {
|
||||
statusEl.textContent = 'Buffering...';
|
||||
}
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
});
|
||||
|
||||
video.addEventListener('playing', function() {
|
||||
const modeLabel = currentMode === 'live' ? 'Live' : 'Replay';
|
||||
statusEl.textContent = `${modeLabel} (${segmentsLoaded} segments)`;
|
||||
statusEl.classList.remove('text-yellow-400');
|
||||
statusEl.classList.add('text-green-400');
|
||||
});
|
||||
|
||||
// Live mode: periodic check for catching up to live edge
|
||||
if (currentMode === 'live') {
|
||||
setInterval(function() {
|
||||
if (hls && !video.paused && hls.levels && hls.levels.length > 0) {
|
||||
const buffered = video.buffered;
|
||||
if (buffered.length > 0) {
|
||||
const bufferEnd = buffered.end(buffered.length - 1);
|
||||
const bufferAhead = bufferEnd - video.currentTime;
|
||||
if (bufferAhead < 4) {
|
||||
statusEl.textContent = 'Waiting for rendering...';
|
||||
statusEl.classList.remove('text-green-400');
|
||||
statusEl.classList.add('text-yellow-400');
|
||||
}
|
||||
}
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
hls.loadSource(getHlsUrl());
|
||||
hls.attachMedia(video);
|
||||
} else if (video.canPlayType('application/vnd.apple.mpegurl')) {
|
||||
video.src = getHlsUrl();
|
||||
video.addEventListener('loadedmetadata', function() {
|
||||
loadingEl.classList.add('hidden');
|
||||
statusEl.textContent = 'Playing';
|
||||
video.play().catch(() => {});
|
||||
});
|
||||
} else {
|
||||
statusEl.textContent = 'HLS not supported';
|
||||
statusEl.classList.add('text-red-400');
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize with appropriate mode
|
||||
updateModeUI(currentMode);
|
||||
initHls(currentMode);
|
||||
|
||||
window.addEventListener('beforeunload', function() {
|
||||
if (hls) hls.destroy();
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
<!-- Tabs -->
|
||||
<div class="border-b border-gray-700 mb-6">
|
||||
<nav class="flex space-x-8">
|
||||
@@ -68,12 +445,57 @@
|
||||
<!-- Plan Tab -->
|
||||
<div id="tab-plan" class="tab-content">
|
||||
{% if plan %}
|
||||
<div id="dag-container" class="bg-gray-900 rounded-lg border border-gray-700 h-96 mb-4"></div>
|
||||
<div class="grid grid-cols-1 lg:grid-cols-3 gap-4 mb-4">
|
||||
<!-- DAG Visualization -->
|
||||
<div class="lg:col-span-2">
|
||||
<div id="dag-container" class="bg-gray-900 rounded-lg border border-gray-700 h-96"></div>
|
||||
</div>
|
||||
<!-- Node Detail Panel -->
|
||||
<div id="node-detail" class="bg-gray-800 rounded-lg border border-gray-700 p-4 h-96 overflow-y-auto">
|
||||
<div id="node-detail-empty" class="h-full flex items-center justify-center text-gray-500">
|
||||
Click a node to view details
|
||||
</div>
|
||||
<div id="node-detail-content" class="hidden">
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h3 id="node-name" class="text-lg font-semibold"></h3>
|
||||
<span id="node-type" class="text-sm px-2 py-1 rounded"></span>
|
||||
</div>
|
||||
<div id="node-status" class="mb-4"></div>
|
||||
|
||||
<!-- Inputs -->
|
||||
<div id="node-inputs-section" class="mb-4">
|
||||
<h4 class="text-gray-400 text-sm mb-2">Inputs</h4>
|
||||
<div id="node-inputs" class="space-y-2"></div>
|
||||
</div>
|
||||
|
||||
<!-- Output -->
|
||||
<div id="node-output-section">
|
||||
<h4 class="text-gray-400 text-sm mb-2">Output</h4>
|
||||
<div id="node-output"></div>
|
||||
</div>
|
||||
|
||||
<!-- Config -->
|
||||
<div id="node-config-section" class="mt-4 hidden">
|
||||
<h4 class="text-gray-400 text-sm mb-2">Config</h4>
|
||||
<pre id="node-config" class="text-xs bg-gray-900 rounded p-2 overflow-x-auto"></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Step List -->
|
||||
<div class="space-y-2">
|
||||
{% for step in plan.steps %}
|
||||
{% set step_color = 'green' if step.status == 'completed' or step.cache_id else ('purple' if step.cached else ('blue' if step.status == 'running' else 'gray')) %}
|
||||
<div class="bg-gray-800 rounded p-3">
|
||||
<div class="step-item bg-gray-800 rounded p-3 cursor-pointer hover:bg-gray-750 transition-colors"
|
||||
data-step-id="{{ step.id }}"
|
||||
data-step-name="{{ step.name }}"
|
||||
data-step-type="{{ step.type }}"
|
||||
data-step-status="{{ step.status or 'pending' }}"
|
||||
data-step-inputs="{{ step.inputs | tojson }}"
|
||||
data-step-cache-id="{{ step.cache_id or '' }}"
|
||||
data-step-config="{{ (step.config or {}) | tojson }}"
|
||||
onclick="selectStep('{{ step.id }}')">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-3">
|
||||
<span class="w-6 h-6 rounded-full bg-{{ step_color }}-600 flex items-center justify-center text-xs">
|
||||
@@ -93,36 +515,122 @@
|
||||
{% if step.cache_id %}
|
||||
<div class="mt-2 ml-9 flex items-center space-x-2">
|
||||
<span class="text-gray-500 text-xs">Output:</span>
|
||||
<a href="/cache/{{ step.cache_id }}" class="font-mono text-xs text-blue-400 hover:text-blue-300">
|
||||
{{ step.cache_id }}
|
||||
<a href="/cache/{{ step.cache_id }}" class="font-mono text-xs text-blue-400 hover:text-blue-300" onclick="event.stopPropagation()">
|
||||
{{ step.cache_id[:24] }}...
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if step.outputs and step.outputs | length > 1 %}
|
||||
<div class="mt-1 ml-9">
|
||||
<span class="text-gray-500 text-xs">Additional outputs:</span>
|
||||
{% for output in step.outputs %}
|
||||
{% if output != step.cache_id %}
|
||||
<a href="/cache/{{ output }}" class="block font-mono text-xs text-gray-400 hover:text-white ml-2">
|
||||
{{ output[:32] }}...
|
||||
</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<!-- Plan JSON -->
|
||||
<details class="mt-6">
|
||||
<summary class="cursor-pointer text-gray-400 hover:text-white text-sm mb-2">
|
||||
Show Plan JSON
|
||||
<!-- Recipe/Plan S-expression -->
|
||||
{% if plan_sexp %}
|
||||
<details class="mt-6" open>
|
||||
<summary class="cursor-pointer text-gray-400 hover:text-white text-sm mb-2 flex items-center justify-between">
|
||||
<span>Recipe (S-expression)</span>
|
||||
{% if recipe_ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ recipe_ipfs_cid }}"
|
||||
target="_blank"
|
||||
onclick="event.stopPropagation()"
|
||||
class="text-blue-400 hover:text-blue-300 text-xs font-mono ml-4">
|
||||
ipfs://{{ recipe_ipfs_cid[:16] }}...
|
||||
</a>
|
||||
{% endif %}
|
||||
</summary>
|
||||
<div class="bg-gray-900 rounded-lg border border-gray-700 p-4 overflow-x-auto">
|
||||
<pre class="text-sm text-gray-300 whitespace-pre-wrap">{{ plan | tojson(indent=2) }}</pre>
|
||||
<pre class="text-sm font-mono sexp-code">{{ plan_sexp }}</pre>
|
||||
</div>
|
||||
</details>
|
||||
{% endif %}
|
||||
|
||||
<style>
|
||||
.sexp-code {
|
||||
line-height: 1.6;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
// Single-pass S-expression syntax highlighter (avoids regex corruption)
|
||||
function highlightSexp(text) {
|
||||
const SPECIAL = new Set(['plan','recipe','def','->','stream','let','lambda','if','cond','define']);
|
||||
const PRIMS = new Set(['source','effect','sequence','segment','resize','transform','layer','blend','mux','analyze','fused-pipeline']);
|
||||
function esc(s) { return s.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); }
|
||||
function span(cls, s) { return '<span class="' + cls + '">' + esc(s) + '</span>'; }
|
||||
|
||||
let out = '', i = 0, len = text.length;
|
||||
while (i < len) {
|
||||
// Comments
|
||||
if (text[i] === ';' && i + 1 < len && text[i+1] === ';') {
|
||||
let end = text.indexOf('\n', i);
|
||||
if (end === -1) end = len;
|
||||
out += span('text-gray-500', text.slice(i, end));
|
||||
i = end;
|
||||
}
|
||||
// Strings
|
||||
else if (text[i] === '"') {
|
||||
let j = i + 1;
|
||||
while (j < len && text[j] !== '"') { if (text[j] === '\\') j++; j++; }
|
||||
if (j < len) j++; // closing quote
|
||||
out += span('text-green-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Keywords (:keyword)
|
||||
else if (text[i] === ':' && i + 1 < len && /[a-zA-Z_-]/.test(text[i+1])) {
|
||||
let j = i + 1;
|
||||
while (j < len && /[a-zA-Z0-9_-]/.test(text[j])) j++;
|
||||
out += span('text-purple-400', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Open paren - check for primitive/special after it
|
||||
else if (text[i] === '(') {
|
||||
out += span('text-yellow-500', '(');
|
||||
i++;
|
||||
// Skip whitespace after paren
|
||||
let ws = '';
|
||||
while (i < len && (text[i] === ' ' || text[i] === '\t')) { ws += text[i]; i++; }
|
||||
out += esc(ws);
|
||||
// Check if next word is a special form or primitive
|
||||
if (i < len && /[a-zA-Z_>-]/.test(text[i])) {
|
||||
let j = i;
|
||||
while (j < len && /[a-zA-Z0-9_>-]/.test(text[j])) j++;
|
||||
let word = text.slice(i, j);
|
||||
if (SPECIAL.has(word)) out += span('text-pink-400 font-semibold', word);
|
||||
else if (PRIMS.has(word)) out += span('text-blue-400', word);
|
||||
else out += esc(word);
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
// Close paren
|
||||
else if (text[i] === ')') {
|
||||
out += span('text-yellow-500', ')');
|
||||
i++;
|
||||
}
|
||||
// Numbers
|
||||
else if (/[0-9]/.test(text[i]) && (i === 0 || /[\s(]/.test(text[i-1]))) {
|
||||
let j = i;
|
||||
while (j < len && /[0-9.]/.test(text[j])) j++;
|
||||
out += span('text-orange-300', text.slice(i, j));
|
||||
i = j;
|
||||
}
|
||||
// Regular text
|
||||
else {
|
||||
let j = i;
|
||||
while (j < len && !'(;":)'.includes(text[j])) {
|
||||
if (text[j] === ':' && j + 1 < len && /[a-zA-Z_-]/.test(text[j+1])) break;
|
||||
if (/[0-9]/.test(text[j]) && (j === 0 || /[\s(]/.test(text[j-1]))) break;
|
||||
j++;
|
||||
}
|
||||
if (j === i) { out += esc(text[i]); i++; } // safety: advance at least 1 char
|
||||
else { out += esc(text.slice(i, j)); i = j; }
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
document.querySelectorAll('.sexp-code').forEach(el => {
|
||||
el.innerHTML = highlightSexp(el.textContent);
|
||||
});
|
||||
</script>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No plan available for this run.</p>
|
||||
{% endif %}
|
||||
@@ -133,20 +641,20 @@
|
||||
{% if artifacts %}
|
||||
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
|
||||
{% for artifact in artifacts %}
|
||||
<a href="/cache/{{ artifact.hash }}"
|
||||
<a href="/cache/{{ artifact.cid }}"
|
||||
class="bg-gray-800 rounded-lg p-4 hover:bg-gray-750 transition-colors">
|
||||
{% if artifact.media_type and artifact.media_type.startswith('image/') %}
|
||||
<img src="/cache/{{ artifact.hash }}/raw" alt=""
|
||||
<img src="/cache/{{ artifact.cid }}/raw" alt=""
|
||||
class="w-full h-32 object-cover rounded mb-2">
|
||||
{% elif artifact.media_type and artifact.media_type.startswith('video/') %}
|
||||
<video src="/cache/{{ artifact.hash }}/raw"
|
||||
<video src="/cache/{{ artifact.cid }}/raw"
|
||||
class="w-full h-32 object-cover rounded mb-2" muted></video>
|
||||
{% else %}
|
||||
<div class="w-full h-32 bg-gray-900 rounded mb-2 flex items-center justify-center text-gray-600">
|
||||
{{ artifact.media_type or 'Unknown' }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="font-mono text-xs text-gray-500 truncate">{{ artifact.hash[:16] }}...</div>
|
||||
<div class="font-mono text-xs text-gray-500 truncate">{{ artifact.cid[:16] }}...</div>
|
||||
<div class="text-sm text-gray-400">{{ artifact.step_name }}</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
@@ -164,8 +672,8 @@
|
||||
<div class="bg-gray-800 rounded-lg p-6">
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h3 class="text-lg font-semibold">{{ item.input_name }}</h3>
|
||||
<a href="/cache/{{ item.input_hash }}" class="font-mono text-xs text-blue-400 hover:text-blue-300">
|
||||
{{ item.input_hash[:16] }}...
|
||||
<a href="/cache/{{ item.input_cid }}" class="font-mono text-xs text-blue-400 hover:text-blue-300">
|
||||
{{ item.input_cid[:16] }}...
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@@ -246,7 +754,52 @@
|
||||
|
||||
<!-- Inputs Tab -->
|
||||
<div id="tab-inputs" class="tab-content hidden">
|
||||
{% if run.inputs %}
|
||||
{% if run_inputs %}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{% for input in run_inputs %}
|
||||
<div class="bg-gray-800 rounded-lg overflow-hidden">
|
||||
<!-- Media Preview -->
|
||||
{% if input.media_type and input.media_type.startswith('image/') %}
|
||||
<a href="/cache/{{ input.cid }}" class="block">
|
||||
<img src="/cache/{{ input.cid }}/raw" alt="{{ input.name or 'Input' }}"
|
||||
class="w-full h-48 object-cover">
|
||||
</a>
|
||||
{% elif input.media_type and input.media_type.startswith('video/') %}
|
||||
<a href="/cache/{{ input.cid }}" class="block">
|
||||
<video src="/cache/{{ input.cid }}/raw"
|
||||
class="w-full h-48 object-cover" muted controls></video>
|
||||
</a>
|
||||
{% elif input.media_type and input.media_type.startswith('audio/') %}
|
||||
<div class="p-4 bg-gray-900">
|
||||
<audio src="/cache/{{ input.cid }}/raw" controls class="w-full"></audio>
|
||||
</div>
|
||||
{% else %}
|
||||
<a href="/cache/{{ input.cid }}" class="block">
|
||||
<div class="w-full h-48 bg-gray-900 flex items-center justify-center text-gray-600">
|
||||
<div class="text-center">
|
||||
<div class="text-4xl mb-2">📄</div>
|
||||
<div>{{ input.media_type or 'Unknown type' }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
<!-- Info -->
|
||||
<div class="p-3">
|
||||
{% if input.name %}
|
||||
<div class="font-medium text-white mb-1">{{ input.name }}</div>
|
||||
{% endif %}
|
||||
<a href="/cache/{{ input.cid }}" class="font-mono text-xs text-blue-400 hover:text-blue-300 block truncate">
|
||||
{{ input.cid }}
|
||||
</a>
|
||||
{% if input.media_type %}
|
||||
<div class="text-xs text-gray-500 mt-1">{{ input.media_type }}</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% elif run.inputs %}
|
||||
<!-- Fallback to simple list if run_inputs not available -->
|
||||
<div class="space-y-2">
|
||||
{% for input_hash in run.inputs %}
|
||||
<a href="/cache/{{ input_hash }}"
|
||||
@@ -261,20 +814,58 @@
|
||||
</div>
|
||||
|
||||
<!-- Output -->
|
||||
{% if run.output_hash %}
|
||||
{% if run.output_cid %}
|
||||
<div class="mt-8 bg-gray-800 rounded-lg p-6">
|
||||
<h3 class="text-lg font-semibold mb-4">Output</h3>
|
||||
<div class="flex items-center justify-between">
|
||||
<a href="/cache/{{ run.output_hash }}" class="font-mono text-blue-400 hover:text-blue-300">
|
||||
{{ run.output_hash }}
|
||||
</a>
|
||||
{% if run.output_ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ run.output_ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
IPFS: {{ run.output_ipfs_cid[:16] }}...
|
||||
|
||||
{# Inline media preview - prefer IPFS URLs when available #}
|
||||
<div class="mb-4">
|
||||
{% if output_media_type and output_media_type.startswith('image/') %}
|
||||
<a href="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}{% endif %}" class="block">
|
||||
<img src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" alt="Output"
|
||||
class="max-w-full max-h-96 rounded-lg mx-auto">
|
||||
</a>
|
||||
{% elif output_media_type and output_media_type.startswith('video/') %}
|
||||
{# HLS streams use the unified player above; show direct video for non-HLS #}
|
||||
{% if run.ipfs_playlist_cid %}
|
||||
<div class="text-gray-400 text-sm py-4">
|
||||
HLS stream available in player above. Use "From Start" to watch from beginning or "Live Edge" to follow rendering progress.
|
||||
</div>
|
||||
{% else %}
|
||||
{# Direct video file #}
|
||||
<video src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" controls
|
||||
class="max-w-full max-h-96 rounded-lg mx-auto"></video>
|
||||
{% endif %}
|
||||
{% elif output_media_type and output_media_type.startswith('audio/') %}
|
||||
<audio src="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}/raw{% endif %}" controls class="w-full"></audio>
|
||||
{% else %}
|
||||
<div class="bg-gray-900 rounded-lg p-8 text-center text-gray-500">
|
||||
<div class="text-4xl mb-2">?</div>
|
||||
<div>{{ output_media_type or 'Unknown media type' }}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between">
|
||||
<a href="{% if run.ipfs_cid %}/ipfs/{{ run.ipfs_cid }}{% else %}/cache/{{ run.output_cid }}{% endif %}"
|
||||
class="font-mono text-sm text-blue-400 hover:text-blue-300">
|
||||
{% if run.ipfs_cid %}{{ run.ipfs_cid }}{% else %}{{ run.output_cid }}{% endif %}
|
||||
</a>
|
||||
<div class="flex items-center space-x-4">
|
||||
{% if run.ipfs_playlist_cid %}
|
||||
<a href="/ipfs/{{ run.ipfs_playlist_cid }}"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
HLS Playlist
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if run.ipfs_cid %}
|
||||
<a href="https://ipfs.io/ipfs/{{ run.ipfs_cid }}"
|
||||
target="_blank"
|
||||
class="text-gray-400 hover:text-white text-sm">
|
||||
View on IPFS Gateway
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -293,9 +884,140 @@ function showTab(name) {
|
||||
}
|
||||
|
||||
{% if plan %}
|
||||
// Store step data for quick lookup
|
||||
const stepData = {};
|
||||
document.querySelectorAll('.step-item').forEach(el => {
|
||||
const id = el.dataset.stepId;
|
||||
stepData[id] = {
|
||||
id: id,
|
||||
name: el.dataset.stepName,
|
||||
type: el.dataset.stepType,
|
||||
status: el.dataset.stepStatus,
|
||||
inputs: JSON.parse(el.dataset.stepInputs || '[]'),
|
||||
cacheId: el.dataset.stepCacheId,
|
||||
config: JSON.parse(el.dataset.stepConfig || '{}')
|
||||
};
|
||||
});
|
||||
console.log('stepData loaded:', Object.keys(stepData).length, 'steps');
|
||||
console.log('dag_elements:', {{ dag_elements | tojson }});
|
||||
|
||||
let cy = null;
|
||||
let selectedNode = null;
|
||||
|
||||
function selectStep(stepId) {
|
||||
// Update URL hash
|
||||
history.pushState(null, '', '#node-' + stepId);
|
||||
showNodeDetail(stepId);
|
||||
}
|
||||
|
||||
function showNodeDetail(stepId) {
|
||||
const step = stepData[stepId];
|
||||
if (!step) return;
|
||||
|
||||
selectedNode = stepId;
|
||||
|
||||
// Update step list selection
|
||||
document.querySelectorAll('.step-item').forEach(el => {
|
||||
el.classList.remove('ring-2', 'ring-blue-500');
|
||||
if (el.dataset.stepId === stepId) {
|
||||
el.classList.add('ring-2', 'ring-blue-500');
|
||||
}
|
||||
});
|
||||
|
||||
// Update cytoscape selection
|
||||
if (cy) {
|
||||
cy.nodes().removeClass('selected');
|
||||
cy.nodes().style('border-width', 0);
|
||||
const node = cy.$('#' + stepId);
|
||||
if (node.length) {
|
||||
node.style('border-width', 3);
|
||||
node.style('border-color', '#3b82f6');
|
||||
}
|
||||
}
|
||||
|
||||
// Show detail panel
|
||||
document.getElementById('node-detail-empty').classList.add('hidden');
|
||||
document.getElementById('node-detail-content').classList.remove('hidden');
|
||||
|
||||
// Populate node info
|
||||
document.getElementById('node-name').textContent = step.name;
|
||||
|
||||
const typeEl = document.getElementById('node-type');
|
||||
typeEl.textContent = step.type;
|
||||
const typeColors = {
|
||||
'SOURCE': 'bg-blue-600',
|
||||
'EFFECT': 'bg-purple-600',
|
||||
'SEQUENCE': 'bg-pink-600',
|
||||
'transform': 'bg-green-600',
|
||||
'output': 'bg-yellow-600'
|
||||
};
|
||||
typeEl.className = 'text-sm px-2 py-1 rounded ' + (typeColors[step.type] || 'bg-gray-600');
|
||||
|
||||
// Status
|
||||
const statusEl = document.getElementById('node-status');
|
||||
const statusColors = {
|
||||
'completed': 'text-green-400',
|
||||
'running': 'text-blue-400',
|
||||
'pending': 'text-gray-400',
|
||||
'cached': 'text-purple-400'
|
||||
};
|
||||
statusEl.innerHTML = `<span class="${statusColors[step.status] || 'text-gray-400'}">${step.status}</span>`;
|
||||
|
||||
// Inputs
|
||||
const inputsEl = document.getElementById('node-inputs');
|
||||
if (step.inputs.length > 0) {
|
||||
document.getElementById('node-inputs-section').classList.remove('hidden');
|
||||
inputsEl.innerHTML = step.inputs.map(inp => {
|
||||
const inputStep = stepData[inp];
|
||||
const inputCacheId = inputStep ? inputStep.cacheId : '';
|
||||
if (inputCacheId) {
|
||||
return `<div class="bg-gray-900 rounded p-2">
|
||||
<div class="text-sm text-gray-300 cursor-pointer hover:text-blue-400" onclick="selectStep('${inp}')">${inp}</div>
|
||||
<a href="/cache/${inputCacheId}" class="font-mono text-xs text-blue-400 hover:text-blue-300 block mt-1" onclick="event.stopPropagation()">
|
||||
${inputCacheId.substring(0, 24)}...
|
||||
</a>
|
||||
</div>`;
|
||||
} else {
|
||||
return `<div class="bg-gray-900 rounded p-2">
|
||||
<div class="text-sm text-gray-300 cursor-pointer hover:text-blue-400" onclick="selectStep('${inp}')">${inp}</div>
|
||||
<span class="text-xs text-gray-500">No cached output</span>
|
||||
</div>`;
|
||||
}
|
||||
}).join('');
|
||||
} else {
|
||||
document.getElementById('node-inputs-section').classList.add('hidden');
|
||||
}
|
||||
|
||||
// Output
|
||||
const outputEl = document.getElementById('node-output');
|
||||
if (step.cacheId) {
|
||||
outputEl.innerHTML = `
|
||||
<div class="bg-gray-900 rounded p-2">
|
||||
<a href="/cache/${step.cacheId}" class="font-mono text-xs text-blue-400 hover:text-blue-300 block">
|
||||
${step.cacheId}
|
||||
</a>
|
||||
<a href="/cache/${step.cacheId}/raw" target="_blank" class="text-xs text-gray-500 hover:text-gray-300 mt-1 inline-block">
|
||||
View raw
|
||||
</a>
|
||||
</div>`;
|
||||
} else {
|
||||
outputEl.innerHTML = '<span class="text-gray-500 text-sm">No output yet</span>';
|
||||
}
|
||||
|
||||
// Config
|
||||
const configSection = document.getElementById('node-config-section');
|
||||
const configEl = document.getElementById('node-config');
|
||||
if (step.config && Object.keys(step.config).length > 0) {
|
||||
configSection.classList.remove('hidden');
|
||||
configEl.textContent = JSON.stringify(step.config, null, 2);
|
||||
} else {
|
||||
configSection.classList.add('hidden');
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize DAG
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const cy = cytoscape({
|
||||
cy = cytoscape({
|
||||
container: document.getElementById('dag-container'),
|
||||
style: [
|
||||
{ selector: 'node', style: {
|
||||
@@ -305,7 +1027,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
'text-valign': 'center',
|
||||
'font-size': '10px',
|
||||
'width': 40,
|
||||
'height': 40
|
||||
'height': 40,
|
||||
'border-width': 0
|
||||
}},
|
||||
{ selector: 'edge', style: {
|
||||
'width': 2,
|
||||
@@ -318,6 +1041,32 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
elements: {{ dag_elements | tojson }},
|
||||
layout: { name: 'dagre', rankDir: 'LR', padding: 30 }
|
||||
});
|
||||
|
||||
// Node click handler
|
||||
cy.on('tap', 'node', function(evt) {
|
||||
const nodeId = evt.target.id();
|
||||
selectStep(nodeId);
|
||||
});
|
||||
|
||||
// Handle initial hash
|
||||
const hash = window.location.hash;
|
||||
if (hash && hash.startsWith('#node-')) {
|
||||
const nodeId = hash.substring(6);
|
||||
if (stepData[nodeId]) {
|
||||
showNodeDetail(nodeId);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle hash changes
|
||||
window.addEventListener('hashchange', function() {
|
||||
const hash = window.location.hash;
|
||||
if (hash && hash.startsWith('#node-')) {
|
||||
const nodeId = hash.substring(6);
|
||||
if (stepData[nodeId]) {
|
||||
showNodeDetail(nodeId);
|
||||
}
|
||||
}
|
||||
});
|
||||
{% endif %}
|
||||
</script>
|
||||
|
||||
99
app/templates/runs/plan.html
Normal file
99
app/templates/runs/plan.html
Normal file
@@ -0,0 +1,99 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Run Plan - {{ run_id[:16] }}{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="https://unpkg.com/cytoscape@3.25.0/dist/cytoscape.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mb-6">
|
||||
<a href="/runs/{{ run_id }}/detail" class="inline-flex items-center text-blue-400 hover:text-blue-300">
|
||||
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 19l-7-7 7-7"/>
|
||||
</svg>
|
||||
Back to Run
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<h1 class="text-2xl font-bold text-white mb-6">Execution Plan</h1>
|
||||
|
||||
{% if plan %}
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<!-- DAG Visualization -->
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<h2 class="text-lg font-semibold text-white mb-4">DAG Visualization</h2>
|
||||
<div id="dag-container" class="w-full h-96 bg-gray-900 rounded"></div>
|
||||
</div>
|
||||
|
||||
<!-- Steps List -->
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<h2 class="text-lg font-semibold text-white mb-4">Steps ({{ plan.steps|length if plan.steps else 0 }})</h2>
|
||||
<div class="space-y-3 max-h-96 overflow-y-auto">
|
||||
{% for step in plan.get('steps', []) %}
|
||||
<div class="bg-gray-900 rounded-lg p-3">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-medium text-white">{{ step.name or step.id or 'Step ' ~ loop.index }}</span>
|
||||
<span class="px-2 py-0.5 text-xs rounded {% if step.status == 'completed' %}bg-green-600{% elif step.cached %}bg-blue-600{% else %}bg-gray-600{% endif %}">
|
||||
{{ step.status or ('cached' if step.cached else 'pending') }}
|
||||
</span>
|
||||
</div>
|
||||
{% if step.cache_id %}
|
||||
<div class="text-xs text-gray-400 font-mono truncate">
|
||||
{{ step.cache_id[:24] }}...
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No steps defined</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const elements = {{ dag_elements | tojson | safe }};
|
||||
|
||||
if (elements.length > 0) {
|
||||
cytoscape({
|
||||
container: document.getElementById('dag-container'),
|
||||
elements: elements,
|
||||
style: [
|
||||
{
|
||||
selector: 'node',
|
||||
style: {
|
||||
'background-color': 'data(color)',
|
||||
'label': 'data(label)',
|
||||
'color': '#fff',
|
||||
'text-valign': 'bottom',
|
||||
'text-margin-y': 5,
|
||||
'font-size': '10px'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'edge',
|
||||
style: {
|
||||
'width': 2,
|
||||
'line-color': '#6b7280',
|
||||
'target-arrow-color': '#6b7280',
|
||||
'target-arrow-shape': 'triangle',
|
||||
'curve-style': 'bezier'
|
||||
}
|
||||
}
|
||||
],
|
||||
layout: {
|
||||
name: 'breadthfirst',
|
||||
directed: true,
|
||||
padding: 20
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% else %}
|
||||
<div class="bg-gray-800 rounded-lg p-6 text-center">
|
||||
<p class="text-gray-400">No execution plan available for this run.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
99
app/templates/runs/plan_node.html
Normal file
99
app/templates/runs/plan_node.html
Normal file
@@ -0,0 +1,99 @@
|
||||
{# Plan node detail panel - loaded via HTMX #}
|
||||
{% set status_color = 'green' if status in ('cached', 'completed') else 'yellow' %}
|
||||
|
||||
<div class="flex justify-between items-start mb-4">
|
||||
<div>
|
||||
<h4 class="text-lg font-semibold text-white">{{ step.name or step.step_id[:20] }}</h4>
|
||||
<div class="flex items-center gap-2 mt-1">
|
||||
<span class="px-2 py-0.5 rounded text-xs text-white" style="background-color: {{ node_color }}">
|
||||
{{ step.node_type or 'EFFECT' }}
|
||||
</span>
|
||||
<span class="text-{{ status_color }}-400 text-xs">{{ status }}</span>
|
||||
<span class="text-gray-500 text-xs">Level {{ step.level or 0 }}</span>
|
||||
</div>
|
||||
</div>
|
||||
<button onclick="closeNodeDetail()" class="text-gray-400 hover:text-white p-1">
|
||||
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{# Output preview #}
|
||||
{% if output_preview %}
|
||||
<div class="mb-4">
|
||||
<h5 class="text-sm font-medium text-gray-400 mb-2">Output</h5>
|
||||
{% if output_media_type == 'video' %}
|
||||
<video src="/cache/{{ cache_id }}/raw" controls muted class="w-full max-h-48 rounded-lg"></video>
|
||||
{% elif output_media_type == 'image' %}
|
||||
<img src="/cache/{{ cache_id }}/raw" class="w-full max-h-48 rounded-lg object-contain">
|
||||
{% elif output_media_type == 'audio' %}
|
||||
<audio src="/cache/{{ cache_id }}/raw" controls class="w-full"></audio>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% elif ipfs_cid %}
|
||||
<div class="mb-4">
|
||||
<h5 class="text-sm font-medium text-gray-400 mb-2">Output (IPFS)</h5>
|
||||
<video src="{{ ipfs_gateway }}/{{ ipfs_cid }}" controls muted class="w-full max-h-48 rounded-lg"></video>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Output link #}
|
||||
{% if ipfs_cid %}
|
||||
<a href="/ipfs/{{ ipfs_cid }}" class="flex items-center justify-between bg-gray-800 rounded p-2 hover:bg-gray-700 transition-colors text-xs mb-4">
|
||||
<span class="font-mono text-gray-300 truncate">{{ ipfs_cid[:24] }}...</span>
|
||||
<span class="px-2 py-1 bg-blue-600 text-white rounded ml-2">View</span>
|
||||
</a>
|
||||
{% elif has_cached and cache_id %}
|
||||
<a href="/cache/{{ cache_id }}" class="flex items-center justify-between bg-gray-800 rounded p-2 hover:bg-gray-700 transition-colors text-xs mb-4">
|
||||
<span class="font-mono text-gray-300 truncate">{{ cache_id[:24] }}...</span>
|
||||
<span class="px-2 py-1 bg-blue-600 text-white rounded ml-2">View</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
{# Input media previews #}
|
||||
{% if inputs %}
|
||||
<div class="mt-4">
|
||||
<h5 class="text-sm font-medium text-gray-400 mb-2">Inputs ({{ inputs|length }})</h5>
|
||||
<div class="grid grid-cols-2 gap-2">
|
||||
{% for inp in inputs %}
|
||||
<a href="/cache/{{ inp.cache_id }}" class="block bg-gray-800 rounded-lg overflow-hidden hover:bg-gray-700 transition-colors">
|
||||
{% if inp.media_type == 'video' %}
|
||||
<video src="/cache/{{ inp.cache_id }}/raw" class="w-full h-20 object-cover rounded-t" muted></video>
|
||||
{% elif inp.media_type == 'image' %}
|
||||
<img src="/cache/{{ inp.cache_id }}/raw" class="w-full h-20 object-cover rounded-t">
|
||||
{% else %}
|
||||
<div class="w-full h-20 bg-gray-700 rounded-t flex items-center justify-center text-xs text-gray-400">
|
||||
{{ inp.media_type or 'File' }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="p-2">
|
||||
<div class="text-xs text-white truncate">{{ inp.name }}</div>
|
||||
<div class="text-xs text-gray-500 font-mono truncate">{{ inp.cache_id[:12] }}...</div>
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Parameters/Config #}
|
||||
{% if config %}
|
||||
<div class="mt-4">
|
||||
<h5 class="text-sm font-medium text-gray-400 mb-2">Parameters</h5>
|
||||
<div class="bg-gray-800 rounded p-3 text-xs space-y-1">
|
||||
{% for key, value in config.items() %}
|
||||
<div class="flex justify-between">
|
||||
<span class="text-gray-400">{{ key }}:</span>
|
||||
<span class="text-white">{{ value if value is string else value|tojson }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{# Metadata #}
|
||||
<div class="mt-4 text-xs text-gray-500 space-y-1">
|
||||
<div><span class="text-gray-400">Step ID:</span> <span class="font-mono">{{ step.step_id[:32] }}...</span></div>
|
||||
<div><span class="text-gray-400">Cache ID:</span> <span class="font-mono">{{ cache_id[:32] }}...</span></div>
|
||||
</div>
|
||||
197
app/types.py
Normal file
197
app/types.py
Normal file
@@ -0,0 +1,197 @@
|
||||
"""
|
||||
Type definitions for Art DAG L1 server.
|
||||
|
||||
Uses TypedDict for configuration structures to enable mypy checking.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional, TypedDict, Union
|
||||
from typing_extensions import NotRequired
|
||||
|
||||
|
||||
# === Node Config Types ===
|
||||
|
||||
class SourceConfig(TypedDict, total=False):
|
||||
"""Config for SOURCE nodes."""
|
||||
cid: str # Content ID (IPFS CID or SHA3-256 hash)
|
||||
asset: str # Asset name from registry
|
||||
input: bool # True if this is a variable input
|
||||
name: str # Human-readable name for variable inputs
|
||||
description: str # Description for variable inputs
|
||||
|
||||
|
||||
class EffectConfig(TypedDict, total=False):
|
||||
"""Config for EFFECT nodes."""
|
||||
effect: str # Effect name
|
||||
cid: str # Effect CID (for cached/IPFS effects)
|
||||
# Effect parameters are additional keys
|
||||
intensity: float
|
||||
level: float
|
||||
|
||||
|
||||
class SequenceConfig(TypedDict, total=False):
|
||||
"""Config for SEQUENCE nodes."""
|
||||
transition: Dict[str, Any] # Transition config
|
||||
|
||||
|
||||
class SegmentConfig(TypedDict, total=False):
|
||||
"""Config for SEGMENT nodes."""
|
||||
start: float
|
||||
end: float
|
||||
duration: float
|
||||
|
||||
|
||||
# Union of all config types
|
||||
NodeConfig = Union[SourceConfig, EffectConfig, SequenceConfig, SegmentConfig, Dict[str, Any]]
|
||||
|
||||
|
||||
# === Node Types ===
|
||||
|
||||
class CompiledNode(TypedDict):
|
||||
"""Node as produced by the S-expression compiler."""
|
||||
id: str
|
||||
type: str # "SOURCE", "EFFECT", "SEQUENCE", etc.
|
||||
config: Dict[str, Any]
|
||||
inputs: List[str]
|
||||
name: NotRequired[str]
|
||||
|
||||
|
||||
class TransformedNode(TypedDict):
|
||||
"""Node after transformation for artdag execution."""
|
||||
node_id: str
|
||||
node_type: str
|
||||
config: Dict[str, Any]
|
||||
inputs: List[str]
|
||||
name: NotRequired[str]
|
||||
|
||||
|
||||
# === DAG Types ===
|
||||
|
||||
class CompiledDAG(TypedDict):
|
||||
"""DAG as produced by the S-expression compiler."""
|
||||
nodes: List[CompiledNode]
|
||||
output: str
|
||||
|
||||
|
||||
class TransformedDAG(TypedDict):
|
||||
"""DAG after transformation for artdag execution."""
|
||||
nodes: Dict[str, TransformedNode]
|
||||
output_id: str
|
||||
metadata: NotRequired[Dict[str, Any]]
|
||||
|
||||
|
||||
# === Registry Types ===
|
||||
|
||||
class AssetEntry(TypedDict, total=False):
|
||||
"""Asset in the recipe registry."""
|
||||
cid: str
|
||||
url: str
|
||||
|
||||
|
||||
class EffectEntry(TypedDict, total=False):
|
||||
"""Effect in the recipe registry."""
|
||||
cid: str
|
||||
url: str
|
||||
temporal: bool
|
||||
|
||||
|
||||
class Registry(TypedDict):
|
||||
"""Recipe registry containing assets and effects."""
|
||||
assets: Dict[str, AssetEntry]
|
||||
effects: Dict[str, EffectEntry]
|
||||
|
||||
|
||||
# === Visualization Types ===
|
||||
|
||||
class VisNodeData(TypedDict, total=False):
|
||||
"""Data for a visualization node (Cytoscape.js format)."""
|
||||
id: str
|
||||
label: str
|
||||
nodeType: str
|
||||
isOutput: bool
|
||||
|
||||
|
||||
class VisNode(TypedDict):
|
||||
"""Visualization node wrapper."""
|
||||
data: VisNodeData
|
||||
|
||||
|
||||
class VisEdgeData(TypedDict):
|
||||
"""Data for a visualization edge."""
|
||||
source: str
|
||||
target: str
|
||||
|
||||
|
||||
class VisEdge(TypedDict):
|
||||
"""Visualization edge wrapper."""
|
||||
data: VisEdgeData
|
||||
|
||||
|
||||
class VisualizationDAG(TypedDict):
|
||||
"""DAG structure for Cytoscape.js visualization."""
|
||||
nodes: List[VisNode]
|
||||
edges: List[VisEdge]
|
||||
|
||||
|
||||
# === Recipe Types ===
|
||||
|
||||
class Recipe(TypedDict, total=False):
|
||||
"""Compiled recipe structure."""
|
||||
name: str
|
||||
version: str
|
||||
description: str
|
||||
owner: str
|
||||
registry: Registry
|
||||
dag: CompiledDAG
|
||||
recipe_id: str
|
||||
ipfs_cid: str
|
||||
sexp: str
|
||||
step_count: int
|
||||
error: str
|
||||
|
||||
|
||||
# === API Request/Response Types ===
|
||||
|
||||
class RecipeRunInputs(TypedDict):
|
||||
"""Mapping of input names to CIDs for recipe execution."""
|
||||
# Keys are input names, values are CIDs
|
||||
pass # Actually just Dict[str, str]
|
||||
|
||||
|
||||
class RunResult(TypedDict, total=False):
|
||||
"""Result of a recipe run."""
|
||||
run_id: str
|
||||
status: str # "pending", "running", "completed", "failed"
|
||||
recipe: str
|
||||
recipe_name: str
|
||||
inputs: List[str]
|
||||
output_cid: str
|
||||
ipfs_cid: str
|
||||
provenance_cid: str
|
||||
error: str
|
||||
created_at: str
|
||||
completed_at: str
|
||||
actor_id: str
|
||||
celery_task_id: str
|
||||
output_name: str
|
||||
|
||||
|
||||
# === Helper functions for type narrowing ===
|
||||
|
||||
def is_source_node(node: TransformedNode) -> bool:
|
||||
"""Check if node is a SOURCE node."""
|
||||
return node.get("node_type") == "SOURCE"
|
||||
|
||||
|
||||
def is_effect_node(node: TransformedNode) -> bool:
|
||||
"""Check if node is an EFFECT node."""
|
||||
return node.get("node_type") == "EFFECT"
|
||||
|
||||
|
||||
def is_variable_input(config: Dict[str, Any]) -> bool:
|
||||
"""Check if a SOURCE node config represents a variable input."""
|
||||
return bool(config.get("input"))
|
||||
|
||||
|
||||
def get_effect_cid(config: Dict[str, Any]) -> Optional[str]:
|
||||
"""Get effect CID from config, checking both 'cid' and 'hash' keys."""
|
||||
return config.get("cid") or config.get("hash")
|
||||
0
app/utils/__init__.py
Normal file
0
app/utils/__init__.py
Normal file
84
app/utils/http_signatures.py
Normal file
84
app/utils/http_signatures.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""HTTP Signature verification for incoming AP-style inbox requests.
|
||||
|
||||
Implements the same RSA-SHA256 / PKCS1v15 scheme used by the coop's
|
||||
shared/utils/http_signatures.py, but only the verification side.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import re
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
|
||||
|
||||
def verify_request_signature(
|
||||
public_key_pem: str,
|
||||
signature_header: str,
|
||||
method: str,
|
||||
path: str,
|
||||
headers: dict[str, str],
|
||||
) -> bool:
|
||||
"""Verify an incoming HTTP Signature.
|
||||
|
||||
Args:
|
||||
public_key_pem: PEM-encoded public key of the sender.
|
||||
signature_header: Value of the ``Signature`` header.
|
||||
method: HTTP method (GET, POST, etc.).
|
||||
path: Request path (e.g. ``/inbox``).
|
||||
headers: All request headers (case-insensitive keys).
|
||||
|
||||
Returns:
|
||||
True if the signature is valid.
|
||||
"""
|
||||
parts = _parse_signature_header(signature_header)
|
||||
signed_headers = parts.get("headers", "date").split()
|
||||
signature_b64 = parts.get("signature", "")
|
||||
|
||||
# Reconstruct the signed string
|
||||
lc_headers = {k.lower(): v for k, v in headers.items()}
|
||||
lines: list[str] = []
|
||||
for h in signed_headers:
|
||||
if h == "(request-target)":
|
||||
lines.append(f"(request-target): {method.lower()} {path}")
|
||||
else:
|
||||
lines.append(f"{h}: {lc_headers.get(h, '')}")
|
||||
|
||||
signed_string = "\n".join(lines)
|
||||
|
||||
public_key = serialization.load_pem_public_key(public_key_pem.encode())
|
||||
try:
|
||||
public_key.verify(
|
||||
base64.b64decode(signature_b64),
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def parse_key_id(signature_header: str) -> str:
|
||||
"""Extract the keyId from a Signature header.
|
||||
|
||||
keyId is typically ``https://domain/users/username#main-key``.
|
||||
Returns the actor URL (strips ``#main-key``).
|
||||
"""
|
||||
parts = _parse_signature_header(signature_header)
|
||||
key_id = parts.get("keyId", "")
|
||||
return re.sub(r"#.*$", "", key_id)
|
||||
|
||||
|
||||
def _parse_signature_header(header: str) -> dict[str, str]:
|
||||
"""Parse a Signature header into its component parts."""
|
||||
parts: dict[str, str] = {}
|
||||
for part in header.split(","):
|
||||
part = part.strip()
|
||||
eq = part.find("=")
|
||||
if eq < 0:
|
||||
continue
|
||||
key = part[:eq]
|
||||
val = part[eq + 1:].strip('"')
|
||||
parts[key] = val
|
||||
return parts
|
||||
37
build-client.sh
Executable file
37
build-client.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
# Build the artdag-client tarball
|
||||
# This script is run during deployment to create the downloadable client package
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CLIENT_REPO="https://git.rose-ash.com/art-dag/client.git"
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
OUTPUT_FILE="$SCRIPT_DIR/artdag-client.tar.gz"
|
||||
|
||||
echo "Building artdag-client.tar.gz..."
|
||||
|
||||
# Clone the client repo
|
||||
git clone --depth 1 "$CLIENT_REPO" "$TEMP_DIR/artdag-client" 2>/dev/null || {
|
||||
echo "Failed to clone client repo, trying alternative..."
|
||||
# Try GitHub if internal git fails
|
||||
git clone --depth 1 "https://github.com/gilesbradshaw/art-client.git" "$TEMP_DIR/artdag-client" 2>/dev/null || {
|
||||
echo "Error: Could not clone client repository"
|
||||
rm -rf "$TEMP_DIR"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Remove .git directory
|
||||
rm -rf "$TEMP_DIR/artdag-client/.git"
|
||||
rm -rf "$TEMP_DIR/artdag-client/__pycache__"
|
||||
|
||||
# Create tarball
|
||||
cd "$TEMP_DIR"
|
||||
tar -czf "$OUTPUT_FILE" artdag-client
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
echo "Created: $OUTPUT_FILE"
|
||||
ls -lh "$OUTPUT_FILE"
|
||||
614
cache_manager.py
614
cache_manager.py
@@ -3,7 +3,7 @@
|
||||
Cache management for Art DAG L1 server.
|
||||
|
||||
Integrates artdag's Cache, ActivityStore, and ActivityManager to provide:
|
||||
- Content-addressed caching with both node_id and content_hash
|
||||
- Content-addressed caching with both node_id and cid
|
||||
- Activity tracking for runs (input/output/intermediate relationships)
|
||||
- Deletion rules enforcement (shared items protected)
|
||||
- L2 ActivityPub integration for "shared" status checks
|
||||
@@ -35,7 +35,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def file_hash(path: Path, algorithm: str = "sha3_256") -> str:
|
||||
"""Compute SHA3-256 hash of a file."""
|
||||
"""Compute local content hash (fallback when IPFS unavailable)."""
|
||||
hasher = hashlib.new(algorithm)
|
||||
actual_path = path.resolve() if path.is_symlink() else path
|
||||
with open(actual_path, "rb") as f:
|
||||
@@ -51,10 +51,10 @@ class CachedFile:
|
||||
|
||||
Provides a unified view combining:
|
||||
- node_id: computation identity (for DAG caching)
|
||||
- content_hash: file content identity (for external references)
|
||||
- cid: file content identity (for external references)
|
||||
"""
|
||||
node_id: str
|
||||
content_hash: str
|
||||
cid: str
|
||||
path: Path
|
||||
size_bytes: int
|
||||
node_type: str
|
||||
@@ -64,7 +64,7 @@ class CachedFile:
|
||||
def from_cache_entry(cls, entry: CacheEntry) -> "CachedFile":
|
||||
return cls(
|
||||
node_id=entry.node_id,
|
||||
content_hash=entry.content_hash,
|
||||
cid=entry.cid,
|
||||
path=entry.output_path,
|
||||
size_bytes=entry.size_bytes,
|
||||
node_type=entry.node_type,
|
||||
@@ -84,41 +84,41 @@ class L2SharedChecker:
|
||||
self.cache_ttl = cache_ttl
|
||||
self._cache: Dict[str, tuple[bool, float]] = {}
|
||||
|
||||
def is_shared(self, content_hash: str) -> bool:
|
||||
"""Check if content_hash has been published to L2."""
|
||||
def is_shared(self, cid: str) -> bool:
|
||||
"""Check if cid has been published to L2."""
|
||||
import time
|
||||
now = time.time()
|
||||
|
||||
# Check cache
|
||||
if content_hash in self._cache:
|
||||
is_shared, cached_at = self._cache[content_hash]
|
||||
if cid in self._cache:
|
||||
is_shared, cached_at = self._cache[cid]
|
||||
if now - cached_at < self.cache_ttl:
|
||||
logger.debug(f"L2 check (cached): {content_hash[:16]}... = {is_shared}")
|
||||
logger.debug(f"L2 check (cached): {cid[:16]}... = {is_shared}")
|
||||
return is_shared
|
||||
|
||||
# Query L2
|
||||
try:
|
||||
url = f"{self.l2_server}/assets/by-hash/{content_hash}"
|
||||
url = f"{self.l2_server}/assets/by-hash/{cid}"
|
||||
logger.info(f"L2 check: GET {url}")
|
||||
resp = requests.get(url, timeout=5)
|
||||
logger.info(f"L2 check response: {resp.status_code}")
|
||||
is_shared = resp.status_code == 200
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to check L2 for {content_hash}: {e}")
|
||||
logger.warning(f"Failed to check L2 for {cid}: {e}")
|
||||
# On error, assume IS shared (safer - prevents accidental deletion)
|
||||
is_shared = True
|
||||
|
||||
self._cache[content_hash] = (is_shared, now)
|
||||
self._cache[cid] = (is_shared, now)
|
||||
return is_shared
|
||||
|
||||
def invalidate(self, content_hash: str):
|
||||
"""Invalidate cache for a content_hash (call after publishing)."""
|
||||
self._cache.pop(content_hash, None)
|
||||
def invalidate(self, cid: str):
|
||||
"""Invalidate cache for a cid (call after publishing)."""
|
||||
self._cache.pop(cid, None)
|
||||
|
||||
def mark_shared(self, content_hash: str):
|
||||
def mark_shared(self, cid: str):
|
||||
"""Mark as shared without querying (call after successful publish)."""
|
||||
import time
|
||||
self._cache[content_hash] = (True, time.time())
|
||||
self._cache[cid] = (True, time.time())
|
||||
|
||||
|
||||
class L1CacheManager:
|
||||
@@ -131,7 +131,7 @@ class L1CacheManager:
|
||||
- ActivityManager for deletion rules
|
||||
- L2 integration for shared status
|
||||
|
||||
Provides both node_id and content_hash based access.
|
||||
Provides both node_id and cid based access.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -162,202 +162,176 @@ class L1CacheManager:
|
||||
is_shared_fn=self._is_shared_by_node_id,
|
||||
)
|
||||
|
||||
# Content hash index: content_hash -> node_id
|
||||
# Uses Redis if available, falls back to in-memory dict
|
||||
self._content_index: Dict[str, str] = {}
|
||||
self._load_content_index()
|
||||
|
||||
# IPFS CID index: content_hash -> ipfs_cid
|
||||
self._ipfs_cids: Dict[str, str] = {}
|
||||
self._load_ipfs_index()
|
||||
|
||||
# Legacy files directory (for files uploaded directly by content_hash)
|
||||
# Legacy files directory (for files uploaded directly by cid)
|
||||
self.legacy_dir = self.cache_dir / "legacy"
|
||||
self.legacy_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _index_path(self) -> Path:
|
||||
return self.cache_dir / "content_index.json"
|
||||
# ============ Redis Index (no JSON files) ============
|
||||
#
|
||||
# Content index maps: CID (content hash or IPFS CID) -> node_id (code hash)
|
||||
# IPFS index maps: node_id -> IPFS CID
|
||||
#
|
||||
# Database is the ONLY source of truth for cache_id -> ipfs_cid mapping.
|
||||
# No fallbacks - failures raise exceptions.
|
||||
|
||||
def _load_content_index(self):
|
||||
"""Load content_hash -> node_id index from Redis or JSON file."""
|
||||
# If Redis available and has data, use it
|
||||
def _run_async(self, coro):
|
||||
"""Run async coroutine from sync context.
|
||||
|
||||
Always creates a fresh event loop to avoid issues with Celery's
|
||||
prefork workers where loops may be closed by previous tasks.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
# Check if we're already in an async context
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
# We're in an async context - use a thread with its own loop
|
||||
import threading
|
||||
result = [None]
|
||||
error = [None]
|
||||
|
||||
def run_in_thread():
|
||||
try:
|
||||
new_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(new_loop)
|
||||
try:
|
||||
result[0] = new_loop.run_until_complete(coro)
|
||||
finally:
|
||||
new_loop.close()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
|
||||
thread = threading.Thread(target=run_in_thread)
|
||||
thread.start()
|
||||
thread.join(timeout=30)
|
||||
if error[0]:
|
||||
raise error[0]
|
||||
return result[0]
|
||||
except RuntimeError:
|
||||
# No running loop - create a fresh one (don't reuse potentially closed loops)
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
return loop.run_until_complete(coro)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
def _set_content_index(self, cache_id: str, ipfs_cid: str):
|
||||
"""Set content index entry in database (cache_id -> ipfs_cid)."""
|
||||
import database
|
||||
|
||||
async def save_to_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO cache_items (cid, ipfs_cid)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (cid) DO UPDATE SET ipfs_cid = $2
|
||||
""",
|
||||
cache_id, ipfs_cid
|
||||
)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
self._run_async(save_to_db())
|
||||
logger.info(f"Indexed in database: {cache_id[:16]}... -> {ipfs_cid}")
|
||||
|
||||
def _get_content_index(self, cache_id: str) -> Optional[str]:
|
||||
"""Get content index entry (cache_id -> ipfs_cid) from database."""
|
||||
import database
|
||||
|
||||
async def get_from_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT ipfs_cid FROM cache_items WHERE cid = $1",
|
||||
cache_id
|
||||
)
|
||||
return {"ipfs_cid": row["ipfs_cid"]} if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
result = self._run_async(get_from_db())
|
||||
if result and result.get("ipfs_cid"):
|
||||
return result["ipfs_cid"]
|
||||
return None
|
||||
|
||||
def _del_content_index(self, cache_id: str):
|
||||
"""Delete content index entry from database."""
|
||||
import database
|
||||
|
||||
async def delete_from_db():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
await conn.execute("DELETE FROM cache_items WHERE cid = $1", cache_id)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
self._run_async(delete_from_db())
|
||||
|
||||
def _set_ipfs_index(self, cid: str, ipfs_cid: str):
|
||||
"""Set IPFS index entry in Redis."""
|
||||
if self._redis:
|
||||
try:
|
||||
redis_data = self._redis.hgetall(self._redis_content_key)
|
||||
if redis_data:
|
||||
self._content_index = {
|
||||
k.decode() if isinstance(k, bytes) else k:
|
||||
v.decode() if isinstance(v, bytes) else v
|
||||
for k, v in redis_data.items()
|
||||
}
|
||||
logger.info(f"Loaded {len(self._content_index)} content index entries from Redis")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load content index from Redis: {e}")
|
||||
|
||||
# Fall back to JSON file
|
||||
if self._index_path().exists():
|
||||
try:
|
||||
with open(self._index_path()) as f:
|
||||
self._content_index = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load content index: {e}")
|
||||
self._content_index = {}
|
||||
|
||||
# Also index from existing cache entries
|
||||
for entry in self.cache.list_entries():
|
||||
if entry.content_hash:
|
||||
self._content_index[entry.content_hash] = entry.node_id
|
||||
|
||||
# Migrate to Redis if available
|
||||
if self._redis and self._content_index:
|
||||
try:
|
||||
self._redis.hset(self._redis_content_key, mapping=self._content_index)
|
||||
logger.info(f"Migrated {len(self._content_index)} content index entries to Redis")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate content index to Redis: {e}")
|
||||
|
||||
def _save_content_index(self):
|
||||
"""Save content_hash -> node_id index to Redis and JSON file."""
|
||||
# Always save to JSON as backup
|
||||
with open(self._index_path(), "w") as f:
|
||||
json.dump(self._content_index, f, indent=2)
|
||||
|
||||
def _set_content_index(self, content_hash: str, node_id: str):
|
||||
"""Set a single content index entry (Redis + in-memory)."""
|
||||
self._content_index[content_hash] = node_id
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hset(self._redis_content_key, content_hash, node_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to set content index in Redis: {e}")
|
||||
self._save_content_index()
|
||||
|
||||
def _get_content_index(self, content_hash: str) -> Optional[str]:
|
||||
"""Get a content index entry (Redis-first, then in-memory)."""
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_content_key, content_hash)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get content index from Redis: {e}")
|
||||
return self._content_index.get(content_hash)
|
||||
|
||||
def _del_content_index(self, content_hash: str):
|
||||
"""Delete a content index entry."""
|
||||
if content_hash in self._content_index:
|
||||
del self._content_index[content_hash]
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hdel(self._redis_content_key, content_hash)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete content index from Redis: {e}")
|
||||
self._save_content_index()
|
||||
|
||||
def _ipfs_index_path(self) -> Path:
|
||||
return self.cache_dir / "ipfs_index.json"
|
||||
|
||||
def _load_ipfs_index(self):
|
||||
"""Load content_hash -> ipfs_cid index from Redis or JSON file."""
|
||||
# If Redis available and has data, use it
|
||||
if self._redis:
|
||||
try:
|
||||
redis_data = self._redis.hgetall(self._redis_ipfs_key)
|
||||
if redis_data:
|
||||
self._ipfs_cids = {
|
||||
k.decode() if isinstance(k, bytes) else k:
|
||||
v.decode() if isinstance(v, bytes) else v
|
||||
for k, v in redis_data.items()
|
||||
}
|
||||
logger.info(f"Loaded {len(self._ipfs_cids)} IPFS index entries from Redis")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load IPFS index from Redis: {e}")
|
||||
|
||||
# Fall back to JSON file
|
||||
if self._ipfs_index_path().exists():
|
||||
try:
|
||||
with open(self._ipfs_index_path()) as f:
|
||||
self._ipfs_cids = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load IPFS index: {e}")
|
||||
self._ipfs_cids = {}
|
||||
|
||||
# Migrate to Redis if available
|
||||
if self._redis and self._ipfs_cids:
|
||||
try:
|
||||
self._redis.hset(self._redis_ipfs_key, mapping=self._ipfs_cids)
|
||||
logger.info(f"Migrated {len(self._ipfs_cids)} IPFS index entries to Redis")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate IPFS index to Redis: {e}")
|
||||
|
||||
def _save_ipfs_index(self):
|
||||
"""Save content_hash -> ipfs_cid index to JSON file (backup)."""
|
||||
with open(self._ipfs_index_path(), "w") as f:
|
||||
json.dump(self._ipfs_cids, f, indent=2)
|
||||
|
||||
def _set_ipfs_index(self, content_hash: str, ipfs_cid: str):
|
||||
"""Set a single IPFS index entry (Redis + in-memory)."""
|
||||
self._ipfs_cids[content_hash] = ipfs_cid
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hset(self._redis_ipfs_key, content_hash, ipfs_cid)
|
||||
self._redis.hset(self._redis_ipfs_key, cid, ipfs_cid)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to set IPFS index in Redis: {e}")
|
||||
self._save_ipfs_index()
|
||||
|
||||
def _get_ipfs_cid_from_index(self, content_hash: str) -> Optional[str]:
|
||||
"""Get IPFS CID from index (Redis-first, then in-memory)."""
|
||||
def _get_ipfs_cid_from_index(self, cid: str) -> Optional[str]:
|
||||
"""Get IPFS CID from Redis."""
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_ipfs_key, content_hash)
|
||||
val = self._redis.hget(self._redis_ipfs_key, cid)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get IPFS CID from Redis: {e}")
|
||||
return self._ipfs_cids.get(content_hash)
|
||||
return None
|
||||
|
||||
def get_ipfs_cid(self, content_hash: str) -> Optional[str]:
|
||||
def get_ipfs_cid(self, cid: str) -> Optional[str]:
|
||||
"""Get IPFS CID for a content hash."""
|
||||
return self._get_ipfs_cid_from_index(content_hash)
|
||||
return self._get_ipfs_cid_from_index(cid)
|
||||
|
||||
def _is_shared_by_node_id(self, content_hash: str) -> bool:
|
||||
"""Check if a content_hash is shared via L2."""
|
||||
return self.l2_checker.is_shared(content_hash)
|
||||
def _is_shared_by_node_id(self, cid: str) -> bool:
|
||||
"""Check if a cid is shared via L2."""
|
||||
return self.l2_checker.is_shared(cid)
|
||||
|
||||
def _load_meta(self, content_hash: str) -> dict:
|
||||
def _load_meta(self, cid: str) -> dict:
|
||||
"""Load metadata for a cached file."""
|
||||
meta_path = self.cache_dir / f"{content_hash}.meta.json"
|
||||
meta_path = self.cache_dir / f"{cid}.meta.json"
|
||||
if meta_path.exists():
|
||||
with open(meta_path) as f:
|
||||
return json.load(f)
|
||||
return {}
|
||||
|
||||
def is_pinned(self, content_hash: str) -> tuple[bool, str]:
|
||||
def is_pinned(self, cid: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Check if a content_hash is pinned (non-deletable).
|
||||
Check if a cid is pinned (non-deletable).
|
||||
|
||||
Returns:
|
||||
(is_pinned, reason) tuple
|
||||
"""
|
||||
meta = self._load_meta(content_hash)
|
||||
meta = self._load_meta(cid)
|
||||
if meta.get("pinned"):
|
||||
return True, meta.get("pin_reason", "published")
|
||||
return False, ""
|
||||
|
||||
def _save_meta(self, content_hash: str, **updates) -> dict:
|
||||
def _save_meta(self, cid: str, **updates) -> dict:
|
||||
"""Save/update metadata for a cached file."""
|
||||
meta = self._load_meta(content_hash)
|
||||
meta = self._load_meta(cid)
|
||||
meta.update(updates)
|
||||
meta_path = self.cache_dir / f"{content_hash}.meta.json"
|
||||
meta_path = self.cache_dir / f"{cid}.meta.json"
|
||||
with open(meta_path, "w") as f:
|
||||
json.dump(meta, f, indent=2)
|
||||
return meta
|
||||
|
||||
def pin(self, content_hash: str, reason: str = "published") -> None:
|
||||
def pin(self, cid: str, reason: str = "published") -> None:
|
||||
"""Mark an item as pinned (non-deletable)."""
|
||||
self._save_meta(content_hash, pinned=True, pin_reason=reason)
|
||||
self._save_meta(cid, pinned=True, pin_reason=reason)
|
||||
|
||||
# ============ File Storage ============
|
||||
|
||||
@@ -366,42 +340,58 @@ class L1CacheManager:
|
||||
source_path: Path,
|
||||
node_type: str = "upload",
|
||||
node_id: str = None,
|
||||
cache_id: str = None,
|
||||
execution_time: float = 0.0,
|
||||
move: bool = False,
|
||||
skip_ipfs: bool = False,
|
||||
) -> tuple[CachedFile, Optional[str]]:
|
||||
"""
|
||||
Store a file in the cache and upload to IPFS.
|
||||
Store a file in the cache and optionally upload to IPFS.
|
||||
|
||||
Files are stored by IPFS CID when skip_ipfs=False (default), or by
|
||||
local content hash when skip_ipfs=True. The cache_id parameter creates
|
||||
an index from cache_id -> CID for code-addressed lookups.
|
||||
|
||||
Args:
|
||||
source_path: Path to file to cache
|
||||
node_type: Type of node (e.g., "upload", "source", "effect")
|
||||
node_id: Optional node_id; if not provided, uses content_hash
|
||||
node_id: DEPRECATED - ignored, always uses CID
|
||||
cache_id: Optional code-addressed cache ID to index
|
||||
execution_time: How long the operation took
|
||||
move: If True, move instead of copy
|
||||
skip_ipfs: If True, skip IPFS upload and use local hash (faster for large files)
|
||||
|
||||
Returns:
|
||||
Tuple of (CachedFile with both node_id and content_hash, IPFS CID or None)
|
||||
Tuple of (CachedFile with both node_id and cid, CID or None if skip_ipfs)
|
||||
"""
|
||||
# Compute content hash first
|
||||
content_hash = file_hash(source_path)
|
||||
if skip_ipfs:
|
||||
# Use local content hash instead of IPFS CID (much faster)
|
||||
cid = file_hash(source_path)
|
||||
ipfs_cid = None
|
||||
logger.info(f"put: Using local hash (skip_ipfs=True): {cid[:16]}...")
|
||||
else:
|
||||
# Upload to IPFS first to get the CID (primary identifier)
|
||||
cid = ipfs_client.add_file(source_path)
|
||||
if not cid:
|
||||
raise RuntimeError(f"IPFS upload failed for {source_path}. IPFS is required.")
|
||||
ipfs_cid = cid
|
||||
|
||||
# Use content_hash as node_id if not provided
|
||||
# This is for legacy/uploaded files that don't have a DAG node
|
||||
if node_id is None:
|
||||
node_id = content_hash
|
||||
# Always store by IPFS CID (node_id parameter is deprecated)
|
||||
node_id = cid
|
||||
|
||||
# Check if already cached (by node_id)
|
||||
existing = self.cache.get_entry(node_id)
|
||||
if existing and existing.output_path.exists():
|
||||
# Already cached - still try to get IPFS CID if we don't have it
|
||||
ipfs_cid = self._get_ipfs_cid_from_index(content_hash)
|
||||
if not ipfs_cid:
|
||||
ipfs_cid = ipfs_client.add_file(existing.output_path)
|
||||
if ipfs_cid:
|
||||
self._set_ipfs_index(content_hash, ipfs_cid)
|
||||
return CachedFile.from_cache_entry(existing), ipfs_cid
|
||||
|
||||
# Compute local hash BEFORE moving the file (for dual-indexing)
|
||||
# Only needed if we uploaded to IPFS (to map local hash -> IPFS CID)
|
||||
local_hash = None
|
||||
if not skip_ipfs and self._is_ipfs_cid(cid):
|
||||
local_hash = file_hash(source_path)
|
||||
|
||||
# Store in local cache
|
||||
logger.info(f"put: Storing in cache with node_id={node_id[:16]}...")
|
||||
self.cache.put(
|
||||
node_id=node_id,
|
||||
source_path=source_path,
|
||||
@@ -411,74 +401,125 @@ class L1CacheManager:
|
||||
)
|
||||
|
||||
entry = self.cache.get_entry(node_id)
|
||||
logger.info(f"put: After cache.put, get_entry(node_id={node_id[:16]}...) returned entry={entry is not None}, path={entry.output_path if entry else None}")
|
||||
|
||||
# Update content index (Redis + local)
|
||||
self._set_content_index(entry.content_hash, node_id)
|
||||
# Verify we can retrieve it
|
||||
verify_path = self.cache.get(node_id)
|
||||
logger.info(f"put: Verify cache.get(node_id={node_id[:16]}...) = {verify_path}")
|
||||
|
||||
# Upload to IPFS (async in background would be better, but sync for now)
|
||||
ipfs_cid = ipfs_client.add_file(entry.output_path)
|
||||
if ipfs_cid:
|
||||
self._set_ipfs_index(entry.content_hash, ipfs_cid)
|
||||
logger.info(f"Uploaded to IPFS: {entry.content_hash[:16]}... -> {ipfs_cid}")
|
||||
# Index by cache_id if provided (code-addressed cache lookup)
|
||||
# This allows get_by_cid(cache_id) to find files stored by IPFS CID
|
||||
if cache_id and cache_id != cid:
|
||||
self._set_content_index(cache_id, cid)
|
||||
logger.info(f"put: Indexed cache_id {cache_id[:16]}... -> IPFS {cid}")
|
||||
|
||||
return CachedFile.from_cache_entry(entry), ipfs_cid
|
||||
# Also index by local hash for content-based lookup
|
||||
if local_hash and local_hash != cid:
|
||||
self._set_content_index(local_hash, cid)
|
||||
logger.debug(f"Indexed local hash {local_hash[:16]}... -> IPFS {cid}")
|
||||
|
||||
logger.info(f"Cached: {cid[:16]}..." + (" (local only)" if skip_ipfs else " (IPFS)"))
|
||||
|
||||
return CachedFile.from_cache_entry(entry), ipfs_cid if not skip_ipfs else None
|
||||
|
||||
def get_by_node_id(self, node_id: str) -> Optional[Path]:
|
||||
"""Get cached file path by node_id."""
|
||||
return self.cache.get(node_id)
|
||||
|
||||
def get_by_content_hash(self, content_hash: str) -> Optional[Path]:
|
||||
"""Get cached file path by content_hash. Falls back to IPFS if not in local cache."""
|
||||
def _is_ipfs_cid(self, identifier: str) -> bool:
|
||||
"""Check if identifier looks like an IPFS CID."""
|
||||
# CIDv0 starts with "Qm", CIDv1 starts with "bafy" or other multibase prefixes
|
||||
return identifier.startswith("Qm") or identifier.startswith("bafy") or identifier.startswith("baf")
|
||||
|
||||
def get_by_cid(self, cid: str) -> Optional[Path]:
|
||||
"""Get cached file path by cid or IPFS CID. Falls back to IPFS if not in local cache."""
|
||||
logger.info(f"get_by_cid: Looking for cid={cid[:16]}...")
|
||||
|
||||
# Check index first (Redis then local)
|
||||
node_id = self._get_content_index(content_hash)
|
||||
node_id = self._get_content_index(cid)
|
||||
logger.info(f"get_by_cid: Index lookup returned node_id={node_id[:16] if node_id else None}...")
|
||||
if node_id:
|
||||
path = self.cache.get(node_id)
|
||||
logger.info(f"get_by_cid: cache.get(node_id={node_id[:16]}...) returned path={path}")
|
||||
if path and path.exists():
|
||||
logger.debug(f" Found via index: {path}")
|
||||
logger.info(f"get_by_cid: Found via index: {path}")
|
||||
return path
|
||||
|
||||
# For uploads, node_id == content_hash, so try direct lookup
|
||||
# artdag Cache doesn't know about entry - check filesystem directly
|
||||
# Files are stored at {cache_dir}/nodes/{node_id}/output.*
|
||||
nodes_dir = self.cache_dir / "nodes" / node_id
|
||||
if nodes_dir.exists():
|
||||
for f in nodes_dir.iterdir():
|
||||
if f.name.startswith("output."):
|
||||
logger.info(f"get_by_cid: Found on filesystem: {f}")
|
||||
return f
|
||||
|
||||
# For uploads, node_id == cid, so try direct lookup
|
||||
# This works even if cache index hasn't been reloaded
|
||||
path = self.cache.get(content_hash)
|
||||
logger.debug(f" cache.get({content_hash[:16]}...) returned: {path}")
|
||||
path = self.cache.get(cid)
|
||||
logger.info(f"get_by_cid: Direct cache.get({cid[:16]}...) returned: {path}")
|
||||
if path and path.exists():
|
||||
self._set_content_index(content_hash, content_hash)
|
||||
self._set_content_index(cid, cid)
|
||||
return path
|
||||
|
||||
# Check filesystem directly for cid as node_id
|
||||
nodes_dir = self.cache_dir / "nodes" / cid
|
||||
if nodes_dir.exists():
|
||||
for f in nodes_dir.iterdir():
|
||||
if f.name.startswith("output."):
|
||||
logger.info(f"get_by_cid: Found on filesystem (direct): {f}")
|
||||
self._set_content_index(cid, cid)
|
||||
return f
|
||||
|
||||
# Scan cache entries (fallback for new structure)
|
||||
entry = self.cache.find_by_content_hash(content_hash)
|
||||
entry = self.cache.find_by_cid(cid)
|
||||
logger.info(f"get_by_cid: find_by_cid({cid[:16]}...) returned entry={entry}")
|
||||
if entry and entry.output_path.exists():
|
||||
logger.debug(f" Found via scan: {entry.output_path}")
|
||||
self._set_content_index(content_hash, entry.node_id)
|
||||
logger.info(f"get_by_cid: Found via scan: {entry.output_path}")
|
||||
self._set_content_index(cid, entry.node_id)
|
||||
return entry.output_path
|
||||
|
||||
# Check legacy location (files stored directly as CACHE_DIR/{content_hash})
|
||||
legacy_path = self.cache_dir / content_hash
|
||||
# Check legacy location (files stored directly as CACHE_DIR/{cid})
|
||||
legacy_path = self.cache_dir / cid
|
||||
logger.info(f"get_by_cid: Checking legacy path: {legacy_path} exists={legacy_path.exists()}")
|
||||
if legacy_path.exists() and legacy_path.is_file():
|
||||
logger.info(f"get_by_cid: Found at legacy path: {legacy_path}")
|
||||
return legacy_path
|
||||
|
||||
# Try to recover from IPFS if we have a CID
|
||||
ipfs_cid = self._get_ipfs_cid_from_index(content_hash)
|
||||
if ipfs_cid:
|
||||
logger.info(f"Recovering from IPFS: {content_hash[:16]}... ({ipfs_cid})")
|
||||
recovery_path = self.legacy_dir / content_hash
|
||||
if ipfs_client.get_file(ipfs_cid, recovery_path):
|
||||
logger.info(f"Recovered from IPFS: {recovery_path}")
|
||||
# Fetch from IPFS - this is the source of truth for all content
|
||||
if self._is_ipfs_cid(cid):
|
||||
logger.info(f"get_by_cid: Fetching from IPFS: {cid[:16]}...")
|
||||
recovery_path = self.legacy_dir / cid
|
||||
recovery_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if ipfs_client.get_file(cid, str(recovery_path)):
|
||||
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
|
||||
self._set_content_index(cid, cid)
|
||||
return recovery_path
|
||||
else:
|
||||
logger.warning(f"get_by_cid: IPFS fetch failed for {cid[:16]}...")
|
||||
|
||||
# Also try with a mapped IPFS CID if different from cid
|
||||
ipfs_cid = self._get_ipfs_cid_from_index(cid)
|
||||
if ipfs_cid and ipfs_cid != cid:
|
||||
logger.info(f"get_by_cid: Fetching from IPFS via mapping: {ipfs_cid[:16]}...")
|
||||
recovery_path = self.legacy_dir / cid
|
||||
recovery_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if ipfs_client.get_file(ipfs_cid, str(recovery_path)):
|
||||
logger.info(f"get_by_cid: Fetched from IPFS: {recovery_path}")
|
||||
return recovery_path
|
||||
|
||||
return None
|
||||
|
||||
def has_content(self, content_hash: str) -> bool:
|
||||
def has_content(self, cid: str) -> bool:
|
||||
"""Check if content exists in cache."""
|
||||
return self.get_by_content_hash(content_hash) is not None
|
||||
return self.get_by_cid(cid) is not None
|
||||
|
||||
def get_entry_by_content_hash(self, content_hash: str) -> Optional[CacheEntry]:
|
||||
"""Get cache entry by content_hash."""
|
||||
node_id = self._get_content_index(content_hash)
|
||||
def get_entry_by_cid(self, cid: str) -> Optional[CacheEntry]:
|
||||
"""Get cache entry by cid."""
|
||||
node_id = self._get_content_index(cid)
|
||||
if node_id:
|
||||
return self.cache.get_entry(node_id)
|
||||
return self.cache.find_by_content_hash(content_hash)
|
||||
return self.cache.find_by_cid(cid)
|
||||
|
||||
def list_all(self) -> List[CachedFile]:
|
||||
"""List all cached files."""
|
||||
@@ -488,11 +529,11 @@ class L1CacheManager:
|
||||
# New cache structure entries
|
||||
for entry in self.cache.list_entries():
|
||||
files.append(CachedFile.from_cache_entry(entry))
|
||||
if entry.content_hash:
|
||||
seen_hashes.add(entry.content_hash)
|
||||
if entry.cid:
|
||||
seen_hashes.add(entry.cid)
|
||||
|
||||
# Legacy files stored directly in cache_dir (old structure)
|
||||
# These are files named by content_hash directly in CACHE_DIR
|
||||
# These are files named by cid directly in CACHE_DIR
|
||||
for f in self.cache_dir.iterdir():
|
||||
# Skip directories and special files
|
||||
if not f.is_file():
|
||||
@@ -509,7 +550,7 @@ class L1CacheManager:
|
||||
|
||||
files.append(CachedFile(
|
||||
node_id=f.name,
|
||||
content_hash=f.name,
|
||||
cid=f.name,
|
||||
path=f,
|
||||
size_bytes=f.stat().st_size,
|
||||
node_type="legacy",
|
||||
@@ -519,6 +560,23 @@ class L1CacheManager:
|
||||
|
||||
return files
|
||||
|
||||
def list_by_type(self, node_type: str) -> List[str]:
|
||||
"""
|
||||
List CIDs of all cached files of a specific type.
|
||||
|
||||
Args:
|
||||
node_type: Type to filter by (e.g., "recipe", "upload", "effect")
|
||||
|
||||
Returns:
|
||||
List of CIDs (IPFS CID if available, otherwise node_id)
|
||||
"""
|
||||
cids = []
|
||||
for entry in self.cache.list_entries():
|
||||
if entry.node_type == node_type:
|
||||
# Return node_id which is the IPFS CID for uploaded content
|
||||
cids.append(entry.node_id)
|
||||
return cids
|
||||
|
||||
# ============ Activity Tracking ============
|
||||
|
||||
def record_activity(self, dag: DAG, run_id: str = None) -> Activity:
|
||||
@@ -539,19 +597,19 @@ class L1CacheManager:
|
||||
def record_simple_activity(
|
||||
self,
|
||||
input_hashes: List[str],
|
||||
output_hash: str,
|
||||
output_cid: str,
|
||||
run_id: str = None,
|
||||
) -> Activity:
|
||||
"""
|
||||
Record a simple (non-DAG) execution as an activity.
|
||||
|
||||
For legacy single-effect runs that don't use full DAG execution.
|
||||
Uses content_hash as node_id.
|
||||
Uses cid as node_id.
|
||||
"""
|
||||
activity = Activity(
|
||||
activity_id=run_id or str(hash((tuple(input_hashes), output_hash))),
|
||||
activity_id=run_id or str(hash((tuple(input_hashes), output_cid))),
|
||||
input_ids=sorted(input_hashes),
|
||||
output_id=output_hash,
|
||||
output_id=output_cid,
|
||||
intermediate_ids=[],
|
||||
created_at=datetime.now(timezone.utc).timestamp(),
|
||||
status="completed",
|
||||
@@ -573,7 +631,7 @@ class L1CacheManager:
|
||||
|
||||
# ============ Deletion Rules ============
|
||||
|
||||
def can_delete(self, content_hash: str) -> tuple[bool, str]:
|
||||
def can_delete(self, cid: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Check if a cached item can be deleted.
|
||||
|
||||
@@ -581,12 +639,12 @@ class L1CacheManager:
|
||||
(can_delete, reason) tuple
|
||||
"""
|
||||
# Check if pinned (published or input to published)
|
||||
pinned, reason = self.is_pinned(content_hash)
|
||||
pinned, reason = self.is_pinned(cid)
|
||||
if pinned:
|
||||
return False, f"Item is pinned ({reason})"
|
||||
|
||||
# Find node_id for this content
|
||||
node_id = self._get_content_index(content_hash) or content_hash
|
||||
node_id = self._get_content_index(cid) or cid
|
||||
|
||||
# Check if it's an input or output of any activity
|
||||
for activity in self.activity_store.list():
|
||||
@@ -612,34 +670,34 @@ class L1CacheManager:
|
||||
for node_id in activity.all_node_ids:
|
||||
entry = self.cache.get_entry(node_id)
|
||||
if entry:
|
||||
pinned, reason = self.is_pinned(entry.content_hash)
|
||||
pinned, reason = self.is_pinned(entry.cid)
|
||||
if pinned:
|
||||
return False, f"Item {node_id} is pinned ({reason})"
|
||||
|
||||
return True, "OK"
|
||||
|
||||
def delete_by_content_hash(self, content_hash: str) -> tuple[bool, str]:
|
||||
def delete_by_cid(self, cid: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Delete a cached item by content_hash.
|
||||
Delete a cached item by cid.
|
||||
|
||||
Enforces deletion rules.
|
||||
|
||||
Returns:
|
||||
(success, message) tuple
|
||||
"""
|
||||
can_delete, reason = self.can_delete(content_hash)
|
||||
can_delete, reason = self.can_delete(cid)
|
||||
if not can_delete:
|
||||
return False, reason
|
||||
|
||||
# Find and delete
|
||||
node_id = self._get_content_index(content_hash)
|
||||
node_id = self._get_content_index(cid)
|
||||
if node_id:
|
||||
self.cache.remove(node_id)
|
||||
self._del_content_index(content_hash)
|
||||
self._del_content_index(cid)
|
||||
return True, "Deleted"
|
||||
|
||||
# Try legacy
|
||||
legacy_path = self.legacy_dir / content_hash
|
||||
legacy_path = self.legacy_dir / cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
return True, "Deleted (legacy)"
|
||||
@@ -664,11 +722,26 @@ class L1CacheManager:
|
||||
return True, "Activity discarded"
|
||||
return False, "Failed to discard"
|
||||
|
||||
def _is_used_by_other_activities(self, node_id: str, exclude_activity_id: str) -> bool:
|
||||
"""Check if a node is used by any activity other than the excluded one."""
|
||||
for other_activity in self.activity_store.list():
|
||||
if other_activity.activity_id == exclude_activity_id:
|
||||
continue
|
||||
# Check if used as input, output, or intermediate
|
||||
if node_id in other_activity.input_ids:
|
||||
return True
|
||||
if node_id == other_activity.output_id:
|
||||
return True
|
||||
if node_id in other_activity.intermediate_ids:
|
||||
return True
|
||||
return False
|
||||
|
||||
def discard_activity_outputs_only(self, activity_id: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Discard an activity, deleting only outputs and intermediates.
|
||||
|
||||
Inputs (cache items, configs) are preserved.
|
||||
Outputs/intermediates used by other activities are preserved.
|
||||
|
||||
Returns:
|
||||
(success, message) tuple
|
||||
@@ -681,37 +754,52 @@ class L1CacheManager:
|
||||
if activity.output_id:
|
||||
entry = self.cache.get_entry(activity.output_id)
|
||||
if entry:
|
||||
pinned, reason = self.is_pinned(entry.content_hash)
|
||||
pinned, reason = self.is_pinned(entry.cid)
|
||||
if pinned:
|
||||
return False, f"Output is pinned ({reason})"
|
||||
|
||||
# Delete output
|
||||
if activity.output_id:
|
||||
entry = self.cache.get_entry(activity.output_id)
|
||||
if entry:
|
||||
# Remove from cache
|
||||
self.cache.remove(activity.output_id)
|
||||
# Remove from content index (Redis + local)
|
||||
self._del_content_index(entry.content_hash)
|
||||
# Delete from legacy dir if exists
|
||||
legacy_path = self.legacy_dir / entry.content_hash
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs = 0
|
||||
preserved_shared = 0
|
||||
|
||||
# Delete intermediates
|
||||
# Delete output (only if not used by other activities)
|
||||
if activity.output_id:
|
||||
if self._is_used_by_other_activities(activity.output_id, activity_id):
|
||||
preserved_shared += 1
|
||||
else:
|
||||
entry = self.cache.get_entry(activity.output_id)
|
||||
if entry:
|
||||
# Remove from cache
|
||||
self.cache.remove(activity.output_id)
|
||||
# Remove from content index (Redis + local)
|
||||
self._del_content_index(entry.cid)
|
||||
# Delete from legacy dir if exists
|
||||
legacy_path = self.legacy_dir / entry.cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs += 1
|
||||
|
||||
# Delete intermediates (only if not used by other activities)
|
||||
for node_id in activity.intermediate_ids:
|
||||
if self._is_used_by_other_activities(node_id, activity_id):
|
||||
preserved_shared += 1
|
||||
continue
|
||||
entry = self.cache.get_entry(node_id)
|
||||
if entry:
|
||||
self.cache.remove(node_id)
|
||||
self._del_content_index(entry.content_hash)
|
||||
legacy_path = self.legacy_dir / entry.content_hash
|
||||
self._del_content_index(entry.cid)
|
||||
legacy_path = self.legacy_dir / entry.cid
|
||||
if legacy_path.exists():
|
||||
legacy_path.unlink()
|
||||
deleted_outputs += 1
|
||||
|
||||
# Remove activity record (inputs remain in cache)
|
||||
self.activity_store.remove(activity_id)
|
||||
|
||||
return True, "Activity discarded (outputs only)"
|
||||
msg = f"Activity discarded (deleted {deleted_outputs} outputs"
|
||||
if preserved_shared > 0:
|
||||
msg += f", preserved {preserved_shared} shared items"
|
||||
msg += ")"
|
||||
return True, msg
|
||||
|
||||
def cleanup_intermediates(self) -> int:
|
||||
"""Delete all intermediate cache entries (reconstructible)."""
|
||||
@@ -726,13 +814,13 @@ class L1CacheManager:
|
||||
|
||||
# ============ L2 Integration ============
|
||||
|
||||
def mark_published(self, content_hash: str):
|
||||
"""Mark a content_hash as published to L2."""
|
||||
self.l2_checker.mark_shared(content_hash)
|
||||
def mark_published(self, cid: str):
|
||||
"""Mark a cid as published to L2."""
|
||||
self.l2_checker.mark_shared(cid)
|
||||
|
||||
def invalidate_shared_cache(self, content_hash: str):
|
||||
def invalidate_shared_cache(self, cid: str):
|
||||
"""Invalidate shared status cache (call if item might be unpublished)."""
|
||||
self.l2_checker.invalidate(content_hash)
|
||||
self.l2_checker.invalidate(cid)
|
||||
|
||||
# ============ Stats ============
|
||||
|
||||
|
||||
@@ -1,24 +1,39 @@
|
||||
"""
|
||||
Art DAG Celery Application
|
||||
|
||||
Distributed rendering for the Art DAG system.
|
||||
Uses the foundational artdag language from GitHub.
|
||||
Streaming video rendering for the Art DAG system.
|
||||
Uses S-expression recipes with frame-by-frame processing.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from celery import Celery
|
||||
from celery.signals import worker_ready
|
||||
|
||||
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
|
||||
# Use central config
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
from app.config import settings
|
||||
|
||||
app = Celery(
|
||||
'art_celery',
|
||||
broker=REDIS_URL,
|
||||
backend=REDIS_URL,
|
||||
include=['legacy_tasks', 'tasks', 'tasks.analyze', 'tasks.execute', 'tasks.orchestrate']
|
||||
broker=settings.redis_url,
|
||||
backend=settings.redis_url,
|
||||
include=['tasks', 'tasks.streaming', 'tasks.ipfs_upload']
|
||||
)
|
||||
|
||||
|
||||
@worker_ready.connect
|
||||
def log_config_on_startup(sender, **kwargs):
|
||||
"""Log configuration when worker starts."""
|
||||
print("=" * 60, file=sys.stderr)
|
||||
print("WORKER STARTED - CONFIGURATION", file=sys.stderr)
|
||||
print("=" * 60, file=sys.stderr)
|
||||
settings.log_config()
|
||||
print(f"Worker: {sender}", file=sys.stderr)
|
||||
print("=" * 60, file=sys.stderr)
|
||||
|
||||
app.conf.update(
|
||||
result_expires=3600,
|
||||
result_expires=86400 * 7, # 7 days - allow time for recovery after restarts
|
||||
task_serializer='json',
|
||||
accept_content=['json', 'pickle'], # pickle needed for internal Celery messages
|
||||
result_serializer='json',
|
||||
@@ -26,8 +41,10 @@ app.conf.update(
|
||||
timezone='UTC',
|
||||
enable_utc=True,
|
||||
task_track_started=True,
|
||||
task_acks_late=True,
|
||||
task_acks_late=True, # Don't ack until task completes - survives worker restart
|
||||
worker_prefetch_multiplier=1,
|
||||
task_reject_on_worker_lost=True, # Re-queue if worker dies
|
||||
task_acks_on_failure_or_timeout=True, # Ack failed tasks so they don't retry forever
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
17
configs/audio-dizzy.sexp
Normal file
17
configs/audio-dizzy.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; Audio Configuration - dizzy.mp3
|
||||
;;
|
||||
;; Defines audio analyzer and playback for a recipe.
|
||||
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
|
||||
;;
|
||||
;; Provides:
|
||||
;; - music: audio analyzer for beat/energy detection
|
||||
;; - audio-playback: path for synchronized playback
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Audio analyzer (provides beat detection and energy levels)
|
||||
;; Paths relative to working directory (project root)
|
||||
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
|
||||
|
||||
;; Audio playback path (for sync with video output)
|
||||
(audio-playback "dizzy.mp3")
|
||||
17
configs/audio-halleluwah.sexp
Normal file
17
configs/audio-halleluwah.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; Audio Configuration - dizzy.mp3
|
||||
;;
|
||||
;; Defines audio analyzer and playback for a recipe.
|
||||
;; Pass to recipe with: --audio configs/audio-dizzy.sexp
|
||||
;;
|
||||
;; Provides:
|
||||
;; - music: audio analyzer for beat/energy detection
|
||||
;; - audio-playback: path for synchronized playback
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Audio analyzer (provides beat detection and energy levels)
|
||||
;; Using friendly name for asset resolution
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
|
||||
;; Audio playback path (for sync with video output)
|
||||
(audio-playback "woods-audio")
|
||||
38
configs/sources-default.sexp
Normal file
38
configs/sources-default.sexp
Normal file
@@ -0,0 +1,38 @@
|
||||
;; Default Sources Configuration
|
||||
;;
|
||||
;; Defines video sources and per-pair effect configurations.
|
||||
;; Pass to recipe with: --sources configs/sources-default.sexp
|
||||
;;
|
||||
;; Required by recipes using process-pair macro:
|
||||
;; - sources: array of video sources
|
||||
;; - pair-configs: array of effect configurations per source
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Video sources array
|
||||
;; Paths relative to working directory (project root)
|
||||
(def sources [
|
||||
(streaming:make-video-source "monday.webm" 30)
|
||||
(streaming:make-video-source "escher.webm" 30)
|
||||
(streaming:make-video-source "2.webm" 30)
|
||||
(streaming:make-video-source "disruptors.webm" 30)
|
||||
(streaming:make-video-source "4.mp4" 30)
|
||||
(streaming:make-video-source "ecstacy.mp4" 30)
|
||||
(streaming:make-video-source "dopple.webm" 30)
|
||||
(streaming:make-video-source "5.mp4" 30)
|
||||
])
|
||||
|
||||
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
|
||||
;; :dir = rotation direction (1 or -1)
|
||||
;; :rot-a, :rot-b = max rotation angles for clip A and B
|
||||
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
|
||||
])
|
||||
19
configs/sources-woods-half.sexp
Normal file
19
configs/sources-woods-half.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Half-resolution Woods Sources (960x540)
|
||||
;;
|
||||
;; Pass to recipe with: --sources configs/sources-woods-half.sexp
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods_half/1.webm" 30)
|
||||
(streaming:make-video-source "woods_half/2.webm" 30)
|
||||
(streaming:make-video-source "woods_half/3.webm" 30)
|
||||
(streaming:make-video-source "woods_half/4.webm" 30)
|
||||
])
|
||||
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
])
|
||||
39
configs/sources-woods.sexp
Normal file
39
configs/sources-woods.sexp
Normal file
@@ -0,0 +1,39 @@
|
||||
;; Default Sources Configuration
|
||||
;;
|
||||
;; Defines video sources and per-pair effect configurations.
|
||||
;; Pass to recipe with: --sources configs/sources-default.sexp
|
||||
;;
|
||||
;; Required by recipes using process-pair macro:
|
||||
;; - sources: array of video sources
|
||||
;; - pair-configs: array of effect configurations per source
|
||||
|
||||
(require-primitives "streaming")
|
||||
|
||||
;; Video sources array
|
||||
;; Using friendly names for asset resolution
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 10)
|
||||
(streaming:make-video-source "woods-2" 10)
|
||||
(streaming:make-video-source "woods-3" 10)
|
||||
(streaming:make-video-source "woods-4" 10)
|
||||
(streaming:make-video-source "woods-5" 10)
|
||||
(streaming:make-video-source "woods-6" 10)
|
||||
(streaming:make-video-source "woods-7" 10)
|
||||
(streaming:make-video-source "woods-8" 10)
|
||||
])
|
||||
|
||||
;; Per-pair effect config: rotation direction, rotation ranges, zoom ranges
|
||||
;; :dir = rotation direction (1 or -1)
|
||||
;; :rot-a, :rot-b = max rotation angles for clip A and B
|
||||
;; :zoom-a, :zoom-b = max zoom amounts for clip A and B
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
|
||||
])
|
||||
1161
database.py
1161
database.py
File diff suppressed because it is too large
Load Diff
@@ -7,13 +7,13 @@ echo "=== Pulling latest code ==="
|
||||
git pull
|
||||
|
||||
echo "=== Building Docker image ==="
|
||||
docker build --build-arg CACHEBUST=$(date +%s) -t git.rose-ash.com/art-dag/l1-server:latest .
|
||||
docker build --build-arg CACHEBUST=$(date +%s) -t registry.rose-ash.com:5000/celery-l1-server:latest .
|
||||
|
||||
echo "=== Pushing to registry ==="
|
||||
docker push registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
|
||||
echo "=== Redeploying celery stack ==="
|
||||
docker stack deploy -c docker-compose.yml celery
|
||||
|
||||
echo "=== Restarting proxy nginx ==="
|
||||
docker service update --force proxy_nginx
|
||||
|
||||
echo "=== Done ==="
|
||||
docker stack services celery
|
||||
|
||||
249
diagnose_gpu.py
Executable file
249
diagnose_gpu.py
Executable file
@@ -0,0 +1,249 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
GPU Rendering Diagnostic Script
|
||||
|
||||
Checks for common issues that cause GPU rendering slowdowns in art-dag.
|
||||
Run this script to identify potential performance bottlenecks.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
def print_section(title):
|
||||
print(f"\n{'='*60}")
|
||||
print(f" {title}")
|
||||
print(f"{'='*60}")
|
||||
|
||||
def check_pass(msg):
|
||||
print(f" [PASS] {msg}")
|
||||
|
||||
def check_fail(msg):
|
||||
print(f" [FAIL] {msg}")
|
||||
|
||||
def check_warn(msg):
|
||||
print(f" [WARN] {msg}")
|
||||
|
||||
def check_info(msg):
|
||||
print(f" [INFO] {msg}")
|
||||
|
||||
# ============================================================
|
||||
# 1. Check GPU Availability
|
||||
# ============================================================
|
||||
print_section("1. GPU AVAILABILITY")
|
||||
|
||||
# Check nvidia-smi
|
||||
try:
|
||||
result = subprocess.run(["nvidia-smi", "--query-gpu=name,memory.total,memory.free,utilization.gpu",
|
||||
"--format=csv,noheader"], capture_output=True, text=True, timeout=5)
|
||||
if result.returncode == 0:
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
check_pass(f"GPU found: {line}")
|
||||
else:
|
||||
check_fail("nvidia-smi failed - no GPU detected")
|
||||
except FileNotFoundError:
|
||||
check_fail("nvidia-smi not found - NVIDIA drivers not installed")
|
||||
except Exception as e:
|
||||
check_fail(f"nvidia-smi error: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 2. Check CuPy
|
||||
# ============================================================
|
||||
print_section("2. CUPY (GPU ARRAY LIBRARY)")
|
||||
|
||||
try:
|
||||
import cupy as cp
|
||||
check_pass(f"CuPy available, version {cp.__version__}")
|
||||
|
||||
# Test basic GPU operation
|
||||
try:
|
||||
a = cp.zeros((100, 100), dtype=cp.uint8)
|
||||
cp.cuda.Stream.null.synchronize()
|
||||
check_pass("CuPy GPU operations working")
|
||||
|
||||
# Check memory
|
||||
mempool = cp.get_default_memory_pool()
|
||||
check_info(f"GPU memory pool: {mempool.used_bytes() / 1024**2:.1f} MB used, "
|
||||
f"{mempool.total_bytes() / 1024**2:.1f} MB total")
|
||||
except Exception as e:
|
||||
check_fail(f"CuPy GPU test failed: {e}")
|
||||
except ImportError:
|
||||
check_fail("CuPy not installed - GPU rendering disabled")
|
||||
|
||||
# ============================================================
|
||||
# 3. Check PyNvVideoCodec (GPU Encoding)
|
||||
# ============================================================
|
||||
print_section("3. PYNVVIDEOCODEC (GPU ENCODING)")
|
||||
|
||||
try:
|
||||
import PyNvVideoCodec as nvc
|
||||
check_pass("PyNvVideoCodec available - zero-copy GPU encoding enabled")
|
||||
except ImportError:
|
||||
check_warn("PyNvVideoCodec not available - using FFmpeg NVENC (slower)")
|
||||
|
||||
# ============================================================
|
||||
# 4. Check Decord GPU (Hardware Decode)
|
||||
# ============================================================
|
||||
print_section("4. DECORD GPU (HARDWARE DECODE)")
|
||||
|
||||
try:
|
||||
import decord
|
||||
from decord import gpu
|
||||
ctx = gpu(0)
|
||||
check_pass(f"Decord GPU (NVDEC) available - hardware video decode enabled")
|
||||
except ImportError:
|
||||
check_warn("Decord not installed - using FFmpeg decode")
|
||||
except Exception as e:
|
||||
check_warn(f"Decord GPU not available ({e}) - using FFmpeg decode")
|
||||
|
||||
# ============================================================
|
||||
# 5. Check DLPack Support
|
||||
# ============================================================
|
||||
print_section("5. DLPACK (ZERO-COPY TRANSFER)")
|
||||
|
||||
try:
|
||||
import decord
|
||||
from decord import VideoReader, gpu
|
||||
import cupy as cp
|
||||
|
||||
# Need a test video file
|
||||
test_video = None
|
||||
for path in ["/data/cache", "/tmp"]:
|
||||
if os.path.exists(path):
|
||||
for f in os.listdir(path):
|
||||
if f.endswith(('.mp4', '.webm', '.mkv')):
|
||||
test_video = os.path.join(path, f)
|
||||
break
|
||||
if test_video:
|
||||
break
|
||||
|
||||
if test_video:
|
||||
try:
|
||||
vr = VideoReader(test_video, ctx=gpu(0))
|
||||
frame = vr[0]
|
||||
dlpack = frame.to_dlpack()
|
||||
gpu_frame = cp.from_dlpack(dlpack)
|
||||
check_pass(f"DLPack zero-copy working (tested with {os.path.basename(test_video)})")
|
||||
except Exception as e:
|
||||
check_fail(f"DLPack FAILED: {e}")
|
||||
check_info("This means every frame does GPU->CPU->GPU copy (SLOW)")
|
||||
else:
|
||||
check_warn("No test video found - cannot verify DLPack")
|
||||
except ImportError:
|
||||
check_warn("Cannot test DLPack - decord or cupy not available")
|
||||
|
||||
# ============================================================
|
||||
# 6. Check Fast CUDA Kernels
|
||||
# ============================================================
|
||||
print_section("6. FAST CUDA KERNELS (JIT COMPILED)")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from streaming.jit_compiler import (
|
||||
fast_rotate, fast_zoom, fast_blend, fast_hue_shift,
|
||||
fast_invert, fast_ripple, get_fast_ops
|
||||
)
|
||||
check_pass("Fast CUDA kernels loaded successfully")
|
||||
|
||||
# Test one kernel
|
||||
try:
|
||||
import cupy as cp
|
||||
test_img = cp.zeros((720, 1280, 3), dtype=cp.uint8)
|
||||
result = fast_rotate(test_img, 45.0)
|
||||
cp.cuda.Stream.null.synchronize()
|
||||
check_pass("Fast rotate kernel working")
|
||||
except Exception as e:
|
||||
check_fail(f"Fast kernel execution failed: {e}")
|
||||
except ImportError as e:
|
||||
check_warn(f"Fast CUDA kernels not available: {e}")
|
||||
check_info("Fallback to slower CuPy operations")
|
||||
|
||||
# ============================================================
|
||||
# 7. Check Fused Pipeline Compiler
|
||||
# ============================================================
|
||||
print_section("7. FUSED PIPELINE COMPILER")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from streaming.sexp_to_cuda import compile_frame_pipeline, compile_autonomous_pipeline
|
||||
check_pass("Fused CUDA pipeline compiler available")
|
||||
except ImportError as e:
|
||||
check_warn(f"Fused pipeline compiler not available: {e}")
|
||||
check_info("Using per-operation fallback (slower for multi-effect pipelines)")
|
||||
|
||||
# ============================================================
|
||||
# 8. Check FFmpeg NVENC
|
||||
# ============================================================
|
||||
print_section("8. FFMPEG NVENC (HARDWARE ENCODE)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(["ffmpeg", "-encoders"], capture_output=True, text=True, timeout=5)
|
||||
if "h264_nvenc" in result.stdout:
|
||||
check_pass("FFmpeg h264_nvenc encoder available")
|
||||
else:
|
||||
check_warn("FFmpeg h264_nvenc not available - using libx264 (CPU)")
|
||||
|
||||
if "hevc_nvenc" in result.stdout:
|
||||
check_pass("FFmpeg hevc_nvenc encoder available")
|
||||
except Exception as e:
|
||||
check_fail(f"FFmpeg check failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 9. Check FFmpeg NVDEC
|
||||
# ============================================================
|
||||
print_section("9. FFMPEG NVDEC (HARDWARE DECODE)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(["ffmpeg", "-hwaccels"], capture_output=True, text=True, timeout=5)
|
||||
if "cuda" in result.stdout:
|
||||
check_pass("FFmpeg CUDA hwaccel available")
|
||||
else:
|
||||
check_warn("FFmpeg CUDA hwaccel not available - using CPU decode")
|
||||
except Exception as e:
|
||||
check_fail(f"FFmpeg hwaccel check failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 10. Check Pipeline Cache Status
|
||||
# ============================================================
|
||||
print_section("10. PIPELINE CACHE STATUS")
|
||||
|
||||
try:
|
||||
sys.path.insert(0, '/root/art-dag/celery')
|
||||
from sexp_effects.primitive_libs.streaming_gpu import (
|
||||
_FUSED_PIPELINE_CACHE, _AUTONOMOUS_PIPELINE_CACHE
|
||||
)
|
||||
fused_count = len(_FUSED_PIPELINE_CACHE)
|
||||
auto_count = len(_AUTONOMOUS_PIPELINE_CACHE)
|
||||
|
||||
if fused_count > 0 or auto_count > 0:
|
||||
check_info(f"Fused pipeline cache: {fused_count} entries")
|
||||
check_info(f"Autonomous pipeline cache: {auto_count} entries")
|
||||
if fused_count > 100 or auto_count > 100:
|
||||
check_warn("Large pipeline cache - may cause memory pressure")
|
||||
else:
|
||||
check_info("Pipeline caches empty (no rendering done yet)")
|
||||
except Exception as e:
|
||||
check_info(f"Could not check pipeline cache: {e}")
|
||||
|
||||
# ============================================================
|
||||
# Summary
|
||||
# ============================================================
|
||||
print_section("SUMMARY")
|
||||
print("""
|
||||
Optimal GPU rendering requires:
|
||||
1. [CRITICAL] CuPy with working GPU operations
|
||||
2. [CRITICAL] DLPack zero-copy transfer (decord -> CuPy)
|
||||
3. [HIGH] Fast CUDA kernels from jit_compiler
|
||||
4. [MEDIUM] Fused pipeline compiler for multi-effect recipes
|
||||
5. [MEDIUM] PyNvVideoCodec for zero-copy encoding
|
||||
6. [LOW] FFmpeg NVENC/NVDEC as fallback
|
||||
|
||||
If DLPack is failing, check:
|
||||
- decord version (needs 0.6.0+ with DLPack support)
|
||||
- CuPy version compatibility
|
||||
- CUDA toolkit version match
|
||||
|
||||
If fast kernels are not loading:
|
||||
- Check if streaming/jit_compiler.py exists
|
||||
- Verify CUDA compiler (nvcc) is available
|
||||
""")
|
||||
36
docker-compose.gpu-dev.yml
Normal file
36
docker-compose.gpu-dev.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
# GPU Worker Development Override
|
||||
#
|
||||
# Usage: docker stack deploy -c docker-compose.yml -c docker-compose.gpu-dev.yml celery
|
||||
# Or for quick testing: docker-compose -f docker-compose.yml -f docker-compose.gpu-dev.yml up l1-gpu-worker
|
||||
#
|
||||
# Features:
|
||||
# - Mounts source code for instant changes (no rebuild needed)
|
||||
# - Uses watchmedo for auto-reload on file changes
|
||||
# - Shows config on startup
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
l1-gpu-worker:
|
||||
# Override command to use watchmedo for auto-reload
|
||||
command: >
|
||||
sh -c "
|
||||
pip install -q watchdog[watchmedo] 2>/dev/null || true;
|
||||
echo '=== GPU WORKER DEV MODE ===';
|
||||
echo 'Source mounted - changes take effect on restart';
|
||||
echo 'Auto-reload enabled via watchmedo';
|
||||
env | grep -E 'STREAMING_GPU|IPFS_GATEWAY|REDIS|DATABASE' | sort;
|
||||
echo '===========================';
|
||||
watchmedo auto-restart --directory=/app --pattern='*.py' --recursive -- \
|
||||
celery -A celery_app worker --loglevel=info -E -Q gpu,celery
|
||||
"
|
||||
environment:
|
||||
# Development defaults (can override with .env)
|
||||
- STREAMING_GPU_PERSIST=0
|
||||
- IPFS_GATEWAY_URL=https://celery-artdag.rose-ash.com/ipfs
|
||||
- SHOW_CONFIG=1
|
||||
volumes:
|
||||
# Mount source code for hot reload
|
||||
- ./:/app:ro
|
||||
# Keep cache local
|
||||
- gpu_cache:/data/cache
|
||||
@@ -3,6 +3,10 @@ version: "3.8"
|
||||
services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 16379
|
||||
mode: host # Bypass swarm routing mesh
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
@@ -11,13 +15,21 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- POSTGRES_USER=artdag
|
||||
- POSTGRES_PASSWORD=artdag
|
||||
- POSTGRES_DB=artdag
|
||||
ports:
|
||||
- target: 5432
|
||||
published: 15432
|
||||
mode: host # Expose for GPU worker on different VPC
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
@@ -26,12 +38,18 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
ipfs:
|
||||
image: ipfs/kubo:latest
|
||||
ports:
|
||||
- "4001:4001" # Swarm TCP
|
||||
- "4001:4001/udp" # Swarm UDP
|
||||
- target: 5001
|
||||
published: 15001
|
||||
mode: host # API port for GPU worker on different VPC
|
||||
volumes:
|
||||
- ipfs_data:/data/ipfs
|
||||
- l1_cache:/data/cache:ro # Read-only access to cache for adding files
|
||||
@@ -42,22 +60,31 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
l1-server:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
image: registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- DATABASE_URL=postgresql://artdag:artdag@postgres:5432/artdag
|
||||
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
|
||||
- IPFS_API=/dns/ipfs/tcp/5001
|
||||
- CACHE_DIR=/data/cache
|
||||
# Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS, no local cache)
|
||||
# - IPFS_PRIMARY=true
|
||||
# Cluster key for trust domains - systems with same key can share work via IPFS
|
||||
# Generate with: openssl rand -hex 32
|
||||
- ARTDAG_CLUSTER_KEY=${ARTDAG_CLUSTER_KEY:-}
|
||||
# Coop app internal URLs for fragment composition
|
||||
- INTERNAL_URL_BLOG=http://blog:8000
|
||||
- INTERNAL_URL_CART=http://cart:8000
|
||||
- INTERNAL_URL_ACCOUNT=http://account:8000
|
||||
# DATABASE_URL, ADMIN_TOKEN, ARTDAG_CLUSTER_KEY,
|
||||
# L2_SERVER, L2_DOMAIN, IPFS_GATEWAY_URL from .env file
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8100/health')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 15s
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
@@ -69,20 +96,26 @@ services:
|
||||
- externalnet
|
||||
deploy:
|
||||
replicas: 1
|
||||
update_config:
|
||||
order: start-first
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
l1-worker:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
command: celery -A celery_app worker --loglevel=info -E
|
||||
image: registry.rose-ash.com:5000/celery-l1-server:latest
|
||||
command: sh -c "find /app -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null; celery -A celery_app worker --loglevel=info -E -Q celery,gpu"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- DATABASE_URL=postgresql://artdag:artdag@postgres:5432/artdag
|
||||
# IPFS_API multiaddr - used for all IPFS operations (add, cat, pin)
|
||||
- IPFS_API=/dns/ipfs/tcp/5001
|
||||
- CACHE_DIR=/data/cache
|
||||
- C_FORCE_ROOT=true
|
||||
# Must match l1-server for consistent cache_ids
|
||||
- ARTDAG_CLUSTER_KEY=${ARTDAG_CLUSTER_KEY:-}
|
||||
# DATABASE_URL, ARTDAG_CLUSTER_KEY from .env file
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
@@ -95,6 +128,9 @@ services:
|
||||
replicas: 2
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
flower:
|
||||
image: mher/flower:2.0
|
||||
@@ -111,12 +147,42 @@ services:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
# GPU worker for streaming/rendering tasks
|
||||
# Build: docker build -f Dockerfile.gpu -t registry.rose-ash.com:5000/celery-l1-gpu-server:latest .
|
||||
# Requires: docker node update --label-add gpu=true <gpu-node-name>
|
||||
l1-gpu-worker:
|
||||
image: registry.rose-ash.com:5000/celery-l1-gpu-server:latest
|
||||
command: sh -c "cd /app && celery -A celery_app worker --loglevel=info -E -Q gpu,celery"
|
||||
env_file:
|
||||
- .env.gpu
|
||||
volumes:
|
||||
# Local cache - ephemeral, just for working files
|
||||
- gpu_cache:/data/cache
|
||||
# Note: No source mount - GPU worker uses code from image
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
- ipfs
|
||||
networks:
|
||||
- celery
|
||||
deploy:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu == true
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
postgres_data:
|
||||
ipfs_data:
|
||||
l1_cache:
|
||||
gpu_cache: # Ephemeral cache for GPU workers
|
||||
|
||||
networks:
|
||||
celery:
|
||||
|
||||
150
effects/quick_test_explicit.sexp
Normal file
150
effects/quick_test_explicit.sexp
Normal file
@@ -0,0 +1,150 @@
|
||||
;; Quick Test - Fully Explicit Streaming Version
|
||||
;;
|
||||
;; The interpreter is completely generic - knows nothing about video/audio.
|
||||
;; All domain logic is explicit via primitives.
|
||||
;;
|
||||
;; Run with built-in sources/audio:
|
||||
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp --fps 30
|
||||
;;
|
||||
;; Run with external config files:
|
||||
;; python3 -m streaming.stream_sexp_generic effects/quick_test_explicit.sexp \
|
||||
;; --sources configs/sources-default.sexp \
|
||||
;; --audio configs/audio-dizzy.sexp \
|
||||
;; --fps 30
|
||||
|
||||
(stream "quick_test_explicit"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives and effects
|
||||
(include :path "../templates/standard-primitives.sexp")
|
||||
(include :path "../templates/standard-effects.sexp")
|
||||
|
||||
;; Load reusable templates
|
||||
(include :path "../templates/stream-process-pair.sexp")
|
||||
(include :path "../templates/crossfade-zoom.sexp")
|
||||
|
||||
;; === SOURCES AS ARRAY ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "monday.webm" 30)
|
||||
(streaming:make-video-source "escher.webm" 30)
|
||||
(streaming:make-video-source "2.webm" 30)
|
||||
(streaming:make-video-source "disruptors.webm" 30)
|
||||
(streaming:make-video-source "4.mp4" 30)
|
||||
(streaming:make-video-source "ecstacy.mp4" 30)
|
||||
(streaming:make-video-source "dopple.webm" 30)
|
||||
(streaming:make-video-source "5.mp4" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
|
||||
;; Pairs 3,6: reversed (negative rot-a, positive rot-b, shrink zoom-a, grow zoom-b)
|
||||
;; Pair 5: smaller ranges
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 0: monday
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 1: escher
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 2: vid2
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 3: disruptors (reversed)
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 4: vid4
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7} ;; 5: ecstacy (smaller)
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5} ;; 6: dopple (reversed)
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5} ;; 7: vid5
|
||||
])
|
||||
|
||||
;; Audio analyzer
|
||||
(def music (streaming:make-audio-analyzer "dizzy.mp3"))
|
||||
|
||||
;; Audio playback
|
||||
(audio-playback "../dizzy.mp3")
|
||||
|
||||
;; === GLOBAL SCANS ===
|
||||
|
||||
;; Cycle state: which source is active (recipe-specific)
|
||||
;; clen = beats per source (8-24 beats = ~4-12 seconds)
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Reusable scans from templates (require 'music' to be defined)
|
||||
(include :path "../templates/scan-oscillating-spin.sexp")
|
||||
(include :path "../templates/scan-ripple-drops.sexp")
|
||||
|
||||
;; === PER-PAIR STATE (dynamically sized based on sources) ===
|
||||
;; Each pair has: inv-a, inv-b, hue-a, hue-b, mix, rot-angle
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [;; Invert toggles (10% chance, lasts 1-4 beats)
|
||||
new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
;; Hue shifts (10% chance, lasts 1-4 beats) - use countdown like invert
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
;; Pick random hue value when triggering (stored separately)
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
;; Mix (holds for 1-10 beats, then picks 0, 0.5, or 1)
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
;; Rotation (accumulates, reverses direction when cycle completes)
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
;; Note: dir comes from pair-configs, but we store rotation state here
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic: last third of cycle crossfades to next
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Get pair states array (required by process-pair macro)
|
||||
pair-states (bind pairs :states)
|
||||
|
||||
;; Process active pair using macro from template
|
||||
active-frame (process-pair active)
|
||||
|
||||
;; Crossfade with zoom during transition (using macro)
|
||||
result (if fading
|
||||
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
|
||||
active-frame)
|
||||
|
||||
;; Final: global spin + ripple
|
||||
spun (rotate result :angle (bind spin :angle))
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
|
||||
|
||||
(ripple spun
|
||||
:amplitude rip-amp
|
||||
:center_x (bind ripple-state :cx)
|
||||
:center_y (bind ripple-state :cy)
|
||||
:frequency 8
|
||||
:decay 2
|
||||
:speed 5))))
|
||||
294
hybrid_state.py
294
hybrid_state.py
@@ -1,294 +0,0 @@
|
||||
"""
|
||||
Hybrid State Manager: Local Redis + IPNS Sync.
|
||||
|
||||
Provides fast local operations with eventual consistency across L1 nodes.
|
||||
|
||||
- Local Redis: Fast reads/writes (microseconds)
|
||||
- IPNS Sync: Background sync with other nodes (every N seconds)
|
||||
- Duplicate work: Accepted, idempotent (same inputs → same CID)
|
||||
|
||||
Usage:
|
||||
from hybrid_state import get_state_manager
|
||||
|
||||
state = get_state_manager()
|
||||
|
||||
# Fast local lookup
|
||||
cid = state.get_cached_cid(cache_id)
|
||||
|
||||
# Fast local write (synced in background)
|
||||
state.set_cached_cid(cache_id, output_cid)
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, Optional
|
||||
|
||||
import redis
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configuration
|
||||
REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/5")
|
||||
CLUSTER_KEY = os.environ.get("ARTDAG_CLUSTER_KEY", "default")
|
||||
IPNS_SYNC_INTERVAL = int(os.environ.get("ARTDAG_IPNS_SYNC_INTERVAL", "30"))
|
||||
IPNS_ENABLED = os.environ.get("ARTDAG_IPNS_SYNC", "").lower() in ("true", "1", "yes")
|
||||
|
||||
# Redis keys
|
||||
CACHE_KEY = "artdag:cid_cache" # hash: cache_id → output CID
|
||||
ANALYSIS_KEY = "artdag:analysis_cache" # hash: input_hash:features → analysis CID
|
||||
PLAN_KEY = "artdag:plan_cache" # hash: plan_id → plan CID
|
||||
RUN_KEY = "artdag:run_cache" # hash: run_id → output CID
|
||||
CLAIM_KEY_PREFIX = "artdag:claim:" # string: cache_id → worker (with TTL)
|
||||
|
||||
# IPNS names (relative to cluster key)
|
||||
IPNS_CACHE_NAME = "cache"
|
||||
IPNS_ANALYSIS_NAME = "analysis"
|
||||
IPNS_PLAN_NAME = "plans"
|
||||
|
||||
|
||||
class HybridStateManager:
|
||||
"""
|
||||
Local Redis + async IPNS sync for distributed L1 coordination.
|
||||
|
||||
Fast path (local Redis):
|
||||
- get_cached_cid / set_cached_cid
|
||||
- try_claim / release_claim
|
||||
|
||||
Slow path (background IPNS sync):
|
||||
- Periodically syncs local state with global IPNS state
|
||||
- Merges remote state into local (pulls new entries)
|
||||
- Publishes local state to IPNS (pushes updates)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_url: str = REDIS_URL,
|
||||
cluster_key: str = CLUSTER_KEY,
|
||||
sync_interval: int = IPNS_SYNC_INTERVAL,
|
||||
ipns_enabled: bool = IPNS_ENABLED,
|
||||
):
|
||||
self.cluster_key = cluster_key
|
||||
self.sync_interval = sync_interval
|
||||
self.ipns_enabled = ipns_enabled
|
||||
|
||||
# Connect to Redis
|
||||
self._redis = redis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
# IPNS client (lazy import)
|
||||
self._ipfs = None
|
||||
|
||||
# Sync thread
|
||||
self._sync_thread = None
|
||||
self._stop_sync = threading.Event()
|
||||
|
||||
# Start background sync if enabled
|
||||
if self.ipns_enabled:
|
||||
self._start_background_sync()
|
||||
|
||||
@property
|
||||
def ipfs(self):
|
||||
"""Lazy import of IPFS client."""
|
||||
if self._ipfs is None:
|
||||
try:
|
||||
import ipfs_client
|
||||
self._ipfs = ipfs_client
|
||||
except ImportError:
|
||||
logger.warning("ipfs_client not available, IPNS sync disabled")
|
||||
self._ipfs = False
|
||||
return self._ipfs if self._ipfs else None
|
||||
|
||||
# ========== CID Cache ==========
|
||||
|
||||
def get_cached_cid(self, cache_id: str) -> Optional[str]:
|
||||
"""Get output CID for a cache_id. Fast local lookup."""
|
||||
return self._redis.hget(CACHE_KEY, cache_id)
|
||||
|
||||
def set_cached_cid(self, cache_id: str, cid: str) -> None:
|
||||
"""Set output CID for a cache_id. Fast local write."""
|
||||
self._redis.hset(CACHE_KEY, cache_id, cid)
|
||||
|
||||
def get_all_cached_cids(self) -> Dict[str, str]:
|
||||
"""Get all cached CIDs."""
|
||||
return self._redis.hgetall(CACHE_KEY)
|
||||
|
||||
# ========== Analysis Cache ==========
|
||||
|
||||
def get_analysis_cid(self, input_hash: str, features: list) -> Optional[str]:
|
||||
"""Get analysis CID for input + features."""
|
||||
key = f"{input_hash}:{','.join(sorted(features))}"
|
||||
return self._redis.hget(ANALYSIS_KEY, key)
|
||||
|
||||
def set_analysis_cid(self, input_hash: str, features: list, cid: str) -> None:
|
||||
"""Set analysis CID for input + features."""
|
||||
key = f"{input_hash}:{','.join(sorted(features))}"
|
||||
self._redis.hset(ANALYSIS_KEY, key, cid)
|
||||
|
||||
def get_all_analysis_cids(self) -> Dict[str, str]:
|
||||
"""Get all analysis CIDs."""
|
||||
return self._redis.hgetall(ANALYSIS_KEY)
|
||||
|
||||
# ========== Plan Cache ==========
|
||||
|
||||
def get_plan_cid(self, plan_id: str) -> Optional[str]:
|
||||
"""Get plan CID for a plan_id."""
|
||||
return self._redis.hget(PLAN_KEY, plan_id)
|
||||
|
||||
def set_plan_cid(self, plan_id: str, cid: str) -> None:
|
||||
"""Set plan CID for a plan_id."""
|
||||
self._redis.hset(PLAN_KEY, plan_id, cid)
|
||||
|
||||
def get_all_plan_cids(self) -> Dict[str, str]:
|
||||
"""Get all plan CIDs."""
|
||||
return self._redis.hgetall(PLAN_KEY)
|
||||
|
||||
# ========== Run Cache ==========
|
||||
|
||||
def get_run_cid(self, run_id: str) -> Optional[str]:
|
||||
"""Get output CID for a run_id."""
|
||||
return self._redis.hget(RUN_KEY, run_id)
|
||||
|
||||
def set_run_cid(self, run_id: str, cid: str) -> None:
|
||||
"""Set output CID for a run_id."""
|
||||
self._redis.hset(RUN_KEY, run_id, cid)
|
||||
|
||||
# ========== Claiming ==========
|
||||
|
||||
def try_claim(self, cache_id: str, worker_id: str, ttl: int = 300) -> bool:
|
||||
"""
|
||||
Try to claim a cache_id for execution.
|
||||
|
||||
Returns True if claimed, False if already claimed by another worker.
|
||||
Uses Redis SETNX for atomic claim.
|
||||
"""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
return self._redis.set(key, worker_id, nx=True, ex=ttl)
|
||||
|
||||
def release_claim(self, cache_id: str) -> None:
|
||||
"""Release a claim."""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
self._redis.delete(key)
|
||||
|
||||
def get_claim(self, cache_id: str) -> Optional[str]:
|
||||
"""Get current claim holder for a cache_id."""
|
||||
key = f"{CLAIM_KEY_PREFIX}{cache_id}"
|
||||
return self._redis.get(key)
|
||||
|
||||
# ========== IPNS Sync ==========
|
||||
|
||||
def _start_background_sync(self):
|
||||
"""Start background IPNS sync thread."""
|
||||
if self._sync_thread is not None:
|
||||
return
|
||||
|
||||
def sync_loop():
|
||||
logger.info(f"IPNS sync started (interval={self.sync_interval}s)")
|
||||
while not self._stop_sync.wait(timeout=self.sync_interval):
|
||||
try:
|
||||
self._sync_with_ipns()
|
||||
except Exception as e:
|
||||
logger.warning(f"IPNS sync failed: {e}")
|
||||
|
||||
self._sync_thread = threading.Thread(target=sync_loop, daemon=True)
|
||||
self._sync_thread.start()
|
||||
|
||||
def stop_sync(self):
|
||||
"""Stop background sync thread."""
|
||||
self._stop_sync.set()
|
||||
if self._sync_thread:
|
||||
self._sync_thread.join(timeout=5)
|
||||
|
||||
def _sync_with_ipns(self):
|
||||
"""Sync local state with IPNS global state."""
|
||||
if not self.ipfs:
|
||||
return
|
||||
|
||||
logger.debug("Starting IPNS sync...")
|
||||
|
||||
# Sync each cache type
|
||||
self._sync_hash(CACHE_KEY, IPNS_CACHE_NAME)
|
||||
self._sync_hash(ANALYSIS_KEY, IPNS_ANALYSIS_NAME)
|
||||
self._sync_hash(PLAN_KEY, IPNS_PLAN_NAME)
|
||||
|
||||
logger.debug("IPNS sync complete")
|
||||
|
||||
def _sync_hash(self, redis_key: str, ipns_name: str):
|
||||
"""Sync a Redis hash with IPNS."""
|
||||
ipns_full_name = f"{self.cluster_key}/{ipns_name}"
|
||||
|
||||
# Pull: resolve IPNS → get global state
|
||||
global_state = {}
|
||||
try:
|
||||
global_cid = self.ipfs.name_resolve(ipns_full_name)
|
||||
if global_cid:
|
||||
global_bytes = self.ipfs.get_bytes(global_cid)
|
||||
if global_bytes:
|
||||
global_state = json.loads(global_bytes.decode('utf-8'))
|
||||
logger.debug(f"Pulled {len(global_state)} entries from {ipns_name}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not resolve {ipns_full_name}: {e}")
|
||||
|
||||
# Merge global into local (add entries we don't have)
|
||||
if global_state:
|
||||
pipe = self._redis.pipeline()
|
||||
for key, value in global_state.items():
|
||||
pipe.hsetnx(redis_key, key, value)
|
||||
results = pipe.execute()
|
||||
added = sum(1 for r in results if r)
|
||||
if added:
|
||||
logger.info(f"Merged {added} new entries from IPNS/{ipns_name}")
|
||||
|
||||
# Push: get local state, merge with global, publish
|
||||
local_state = self._redis.hgetall(redis_key)
|
||||
if local_state:
|
||||
merged = {**global_state, **local_state}
|
||||
|
||||
# Only publish if we have new entries
|
||||
if len(merged) > len(global_state):
|
||||
try:
|
||||
new_cid = self.ipfs.add_json(merged)
|
||||
if new_cid:
|
||||
# Note: name_publish can be slow
|
||||
self.ipfs.name_publish(ipns_full_name, new_cid)
|
||||
logger.info(f"Published {len(merged)} entries to IPNS/{ipns_name}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish to {ipns_full_name}: {e}")
|
||||
|
||||
def force_sync(self):
|
||||
"""Force an immediate IPNS sync (blocking)."""
|
||||
self._sync_with_ipns()
|
||||
|
||||
# ========== Stats ==========
|
||||
|
||||
def get_stats(self) -> Dict:
|
||||
"""Get cache statistics."""
|
||||
return {
|
||||
"cached_cids": self._redis.hlen(CACHE_KEY),
|
||||
"analysis_cids": self._redis.hlen(ANALYSIS_KEY),
|
||||
"plan_cids": self._redis.hlen(PLAN_KEY),
|
||||
"run_cids": self._redis.hlen(RUN_KEY),
|
||||
"ipns_enabled": self.ipns_enabled,
|
||||
"cluster_key": self.cluster_key[:16] + "..." if len(self.cluster_key) > 16 else self.cluster_key,
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_state_manager: Optional[HybridStateManager] = None
|
||||
|
||||
|
||||
def get_state_manager() -> HybridStateManager:
|
||||
"""Get the singleton state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
_state_manager = HybridStateManager()
|
||||
return _state_manager
|
||||
|
||||
|
||||
def reset_state_manager():
|
||||
"""Reset the singleton (for testing)."""
|
||||
global _state_manager
|
||||
if _state_manager:
|
||||
_state_manager.stop_sync()
|
||||
_state_manager = None
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
@@ -19,8 +19,18 @@ logger = logging.getLogger(__name__)
|
||||
# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
|
||||
# Connection timeout in seconds
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "30"))
|
||||
# Connection timeout in seconds (increased for large files)
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "120"))
|
||||
|
||||
# IPFS gateway URLs for fallback when local node doesn't have content
|
||||
# Comma-separated list of gateway URLs (without /ipfs/ suffix)
|
||||
IPFS_GATEWAYS = [g.strip() for g in os.getenv(
|
||||
"IPFS_GATEWAYS",
|
||||
"https://ipfs.io,https://cloudflare-ipfs.com,https://dweb.link"
|
||||
).split(",") if g.strip()]
|
||||
|
||||
# Gateway timeout (shorter than API timeout for faster fallback)
|
||||
GATEWAY_TIMEOUT = int(os.getenv("GATEWAY_TIMEOUT", "30"))
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
@@ -45,18 +55,22 @@ def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
def add_file(file_path: Path, pin: bool = True) -> Optional[str]:
|
||||
def add_file(file_path: Union[Path, str], pin: bool = True) -> Optional[str]:
|
||||
"""
|
||||
Add a file to IPFS and optionally pin it.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file to add
|
||||
file_path: Path to the file to add (Path object or string)
|
||||
pin: Whether to pin the file (default: True)
|
||||
|
||||
Returns:
|
||||
IPFS CID (content identifier) or None on failure
|
||||
"""
|
||||
try:
|
||||
# Ensure file_path is a Path object
|
||||
if isinstance(file_path, str):
|
||||
file_path = Path(file_path)
|
||||
|
||||
url = f"{IPFS_BASE_URL}/api/v0/add"
|
||||
params = {"pin": str(pin).lower()}
|
||||
|
||||
@@ -118,13 +132,27 @@ def add_json(data: dict, pin: bool = True) -> Optional[str]:
|
||||
return add_bytes(json_bytes, pin=pin)
|
||||
|
||||
|
||||
def get_file(cid: str, dest_path: Path) -> bool:
|
||||
def add_string(content: str, pin: bool = True) -> Optional[str]:
|
||||
"""
|
||||
Add a string to IPFS and optionally pin it.
|
||||
|
||||
Args:
|
||||
content: String content to add (e.g., S-expression)
|
||||
pin: Whether to pin the data (default: True)
|
||||
|
||||
Returns:
|
||||
IPFS CID or None on failure
|
||||
"""
|
||||
return add_bytes(content.encode('utf-8'), pin=pin)
|
||||
|
||||
|
||||
def get_file(cid: str, dest_path: Union[Path, str]) -> bool:
|
||||
"""
|
||||
Retrieve a file from IPFS and save to destination.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
dest_path: Path to save the file
|
||||
dest_path: Path to save the file (Path object or string)
|
||||
|
||||
Returns:
|
||||
True on success, False on failure
|
||||
@@ -134,6 +162,10 @@ def get_file(cid: str, dest_path: Path) -> bool:
|
||||
if data is None:
|
||||
return False
|
||||
|
||||
# Ensure dest_path is a Path object
|
||||
if isinstance(dest_path, str):
|
||||
dest_path = Path(dest_path)
|
||||
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest_path.write_bytes(data)
|
||||
logger.info(f"Retrieved from IPFS: {cid} -> {dest_path}")
|
||||
@@ -143,16 +175,50 @@ def get_file(cid: str, dest_path: Path) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def get_bytes(cid: str) -> Optional[bytes]:
|
||||
def get_bytes_from_gateway(cid: str) -> Optional[bytes]:
|
||||
"""
|
||||
Retrieve bytes data from IPFS.
|
||||
Retrieve bytes from IPFS via public gateways (fallback).
|
||||
|
||||
Tries each configured gateway in order until one succeeds.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
|
||||
Returns:
|
||||
File content as bytes or None if all gateways fail
|
||||
"""
|
||||
for gateway in IPFS_GATEWAYS:
|
||||
try:
|
||||
url = f"{gateway}/ipfs/{cid}"
|
||||
logger.info(f"Trying gateway: {url}")
|
||||
response = requests.get(url, timeout=GATEWAY_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
data = response.content
|
||||
logger.info(f"Retrieved from gateway {gateway}: {cid} ({len(data)} bytes)")
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"Gateway {gateway} failed for {cid}: {e}")
|
||||
continue
|
||||
|
||||
logger.error(f"All gateways failed for {cid}")
|
||||
return None
|
||||
|
||||
|
||||
def get_bytes(cid: str, use_gateway_fallback: bool = True) -> Optional[bytes]:
|
||||
"""
|
||||
Retrieve bytes data from IPFS.
|
||||
|
||||
Tries local IPFS node first, then falls back to public gateways
|
||||
if configured and use_gateway_fallback is True.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
use_gateway_fallback: If True, try public gateways on local failure
|
||||
|
||||
Returns:
|
||||
File content as bytes or None on failure
|
||||
"""
|
||||
# Try local IPFS node first
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/cat"
|
||||
params = {"arg": cid}
|
||||
@@ -164,6 +230,13 @@ def get_bytes(cid: str) -> Optional[bytes]:
|
||||
logger.info(f"Retrieved from IPFS: {cid} ({len(data)} bytes)")
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"Local IPFS failed for {cid}: {e}")
|
||||
|
||||
# Try gateway fallback
|
||||
if use_gateway_fallback and IPFS_GATEWAYS:
|
||||
logger.info(f"Trying gateway fallback for {cid}")
|
||||
return get_bytes_from_gateway(cid)
|
||||
|
||||
logger.error(f"Failed to get bytes from IPFS: {e}")
|
||||
return None
|
||||
|
||||
|
||||
455
legacy_tasks.py
455
legacy_tasks.py
@@ -1,455 +0,0 @@
|
||||
"""
|
||||
Art DAG Celery Tasks
|
||||
|
||||
Distributed rendering tasks for the Art DAG system.
|
||||
Supports both single-effect runs and multi-step DAG execution.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from celery import Task
|
||||
from celery_app import app
|
||||
|
||||
# Import artdag components
|
||||
from artdag import DAG, Node, NodeType
|
||||
from artdag.engine import Engine
|
||||
from artdag.executor import register_executor, Executor, get_executor
|
||||
from artdag.nodes.effect import register_effect
|
||||
import artdag.nodes # Register all built-in executors (SOURCE, EFFECT, etc.)
|
||||
|
||||
# Add effects to path (use env var in Docker, fallback to home dir locally)
|
||||
EFFECTS_PATH = Path(os.environ.get("EFFECTS_PATH", str(Path.home() / "artdag-effects")))
|
||||
ARTDAG_PATH = Path(os.environ.get("ARTDAG_PATH", str(Path.home() / "art" / "artdag")))
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_effects_commit() -> str:
|
||||
"""Get current git commit hash of effects repo."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD"],
|
||||
cwd=EFFECTS_PATH,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
def get_artdag_commit() -> str:
|
||||
"""Get current git commit hash of artdag repo."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "rev-parse", "HEAD"],
|
||||
cwd=ARTDAG_PATH,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
sys.path.insert(0, str(EFFECTS_PATH / "dog"))
|
||||
|
||||
# Register the dog effect with the EFFECT executor
|
||||
from effect import effect_dog
|
||||
|
||||
@register_effect("dog")
|
||||
def _dog_effect(input_path: Path, output_path: Path, config: dict) -> Path:
|
||||
"""Dog effect wrapper - registered for DAG EFFECT nodes."""
|
||||
return effect_dog(input_path, output_path, config)
|
||||
|
||||
|
||||
def file_hash(path: Path) -> str:
|
||||
"""Compute SHA3-256 hash of a file."""
|
||||
hasher = hashlib.sha3_256()
|
||||
actual_path = path.resolve() if path.is_symlink() else path
|
||||
with open(actual_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(65536), b""):
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
# Cache directory (shared between server and worker)
|
||||
CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
|
||||
|
||||
|
||||
# ============ Executors for Effects ============
|
||||
|
||||
@register_executor("effect:dog")
|
||||
class DogExecutor(Executor):
|
||||
"""Executor for the dog effect."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
from effect import effect_dog
|
||||
if len(inputs) != 1:
|
||||
raise ValueError(f"Dog effect expects 1 input, got {len(inputs)}")
|
||||
return effect_dog(inputs[0], output_path, config)
|
||||
|
||||
|
||||
@register_executor("effect:identity")
|
||||
class IdentityExecutor(Executor):
|
||||
"""Executor for the identity effect (passthrough)."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
from artdag.nodes.effect import effect_identity
|
||||
if len(inputs) != 1:
|
||||
raise ValueError(f"Identity effect expects 1 input, got {len(inputs)}")
|
||||
return effect_identity(inputs[0], output_path, config)
|
||||
|
||||
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class SourceExecutor(Executor):
|
||||
"""Executor for SOURCE nodes - loads content from cache by hash."""
|
||||
|
||||
def execute(self, config: Dict, inputs: List[Path], output_path: Path) -> Path:
|
||||
# Source nodes load from cache by content_hash
|
||||
content_hash = config.get("content_hash")
|
||||
if not content_hash:
|
||||
raise ValueError("SOURCE node requires content_hash in config")
|
||||
|
||||
# Look up in cache
|
||||
source_path = CACHE_DIR / content_hash
|
||||
if not source_path.exists():
|
||||
# Try nodes directory
|
||||
from cache_manager import get_cache_manager
|
||||
cache_manager = get_cache_manager()
|
||||
source_path = cache_manager.get_by_content_hash(content_hash)
|
||||
|
||||
if not source_path or not source_path.exists():
|
||||
raise ValueError(f"Source content not in cache: {content_hash}")
|
||||
|
||||
# For source nodes, we just return the path (no transformation)
|
||||
# The engine will use this as input to subsequent nodes
|
||||
return source_path
|
||||
|
||||
|
||||
class RenderTask(Task):
|
||||
"""Base task with provenance tracking."""
|
||||
|
||||
def on_success(self, retval, task_id, args, kwargs):
|
||||
"""Record successful render."""
|
||||
print(f"Task {task_id} completed: {retval}")
|
||||
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
"""Record failed render."""
|
||||
print(f"Task {task_id} failed: {exc}")
|
||||
|
||||
|
||||
@app.task(base=RenderTask, bind=True)
|
||||
def render_effect(self, input_hash: str, effect_name: str, output_name: str) -> dict:
|
||||
"""
|
||||
Render an effect on an input asset.
|
||||
|
||||
Args:
|
||||
input_hash: SHA3-256 hash of input asset
|
||||
effect_name: Name of effect (e.g., "dog", "identity")
|
||||
output_name: Name for output asset
|
||||
|
||||
Returns:
|
||||
Provenance record with output hash
|
||||
"""
|
||||
from cache_manager import get_cache_manager
|
||||
|
||||
# Registry hashes (for effects/infra metadata only)
|
||||
REGISTRY = {
|
||||
"effect:dog": {
|
||||
"hash": "d048fe313433eb4e38f0e24194ffae91b896ca3e6eed3e50b2cc37b7be495555"
|
||||
},
|
||||
"effect:identity": {
|
||||
"hash": "640ea11ee881ebf4101af0a955439105ab11e763682b209e88ea08fc66e1cc03"
|
||||
},
|
||||
"infra:artdag": {
|
||||
"hash": "96a5972de216aee12ec794dcad5f9360da2e676171eabf24a46dfe1ee5fee4b0"
|
||||
},
|
||||
"infra:giles-hp": {
|
||||
"hash": "964bf6e69dc4e2493f42375013caffe26404ec3cf8eb5d9bc170cd42a361523b"
|
||||
}
|
||||
}
|
||||
|
||||
# Input comes from cache by hash (supports both legacy and new cache locations)
|
||||
cache_manager = get_cache_manager()
|
||||
input_path = cache_manager.get_by_content_hash(input_hash)
|
||||
if not input_path or not input_path.exists():
|
||||
raise ValueError(f"Input not in cache: {input_hash}")
|
||||
|
||||
output_dir = CACHE_DIR
|
||||
|
||||
# Verify input
|
||||
actual_hash = file_hash(input_path)
|
||||
if actual_hash != input_hash:
|
||||
raise ValueError(f"Input hash mismatch: expected {input_hash}, got {actual_hash}")
|
||||
|
||||
self.update_state(state='RENDERING', meta={'effect': effect_name, 'input': input_hash[:16]})
|
||||
|
||||
# Load and apply effect
|
||||
if effect_name == "dog":
|
||||
from effect import effect_dog, DOG_HASH
|
||||
output_path = output_dir / f"{output_name}.mkv"
|
||||
result = effect_dog(input_path, output_path, {})
|
||||
expected_hash = DOG_HASH
|
||||
elif effect_name == "identity":
|
||||
from artdag.nodes.effect import effect_identity
|
||||
output_path = output_dir / f"{output_name}{input_path.suffix}"
|
||||
result = effect_identity(input_path, output_path, {})
|
||||
expected_hash = input_hash
|
||||
else:
|
||||
raise ValueError(f"Unknown effect: {effect_name}")
|
||||
|
||||
# Verify output
|
||||
output_hash = file_hash(result)
|
||||
if output_hash != expected_hash:
|
||||
raise ValueError(f"Output hash mismatch: expected {expected_hash}, got {output_hash}")
|
||||
|
||||
# Build effect info based on source
|
||||
if effect_name == "identity":
|
||||
# Identity is from artdag package on GitHub
|
||||
artdag_commit = get_artdag_commit()
|
||||
effect_info = {
|
||||
"name": f"effect:{effect_name}",
|
||||
"content_hash": REGISTRY[f"effect:{effect_name}"]["hash"],
|
||||
"repo": "github",
|
||||
"repo_commit": artdag_commit,
|
||||
"repo_url": f"https://github.com/gilesbradshaw/art-dag/blob/{artdag_commit}/artdag/nodes/effect.py"
|
||||
}
|
||||
else:
|
||||
# Other effects from rose-ash effects repo
|
||||
effects_commit = get_effects_commit()
|
||||
effect_info = {
|
||||
"name": f"effect:{effect_name}",
|
||||
"content_hash": REGISTRY[f"effect:{effect_name}"]["hash"],
|
||||
"repo": "rose-ash",
|
||||
"repo_commit": effects_commit,
|
||||
"repo_url": f"https://git.rose-ash.com/art-dag/effects/src/commit/{effects_commit}/{effect_name}"
|
||||
}
|
||||
|
||||
# Build provenance
|
||||
provenance = {
|
||||
"task_id": self.request.id,
|
||||
"rendered_at": datetime.now(timezone.utc).isoformat(),
|
||||
"rendered_by": "@giles@artdag.rose-ash.com",
|
||||
"output": {
|
||||
"name": output_name,
|
||||
"content_hash": output_hash,
|
||||
},
|
||||
"inputs": [
|
||||
{"content_hash": input_hash}
|
||||
],
|
||||
"effects": [effect_info],
|
||||
"infrastructure": {
|
||||
"software": {"name": "infra:artdag", "content_hash": REGISTRY["infra:artdag"]["hash"]},
|
||||
"hardware": {"name": "infra:giles-hp", "content_hash": REGISTRY["infra:giles-hp"]["hash"]}
|
||||
}
|
||||
}
|
||||
|
||||
# Store provenance on IPFS
|
||||
import ipfs_client
|
||||
provenance_cid = ipfs_client.add_json(provenance)
|
||||
if provenance_cid:
|
||||
provenance["provenance_cid"] = provenance_cid
|
||||
logger.info(f"Stored provenance on IPFS: {provenance_cid}")
|
||||
else:
|
||||
logger.warning("Failed to store provenance on IPFS")
|
||||
|
||||
return provenance
|
||||
|
||||
|
||||
@app.task
|
||||
def render_dog_from_cat() -> dict:
|
||||
"""Convenience task: render cat through dog effect."""
|
||||
CAT_HASH = "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b"
|
||||
return render_effect.delay(CAT_HASH, "dog", "dog-from-cat-celery").get()
|
||||
|
||||
|
||||
@app.task(base=RenderTask, bind=True)
|
||||
def execute_dag(self, dag_json: str, run_id: str = None) -> dict:
|
||||
"""
|
||||
Execute a multi-step DAG.
|
||||
|
||||
Args:
|
||||
dag_json: Serialized DAG as JSON string
|
||||
run_id: Optional run ID for tracking
|
||||
|
||||
Returns:
|
||||
Execution result with output hash and node results
|
||||
"""
|
||||
from cache_manager import get_cache_manager
|
||||
|
||||
# Parse DAG
|
||||
try:
|
||||
dag = DAG.from_json(dag_json)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid DAG JSON: {e}")
|
||||
|
||||
# Validate DAG
|
||||
errors = dag.validate()
|
||||
if errors:
|
||||
raise ValueError(f"Invalid DAG: {errors}")
|
||||
|
||||
# Create engine with cache directory
|
||||
engine = Engine(CACHE_DIR / "nodes")
|
||||
|
||||
# Set up progress callback
|
||||
def progress_callback(progress):
|
||||
self.update_state(
|
||||
state='EXECUTING',
|
||||
meta={
|
||||
'node_id': progress.node_id,
|
||||
'node_type': progress.node_type,
|
||||
'status': progress.status,
|
||||
'progress': progress.progress,
|
||||
'message': progress.message,
|
||||
}
|
||||
)
|
||||
logger.info(f"DAG progress: {progress.node_id} - {progress.status} - {progress.message}")
|
||||
|
||||
engine.set_progress_callback(progress_callback)
|
||||
|
||||
# Execute DAG
|
||||
self.update_state(state='EXECUTING', meta={'status': 'starting', 'nodes': len(dag.nodes)})
|
||||
result = engine.execute(dag)
|
||||
|
||||
if not result.success:
|
||||
raise RuntimeError(f"DAG execution failed: {result.error}")
|
||||
|
||||
# Get output hash
|
||||
cache_manager = get_cache_manager()
|
||||
output_hash = None
|
||||
if result.output_path and result.output_path.exists():
|
||||
output_hash = file_hash(result.output_path)
|
||||
|
||||
# Store in cache_manager for proper tracking (returns tuple)
|
||||
cached, ipfs_cid = cache_manager.put(result.output_path, node_type="dag_output")
|
||||
|
||||
# Store in database (for L2 to query IPFS CID)
|
||||
import asyncio
|
||||
import database
|
||||
|
||||
async def save_to_db():
|
||||
if database.pool is None:
|
||||
await database.init_db()
|
||||
await database.create_cache_item(output_hash, ipfs_cid)
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_running():
|
||||
asyncio.ensure_future(save_to_db())
|
||||
else:
|
||||
loop.run_until_complete(save_to_db())
|
||||
except RuntimeError:
|
||||
asyncio.run(save_to_db())
|
||||
|
||||
# Record activity for deletion tracking
|
||||
input_hashes = []
|
||||
for node_id, node in dag.nodes.items():
|
||||
if node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE":
|
||||
content_hash = node.config.get("content_hash")
|
||||
if content_hash:
|
||||
input_hashes.append(content_hash)
|
||||
|
||||
if input_hashes:
|
||||
cache_manager.record_simple_activity(
|
||||
input_hashes=input_hashes,
|
||||
output_hash=output_hash,
|
||||
run_id=run_id,
|
||||
)
|
||||
|
||||
# Build provenance
|
||||
input_hashes_for_provenance = []
|
||||
for node_id, node in dag.nodes.items():
|
||||
if node.node_type == NodeType.SOURCE or str(node.node_type) == "SOURCE":
|
||||
content_hash = node.config.get("content_hash")
|
||||
if content_hash:
|
||||
input_hashes_for_provenance.append({"content_hash": content_hash})
|
||||
|
||||
provenance = {
|
||||
"task_id": self.request.id,
|
||||
"run_id": run_id,
|
||||
"rendered_at": datetime.now(timezone.utc).isoformat(),
|
||||
"output": {
|
||||
"content_hash": output_hash,
|
||||
},
|
||||
"inputs": input_hashes_for_provenance,
|
||||
"dag": dag_json, # Full DAG definition
|
||||
"execution": {
|
||||
"execution_time": result.execution_time,
|
||||
"nodes_executed": result.nodes_executed,
|
||||
"nodes_cached": result.nodes_cached,
|
||||
}
|
||||
}
|
||||
|
||||
# Store provenance on IPFS
|
||||
import ipfs_client
|
||||
provenance_cid = ipfs_client.add_json(provenance)
|
||||
if provenance_cid:
|
||||
provenance["provenance_cid"] = provenance_cid
|
||||
logger.info(f"Stored DAG provenance on IPFS: {provenance_cid}")
|
||||
else:
|
||||
logger.warning("Failed to store DAG provenance on IPFS")
|
||||
|
||||
# Build result
|
||||
return {
|
||||
"success": True,
|
||||
"run_id": run_id,
|
||||
"output_hash": output_hash,
|
||||
"output_path": str(result.output_path) if result.output_path else None,
|
||||
"execution_time": result.execution_time,
|
||||
"nodes_executed": result.nodes_executed,
|
||||
"nodes_cached": result.nodes_cached,
|
||||
"node_results": {
|
||||
node_id: str(path) for node_id, path in result.node_results.items()
|
||||
},
|
||||
"provenance_cid": provenance_cid,
|
||||
}
|
||||
|
||||
|
||||
def build_effect_dag(input_hashes: List[str], effect_name: str) -> DAG:
|
||||
"""
|
||||
Build a simple DAG for applying an effect to inputs.
|
||||
|
||||
Args:
|
||||
input_hashes: List of input content hashes
|
||||
effect_name: Name of effect to apply (e.g., "dog", "identity")
|
||||
|
||||
Returns:
|
||||
DAG ready for execution
|
||||
"""
|
||||
dag = DAG()
|
||||
|
||||
# Add source nodes for each input
|
||||
source_ids = []
|
||||
for i, content_hash in enumerate(input_hashes):
|
||||
source_node = Node(
|
||||
node_type=NodeType.SOURCE,
|
||||
config={"content_hash": content_hash},
|
||||
name=f"source_{i}",
|
||||
)
|
||||
dag.add_node(source_node)
|
||||
source_ids.append(source_node.node_id)
|
||||
|
||||
# Add effect node
|
||||
effect_node = Node(
|
||||
node_type=f"effect:{effect_name}",
|
||||
config={},
|
||||
inputs=source_ids,
|
||||
name=f"effect_{effect_name}",
|
||||
)
|
||||
dag.add_node(effect_node)
|
||||
dag.set_output(effect_node.node_id)
|
||||
|
||||
return dag
|
||||
477
path_registry.py
Normal file
477
path_registry.py
Normal file
@@ -0,0 +1,477 @@
|
||||
"""
|
||||
Path Registry - Maps human-friendly paths to content-addressed IDs.
|
||||
|
||||
This module provides a bidirectional mapping between:
|
||||
- Human-friendly paths (e.g., "effects/ascii_fx_zone.sexp")
|
||||
- Content-addressed IDs (IPFS CIDs or SHA3-256 hashes)
|
||||
|
||||
The registry is useful for:
|
||||
- Looking up effects by their friendly path name
|
||||
- Resolving cids back to the original path for debugging
|
||||
- Maintaining a stable naming scheme across cache updates
|
||||
|
||||
Storage:
|
||||
- Uses the existing item_types table in the database (path column)
|
||||
- Caches in Redis for fast lookups across distributed workers
|
||||
|
||||
The registry uses a system actor (@system@local) for global path mappings,
|
||||
allowing effects to be resolved by path without requiring user context.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# System actor for global path mappings (effects, recipes, analyzers)
|
||||
SYSTEM_ACTOR = "@system@local"
|
||||
|
||||
|
||||
@dataclass
|
||||
class PathEntry:
|
||||
"""A registered path with its content-addressed ID."""
|
||||
path: str # Human-friendly path (relative or normalized)
|
||||
cid: str # Content-addressed ID (IPFS CID or hash)
|
||||
content_type: str # Type: "effect", "recipe", "analyzer", etc.
|
||||
actor_id: str = SYSTEM_ACTOR # Owner (system for global)
|
||||
description: Optional[str] = None
|
||||
created_at: float = 0.0
|
||||
|
||||
|
||||
class PathRegistry:
|
||||
"""
|
||||
Registry for mapping paths to content-addressed IDs.
|
||||
|
||||
Uses the existing item_types table for persistence and Redis
|
||||
for fast lookups in distributed Celery workers.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_client=None):
|
||||
self._redis = redis_client
|
||||
self._redis_path_to_cid_key = "artdag:path_to_cid"
|
||||
self._redis_cid_to_path_key = "artdag:cid_to_path"
|
||||
|
||||
def _run_async(self, coro):
|
||||
"""Run async coroutine from sync context."""
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
import threading
|
||||
result = [None]
|
||||
error = [None]
|
||||
|
||||
def run_in_thread():
|
||||
try:
|
||||
new_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(new_loop)
|
||||
try:
|
||||
result[0] = new_loop.run_until_complete(coro)
|
||||
finally:
|
||||
new_loop.close()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
|
||||
thread = threading.Thread(target=run_in_thread)
|
||||
thread.start()
|
||||
thread.join(timeout=30)
|
||||
if error[0]:
|
||||
raise error[0]
|
||||
return result[0]
|
||||
except RuntimeError:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop.run_until_complete(coro)
|
||||
|
||||
def _normalize_path(self, path: str) -> str:
|
||||
"""Normalize a path for consistent storage."""
|
||||
# Remove leading ./ or /
|
||||
path = path.lstrip('./')
|
||||
# Normalize separators
|
||||
path = path.replace('\\', '/')
|
||||
# Remove duplicate slashes
|
||||
while '//' in path:
|
||||
path = path.replace('//', '/')
|
||||
return path
|
||||
|
||||
def register(
|
||||
self,
|
||||
path: str,
|
||||
cid: str,
|
||||
content_type: str = "effect",
|
||||
actor_id: str = SYSTEM_ACTOR,
|
||||
description: Optional[str] = None,
|
||||
) -> PathEntry:
|
||||
"""
|
||||
Register a path -> cid mapping.
|
||||
|
||||
Args:
|
||||
path: Human-friendly path (e.g., "effects/ascii_fx_zone.sexp")
|
||||
cid: Content-addressed ID (IPFS CID or hash)
|
||||
content_type: Type of content ("effect", "recipe", "analyzer")
|
||||
actor_id: Owner (default: system for global mappings)
|
||||
description: Optional description
|
||||
|
||||
Returns:
|
||||
The created PathEntry
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
now = datetime.now(timezone.utc).timestamp()
|
||||
|
||||
entry = PathEntry(
|
||||
path=norm_path,
|
||||
cid=cid,
|
||||
content_type=content_type,
|
||||
actor_id=actor_id,
|
||||
description=description,
|
||||
created_at=now,
|
||||
)
|
||||
|
||||
# Store in database (item_types table)
|
||||
self._save_to_db(entry)
|
||||
|
||||
# Update Redis cache
|
||||
self._update_redis_cache(norm_path, cid)
|
||||
|
||||
logger.info(f"Registered path '{norm_path}' -> {cid[:16]}...")
|
||||
return entry
|
||||
|
||||
def _save_to_db(self, entry: PathEntry):
|
||||
"""Save entry to database using item_types table."""
|
||||
import database
|
||||
|
||||
async def save():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
# Ensure cache_item exists
|
||||
await conn.execute(
|
||||
"INSERT INTO cache_items (cid) VALUES ($1) ON CONFLICT DO NOTHING",
|
||||
entry.cid
|
||||
)
|
||||
# Insert or update item_type with path
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO item_types (cid, actor_id, type, path, description)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (cid, actor_id, type, path) DO UPDATE SET
|
||||
description = COALESCE(EXCLUDED.description, item_types.description)
|
||||
""",
|
||||
entry.cid, entry.actor_id, entry.content_type, entry.path, entry.description
|
||||
)
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
self._run_async(save())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to save path registry to DB: {e}")
|
||||
|
||||
def _update_redis_cache(self, path: str, cid: str):
|
||||
"""Update Redis cache with mapping."""
|
||||
if self._redis:
|
||||
try:
|
||||
self._redis.hset(self._redis_path_to_cid_key, path, cid)
|
||||
self._redis.hset(self._redis_cid_to_path_key, cid, path)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update Redis cache: {e}")
|
||||
|
||||
def get_cid(self, path: str, content_type: str = None) -> Optional[str]:
|
||||
"""
|
||||
Get the cid for a path.
|
||||
|
||||
Args:
|
||||
path: Human-friendly path
|
||||
content_type: Optional type filter
|
||||
|
||||
Returns:
|
||||
The cid, or None if not found
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
|
||||
# Try Redis first (fast path)
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_path_to_cid_key, norm_path)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis lookup failed: {e}")
|
||||
|
||||
# Fall back to database
|
||||
return self._get_cid_from_db(norm_path, content_type)
|
||||
|
||||
def _get_cid_from_db(self, path: str, content_type: str = None) -> Optional[str]:
|
||||
"""Get cid from database using item_types table."""
|
||||
import database
|
||||
|
||||
async def get():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if content_type:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT cid FROM item_types WHERE path = $1 AND type = $2",
|
||||
path, content_type
|
||||
)
|
||||
else:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT cid FROM item_types WHERE path = $1",
|
||||
path
|
||||
)
|
||||
return row["cid"] if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
result = self._run_async(get())
|
||||
# Update Redis cache if found
|
||||
if result and self._redis:
|
||||
self._update_redis_cache(path, result)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get from DB: {e}")
|
||||
return None
|
||||
|
||||
def get_path(self, cid: str) -> Optional[str]:
|
||||
"""
|
||||
Get the path for a cid.
|
||||
|
||||
Args:
|
||||
cid: Content-addressed ID
|
||||
|
||||
Returns:
|
||||
The path, or None if not found
|
||||
"""
|
||||
# Try Redis first
|
||||
if self._redis:
|
||||
try:
|
||||
val = self._redis.hget(self._redis_cid_to_path_key, cid)
|
||||
if val:
|
||||
return val.decode() if isinstance(val, bytes) else val
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis lookup failed: {e}")
|
||||
|
||||
# Fall back to database
|
||||
return self._get_path_from_db(cid)
|
||||
|
||||
def _get_path_from_db(self, cid: str) -> Optional[str]:
|
||||
"""Get path from database using item_types table."""
|
||||
import database
|
||||
|
||||
async def get():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT path FROM item_types WHERE cid = $1 AND path IS NOT NULL ORDER BY created_at LIMIT 1",
|
||||
cid
|
||||
)
|
||||
return row["path"] if row else None
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
result = self._run_async(get())
|
||||
# Update Redis cache if found
|
||||
if result and self._redis:
|
||||
self._update_redis_cache(result, cid)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get from DB: {e}")
|
||||
return None
|
||||
|
||||
def list_by_type(self, content_type: str, actor_id: str = None) -> List[PathEntry]:
|
||||
"""
|
||||
List all entries of a given type.
|
||||
|
||||
Args:
|
||||
content_type: Type to filter by ("effect", "recipe", etc.)
|
||||
actor_id: Optional actor filter (None = all, SYSTEM_ACTOR = global)
|
||||
|
||||
Returns:
|
||||
List of PathEntry objects
|
||||
"""
|
||||
import database
|
||||
|
||||
async def list_entries():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if actor_id:
|
||||
rows = await conn.fetch(
|
||||
"""
|
||||
SELECT cid, path, type, actor_id, description,
|
||||
EXTRACT(EPOCH FROM created_at) as created_at
|
||||
FROM item_types
|
||||
WHERE type = $1 AND actor_id = $2 AND path IS NOT NULL
|
||||
ORDER BY path
|
||||
""",
|
||||
content_type, actor_id
|
||||
)
|
||||
else:
|
||||
rows = await conn.fetch(
|
||||
"""
|
||||
SELECT cid, path, type, actor_id, description,
|
||||
EXTRACT(EPOCH FROM created_at) as created_at
|
||||
FROM item_types
|
||||
WHERE type = $1 AND path IS NOT NULL
|
||||
ORDER BY path
|
||||
""",
|
||||
content_type
|
||||
)
|
||||
return [
|
||||
PathEntry(
|
||||
path=row["path"],
|
||||
cid=row["cid"],
|
||||
content_type=row["type"],
|
||||
actor_id=row["actor_id"],
|
||||
description=row["description"],
|
||||
created_at=row["created_at"] or 0,
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
return self._run_async(list_entries())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to list from DB: {e}")
|
||||
return []
|
||||
|
||||
def delete(self, path: str, content_type: str = None) -> bool:
|
||||
"""
|
||||
Delete a path registration.
|
||||
|
||||
Args:
|
||||
path: The path to delete
|
||||
content_type: Optional type filter
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
norm_path = self._normalize_path(path)
|
||||
|
||||
# Get cid for Redis cleanup
|
||||
cid = self.get_cid(norm_path, content_type)
|
||||
|
||||
# Delete from database
|
||||
deleted = self._delete_from_db(norm_path, content_type)
|
||||
|
||||
# Clean up Redis
|
||||
if deleted and cid and self._redis:
|
||||
try:
|
||||
self._redis.hdel(self._redis_path_to_cid_key, norm_path)
|
||||
self._redis.hdel(self._redis_cid_to_path_key, cid)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to clean up Redis: {e}")
|
||||
|
||||
return deleted
|
||||
|
||||
def _delete_from_db(self, path: str, content_type: str = None) -> bool:
|
||||
"""Delete from database."""
|
||||
import database
|
||||
|
||||
async def delete():
|
||||
import asyncpg
|
||||
conn = await asyncpg.connect(database.DATABASE_URL)
|
||||
try:
|
||||
if content_type:
|
||||
result = await conn.execute(
|
||||
"DELETE FROM item_types WHERE path = $1 AND type = $2",
|
||||
path, content_type
|
||||
)
|
||||
else:
|
||||
result = await conn.execute(
|
||||
"DELETE FROM item_types WHERE path = $1",
|
||||
path
|
||||
)
|
||||
return "DELETE" in result
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
try:
|
||||
return self._run_async(delete())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete from DB: {e}")
|
||||
return False
|
||||
|
||||
def register_effect(
|
||||
self,
|
||||
path: str,
|
||||
cid: str,
|
||||
description: Optional[str] = None,
|
||||
) -> PathEntry:
|
||||
"""
|
||||
Convenience method to register an effect.
|
||||
|
||||
Args:
|
||||
path: Effect path (e.g., "effects/ascii_fx_zone.sexp")
|
||||
cid: IPFS CID of the effect file
|
||||
description: Optional description
|
||||
|
||||
Returns:
|
||||
The created PathEntry
|
||||
"""
|
||||
return self.register(
|
||||
path=path,
|
||||
cid=cid,
|
||||
content_type="effect",
|
||||
actor_id=SYSTEM_ACTOR,
|
||||
description=description,
|
||||
)
|
||||
|
||||
def get_effect_cid(self, path: str) -> Optional[str]:
|
||||
"""
|
||||
Get CID for an effect by path.
|
||||
|
||||
Args:
|
||||
path: Effect path
|
||||
|
||||
Returns:
|
||||
IPFS CID or None
|
||||
"""
|
||||
return self.get_cid(path, content_type="effect")
|
||||
|
||||
def list_effects(self) -> List[PathEntry]:
|
||||
"""List all registered effects."""
|
||||
return self.list_by_type("effect", actor_id=SYSTEM_ACTOR)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_registry: Optional[PathRegistry] = None
|
||||
|
||||
|
||||
def get_path_registry() -> PathRegistry:
|
||||
"""Get the singleton path registry instance."""
|
||||
global _registry
|
||||
if _registry is None:
|
||||
import redis
|
||||
from urllib.parse import urlparse
|
||||
|
||||
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
|
||||
parsed = urlparse(redis_url)
|
||||
redis_client = redis.Redis(
|
||||
host=parsed.hostname or 'localhost',
|
||||
port=parsed.port or 6379,
|
||||
db=int(parsed.path.lstrip('/') or 0),
|
||||
socket_timeout=5,
|
||||
socket_connect_timeout=5
|
||||
)
|
||||
|
||||
_registry = PathRegistry(redis_client=redis_client)
|
||||
return _registry
|
||||
|
||||
|
||||
def reset_path_registry():
|
||||
"""Reset the singleton (for testing)."""
|
||||
global _registry
|
||||
_registry = None
|
||||
51
pyproject.toml
Normal file
51
pyproject.toml
Normal file
@@ -0,0 +1,51 @@
|
||||
[project]
|
||||
name = "art-celery"
|
||||
version = "0.1.0"
|
||||
description = "Art DAG L1 Server and Celery Workers"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
warn_return_any = true
|
||||
warn_unused_ignores = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
check_untyped_defs = true
|
||||
strict_optional = true
|
||||
no_implicit_optional = true
|
||||
|
||||
# Start strict on new code, gradually enable for existing
|
||||
files = [
|
||||
"app/types.py",
|
||||
"app/routers/recipes.py",
|
||||
"tests/",
|
||||
]
|
||||
|
||||
# Ignore missing imports for third-party packages without stubs
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"celery.*",
|
||||
"redis.*",
|
||||
"artdag.*",
|
||||
"artdag_common.*",
|
||||
"ipfs_client.*",
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py"]
|
||||
python_functions = ["test_*"]
|
||||
asyncio_mode = "auto"
|
||||
addopts = "-v --tb=short"
|
||||
filterwarnings = [
|
||||
"ignore::DeprecationWarning",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
target-version = "py311"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "UP"]
|
||||
ignore = ["E501"] # Line length handled separately
|
||||
223
recipes/woods-lowres.sexp
Normal file
223
recipes/woods-lowres.sexp
Normal file
@@ -0,0 +1,223 @@
|
||||
;; Woods Recipe - OPTIMIZED VERSION
|
||||
;;
|
||||
;; Uses fused-pipeline for GPU acceleration when available,
|
||||
;; falls back to individual primitives on CPU.
|
||||
;;
|
||||
;; Key optimizations:
|
||||
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
|
||||
;; 2. Uses fused-pipeline to batch effects into single kernel passes
|
||||
;; 3. GPU persistence - frames stay on GPU throughout pipeline
|
||||
|
||||
(stream "woods-lowres"
|
||||
:fps 30
|
||||
:width 640
|
||||
:height 360
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives (includes proper asset resolution)
|
||||
;; Auto-selects GPU versions when available, falls back to CPU
|
||||
(include :name "tpl-standard-primitives")
|
||||
|
||||
;; === SOURCES (using streaming: which has proper asset resolution) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === SCANS ===
|
||||
|
||||
;; Cycle state
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Spin scan
|
||||
(scan spin (streaming:audio-beat music t)
|
||||
:init {:angle 0 :dir 1 :speed 2}
|
||||
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
|
||||
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
|
||||
(dict :angle (+ angle (* new-dir new-speed))
|
||||
:dir new-dir
|
||||
:speed new-speed)))
|
||||
|
||||
;; Ripple scan - raindrop style, all params randomized
|
||||
;; Higher freq = bigger gaps between waves (formula is dist/freq)
|
||||
(scan ripple-state (streaming:audio-beat music t)
|
||||
:init {:gate 0 :cx 320 :cy 180 :freq 20 :decay 6 :amp-mult 1.0}
|
||||
:step (let [new-gate (if (< (core:rand) 0.2) (+ 2 (core:rand-int 0 4)) (core:max 0 (- gate 1)))
|
||||
triggered (> new-gate gate)
|
||||
new-cx (if triggered (core:rand-int 50 590) cx)
|
||||
new-cy (if triggered (core:rand-int 50 310) cy)
|
||||
new-freq (if triggered (+ 15 (core:rand-int 0 20)) freq)
|
||||
new-decay (if triggered (+ 5 (core:rand-int 0 4)) decay)
|
||||
new-amp-mult (if triggered (+ 0.8 (* (core:rand) 1.2)) amp-mult)]
|
||||
(dict :gate new-gate :cx new-cx :cy new-cy :freq new-freq :decay new-decay :amp-mult new-amp-mult)))
|
||||
|
||||
;; Pair states
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === OPTIMIZED PROCESS-PAIR MACRO ===
|
||||
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
|
||||
(defmacro process-pair-fast (idx)
|
||||
(let [;; Get sources for this pair (with safe modulo indexing)
|
||||
num-sources (len sources)
|
||||
src-a (nth sources (mod (* idx 2) num-sources))
|
||||
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
|
||||
cfg (nth pair-configs idx)
|
||||
pstate (nth (bind pairs :states) idx)
|
||||
|
||||
;; Read frames (GPU decode, stays on GPU)
|
||||
frame-a (streaming:source-read src-a t)
|
||||
frame-b (streaming:source-read src-b t)
|
||||
|
||||
;; Get state values
|
||||
dir (get cfg :dir)
|
||||
rot-max-a (get cfg :rot-a)
|
||||
rot-max-b (get cfg :rot-b)
|
||||
zoom-max-a (get cfg :zoom-a)
|
||||
zoom-max-b (get cfg :zoom-b)
|
||||
pair-angle (get pstate :angle)
|
||||
inv-a-on (> (get pstate :inv-a) 0)
|
||||
inv-b-on (> (get pstate :inv-b) 0)
|
||||
hue-a-on (> (get pstate :hue-a) 0)
|
||||
hue-b-on (> (get pstate :hue-b) 0)
|
||||
hue-a-val (get pstate :hue-a-val)
|
||||
hue-b-val (get pstate :hue-b-val)
|
||||
mix-ratio (get pstate :mix)
|
||||
|
||||
;; Calculate rotation angles
|
||||
angle-a (* dir pair-angle rot-max-a 0.01)
|
||||
angle-b (* dir pair-angle rot-max-b 0.01)
|
||||
|
||||
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
|
||||
zoom-a (core:map-range e 0 1 1 zoom-max-a)
|
||||
zoom-b (core:map-range e 0 1 1 zoom-max-b)
|
||||
|
||||
;; Define effect pipelines for each source
|
||||
;; These get compiled to single CUDA kernels!
|
||||
;; First resize to target resolution, then apply effects
|
||||
effects-a [{:op "resize" :width 640 :height 360}
|
||||
{:op "zoom" :amount zoom-a}
|
||||
{:op "rotate" :angle angle-a}
|
||||
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
|
||||
{:op "invert" :amount (if inv-a-on 1 0)}]
|
||||
effects-b [{:op "resize" :width 640 :height 360}
|
||||
{:op "zoom" :amount zoom-b}
|
||||
{:op "rotate" :angle angle-b}
|
||||
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
|
||||
{:op "invert" :amount (if inv-b-on 1 0)}]
|
||||
|
||||
;; Apply fused pipelines (single kernel per source!)
|
||||
processed-a (streaming:fused-pipeline frame-a effects-a)
|
||||
processed-b (streaming:fused-pipeline frame-b effects-b)]
|
||||
|
||||
;; Blend the two processed frames
|
||||
(blending:blend-images processed-a processed-b mix-ratio)))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Process active pair with fused pipeline
|
||||
active-frame (process-pair-fast active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
;; Old pair: zooms out (1.0 -> 2.0) and fades out
|
||||
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
|
||||
result (if fading
|
||||
(let [next-frame (process-pair-fast next-idx)
|
||||
;; Active zooms out as it fades
|
||||
active-zoom (+ 1.0 fade-amt)
|
||||
active-zoomed (streaming:fused-pipeline active-frame
|
||||
[{:op "zoom" :amount active-zoom}])
|
||||
;; Next starts small and zooms in
|
||||
next-zoom (+ 0.1 (* fade-amt 0.9))
|
||||
next-zoomed (streaming:fused-pipeline next-frame
|
||||
[{:op "zoom" :amount next-zoom}])]
|
||||
(blending:blend-images active-zoomed next-zoomed fade-amt))
|
||||
active-frame)
|
||||
|
||||
;; Final effects pipeline (fused!)
|
||||
spin-angle (bind spin :angle)
|
||||
;; Ripple params - all randomized per ripple trigger
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp-mult (bind ripple-state :amp-mult)
|
||||
rip-amp (* rip-gate rip-amp-mult (core:map-range e 0 1 50 200))
|
||||
rip-cx (bind ripple-state :cx)
|
||||
rip-cy (bind ripple-state :cy)
|
||||
rip-freq (bind ripple-state :freq)
|
||||
rip-decay (bind ripple-state :decay)
|
||||
|
||||
;; Fused final effects
|
||||
final-effects [{:op "rotate" :angle spin-angle}
|
||||
{:op "ripple" :amplitude rip-amp :frequency rip-freq :decay rip-decay
|
||||
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
|
||||
|
||||
;; Apply final fused pipeline
|
||||
(streaming:fused-pipeline result final-effects
|
||||
:rotate_angle spin-angle
|
||||
:ripple_phase (* now 5)
|
||||
:ripple_amplitude rip-amp))))
|
||||
211
recipes/woods-recipe-optimized.sexp
Normal file
211
recipes/woods-recipe-optimized.sexp
Normal file
@@ -0,0 +1,211 @@
|
||||
;; Woods Recipe - OPTIMIZED VERSION
|
||||
;;
|
||||
;; Uses fused-pipeline for GPU acceleration when available,
|
||||
;; falls back to individual primitives on CPU.
|
||||
;;
|
||||
;; Key optimizations:
|
||||
;; 1. Uses streaming_gpu primitives with fast CUDA kernels
|
||||
;; 2. Uses fused-pipeline to batch effects into single kernel passes
|
||||
;; 3. GPU persistence - frames stay on GPU throughout pipeline
|
||||
|
||||
(stream "woods-recipe-optimized"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives (includes proper asset resolution)
|
||||
;; Auto-selects GPU versions when available, falls back to CPU
|
||||
(include :name "tpl-standard-primitives")
|
||||
|
||||
;; === SOURCES (using streaming: which has proper asset resolution) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === SCANS ===
|
||||
|
||||
;; Cycle state
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Spin scan
|
||||
(scan spin (streaming:audio-beat music t)
|
||||
:init {:angle 0 :dir 1 :speed 2}
|
||||
:step (let [new-dir (if (< (core:rand) 0.05) (* dir -1) dir)
|
||||
new-speed (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) speed)]
|
||||
(dict :angle (+ angle (* new-dir new-speed))
|
||||
:dir new-dir
|
||||
:speed new-speed)))
|
||||
|
||||
;; Ripple scan
|
||||
(scan ripple-state (streaming:audio-beat music t)
|
||||
:init {:gate 0 :cx 960 :cy 540}
|
||||
:step (let [new-gate (if (< (core:rand) 0.15) (+ 3 (core:rand-int 0 5)) (core:max 0 (- gate 1)))
|
||||
new-cx (if (> new-gate gate) (+ 200 (core:rand-int 0 1520)) cx)
|
||||
new-cy (if (> new-gate gate) (+ 200 (core:rand-int 0 680)) cy)]
|
||||
(dict :gate new-gate :cx new-cx :cy new-cy)))
|
||||
|
||||
;; Pair states
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === OPTIMIZED PROCESS-PAIR MACRO ===
|
||||
;; Uses fused-pipeline to batch rotate+hue+invert into single kernel
|
||||
(defmacro process-pair-fast (idx)
|
||||
(let [;; Get sources for this pair (with safe modulo indexing)
|
||||
num-sources (len sources)
|
||||
src-a (nth sources (mod (* idx 2) num-sources))
|
||||
src-b (nth sources (mod (+ (* idx 2) 1) num-sources))
|
||||
cfg (nth pair-configs idx)
|
||||
pstate (nth (bind pairs :states) idx)
|
||||
|
||||
;; Read frames (GPU decode, stays on GPU)
|
||||
frame-a (streaming:source-read src-a t)
|
||||
frame-b (streaming:source-read src-b t)
|
||||
|
||||
;; Get state values
|
||||
dir (get cfg :dir)
|
||||
rot-max-a (get cfg :rot-a)
|
||||
rot-max-b (get cfg :rot-b)
|
||||
zoom-max-a (get cfg :zoom-a)
|
||||
zoom-max-b (get cfg :zoom-b)
|
||||
pair-angle (get pstate :angle)
|
||||
inv-a-on (> (get pstate :inv-a) 0)
|
||||
inv-b-on (> (get pstate :inv-b) 0)
|
||||
hue-a-on (> (get pstate :hue-a) 0)
|
||||
hue-b-on (> (get pstate :hue-b) 0)
|
||||
hue-a-val (get pstate :hue-a-val)
|
||||
hue-b-val (get pstate :hue-b-val)
|
||||
mix-ratio (get pstate :mix)
|
||||
|
||||
;; Calculate rotation angles
|
||||
angle-a (* dir pair-angle rot-max-a 0.01)
|
||||
angle-b (* dir pair-angle rot-max-b 0.01)
|
||||
|
||||
;; Energy-driven zoom (maps audio energy 0-1 to 1-max)
|
||||
zoom-a (core:map-range e 0 1 1 zoom-max-a)
|
||||
zoom-b (core:map-range e 0 1 1 zoom-max-b)
|
||||
|
||||
;; Define effect pipelines for each source
|
||||
;; These get compiled to single CUDA kernels!
|
||||
effects-a [{:op "zoom" :amount zoom-a}
|
||||
{:op "rotate" :angle angle-a}
|
||||
{:op "hue_shift" :degrees (if hue-a-on hue-a-val 0)}
|
||||
{:op "invert" :amount (if inv-a-on 1 0)}]
|
||||
effects-b [{:op "zoom" :amount zoom-b}
|
||||
{:op "rotate" :angle angle-b}
|
||||
{:op "hue_shift" :degrees (if hue-b-on hue-b-val 0)}
|
||||
{:op "invert" :amount (if inv-b-on 1 0)}]
|
||||
|
||||
;; Apply fused pipelines (single kernel per source!)
|
||||
processed-a (streaming:fused-pipeline frame-a effects-a)
|
||||
processed-b (streaming:fused-pipeline frame-b effects-b)]
|
||||
|
||||
;; Blend the two processed frames
|
||||
(blending:blend-images processed-a processed-b mix-ratio)))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Process active pair with fused pipeline
|
||||
active-frame (process-pair-fast active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
;; Old pair: zooms out (1.0 -> 2.0) and fades out
|
||||
;; New pair: starts small (0.1), zooms in (-> 1.0) and fades in
|
||||
result (if fading
|
||||
(let [next-frame (process-pair-fast next-idx)
|
||||
;; Active zooms out as it fades
|
||||
active-zoom (+ 1.0 fade-amt)
|
||||
active-zoomed (streaming:fused-pipeline active-frame
|
||||
[{:op "zoom" :amount active-zoom}])
|
||||
;; Next starts small and zooms in
|
||||
next-zoom (+ 0.1 (* fade-amt 0.9))
|
||||
next-zoomed (streaming:fused-pipeline next-frame
|
||||
[{:op "zoom" :amount next-zoom}])]
|
||||
(blending:blend-images active-zoomed next-zoomed fade-amt))
|
||||
active-frame)
|
||||
|
||||
;; Final effects pipeline (fused!)
|
||||
spin-angle (bind spin :angle)
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))
|
||||
rip-cx (bind ripple-state :cx)
|
||||
rip-cy (bind ripple-state :cy)
|
||||
|
||||
;; Fused final effects
|
||||
final-effects [{:op "rotate" :angle spin-angle}
|
||||
{:op "ripple" :amplitude rip-amp :frequency 8 :decay 2
|
||||
:phase (* now 5) :center_x rip-cx :center_y rip-cy}]]
|
||||
|
||||
;; Apply final fused pipeline
|
||||
(streaming:fused-pipeline result final-effects
|
||||
:rotate_angle spin-angle
|
||||
:ripple_phase (* now 5)
|
||||
:ripple_amplitude rip-amp))))
|
||||
134
recipes/woods-recipe.sexp
Normal file
134
recipes/woods-recipe.sexp
Normal file
@@ -0,0 +1,134 @@
|
||||
;; Woods Recipe - Using friendly names for all assets
|
||||
;;
|
||||
;; Requires uploaded:
|
||||
;; - Media: woods-1 through woods-8 (videos), woods-audio (audio)
|
||||
;; - Effects: fx-rotate, fx-zoom, fx-blend, fx-ripple, fx-invert, fx-hue-shift
|
||||
;; - Templates: tpl-standard-primitives, tpl-standard-effects, tpl-process-pair,
|
||||
;; tpl-crossfade-zoom, tpl-scan-spin, tpl-scan-ripple
|
||||
|
||||
(stream "woods-recipe"
|
||||
:fps 30
|
||||
:width 1920
|
||||
:height 1080
|
||||
:seed 42
|
||||
|
||||
;; Load standard primitives and effects via friendly names
|
||||
(include :name "tpl-standard-primitives")
|
||||
(include :name "tpl-standard-effects")
|
||||
|
||||
;; Load reusable templates
|
||||
(include :name "tpl-process-pair")
|
||||
(include :name "tpl-crossfade-zoom")
|
||||
|
||||
;; === SOURCES AS ARRAY (using friendly names) ===
|
||||
(def sources [
|
||||
(streaming:make-video-source "woods-1" 30)
|
||||
(streaming:make-video-source "woods-2" 30)
|
||||
(streaming:make-video-source "woods-3" 30)
|
||||
(streaming:make-video-source "woods-4" 30)
|
||||
(streaming:make-video-source "woods-5" 30)
|
||||
(streaming:make-video-source "woods-6" 30)
|
||||
(streaming:make-video-source "woods-7" 30)
|
||||
(streaming:make-video-source "woods-8" 30)
|
||||
])
|
||||
|
||||
;; Per-pair config: [rot-dir, rot-a-max, rot-b-max, zoom-a-max, zoom-b-max]
|
||||
(def pair-configs [
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir -1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
{:dir 1 :rot-a 30 :rot-b -30 :zoom-a 1.3 :zoom-b 0.7}
|
||||
{:dir -1 :rot-a -45 :rot-b 45 :zoom-a 0.5 :zoom-b 1.5}
|
||||
{:dir 1 :rot-a 45 :rot-b -45 :zoom-a 1.5 :zoom-b 0.5}
|
||||
])
|
||||
|
||||
;; Audio analyzer (using friendly name)
|
||||
(def music (streaming:make-audio-analyzer "woods-audio"))
|
||||
|
||||
;; Audio playback (friendly name resolved by streaming primitives)
|
||||
(audio-playback "woods-audio")
|
||||
|
||||
;; === GLOBAL SCANS ===
|
||||
|
||||
;; Cycle state: which source is active
|
||||
(scan cycle (streaming:audio-beat music t)
|
||||
:init {:active 0 :beat 0 :clen 16}
|
||||
:step (if (< (+ beat 1) clen)
|
||||
(dict :active active :beat (+ beat 1) :clen clen)
|
||||
(dict :active (mod (+ active 1) (len sources)) :beat 0
|
||||
:clen (+ 8 (mod (* (streaming:audio-beat-count music t) 7) 17)))))
|
||||
|
||||
;; Reusable scans from templates
|
||||
(include :name "tpl-scan-spin")
|
||||
(include :name "tpl-scan-ripple")
|
||||
|
||||
;; === PER-PAIR STATE ===
|
||||
(scan pairs (streaming:audio-beat music t)
|
||||
:init {:states (map (core:range (len sources)) (lambda (_)
|
||||
{:inv-a 0 :inv-b 0 :hue-a 0 :hue-b 0 :hue-a-val 0 :hue-b-val 0 :mix 0.5 :mix-rem 5 :angle 0 :rot-beat 0 :rot-clen 25}))}
|
||||
:step (dict :states (map states (lambda (p)
|
||||
(let [new-inv-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-a) 1)))
|
||||
new-inv-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- (get p :inv-b) 1)))
|
||||
old-hue-a (get p :hue-a)
|
||||
old-hue-b (get p :hue-b)
|
||||
new-hue-a (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-a 1)))
|
||||
new-hue-b (if (< (core:rand) 0.1) (+ 1 (core:rand-int 1 4)) (core:max 0 (- old-hue-b 1)))
|
||||
new-hue-a-val (if (> new-hue-a old-hue-a) (+ 30 (* (core:rand) 300)) (get p :hue-a-val))
|
||||
new-hue-b-val (if (> new-hue-b old-hue-b) (+ 30 (* (core:rand) 300)) (get p :hue-b-val))
|
||||
mix-rem (get p :mix-rem)
|
||||
old-mix (get p :mix)
|
||||
new-mix-rem (if (> mix-rem 0) (- mix-rem 1) (+ 1 (core:rand-int 1 10)))
|
||||
new-mix (if (> mix-rem 0) old-mix (* (core:rand-int 0 2) 0.5))
|
||||
rot-beat (get p :rot-beat)
|
||||
rot-clen (get p :rot-clen)
|
||||
old-angle (get p :angle)
|
||||
new-rot-beat (if (< (+ rot-beat 1) rot-clen) (+ rot-beat 1) 0)
|
||||
new-rot-clen (if (< (+ rot-beat 1) rot-clen) rot-clen (+ 20 (core:rand-int 0 10)))
|
||||
new-angle (+ old-angle (/ 360 rot-clen))]
|
||||
(dict :inv-a new-inv-a :inv-b new-inv-b
|
||||
:hue-a new-hue-a :hue-b new-hue-b
|
||||
:hue-a-val new-hue-a-val :hue-b-val new-hue-b-val
|
||||
:mix new-mix :mix-rem new-mix-rem
|
||||
:angle new-angle :rot-beat new-rot-beat :rot-clen new-rot-clen))))))
|
||||
|
||||
;; === FRAME PIPELINE ===
|
||||
(frame
|
||||
(let [now t
|
||||
e (streaming:audio-energy music now)
|
||||
|
||||
;; Get cycle state
|
||||
active (bind cycle :active)
|
||||
beat-pos (bind cycle :beat)
|
||||
clen (bind cycle :clen)
|
||||
|
||||
;; Transition logic
|
||||
phase3 (* beat-pos 3)
|
||||
fading (and (>= phase3 (* clen 2)) (< phase3 (* clen 3)))
|
||||
fade-amt (if fading (/ (- phase3 (* clen 2)) clen) 0)
|
||||
next-idx (mod (+ active 1) (len sources))
|
||||
|
||||
;; Get pair states array
|
||||
pair-states (bind pairs :states)
|
||||
|
||||
;; Process active pair using macro from template
|
||||
active-frame (process-pair active)
|
||||
|
||||
;; Crossfade with zoom during transition
|
||||
result (if fading
|
||||
(crossfade-zoom active-frame (process-pair next-idx) fade-amt)
|
||||
active-frame)
|
||||
|
||||
;; Final: global spin + ripple
|
||||
spun (rotate result :angle (bind spin :angle))
|
||||
rip-gate (bind ripple-state :gate)
|
||||
rip-amp (* rip-gate (core:map-range e 0 1 5 50))]
|
||||
|
||||
(ripple spun
|
||||
:amplitude rip-amp
|
||||
:center_x (bind ripple-state :cx)
|
||||
:center_y (bind ripple-state :cy)
|
||||
:frequency 8
|
||||
:decay 2
|
||||
:speed 5))))
|
||||
65
render.py
65
render.py
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
CLI to submit render tasks to Art DAG Celery.
|
||||
|
||||
Usage:
|
||||
python render.py dog cat # Render cat through dog effect
|
||||
python render.py identity cat # Render cat through identity effect
|
||||
python render.py <effect> <input> # General form
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
from legacy_tasks import render_effect
|
||||
|
||||
# Known asset hashes
|
||||
ASSETS = {
|
||||
"cat": "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b",
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Submit render task to Art DAG Celery")
|
||||
parser.add_argument("effect", help="Effect to apply (e.g., dog, identity)")
|
||||
parser.add_argument("input", help="Input asset name or hash")
|
||||
parser.add_argument("--output", "-o", help="Output name (default: <effect>-from-<input>)")
|
||||
parser.add_argument("--sync", "-s", action="store_true", help="Wait for result")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Resolve input to hash
|
||||
input_hash = ASSETS.get(args.input, args.input)
|
||||
if len(input_hash) != 64:
|
||||
print(f"Error: Unknown asset '{args.input}' and not a valid hash")
|
||||
sys.exit(1)
|
||||
|
||||
# Generate output name
|
||||
output_name = args.output or f"{args.effect}-from-{args.input}-celery"
|
||||
|
||||
print(f"Submitting render task:")
|
||||
print(f" Effect: {args.effect}")
|
||||
print(f" Input: {args.input} ({input_hash[:16]}...)")
|
||||
print(f" Output: {output_name}")
|
||||
|
||||
# Submit task
|
||||
task = render_effect.delay(input_hash, args.effect, output_name)
|
||||
print(f" Task ID: {task.id}")
|
||||
|
||||
if args.sync:
|
||||
print("\nWaiting for result...")
|
||||
try:
|
||||
result = task.get(timeout=300)
|
||||
print("\nRender complete!")
|
||||
print(json.dumps(result, indent=2))
|
||||
except Exception as e:
|
||||
print(f"\nRender failed: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\nTask submitted. Check status with:")
|
||||
print(f" celery -A celery_app inspect query_task {task.id}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
16
requirements-dev.txt
Normal file
16
requirements-dev.txt
Normal file
@@ -0,0 +1,16 @@
|
||||
# Development dependencies
|
||||
-r requirements.txt
|
||||
|
||||
# Type checking
|
||||
mypy>=1.8.0
|
||||
types-requests>=2.31.0
|
||||
types-PyYAML>=6.0.0
|
||||
typing_extensions>=4.9.0
|
||||
|
||||
# Testing
|
||||
pytest>=8.0.0
|
||||
pytest-asyncio>=0.23.0
|
||||
pytest-cov>=4.1.0
|
||||
|
||||
# Linting
|
||||
ruff>=0.2.0
|
||||
@@ -1,10 +1,21 @@
|
||||
celery[redis]>=5.3.0
|
||||
redis>=5.0.0
|
||||
requests>=2.31.0
|
||||
httpx>=0.27.0
|
||||
itsdangerous>=2.0
|
||||
cryptography>=41.0
|
||||
fastapi>=0.109.0
|
||||
uvicorn>=0.27.0
|
||||
python-multipart>=0.0.6
|
||||
PyYAML>=6.0
|
||||
asyncpg>=0.29.0
|
||||
# Core artdag from GitHub
|
||||
git+https://github.com/gilesbradshaw/art-dag.git
|
||||
markdown>=3.5.0
|
||||
# Common effect dependencies (used by uploaded effects)
|
||||
numpy>=1.24.0
|
||||
opencv-python-headless>=4.8.0
|
||||
# Core artdag from GitHub (tracks main branch)
|
||||
git+https://github.com/gilesbradshaw/art-dag.git@main
|
||||
# Shared components (tracks master branch)
|
||||
git+https://git.rose-ash.com/art-dag/common.git@master
|
||||
psycopg2-binary
|
||||
nest_asyncio
|
||||
|
||||
77
scripts/cloud-init-gpu.sh
Normal file
77
scripts/cloud-init-gpu.sh
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/bin/bash
|
||||
# Cloud-init startup script for GPU droplet (RTX 6000 Ada, etc.)
|
||||
# Paste this into DigitalOcean "User data" field when creating droplet
|
||||
|
||||
set -e
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
exec > /var/log/artdag-setup.log 2>&1
|
||||
|
||||
echo "=== ArtDAG GPU Setup Started $(date) ==="
|
||||
|
||||
# Update system (non-interactive, keep existing configs)
|
||||
apt-get update
|
||||
apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" upgrade
|
||||
|
||||
# Install essentials
|
||||
apt-get install -y \
|
||||
python3 python3-venv python3-pip \
|
||||
git curl wget \
|
||||
ffmpeg \
|
||||
vulkan-tools \
|
||||
build-essential
|
||||
|
||||
# Create venv
|
||||
VENV_DIR="/opt/artdag-gpu"
|
||||
python3 -m venv "$VENV_DIR"
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# Install Python packages
|
||||
pip install --upgrade pip
|
||||
pip install \
|
||||
numpy \
|
||||
opencv-python-headless \
|
||||
wgpu \
|
||||
httpx \
|
||||
pyyaml \
|
||||
celery[redis] \
|
||||
fastapi \
|
||||
uvicorn \
|
||||
asyncpg
|
||||
|
||||
# Create code directory
|
||||
mkdir -p "$VENV_DIR/celery/sexp_effects/effects"
|
||||
mkdir -p "$VENV_DIR/celery/sexp_effects/primitive_libs"
|
||||
mkdir -p "$VENV_DIR/celery/streaming"
|
||||
|
||||
# Add SSH key for easier access (optional - add your key here)
|
||||
# echo "ssh-ed25519 AAAA... your-key" >> /root/.ssh/authorized_keys
|
||||
|
||||
# Test GPU
|
||||
echo "=== GPU Info ==="
|
||||
nvidia-smi || echo "nvidia-smi not available yet"
|
||||
|
||||
echo "=== NVENC Check ==="
|
||||
ffmpeg -encoders 2>/dev/null | grep -E "nvenc|cuda" || echo "NVENC not detected"
|
||||
|
||||
echo "=== wgpu Check ==="
|
||||
"$VENV_DIR/bin/python3" -c "
|
||||
import wgpu
|
||||
try:
|
||||
adapter = wgpu.gpu.request_adapter_sync(power_preference='high-performance')
|
||||
print(f'GPU: {adapter.info}')
|
||||
except Exception as e:
|
||||
print(f'wgpu error: {e}')
|
||||
" || echo "wgpu test failed"
|
||||
|
||||
# Add environment setup
|
||||
cat >> /etc/profile.d/artdag-gpu.sh << 'ENVEOF'
|
||||
export WGPU_BACKEND_TYPE=Vulkan
|
||||
export PATH="/opt/artdag-gpu/bin:$PATH"
|
||||
ENVEOF
|
||||
|
||||
# Mark setup complete
|
||||
touch /opt/artdag-gpu/.setup-complete
|
||||
echo "=== Setup Complete $(date) ==="
|
||||
echo "Venv: /opt/artdag-gpu"
|
||||
echo "Activate: source /opt/artdag-gpu/bin/activate"
|
||||
echo "Vulkan: export WGPU_BACKEND_TYPE=Vulkan"
|
||||
51
scripts/deploy-to-gpu.sh
Executable file
51
scripts/deploy-to-gpu.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
# Deploy art-dag GPU code to a remote droplet
|
||||
# Usage: ./deploy-to-gpu.sh <droplet-ip>
|
||||
|
||||
set -e
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 <droplet-ip>"
|
||||
echo "Example: $0 159.223.7.100"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DROPLET_IP="$1"
|
||||
REMOTE_DIR="/opt/artdag-gpu/celery"
|
||||
LOCAL_DIR="$(dirname "$0")/.."
|
||||
|
||||
echo "=== Deploying to $DROPLET_IP ==="
|
||||
|
||||
# Create remote directory
|
||||
echo "[1/4] Creating remote directory..."
|
||||
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects $REMOTE_DIR/streaming $REMOTE_DIR/scripts"
|
||||
|
||||
# Copy core files
|
||||
echo "[2/4] Copying core files..."
|
||||
scp "$LOCAL_DIR/sexp_effects/wgsl_compiler.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/parser.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/interpreter.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/sexp_effects/__init__.py" "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/"
|
||||
scp "$LOCAL_DIR/streaming/backends.py" "root@$DROPLET_IP:$REMOTE_DIR/streaming/"
|
||||
|
||||
# Copy effects
|
||||
echo "[3/4] Copying effects..."
|
||||
ssh "root@$DROPLET_IP" "mkdir -p $REMOTE_DIR/sexp_effects/effects $REMOTE_DIR/sexp_effects/primitive_libs"
|
||||
scp -r "$LOCAL_DIR/sexp_effects/effects/"*.sexp "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/effects/" 2>/dev/null || true
|
||||
scp -r "$LOCAL_DIR/sexp_effects/primitive_libs/"*.py "root@$DROPLET_IP:$REMOTE_DIR/sexp_effects/primitive_libs/" 2>/dev/null || true
|
||||
|
||||
# Test
|
||||
echo "[4/4] Testing deployment..."
|
||||
ssh "root@$DROPLET_IP" "cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c '
|
||||
import sys
|
||||
sys.path.insert(0, \".\")
|
||||
from sexp_effects.wgsl_compiler import compile_effect_file
|
||||
result = compile_effect_file(\"sexp_effects/effects/invert.sexp\")
|
||||
print(f\"Compiled effect: {result.name}\")
|
||||
print(\"Deployment OK\")
|
||||
'" || echo "Test failed - may need to run setup script first"
|
||||
|
||||
echo ""
|
||||
echo "=== Deployment complete ==="
|
||||
echo "SSH: ssh root@$DROPLET_IP"
|
||||
echo "Test: ssh root@$DROPLET_IP 'cd $REMOTE_DIR && /opt/artdag-gpu/bin/python3 -c \"from streaming.backends import get_backend; b=get_backend(\\\"wgpu\\\"); print(b)\"'"
|
||||
34
scripts/gpu-dev-deploy.sh
Executable file
34
scripts/gpu-dev-deploy.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# Quick deploy to GPU node with hot reload
|
||||
# Usage: ./scripts/gpu-dev-deploy.sh
|
||||
|
||||
set -e
|
||||
|
||||
GPU_HOST="${GPU_HOST:-root@138.197.163.123}"
|
||||
REMOTE_DIR="/root/art-dag/celery"
|
||||
|
||||
echo "=== GPU Dev Deploy ==="
|
||||
echo "Syncing code to $GPU_HOST..."
|
||||
|
||||
# Sync code (excluding cache, git, __pycache__)
|
||||
rsync -avz --delete \
|
||||
--exclude '.git' \
|
||||
--exclude '__pycache__' \
|
||||
--exclude '*.pyc' \
|
||||
--exclude '.pytest_cache' \
|
||||
--exclude 'node_modules' \
|
||||
--exclude '.env' \
|
||||
./ "$GPU_HOST:$REMOTE_DIR/"
|
||||
|
||||
echo "Restarting GPU worker..."
|
||||
ssh "$GPU_HOST" "docker kill \$(docker ps -q -f name=l1-gpu-worker) 2>/dev/null || true"
|
||||
|
||||
echo "Waiting for new container..."
|
||||
sleep 10
|
||||
|
||||
# Show new container logs
|
||||
ssh "$GPU_HOST" "docker logs --tail 30 \$(docker ps -q -f name=l1-gpu-worker)"
|
||||
|
||||
echo ""
|
||||
echo "=== Deploy Complete ==="
|
||||
echo "Use 'ssh $GPU_HOST docker logs -f \$(docker ps -q -f name=l1-gpu-worker)' to follow logs"
|
||||
108
scripts/setup-gpu-droplet.sh
Executable file
108
scripts/setup-gpu-droplet.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/bin/bash
|
||||
# Setup script for GPU droplet with NVENC support
|
||||
# Run as root on a fresh Ubuntu droplet with NVIDIA GPU
|
||||
|
||||
set -e
|
||||
|
||||
echo "=== ArtDAG GPU Droplet Setup ==="
|
||||
|
||||
# 1. System updates
|
||||
echo "[1/7] Updating system..."
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
|
||||
# 2. Install NVIDIA drivers (if not already installed)
|
||||
echo "[2/7] Checking NVIDIA drivers..."
|
||||
if ! command -v nvidia-smi &> /dev/null; then
|
||||
echo "Installing NVIDIA drivers..."
|
||||
apt-get install -y nvidia-driver-535 nvidia-utils-535
|
||||
echo "NVIDIA drivers installed. Reboot required."
|
||||
echo "After reboot, run this script again."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
nvidia-smi
|
||||
echo "NVIDIA drivers OK"
|
||||
|
||||
# 3. Install FFmpeg with NVENC support
|
||||
echo "[3/7] Installing FFmpeg with NVENC..."
|
||||
apt-get install -y ffmpeg
|
||||
|
||||
# Verify NVENC
|
||||
if ffmpeg -encoders 2>/dev/null | grep -q nvenc; then
|
||||
echo "NVENC available:"
|
||||
ffmpeg -encoders 2>/dev/null | grep nvenc
|
||||
else
|
||||
echo "WARNING: NVENC not available. GPU may not support hardware encoding."
|
||||
fi
|
||||
|
||||
# 4. Install Python and create venv
|
||||
echo "[4/7] Setting up Python environment..."
|
||||
apt-get install -y python3 python3-venv python3-pip git
|
||||
|
||||
VENV_DIR="/opt/artdag-gpu"
|
||||
python3 -m venv "$VENV_DIR"
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# 5. Install Python dependencies
|
||||
echo "[5/7] Installing Python packages..."
|
||||
pip install --upgrade pip
|
||||
pip install \
|
||||
numpy \
|
||||
opencv-python-headless \
|
||||
wgpu \
|
||||
httpx \
|
||||
pyyaml \
|
||||
celery[redis] \
|
||||
fastapi \
|
||||
uvicorn
|
||||
|
||||
# 6. Clone/update art-dag code
|
||||
echo "[6/7] Setting up art-dag code..."
|
||||
ARTDAG_DIR="$VENV_DIR/celery"
|
||||
if [ -d "$ARTDAG_DIR" ]; then
|
||||
echo "Updating existing code..."
|
||||
cd "$ARTDAG_DIR"
|
||||
git pull || true
|
||||
else
|
||||
echo "Cloning art-dag..."
|
||||
git clone https://git.rose-ash.com/art-dag/celery.git "$ARTDAG_DIR" || {
|
||||
echo "Git clone failed. You may need to copy code manually."
|
||||
}
|
||||
fi
|
||||
|
||||
# 7. Test GPU compute
|
||||
echo "[7/7] Testing GPU compute..."
|
||||
"$VENV_DIR/bin/python3" << 'PYTEST'
|
||||
import sys
|
||||
try:
|
||||
import wgpu
|
||||
adapter = wgpu.gpu.request_adapter_sync(power_preference="high-performance")
|
||||
print(f"GPU Adapter: {adapter.info.get('device', 'unknown')}")
|
||||
device = adapter.request_device_sync()
|
||||
print("wgpu device created successfully")
|
||||
|
||||
# Check for NVENC via FFmpeg
|
||||
import subprocess
|
||||
result = subprocess.run(['ffmpeg', '-encoders'], capture_output=True, text=True)
|
||||
if 'h264_nvenc' in result.stdout:
|
||||
print("NVENC H.264 encoder: AVAILABLE")
|
||||
else:
|
||||
print("NVENC H.264 encoder: NOT AVAILABLE")
|
||||
if 'hevc_nvenc' in result.stdout:
|
||||
print("NVENC HEVC encoder: AVAILABLE")
|
||||
else:
|
||||
print("NVENC HEVC encoder: NOT AVAILABLE")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
sys.exit(1)
|
||||
PYTEST
|
||||
|
||||
echo ""
|
||||
echo "=== Setup Complete ==="
|
||||
echo "Venv: $VENV_DIR"
|
||||
echo "Code: $ARTDAG_DIR"
|
||||
echo ""
|
||||
echo "To activate: source $VENV_DIR/bin/activate"
|
||||
echo "To test: cd $ARTDAG_DIR && python -c 'from streaming.backends import get_backend; print(get_backend(\"wgpu\"))'"
|
||||
32
sexp_effects/__init__.py
Normal file
32
sexp_effects/__init__.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
S-Expression Effects System
|
||||
|
||||
Safe, shareable effects defined in S-expressions.
|
||||
"""
|
||||
|
||||
from .parser import parse, parse_file, Symbol, Keyword
|
||||
from .interpreter import (
|
||||
Interpreter,
|
||||
get_interpreter,
|
||||
load_effect,
|
||||
load_effects_dir,
|
||||
run_effect,
|
||||
list_effects,
|
||||
make_process_frame,
|
||||
)
|
||||
from .primitives import PRIMITIVES
|
||||
|
||||
__all__ = [
|
||||
'parse',
|
||||
'parse_file',
|
||||
'Symbol',
|
||||
'Keyword',
|
||||
'Interpreter',
|
||||
'get_interpreter',
|
||||
'load_effect',
|
||||
'load_effects_dir',
|
||||
'run_effect',
|
||||
'list_effects',
|
||||
'make_process_frame',
|
||||
'PRIMITIVES',
|
||||
]
|
||||
206
sexp_effects/derived.sexp
Normal file
206
sexp_effects/derived.sexp
Normal file
@@ -0,0 +1,206 @@
|
||||
;; Derived Operations
|
||||
;;
|
||||
;; These are built from true primitives using S-expressions.
|
||||
;; Load with: (require "derived")
|
||||
|
||||
;; =============================================================================
|
||||
;; Math Helpers (derivable from where + basic ops)
|
||||
;; =============================================================================
|
||||
|
||||
;; Absolute value
|
||||
(define (abs x) (where (< x 0) (- x) x))
|
||||
|
||||
;; Minimum of two values
|
||||
(define (min2 a b) (where (< a b) a b))
|
||||
|
||||
;; Maximum of two values
|
||||
(define (max2 a b) (where (> a b) a b))
|
||||
|
||||
;; Clamp x to range [lo, hi]
|
||||
(define (clamp x lo hi) (max2 lo (min2 hi x)))
|
||||
|
||||
;; Square of x
|
||||
(define (sq x) (* x x))
|
||||
|
||||
;; Linear interpolation: a*(1-t) + b*t
|
||||
(define (lerp a b t) (+ (* a (- 1 t)) (* b t)))
|
||||
|
||||
;; Smooth interpolation between edges
|
||||
(define (smoothstep edge0 edge1 x)
|
||||
(let ((t (clamp (/ (- x edge0) (- edge1 edge0)) 0 1)))
|
||||
(* t (* t (- 3 (* 2 t))))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Channel Shortcuts (derivable from channel primitive)
|
||||
;; =============================================================================
|
||||
|
||||
;; Extract red channel as xector
|
||||
(define (red frame) (channel frame 0))
|
||||
|
||||
;; Extract green channel as xector
|
||||
(define (green frame) (channel frame 1))
|
||||
|
||||
;; Extract blue channel as xector
|
||||
(define (blue frame) (channel frame 2))
|
||||
|
||||
;; Convert to grayscale xector (ITU-R BT.601)
|
||||
(define (gray frame)
|
||||
(+ (* (red frame) 0.299)
|
||||
(* (green frame) 0.587)
|
||||
(* (blue frame) 0.114)))
|
||||
|
||||
;; Alias for gray
|
||||
(define (luminance frame) (gray frame))
|
||||
|
||||
;; =============================================================================
|
||||
;; Coordinate Generators (derivable from iota + repeat/tile)
|
||||
;; =============================================================================
|
||||
|
||||
;; X coordinate for each pixel [0, width)
|
||||
(define (x-coords frame) (tile (iota (width frame)) (height frame)))
|
||||
|
||||
;; Y coordinate for each pixel [0, height)
|
||||
(define (y-coords frame) (repeat (iota (height frame)) (width frame)))
|
||||
|
||||
;; Normalized X coordinate [0, 1]
|
||||
(define (x-norm frame) (/ (x-coords frame) (max2 1 (- (width frame) 1))))
|
||||
|
||||
;; Normalized Y coordinate [0, 1]
|
||||
(define (y-norm frame) (/ (y-coords frame) (max2 1 (- (height frame) 1))))
|
||||
|
||||
;; Distance from frame center for each pixel
|
||||
(define (dist-from-center frame)
|
||||
(let* ((cx (/ (width frame) 2))
|
||||
(cy (/ (height frame) 2))
|
||||
(dx (- (x-coords frame) cx))
|
||||
(dy (- (y-coords frame) cy)))
|
||||
(sqrt (+ (sq dx) (sq dy)))))
|
||||
|
||||
;; Normalized distance from center [0, ~1]
|
||||
(define (dist-norm frame)
|
||||
(let ((d (dist-from-center frame)))
|
||||
(/ d (max2 1 (βmax d)))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Cell/Grid Operations (derivable from floor + basic math)
|
||||
;; =============================================================================
|
||||
|
||||
;; Cell row index for each pixel
|
||||
(define (cell-row frame cell-size) (floor (/ (y-coords frame) cell-size)))
|
||||
|
||||
;; Cell column index for each pixel
|
||||
(define (cell-col frame cell-size) (floor (/ (x-coords frame) cell-size)))
|
||||
|
||||
;; Number of cell rows
|
||||
(define (num-rows frame cell-size) (floor (/ (height frame) cell-size)))
|
||||
|
||||
;; Number of cell columns
|
||||
(define (num-cols frame cell-size) (floor (/ (width frame) cell-size)))
|
||||
|
||||
;; Flat cell index for each pixel
|
||||
(define (cell-indices frame cell-size)
|
||||
(+ (* (cell-row frame cell-size) (num-cols frame cell-size))
|
||||
(cell-col frame cell-size)))
|
||||
|
||||
;; Total number of cells
|
||||
(define (num-cells frame cell-size)
|
||||
(* (num-rows frame cell-size) (num-cols frame cell-size)))
|
||||
|
||||
;; X position within cell [0, cell-size)
|
||||
(define (local-x frame cell-size) (mod (x-coords frame) cell-size))
|
||||
|
||||
;; Y position within cell [0, cell-size)
|
||||
(define (local-y frame cell-size) (mod (y-coords frame) cell-size))
|
||||
|
||||
;; Normalized X within cell [0, 1]
|
||||
(define (local-x-norm frame cell-size)
|
||||
(/ (local-x frame cell-size) (max2 1 (- cell-size 1))))
|
||||
|
||||
;; Normalized Y within cell [0, 1]
|
||||
(define (local-y-norm frame cell-size)
|
||||
(/ (local-y frame cell-size) (max2 1 (- cell-size 1))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Fill Operations (derivable from iota)
|
||||
;; =============================================================================
|
||||
|
||||
;; Xector of n zeros
|
||||
(define (zeros n) (* (iota n) 0))
|
||||
|
||||
;; Xector of n ones
|
||||
(define (ones n) (+ (zeros n) 1))
|
||||
|
||||
;; Xector of n copies of val
|
||||
(define (fill val n) (+ (zeros n) val))
|
||||
|
||||
;; Xector of zeros matching x's length
|
||||
(define (zeros-like x) (* x 0))
|
||||
|
||||
;; Xector of ones matching x's length
|
||||
(define (ones-like x) (+ (zeros-like x) 1))
|
||||
|
||||
;; =============================================================================
|
||||
;; Pooling (derivable from group-reduce)
|
||||
;; =============================================================================
|
||||
|
||||
;; Pool a channel by cell index
|
||||
(define (pool-channel chan cell-idx num-cells)
|
||||
(group-reduce chan cell-idx num-cells "mean"))
|
||||
|
||||
;; Pool red channel to cells
|
||||
(define (pool-red frame cell-size)
|
||||
(pool-channel (red frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool green channel to cells
|
||||
(define (pool-green frame cell-size)
|
||||
(pool-channel (green frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool blue channel to cells
|
||||
(define (pool-blue frame cell-size)
|
||||
(pool-channel (blue frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; Pool grayscale to cells
|
||||
(define (pool-gray frame cell-size)
|
||||
(pool-channel (gray frame)
|
||||
(cell-indices frame cell-size)
|
||||
(num-cells frame cell-size)))
|
||||
|
||||
;; =============================================================================
|
||||
;; Blending (derivable from math)
|
||||
;; =============================================================================
|
||||
|
||||
;; Additive blend
|
||||
(define (blend-add a b) (clamp (+ a b) 0 255))
|
||||
|
||||
;; Multiply blend (normalized)
|
||||
(define (blend-multiply a b) (* (/ a 255) b))
|
||||
|
||||
;; Screen blend
|
||||
(define (blend-screen a b) (- 255 (* (/ (- 255 a) 255) (- 255 b))))
|
||||
|
||||
;; Overlay blend
|
||||
(define (blend-overlay a b)
|
||||
(where (< a 128)
|
||||
(* 2 (/ (* a b) 255))
|
||||
(- 255 (* 2 (/ (* (- 255 a) (- 255 b)) 255)))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Simple Effects (derivable from primitives)
|
||||
;; =============================================================================
|
||||
|
||||
;; Invert a channel (255 - c)
|
||||
(define (invert-channel c) (- 255 c))
|
||||
|
||||
;; Binary threshold
|
||||
(define (threshold-channel c thresh) (where (> c thresh) 255 0))
|
||||
|
||||
;; Reduce to n levels
|
||||
(define (posterize-channel c levels)
|
||||
(let ((step (/ 255 (- levels 1))))
|
||||
(* (round (/ c step)) step)))
|
||||
17
sexp_effects/effects/ascii_art.sexp
Normal file
17
sexp_effects/effects/ascii_art.sexp
Normal file
@@ -0,0 +1,17 @@
|
||||
;; ASCII Art effect - converts image to ASCII characters
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_art
|
||||
:params (
|
||||
(char_size :type int :default 8 :range [4 32])
|
||||
(alphabet :type string :default "standard")
|
||||
(color_mode :type string :default "color" :desc "color, mono, invert, or any color name/hex")
|
||||
(background_color :type string :default "black" :desc "background color name/hex")
|
||||
(invert_colors :type int :default 0 :desc "swap foreground and background colors")
|
||||
(contrast :type float :default 1.5 :range [1 3])
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
(chars (luminance-to-chars luminances alphabet contrast)))
|
||||
(render-char-grid frame chars colors char_size color_mode background_color invert_colors)))
|
||||
52
sexp_effects/effects/ascii_art_fx.sexp
Normal file
52
sexp_effects/effects/ascii_art_fx.sexp
Normal file
@@ -0,0 +1,52 @@
|
||||
;; ASCII Art FX - converts image to ASCII characters with per-character effects
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_art_fx
|
||||
:params (
|
||||
;; Basic parameters
|
||||
(char_size :type int :default 8 :range [4 32]
|
||||
:desc "Size of each character cell in pixels")
|
||||
(alphabet :type string :default "standard"
|
||||
:desc "Character set to use")
|
||||
(color_mode :type string :default "color"
|
||||
:choices [color mono invert]
|
||||
:desc "Color mode: color, mono, invert, or any color name/hex")
|
||||
(background_color :type string :default "black"
|
||||
:desc "Background color name or hex value")
|
||||
(invert_colors :type int :default 0 :range [0 1]
|
||||
:desc "Swap foreground and background colors (0/1)")
|
||||
(contrast :type float :default 1.5 :range [1 3]
|
||||
:desc "Character selection contrast")
|
||||
|
||||
;; Per-character effects
|
||||
(char_jitter :type float :default 0 :range [0 20]
|
||||
:desc "Position jitter amount in pixels")
|
||||
(char_scale :type float :default 1.0 :range [0.5 2.0]
|
||||
:desc "Character scale factor")
|
||||
(char_rotation :type float :default 0 :range [0 180]
|
||||
:desc "Rotation amount in degrees")
|
||||
(char_hue_shift :type float :default 0 :range [0 360]
|
||||
:desc "Hue shift in degrees")
|
||||
|
||||
;; Modulation sources
|
||||
(jitter_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives jitter modulation")
|
||||
(scale_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives scale modulation")
|
||||
(rotation_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives rotation modulation")
|
||||
(hue_source :type string :default "none"
|
||||
:choices [none luminance inv_luminance saturation position_x position_y position_diag random center_dist]
|
||||
:desc "What drives hue shift modulation")
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
(chars (luminance-to-chars luminances alphabet contrast)))
|
||||
(render-char-grid-fx frame chars colors luminances char_size
|
||||
color_mode background_color invert_colors
|
||||
char_jitter char_scale char_rotation char_hue_shift
|
||||
jitter_source scale_source rotation_source hue_source)))
|
||||
102
sexp_effects/effects/ascii_fx_zone.sexp
Normal file
102
sexp_effects/effects/ascii_fx_zone.sexp
Normal file
@@ -0,0 +1,102 @@
|
||||
;; Composable ASCII Art with Per-Zone Expression-Driven Effects
|
||||
;; Requires ascii primitive library for the ascii-fx-zone primitive
|
||||
|
||||
(require-primitives "ascii")
|
||||
|
||||
;; Two modes of operation:
|
||||
;;
|
||||
;; 1. EXPRESSION MODE: Use zone-* variables in expression parameters
|
||||
;; Zone variables available:
|
||||
;; zone-row, zone-col: Grid position (integers)
|
||||
;; zone-row-norm, zone-col-norm: Normalized position (0-1)
|
||||
;; zone-lum: Cell luminance (0-1)
|
||||
;; zone-sat: Cell saturation (0-1)
|
||||
;; zone-hue: Cell hue (0-360)
|
||||
;; zone-r, zone-g, zone-b: RGB components (0-1)
|
||||
;;
|
||||
;; Example:
|
||||
;; (ascii-fx-zone frame
|
||||
;; :cols 80
|
||||
;; :char_hue (* zone-lum 180)
|
||||
;; :char_rotation (* zone-col-norm 30))
|
||||
;;
|
||||
;; 2. CELL EFFECT MODE: Pass a lambda to apply arbitrary effects per-cell
|
||||
;; The lambda receives (cell-image zone-dict) and returns modified cell.
|
||||
;; Zone dict contains: row, col, row-norm, col-norm, lum, sat, hue, r, g, b,
|
||||
;; char, color, cell_size, plus any bound analysis values.
|
||||
;;
|
||||
;; Any loaded sexp effect can be called on cells - each cell is just a small frame:
|
||||
;; (blur cell radius) - Gaussian blur
|
||||
;; (rotate cell angle) - Rotate by angle degrees
|
||||
;; (brightness cell factor) - Adjust brightness
|
||||
;; (contrast cell factor) - Adjust contrast
|
||||
;; (saturation cell factor) - Adjust saturation
|
||||
;; (hue_shift cell degrees) - Shift hue
|
||||
;; (rgb_split cell offset_x offset_y) - RGB channel split
|
||||
;; (invert cell) - Invert colors
|
||||
;; (pixelate cell block_size) - Pixelate
|
||||
;; (wave cell amplitude freq) - Wave distortion
|
||||
;; ... and any other loaded effect
|
||||
;;
|
||||
;; Example:
|
||||
;; (ascii-fx-zone frame
|
||||
;; :cols 60
|
||||
;; :cell_effect (lambda [cell zone]
|
||||
;; (blur (rotate cell (* (get zone "energy") 45))
|
||||
;; (if (> (get zone "lum") 0.5) 3 0))))
|
||||
|
||||
(define-effect ascii_fx_zone
|
||||
:params (
|
||||
(cols :type int :default 80 :range [20 200]
|
||||
:desc "Number of character columns")
|
||||
(char_size :type int :default nil :range [4 32]
|
||||
:desc "Character cell size in pixels (overrides cols if set)")
|
||||
(alphabet :type string :default "standard"
|
||||
:desc "Character set: standard, blocks, simple, digits, or custom string")
|
||||
(color_mode :type string :default "color"
|
||||
:desc "Color mode: color, mono, invert, or any color name/hex")
|
||||
(background :type string :default "black"
|
||||
:desc "Background color name or hex value")
|
||||
(contrast :type float :default 1.5 :range [0.5 3.0]
|
||||
:desc "Contrast for character selection")
|
||||
(char_hue :type any :default nil
|
||||
:desc "Hue shift expression (evaluated per-zone with zone-* vars)")
|
||||
(char_saturation :type any :default nil
|
||||
:desc "Saturation multiplier expression (1.0 = unchanged)")
|
||||
(char_brightness :type any :default nil
|
||||
:desc "Brightness multiplier expression (1.0 = unchanged)")
|
||||
(char_scale :type any :default nil
|
||||
:desc "Character scale expression (1.0 = normal size)")
|
||||
(char_rotation :type any :default nil
|
||||
:desc "Character rotation expression (degrees)")
|
||||
(char_jitter :type any :default nil
|
||||
:desc "Position jitter expression (pixels)")
|
||||
(cell_effect :type any :default nil
|
||||
:desc "Lambda (cell zone) -> cell for arbitrary per-cell effects")
|
||||
;; Convenience params for staged recipes (avoids compile-time expression issues)
|
||||
(energy :type float :default nil
|
||||
:desc "Energy multiplier (0-1) from audio analysis bind")
|
||||
(rotation_scale :type float :default 0
|
||||
:desc "Max rotation at top-right when energy=1 (degrees)")
|
||||
)
|
||||
;; The ascii-fx-zone special form handles expression params
|
||||
;; If energy + rotation_scale provided, it builds: energy * scale * position_factor
|
||||
;; where position_factor = 0 at bottom-left, 3 at top-right
|
||||
;; If cell_effect provided, each character is rendered to a cell image,
|
||||
;; passed to the lambda, and the result composited back
|
||||
(ascii-fx-zone frame
|
||||
:cols cols
|
||||
:char_size char_size
|
||||
:alphabet alphabet
|
||||
:color_mode color_mode
|
||||
:background background
|
||||
:contrast contrast
|
||||
:char_hue char_hue
|
||||
:char_saturation char_saturation
|
||||
:char_brightness char_brightness
|
||||
:char_scale char_scale
|
||||
:char_rotation char_rotation
|
||||
:char_jitter char_jitter
|
||||
:cell_effect cell_effect
|
||||
:energy energy
|
||||
:rotation_scale rotation_scale))
|
||||
30
sexp_effects/effects/ascii_zones.sexp
Normal file
30
sexp_effects/effects/ascii_zones.sexp
Normal file
@@ -0,0 +1,30 @@
|
||||
;; ASCII Zones effect - different character sets for different brightness zones
|
||||
;; Dark areas use simple chars, mid uses standard, bright uses blocks
|
||||
(require-primitives "ascii")
|
||||
|
||||
(define-effect ascii_zones
|
||||
:params (
|
||||
(char_size :type int :default 8 :range [4 32])
|
||||
(dark_threshold :type int :default 80 :range [0 128])
|
||||
(bright_threshold :type int :default 180 :range [128 255])
|
||||
(color_mode :type string :default "color")
|
||||
)
|
||||
(let* ((sample (cell-sample frame char_size))
|
||||
(colors (nth sample 0))
|
||||
(luminances (nth sample 1))
|
||||
;; Start with simple chars as base
|
||||
(base-chars (luminance-to-chars luminances "simple" 1.2))
|
||||
;; Map each cell to appropriate alphabet based on brightness zone
|
||||
(zoned-chars (map-char-grid base-chars luminances
|
||||
(lambda (r c ch lum)
|
||||
(cond
|
||||
;; Bright zones: use block characters
|
||||
((> lum bright_threshold)
|
||||
(alphabet-char "blocks" (floor (/ (- lum bright_threshold) 15))))
|
||||
;; Dark zones: use simple sparse chars
|
||||
((< lum dark_threshold)
|
||||
(alphabet-char " .-" (floor (/ lum 30))))
|
||||
;; Mid zones: use standard ASCII
|
||||
(else
|
||||
(alphabet-char "standard" (floor (/ lum 4)))))))))
|
||||
(render-char-grid frame zoned-chars colors char_size color_mode (list 0 0 0))))
|
||||
31
sexp_effects/effects/blend.sexp
Normal file
31
sexp_effects/effects/blend.sexp
Normal file
@@ -0,0 +1,31 @@
|
||||
;; Blend effect - combines two video frames
|
||||
;; Streaming-compatible: frame is background, overlay is second frame
|
||||
;; Usage: (blend background overlay :opacity 0.5 :mode "alpha")
|
||||
;;
|
||||
;; Params:
|
||||
;; mode - blend mode (add, multiply, screen, overlay, difference, lighten, darken, alpha)
|
||||
;; opacity - blend amount (0-1)
|
||||
|
||||
(require-primitives "image" "blending" "core")
|
||||
|
||||
(define-effect blend
|
||||
:params (
|
||||
(overlay :type frame :default nil)
|
||||
(mode :type string :default "alpha")
|
||||
(opacity :type float :default 0.5)
|
||||
)
|
||||
(if (core:is-nil overlay)
|
||||
frame
|
||||
(let [a frame
|
||||
b overlay
|
||||
a-h (image:height a)
|
||||
a-w (image:width a)
|
||||
b-h (image:height b)
|
||||
b-w (image:width b)
|
||||
;; Resize b to match a if needed
|
||||
b-sized (if (and (= a-w b-w) (= a-h b-h))
|
||||
b
|
||||
(image:resize b a-w a-h "linear"))]
|
||||
(if (= mode "alpha")
|
||||
(blending:blend-images a b-sized opacity)
|
||||
(blending:blend-images a (blending:blend-mode a b-sized mode) opacity)))))
|
||||
58
sexp_effects/effects/blend_multi.sexp
Normal file
58
sexp_effects/effects/blend_multi.sexp
Normal file
@@ -0,0 +1,58 @@
|
||||
;; N-way weighted blend effect
|
||||
;; Streaming-compatible: pass inputs as a list of frames
|
||||
;; Usage: (blend_multi :inputs [(read a) (read b) (read c)] :weights [0.3 0.4 0.3])
|
||||
;;
|
||||
;; Parameters:
|
||||
;; inputs - list of N frames to blend
|
||||
;; weights - list of N floats, one per input (resolved per-frame)
|
||||
;; mode - blend mode applied when folding each frame in:
|
||||
;; "alpha" — pure weighted average (default)
|
||||
;; "multiply" — darken by multiplication
|
||||
;; "screen" — lighten (inverse multiply)
|
||||
;; "overlay" — contrast-boosting midtone blend
|
||||
;; "soft-light" — gentle dodge/burn
|
||||
;; "hard-light" — strong dodge/burn
|
||||
;; "color-dodge" — brightens towards white
|
||||
;; "color-burn" — darkens towards black
|
||||
;; "difference" — absolute pixel difference
|
||||
;; "exclusion" — softer difference
|
||||
;; "add" — additive (clamped)
|
||||
;; "subtract" — subtractive (clamped)
|
||||
;; "darken" — per-pixel minimum
|
||||
;; "lighten" — per-pixel maximum
|
||||
;; resize_mode - how to match frame dimensions (fit, crop, stretch)
|
||||
;;
|
||||
;; Uses a left-fold over inputs[1..N-1]. At each step the running
|
||||
;; opacity is: w[i] / (w[0] + w[1] + ... + w[i])
|
||||
;; which produces the correct normalised weighted result.
|
||||
|
||||
(require-primitives "image" "blending")
|
||||
|
||||
(define-effect blend_multi
|
||||
:params (
|
||||
(inputs :type list :default [])
|
||||
(weights :type list :default [])
|
||||
(mode :type string :default "alpha")
|
||||
(resize_mode :type string :default "fit")
|
||||
)
|
||||
(let [n (len inputs)
|
||||
;; Target dimensions from first frame
|
||||
target-w (image:width (nth inputs 0))
|
||||
target-h (image:height (nth inputs 0))
|
||||
;; Fold over indices 1..n-1
|
||||
;; Accumulator is (list blended-frame running-weight-sum)
|
||||
seed (list (nth inputs 0) (nth weights 0))
|
||||
result (reduce (range 1 n) seed
|
||||
(lambda (pair i)
|
||||
(let [acc (nth pair 0)
|
||||
running (nth pair 1)
|
||||
w (nth weights i)
|
||||
new-running (+ running w)
|
||||
opacity (/ w (max new-running 0.001))
|
||||
f (image:resize (nth inputs i) target-w target-h "linear")
|
||||
;; Apply blend mode then mix with opacity
|
||||
blended (if (= mode "alpha")
|
||||
(blending:blend-images acc f opacity)
|
||||
(blending:blend-images acc (blending:blend-mode acc f mode) opacity))]
|
||||
(list blended new-running))))]
|
||||
(nth result 0)))
|
||||
16
sexp_effects/effects/bloom.sexp
Normal file
16
sexp_effects/effects/bloom.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Bloom effect - glow on bright areas
|
||||
(require-primitives "image" "blending")
|
||||
|
||||
(define-effect bloom
|
||||
:params (
|
||||
(intensity :type float :default 0.5 :range [0 2])
|
||||
(threshold :type int :default 200 :range [0 255])
|
||||
(radius :type int :default 15 :range [1 50])
|
||||
)
|
||||
(let* ((bright (map-pixels frame
|
||||
(lambda (x y c)
|
||||
(if (> (luminance c) threshold)
|
||||
c
|
||||
(rgb 0 0 0)))))
|
||||
(blurred (image:blur bright radius)))
|
||||
(blending:blend-mode frame blurred "add")))
|
||||
8
sexp_effects/effects/blur.sexp
Normal file
8
sexp_effects/effects/blur.sexp
Normal file
@@ -0,0 +1,8 @@
|
||||
;; Blur effect - gaussian blur
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect blur
|
||||
:params (
|
||||
(radius :type int :default 5 :range [1 50])
|
||||
)
|
||||
(image:blur frame (max 1 radius)))
|
||||
9
sexp_effects/effects/brightness.sexp
Normal file
9
sexp_effects/effects/brightness.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Brightness effect - adjusts overall brightness
|
||||
;; Uses vectorized adjust primitive for fast processing
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect brightness
|
||||
:params (
|
||||
(amount :type int :default 0 :range [-255 255])
|
||||
)
|
||||
(color_ops:adjust-brightness frame amount))
|
||||
65
sexp_effects/effects/cell_pattern.sexp
Normal file
65
sexp_effects/effects/cell_pattern.sexp
Normal file
@@ -0,0 +1,65 @@
|
||||
;; Cell Pattern effect - custom patterns within cells
|
||||
;;
|
||||
;; Demonstrates building arbitrary per-cell visuals from primitives.
|
||||
;; Uses local coordinates within cells to draw patterns scaled by luminance.
|
||||
|
||||
(require-primitives "xector")
|
||||
|
||||
(define-effect cell_pattern
|
||||
:params (
|
||||
(cell-size :type int :default 16 :range [8 48] :desc "Cell size")
|
||||
(pattern :type string :default "diagonal" :desc "Pattern: diagonal, cross, ring")
|
||||
)
|
||||
(let* (
|
||||
;; Pool to get cell colors
|
||||
(pooled (pool-frame frame cell-size))
|
||||
(cell-r (nth pooled 0))
|
||||
(cell-g (nth pooled 1))
|
||||
(cell-b (nth pooled 2))
|
||||
(cell-lum (α/ (nth pooled 3) 255))
|
||||
|
||||
;; Cell indices for each pixel
|
||||
(cell-idx (cell-indices frame cell-size))
|
||||
|
||||
;; Look up cell values for each pixel
|
||||
(pix-r (gather cell-r cell-idx))
|
||||
(pix-g (gather cell-g cell-idx))
|
||||
(pix-b (gather cell-b cell-idx))
|
||||
(pix-lum (gather cell-lum cell-idx))
|
||||
|
||||
;; Local position within cell [0, 1]
|
||||
(lx (local-x-norm frame cell-size))
|
||||
(ly (local-y-norm frame cell-size))
|
||||
|
||||
;; Pattern mask based on pattern type
|
||||
(mask
|
||||
(cond
|
||||
;; Diagonal lines - thickness based on luminance
|
||||
((= pattern "diagonal")
|
||||
(let* ((diag (αmod (α+ lx ly) 0.25))
|
||||
(thickness (α* pix-lum 0.125)))
|
||||
(α< diag thickness)))
|
||||
|
||||
;; Cross pattern
|
||||
((= pattern "cross")
|
||||
(let* ((cx (αabs (α- lx 0.5)))
|
||||
(cy (αabs (α- ly 0.5)))
|
||||
(thickness (α* pix-lum 0.25)))
|
||||
(αor (α< cx thickness) (α< cy thickness))))
|
||||
|
||||
;; Ring pattern
|
||||
((= pattern "ring")
|
||||
(let* ((dx (α- lx 0.5))
|
||||
(dy (α- ly 0.5))
|
||||
(dist (αsqrt (α+ (α² dx) (α² dy))))
|
||||
(target (α* pix-lum 0.4))
|
||||
(thickness 0.05))
|
||||
(α< (αabs (α- dist target)) thickness)))
|
||||
|
||||
;; Default: solid
|
||||
(else (α> pix-lum 0)))))
|
||||
|
||||
;; Apply mask: show cell color where mask is true, black elsewhere
|
||||
(rgb (where mask pix-r 0)
|
||||
(where mask pix-g 0)
|
||||
(where mask pix-b 0))))
|
||||
13
sexp_effects/effects/color-adjust.sexp
Normal file
13
sexp_effects/effects/color-adjust.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Color adjustment effect - replaces TRANSFORM node
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect color-adjust
|
||||
:params (
|
||||
(brightness :type int :default 0 :range [-255 255] :desc "Brightness adjustment")
|
||||
(contrast :type float :default 1 :range [0 3] :desc "Contrast multiplier")
|
||||
(saturation :type float :default 1 :range [0 2] :desc "Saturation multiplier")
|
||||
)
|
||||
(-> frame
|
||||
(color_ops:adjust-brightness brightness)
|
||||
(color_ops:adjust-contrast contrast)
|
||||
(color_ops:adjust-saturation saturation)))
|
||||
13
sexp_effects/effects/color_cycle.sexp
Normal file
13
sexp_effects/effects/color_cycle.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Color Cycle effect - animated hue rotation
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect color_cycle
|
||||
:params (
|
||||
(speed :type int :default 1 :range [0 10])
|
||||
)
|
||||
(let ((shift (* t speed 360)))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(let* ((hsv (rgb->hsv c))
|
||||
(new-h (mod (+ (first hsv) shift) 360)))
|
||||
(hsv->rgb (list new-h (nth hsv 1) (nth hsv 2))))))))
|
||||
9
sexp_effects/effects/contrast.sexp
Normal file
9
sexp_effects/effects/contrast.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Contrast effect - adjusts image contrast
|
||||
;; Uses vectorized adjust primitive for fast processing
|
||||
(require-primitives "color_ops")
|
||||
|
||||
(define-effect contrast
|
||||
:params (
|
||||
(amount :type int :default 1 :range [0.5 3])
|
||||
)
|
||||
(color_ops:adjust-contrast frame amount))
|
||||
30
sexp_effects/effects/crt.sexp
Normal file
30
sexp_effects/effects/crt.sexp
Normal file
@@ -0,0 +1,30 @@
|
||||
;; CRT effect - old monitor simulation
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect crt
|
||||
:params (
|
||||
(line_spacing :type int :default 2 :range [1 10])
|
||||
(line_opacity :type float :default 0.3 :range [0 1])
|
||||
(vignette_amount :type float :default 0.2)
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (/ w 2))
|
||||
(cy (/ h 2))
|
||||
(max-dist (sqrt (+ (* cx cx) (* cy cy)))))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(let* (;; Scanline darkening
|
||||
(scanline-factor (if (= 0 (mod y line_spacing))
|
||||
(- 1 line_opacity)
|
||||
1))
|
||||
;; Vignette
|
||||
(dx (- x cx))
|
||||
(dy (- y cy))
|
||||
(dist (sqrt (+ (* dx dx) (* dy dy))))
|
||||
(vignette-factor (- 1 (* (/ dist max-dist) vignette_amount)))
|
||||
;; Combined
|
||||
(factor (* scanline-factor vignette-factor)))
|
||||
(rgb (* (red c) factor)
|
||||
(* (green c) factor)
|
||||
(* (blue c) factor)))))))
|
||||
14
sexp_effects/effects/datamosh.sexp
Normal file
14
sexp_effects/effects/datamosh.sexp
Normal file
@@ -0,0 +1,14 @@
|
||||
;; Datamosh effect - glitch block corruption
|
||||
|
||||
(define-effect datamosh
|
||||
:params (
|
||||
(block_size :type int :default 32 :range [8 128])
|
||||
(corruption :type float :default 0.3 :range [0 1])
|
||||
(max_offset :type int :default 50 :range [0 200])
|
||||
(color_corrupt :type bool :default true)
|
||||
)
|
||||
;; Get previous frame from state, or use current frame if none
|
||||
(let ((prev (state-get "prev_frame" frame)))
|
||||
(begin
|
||||
(state-set "prev_frame" (copy frame))
|
||||
(datamosh frame prev block_size corruption max_offset color_corrupt))))
|
||||
19
sexp_effects/effects/echo.sexp
Normal file
19
sexp_effects/effects/echo.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Echo effect - motion trails using frame buffer
|
||||
(require-primitives "blending")
|
||||
|
||||
(define-effect echo
|
||||
:params (
|
||||
(num_echoes :type int :default 4 :range [1 20])
|
||||
(decay :type float :default 0.5 :range [0 1])
|
||||
)
|
||||
(let* ((buffer (state-get "buffer" (list)))
|
||||
(new-buffer (take (cons frame buffer) (+ num_echoes 1))))
|
||||
(begin
|
||||
(state-set "buffer" new-buffer)
|
||||
;; Blend frames with decay
|
||||
(if (< (length new-buffer) 2)
|
||||
frame
|
||||
(let ((result (copy frame)))
|
||||
;; Simple blend of first two frames for now
|
||||
;; Full version would fold over all frames
|
||||
(blending:blend-images frame (nth new-buffer 1) (* decay 0.5)))))))
|
||||
9
sexp_effects/effects/edge_detect.sexp
Normal file
9
sexp_effects/effects/edge_detect.sexp
Normal file
@@ -0,0 +1,9 @@
|
||||
;; Edge detection effect - highlights edges
|
||||
(require-primitives "image")
|
||||
|
||||
(define-effect edge_detect
|
||||
:params (
|
||||
(low :type int :default 50 :range [10 100])
|
||||
(high :type int :default 150 :range [50 300])
|
||||
)
|
||||
(image:edge-detect frame low high))
|
||||
13
sexp_effects/effects/emboss.sexp
Normal file
13
sexp_effects/effects/emboss.sexp
Normal file
@@ -0,0 +1,13 @@
|
||||
;; Emboss effect - creates raised/3D appearance
|
||||
(require-primitives "blending")
|
||||
|
||||
(define-effect emboss
|
||||
:params (
|
||||
(strength :type int :default 1 :range [0.5 3])
|
||||
(blend :type float :default 0.3 :range [0 1])
|
||||
)
|
||||
(let* ((kernel (list (list (- strength) (- strength) 0)
|
||||
(list (- strength) 1 strength)
|
||||
(list 0 strength strength)))
|
||||
(embossed (convolve frame kernel)))
|
||||
(blending:blend-images embossed frame blend)))
|
||||
19
sexp_effects/effects/film_grain.sexp
Normal file
19
sexp_effects/effects/film_grain.sexp
Normal file
@@ -0,0 +1,19 @@
|
||||
;; Film Grain effect - adds film grain texture
|
||||
(require-primitives "core")
|
||||
|
||||
(define-effect film_grain
|
||||
:params (
|
||||
(intensity :type float :default 0.2 :range [0 1])
|
||||
(colored :type bool :default false)
|
||||
)
|
||||
(let ((grain-amount (* intensity 50)))
|
||||
(map-pixels frame
|
||||
(lambda (x y c)
|
||||
(if colored
|
||||
(rgb (clamp (+ (red c) (gaussian 0 grain-amount)) 0 255)
|
||||
(clamp (+ (green c) (gaussian 0 grain-amount)) 0 255)
|
||||
(clamp (+ (blue c) (gaussian 0 grain-amount)) 0 255))
|
||||
(let ((n (gaussian 0 grain-amount)))
|
||||
(rgb (clamp (+ (red c) n) 0 255)
|
||||
(clamp (+ (green c) n) 0 255)
|
||||
(clamp (+ (blue c) n) 0 255))))))))
|
||||
16
sexp_effects/effects/fisheye.sexp
Normal file
16
sexp_effects/effects/fisheye.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Fisheye effect - barrel/pincushion lens distortion
|
||||
(require-primitives "geometry" "image")
|
||||
|
||||
(define-effect fisheye
|
||||
:params (
|
||||
(strength :type float :default 0.3 :range [-1 1])
|
||||
(center_x :type float :default 0.5 :range [0 1])
|
||||
(center_y :type float :default 0.5 :range [0 1])
|
||||
(zoom_correct :type bool :default true)
|
||||
)
|
||||
(let* ((w (image:width frame))
|
||||
(h (image:height frame))
|
||||
(cx (* w center_x))
|
||||
(cy (* h center_y))
|
||||
(coords (geometry:fisheye-coords w h strength cx cy zoom_correct)))
|
||||
(geometry:remap frame (geometry:coords-x coords) (geometry:coords-y coords))))
|
||||
16
sexp_effects/effects/flip.sexp
Normal file
16
sexp_effects/effects/flip.sexp
Normal file
@@ -0,0 +1,16 @@
|
||||
;; Flip effect - flips image horizontally or vertically
|
||||
(require-primitives "geometry")
|
||||
|
||||
(define-effect flip
|
||||
:params (
|
||||
(horizontal :type bool :default true)
|
||||
(vertical :type bool :default false)
|
||||
)
|
||||
(let ((result frame))
|
||||
(if horizontal
|
||||
(set! result (geometry:flip-img result "horizontal"))
|
||||
nil)
|
||||
(if vertical
|
||||
(set! result (geometry:flip-img result "vertical"))
|
||||
nil)
|
||||
result))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user