Files
giles bb458aa924 Replace batch DAG system with streaming architecture
- Remove legacy_tasks.py, hybrid_state.py, render.py
- Remove old task modules (analyze, execute, execute_sexp, orchestrate)
- Add streaming interpreter from test repo
- Add sexp_effects with primitives and video effects
- Add streaming Celery task with CID-based asset resolution
- Support both CID and friendly name references for assets
- Add .dockerignore to prevent local clones from conflicting

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-02 19:10:11 +00:00

117 lines
3.3 KiB
Python

"""
Blending Primitives Library
Image blending and compositing operations.
"""
import numpy as np
def prim_blend_images(a, b, alpha):
"""Blend two images: a * (1-alpha) + b * alpha."""
alpha = max(0.0, min(1.0, alpha))
return (a.astype(float) * (1 - alpha) + b.astype(float) * alpha).astype(np.uint8)
def prim_blend_mode(a, b, mode):
"""Blend using Photoshop-style blend modes."""
a = a.astype(float) / 255
b = b.astype(float) / 255
if mode == "multiply":
result = a * b
elif mode == "screen":
result = 1 - (1 - a) * (1 - b)
elif mode == "overlay":
mask = a < 0.5
result = np.where(mask, 2 * a * b, 1 - 2 * (1 - a) * (1 - b))
elif mode == "soft-light":
mask = b < 0.5
result = np.where(mask,
a - (1 - 2 * b) * a * (1 - a),
a + (2 * b - 1) * (np.sqrt(a) - a))
elif mode == "hard-light":
mask = b < 0.5
result = np.where(mask, 2 * a * b, 1 - 2 * (1 - a) * (1 - b))
elif mode == "color-dodge":
result = np.clip(a / (1 - b + 0.001), 0, 1)
elif mode == "color-burn":
result = 1 - np.clip((1 - a) / (b + 0.001), 0, 1)
elif mode == "difference":
result = np.abs(a - b)
elif mode == "exclusion":
result = a + b - 2 * a * b
elif mode == "add":
result = np.clip(a + b, 0, 1)
elif mode == "subtract":
result = np.clip(a - b, 0, 1)
elif mode == "darken":
result = np.minimum(a, b)
elif mode == "lighten":
result = np.maximum(a, b)
else:
# Default to normal (just return b)
result = b
return (result * 255).astype(np.uint8)
def prim_mask(img, mask_img):
"""Apply grayscale mask to image (white=opaque, black=transparent)."""
if len(mask_img.shape) == 3:
mask = mask_img[:, :, 0].astype(float) / 255
else:
mask = mask_img.astype(float) / 255
mask = mask[:, :, np.newaxis]
return (img.astype(float) * mask).astype(np.uint8)
def prim_alpha_composite(base, overlay, alpha_channel):
"""Composite overlay onto base using alpha channel."""
if len(alpha_channel.shape) == 3:
alpha = alpha_channel[:, :, 0].astype(float) / 255
else:
alpha = alpha_channel.astype(float) / 255
alpha = alpha[:, :, np.newaxis]
result = base.astype(float) * (1 - alpha) + overlay.astype(float) * alpha
return result.astype(np.uint8)
def prim_overlay(base, overlay, x, y, alpha=1.0):
"""Overlay image at position (x, y) with optional alpha."""
result = base.copy()
x, y = int(x), int(y)
oh, ow = overlay.shape[:2]
bh, bw = base.shape[:2]
# Clip to bounds
sx1 = max(0, -x)
sy1 = max(0, -y)
dx1 = max(0, x)
dy1 = max(0, y)
sx2 = min(ow, bw - x)
sy2 = min(oh, bh - y)
if sx2 > sx1 and sy2 > sy1:
src = overlay[sy1:sy2, sx1:sx2]
dst = result[dy1:dy1+(sy2-sy1), dx1:dx1+(sx2-sx1)]
blended = (dst.astype(float) * (1 - alpha) + src.astype(float) * alpha)
result[dy1:dy1+(sy2-sy1), dx1:dx1+(sx2-sx1)] = blended.astype(np.uint8)
return result
PRIMITIVES = {
# Basic blending
'blend-images': prim_blend_images,
'blend-mode': prim_blend_mode,
# Masking
'mask': prim_mask,
'alpha-composite': prim_alpha_composite,
# Overlay
'overlay': prim_overlay,
}