All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 2m33s
Merges full history from art-dag/mono.git into the monorepo under the artdag/ directory. Contains: core (DAG engine), l1 (Celery rendering server), l2 (ActivityPub registry), common (shared templates/middleware), client (CLI), test (e2e). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> git-subtree-dir: artdag git-subtree-mainline:1a179de547git-subtree-split:4c2e716558
45 lines
1.2 KiB
Python
45 lines
1.2 KiB
Python
"""
|
|
Streaming video compositor for real-time effect processing.
|
|
|
|
This module provides a frame-by-frame streaming architecture that:
|
|
- Reads from multiple video sources with automatic looping
|
|
- Applies effects inline (no intermediate files)
|
|
- Composites layers with time-varying weights
|
|
- Outputs to display, file, or stream
|
|
|
|
Usage:
|
|
from streaming import StreamingCompositor, VideoSource, AudioAnalyzer
|
|
|
|
compositor = StreamingCompositor(
|
|
sources=["video1.mp4", "video2.mp4"],
|
|
effects_per_source=[...],
|
|
compositor_config={...},
|
|
)
|
|
|
|
# With live audio
|
|
audio = AudioAnalyzer(device=0)
|
|
compositor.run(output="output.mp4", duration=60, audio=audio)
|
|
|
|
# With preview window
|
|
compositor.run(output="preview", duration=60)
|
|
|
|
Backends:
|
|
- numpy: Works everywhere, ~3-5 fps (default)
|
|
- glsl: Requires GPU, 30+ fps real-time (future)
|
|
"""
|
|
|
|
from .sources import VideoSource, ImageSource
|
|
from .compositor import StreamingCompositor
|
|
from .backends import NumpyBackend, get_backend
|
|
from .output import DisplayOutput, FileOutput
|
|
|
|
__all__ = [
|
|
"StreamingCompositor",
|
|
"VideoSource",
|
|
"ImageSource",
|
|
"NumpyBackend",
|
|
"get_backend",
|
|
"DisplayOutput",
|
|
"FileOutput",
|
|
]
|