Use GPUVideoSource for hardware-accelerated video decoding
- CIDVideoSource now uses GPUVideoSource when GPU is available - Enables CUDA hardware decoding for video sources - Should significantly improve rendering performance Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -134,9 +134,18 @@ class CIDVideoSource:
|
||||
raise ValueError(f"Could not resolve video source '{self.cid}' for actor_id={self.actor_id}")
|
||||
|
||||
logger.info(f"CIDVideoSource._ensure_source: resolved to path={path}")
|
||||
# Import from primitives where VideoSource is defined
|
||||
from sexp_effects.primitive_libs.streaming import VideoSource
|
||||
self._source = VideoSource(str(path), self.fps)
|
||||
# Use GPU-accelerated video source if available
|
||||
try:
|
||||
from sexp_effects.primitive_libs.streaming_gpu import GPUVideoSource, GPU_AVAILABLE
|
||||
if GPU_AVAILABLE:
|
||||
logger.info(f"CIDVideoSource: using GPUVideoSource for {path}")
|
||||
self._source = GPUVideoSource(str(path), self.fps, prefer_gpu=True)
|
||||
else:
|
||||
raise ImportError("GPU not available")
|
||||
except (ImportError, Exception) as e:
|
||||
logger.info(f"CIDVideoSource: falling back to CPU VideoSource ({e})")
|
||||
from sexp_effects.primitive_libs.streaming import VideoSource
|
||||
self._source = VideoSource(str(path), self.fps)
|
||||
|
||||
def read_at(self, t: float):
|
||||
self._ensure_source()
|
||||
|
||||
Reference in New Issue
Block a user