Fix Celery workers to use Redis for shared cache index

The get_cache_manager() singleton wasn't initializing with Redis,
so workers couldn't see files uploaded via the API server.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
gilesb
2026-01-09 11:25:38 +00:00
parent dd3b0e1c73
commit 43788108c0

View File

@@ -757,9 +757,24 @@ def get_cache_manager() -> L1CacheManager:
"""Get the singleton cache manager instance."""
global _manager
if _manager is None:
import redis
from urllib.parse import urlparse
cache_dir = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
l2_server = os.environ.get("L2_SERVER", "http://localhost:8200")
_manager = L1CacheManager(cache_dir=cache_dir, l2_server=l2_server)
# Initialize Redis client for shared cache index
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
parsed = urlparse(redis_url)
redis_client = redis.Redis(
host=parsed.hostname or 'localhost',
port=parsed.port or 6379,
db=int(parsed.path.lstrip('/') or 0),
socket_timeout=5,
socket_connect_timeout=5
)
_manager = L1CacheManager(cache_dir=cache_dir, l2_server=l2_server, redis_client=redis_client)
return _manager