Fix Celery workers to use Redis for shared cache index
The get_cache_manager() singleton wasn't initializing with Redis, so workers couldn't see files uploaded via the API server. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -757,9 +757,24 @@ def get_cache_manager() -> L1CacheManager:
|
|||||||
"""Get the singleton cache manager instance."""
|
"""Get the singleton cache manager instance."""
|
||||||
global _manager
|
global _manager
|
||||||
if _manager is None:
|
if _manager is None:
|
||||||
|
import redis
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
cache_dir = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
|
cache_dir = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
|
||||||
l2_server = os.environ.get("L2_SERVER", "http://localhost:8200")
|
l2_server = os.environ.get("L2_SERVER", "http://localhost:8200")
|
||||||
_manager = L1CacheManager(cache_dir=cache_dir, l2_server=l2_server)
|
|
||||||
|
# Initialize Redis client for shared cache index
|
||||||
|
redis_url = os.environ.get('REDIS_URL', 'redis://localhost:6379/5')
|
||||||
|
parsed = urlparse(redis_url)
|
||||||
|
redis_client = redis.Redis(
|
||||||
|
host=parsed.hostname or 'localhost',
|
||||||
|
port=parsed.port or 6379,
|
||||||
|
db=int(parsed.path.lstrip('/') or 0),
|
||||||
|
socket_timeout=5,
|
||||||
|
socket_connect_timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
|
_manager = L1CacheManager(cache_dir=cache_dir, l2_server=l2_server, redis_client=redis_client)
|
||||||
return _manager
|
return _manager
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user