Add IPFS_PRIMARY environment variable for server
When IPFS_PRIMARY=true:
- /api/run-recipe uses run_recipe_cid task
- Recipe registered on IPFS before execution
- Input CIDs fetched from cache manager
- Everything flows through IPFS, no local cache
Usage in docker-compose:
environment:
- IPFS_PRIMARY=true
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
51
server.py
51
server.py
@@ -70,6 +70,10 @@ def compute_run_id(input_hashes: list[str], recipe: str, recipe_hash: str = None
|
||||
# IPFS gateway URL for public access to IPFS content
|
||||
IPFS_GATEWAY_URL = os.environ.get("IPFS_GATEWAY_URL", "")
|
||||
|
||||
# IPFS-primary mode: everything stored on IPFS, no local cache
|
||||
# Set to "true" to enable
|
||||
IPFS_PRIMARY = os.environ.get("IPFS_PRIMARY", "").lower() in ("true", "1", "yes")
|
||||
|
||||
# Cache directory (use /data/cache in Docker, ~/.artdag/cache locally)
|
||||
CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
|
||||
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
@@ -6196,9 +6200,9 @@ async def run_recipe_endpoint(
|
||||
3. Execute: Run steps with parallel execution
|
||||
|
||||
Returns immediately with run_id. Poll /api/run/{run_id} for status.
|
||||
"""
|
||||
from tasks.orchestrate import run_recipe
|
||||
|
||||
Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS).
|
||||
"""
|
||||
# Compute run_id from inputs and recipe
|
||||
try:
|
||||
recipe_data = yaml.safe_load(request.recipe_yaml)
|
||||
@@ -6227,12 +6231,43 @@ async def run_recipe_endpoint(
|
||||
|
||||
# Submit to Celery
|
||||
try:
|
||||
task = run_recipe.delay(
|
||||
recipe_yaml=request.recipe_yaml,
|
||||
input_hashes=request.input_hashes,
|
||||
features=request.features,
|
||||
run_id=run_id,
|
||||
)
|
||||
if IPFS_PRIMARY:
|
||||
# IPFS-primary mode: register recipe and get input CIDs
|
||||
from tasks.orchestrate_cid import run_recipe_cid
|
||||
import ipfs_client
|
||||
|
||||
# Register recipe on IPFS
|
||||
recipe_cid = ipfs_client.add_bytes(request.recipe_yaml.encode('utf-8'))
|
||||
if not recipe_cid:
|
||||
raise HTTPException(status_code=500, detail="Failed to register recipe on IPFS")
|
||||
|
||||
# Get input CIDs from cache manager
|
||||
input_cids = {}
|
||||
for name, content_hash in request.input_hashes.items():
|
||||
cid = cache_manager.get_ipfs_cid(content_hash)
|
||||
if cid:
|
||||
input_cids[name] = cid
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Input '{name}' not found on IPFS. Upload first."
|
||||
)
|
||||
|
||||
task = run_recipe_cid.delay(
|
||||
recipe_cid=recipe_cid,
|
||||
input_cids=input_cids,
|
||||
input_hashes=request.input_hashes,
|
||||
features=request.features,
|
||||
)
|
||||
else:
|
||||
# Standard mode: local cache + IPFS backup
|
||||
from tasks.orchestrate import run_recipe
|
||||
task = run_recipe.delay(
|
||||
recipe_yaml=request.recipe_yaml,
|
||||
input_hashes=request.input_hashes,
|
||||
features=request.features,
|
||||
run_id=run_id,
|
||||
)
|
||||
|
||||
# Store run status in Redis
|
||||
run_data = {
|
||||
|
||||
Reference in New Issue
Block a user