diff --git a/docker-compose.yml b/docker-compose.yml index b81dc97..5ad89e2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -52,6 +52,8 @@ services: - DATABASE_URL=postgresql://artdag:artdag@postgres:5432/artdag - IPFS_API=/dns/ipfs/tcp/5001 - CACHE_DIR=/data/cache + # Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS, no local cache) + # - IPFS_PRIMARY=true # L2_SERVER, L2_DOMAIN, IPFS_GATEWAY_URL from .env file volumes: - l1_cache:/data/cache diff --git a/server.py b/server.py index e50b841..d94ad45 100644 --- a/server.py +++ b/server.py @@ -70,6 +70,10 @@ def compute_run_id(input_hashes: list[str], recipe: str, recipe_hash: str = None # IPFS gateway URL for public access to IPFS content IPFS_GATEWAY_URL = os.environ.get("IPFS_GATEWAY_URL", "") +# IPFS-primary mode: everything stored on IPFS, no local cache +# Set to "true" to enable +IPFS_PRIMARY = os.environ.get("IPFS_PRIMARY", "").lower() in ("true", "1", "yes") + # Cache directory (use /data/cache in Docker, ~/.artdag/cache locally) CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache"))) CACHE_DIR.mkdir(parents=True, exist_ok=True) @@ -6196,9 +6200,9 @@ async def run_recipe_endpoint( 3. Execute: Run steps with parallel execution Returns immediately with run_id. Poll /api/run/{run_id} for status. - """ - from tasks.orchestrate import run_recipe + Set IPFS_PRIMARY=true to use IPFS-primary mode (everything on IPFS). + """ # Compute run_id from inputs and recipe try: recipe_data = yaml.safe_load(request.recipe_yaml) @@ -6227,12 +6231,43 @@ async def run_recipe_endpoint( # Submit to Celery try: - task = run_recipe.delay( - recipe_yaml=request.recipe_yaml, - input_hashes=request.input_hashes, - features=request.features, - run_id=run_id, - ) + if IPFS_PRIMARY: + # IPFS-primary mode: register recipe and get input CIDs + from tasks.orchestrate_cid import run_recipe_cid + import ipfs_client + + # Register recipe on IPFS + recipe_cid = ipfs_client.add_bytes(request.recipe_yaml.encode('utf-8')) + if not recipe_cid: + raise HTTPException(status_code=500, detail="Failed to register recipe on IPFS") + + # Get input CIDs from cache manager + input_cids = {} + for name, content_hash in request.input_hashes.items(): + cid = cache_manager.get_ipfs_cid(content_hash) + if cid: + input_cids[name] = cid + else: + raise HTTPException( + status_code=400, + detail=f"Input '{name}' not found on IPFS. Upload first." + ) + + task = run_recipe_cid.delay( + recipe_cid=recipe_cid, + input_cids=input_cids, + input_hashes=request.input_hashes, + features=request.features, + ) + else: + # Standard mode: local cache + IPFS backup + from tasks.orchestrate import run_recipe + task = run_recipe.delay( + recipe_yaml=request.recipe_yaml, + input_hashes=request.input_hashes, + features=request.features, + run_id=run_id, + ) # Store run status in Redis run_data = {