From 92d26b2b72cc3ae43ba6d5eaf48ac845b69c1c11 Mon Sep 17 00:00:00 2001 From: gilesb Date: Mon, 12 Jan 2026 08:02:44 +0000 Subject: [PATCH] Rename content_hash/output_hash to cid throughout Refactor to use IPFS CID as the primary content identifier: - Update database schema: content_hash -> cid, output_hash -> output_cid - Update all services, routers, and tasks to use cid terminology - Update HTML templates to display CID instead of hash - Update cache_manager parameter names - Update README documentation This completes the transition to CID-only content addressing. Co-Authored-By: Claude Opus 4.5 --- README.md | 26 +- app/routers/api.py | 12 +- app/routers/cache.py | 72 ++-- app/routers/recipes.py | 10 +- app/routers/runs.py | 26 +- app/services/cache_service.py | 120 +++--- app/services/recipe_service.py | 14 +- app/services/run_service.py | 40 +- app/templates/cache/detail.html | 18 +- app/templates/cache/media_list.html | 8 +- app/templates/runs/_run_card.html | 14 +- app/templates/runs/detail.html | 42 +- cache_manager.py | 259 ++++++------ database.py | 316 +++++++-------- legacy_tasks.py | 92 ++--- server_legacy.py | 598 ++++++++++++++-------------- storage_providers.py | 146 +++---- tasks/execute.py | 28 +- tasks/execute_sexp.py | 14 +- tasks/orchestrate.py | 38 +- tasks/orchestrate_cid.py | 8 +- tests/test_cache_manager.py | 68 ++-- 22 files changed, 981 insertions(+), 988 deletions(-) diff --git a/README.md b/README.md index 408ad6c..c6ab6f6 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ L1 rendering server for the Art DAG system. Manages distributed rendering jobs v ## Features - **3-Phase Execution**: Analyze → Plan → Execute pipeline for recipe-based rendering -- **Content-Addressable Caching**: SHA3-256 hashed content with deduplication +- **Content-Addressable Caching**: IPFS CIDs with deduplication - **IPFS Integration**: Optional IPFS-primary mode for distributed storage - **Storage Providers**: S3, IPFS, and local storage backends - **DAG Visualization**: Interactive graph visualization of execution plans @@ -130,13 +130,13 @@ Interactive docs: http://localhost:8100/docs | Method | Path | Description | |--------|------|-------------| -| GET | `/cache/{hash}` | Get cached content (with preview) | -| GET | `/cache/{hash}/raw` | Download raw content | -| GET | `/cache/{hash}/mp4` | Get MP4 video | -| GET | `/cache/{hash}/meta` | Get content metadata | -| PATCH | `/cache/{hash}/meta` | Update metadata | -| POST | `/cache/{hash}/publish` | Publish to L2 | -| DELETE | `/cache/{hash}` | Delete from cache | +| GET | `/cache/{cid}` | Get cached content (with preview) | +| GET | `/cache/{cid}/raw` | Download raw content | +| GET | `/cache/{cid}/mp4` | Get MP4 video | +| GET | `/cache/{cid}/meta` | Get content metadata | +| PATCH | `/cache/{cid}/meta` | Update metadata | +| POST | `/cache/{cid}/publish` | Publish to L2 | +| DELETE | `/cache/{cid}` | Delete from cache | | POST | `/cache/import?path=` | Import local file | | POST | `/cache/upload` | Upload file | | GET | `/media` | Browse media gallery | @@ -185,7 +185,7 @@ Recipes are executed in three phases: ### Phase 1: Analyze Extract features from input files: - **Audio/Video**: Tempo, beat times, energy levels -- Results cached by content hash +- Results cached by CID ### Phase 2: Plan Generate an execution plan: @@ -237,7 +237,7 @@ output: sync_video ### Local Cache - Location: `~/.artdag/cache/` (or `CACHE_DIR`) -- Content-addressed by SHA3-256 hash +- Content-addressed by IPFS CID - Subdirectories: `plans/`, `analysis/` ### Redis @@ -318,12 +318,12 @@ Every render produces a provenance record: "task_id": "celery-task-uuid", "rendered_at": "2026-01-07T...", "rendered_by": "@giles@artdag.rose-ash.com", - "output": {"name": "...", "content_hash": "..."}, + "output": {"name": "...", "cid": "Qm..."}, "inputs": [...], "effects": [...], "infrastructure": { - "software": {"name": "infra:artdag", "content_hash": "..."}, - "hardware": {"name": "infra:giles-hp", "content_hash": "..."} + "software": {"name": "infra:artdag", "cid": "Qm..."}, + "hardware": {"name": "infra:giles-hp", "cid": "Qm..."} } } ``` diff --git a/app/routers/api.py b/app/routers/api.py index fa2b5b6..5288342 100644 --- a/app/routers/api.py +++ b/app/routers/api.py @@ -155,13 +155,13 @@ async def run_recipe_endpoint( # Check if already completed cached = await database.get_run_cache(run_id) if cached: - output_hash = cached.get("output_hash") - if cache.has_content(output_hash): + output_cid = cached.get("output_cid") + if cache.has_content(output_cid): return { "status": "completed", "run_id": run_id, - "output_hash": output_hash, - "output_ipfs_cid": cache.get_ipfs_cid(output_hash), + "output_cid": output_cid, + "output_ipfs_cid": cache.get_ipfs_cid(output_cid), "cached": True, } @@ -224,7 +224,7 @@ async def get_run_status( if result.successful(): task_result = result.get() data["status"] = task_result.get("status", "completed") - data["output_hash"] = task_result.get("output_cache_id") + data["output_cid"] = task_result.get("output_cache_id") data["output_ipfs_cid"] = task_result.get("output_ipfs_cid") data["total_steps"] = task_result.get("total_steps") data["cached"] = task_result.get("cached") @@ -250,7 +250,7 @@ async def get_run_status( return { "run_id": run_id, "status": "completed", - "output_hash": cached.get("output_hash"), + "output_cid": cached.get("output_cid"), "cached": True, } diff --git a/app/routers/cache.py b/app/routers/cache.py index a019a0f..abf940b 100644 --- a/app/routers/cache.py +++ b/app/routers/cache.py @@ -40,9 +40,9 @@ def get_cache_service(): return CacheService(database, get_cache_manager()) -@router.get("/{content_hash}") +@router.get("/{cid}") async def get_cached( - content_hash: str, + cid: str, request: Request, cache_service: CacheService = Depends(get_cache_service), ): @@ -50,16 +50,16 @@ async def get_cached( auth_service = AuthService(get_redis_client()) ctx = auth_service.get_user_from_cookie(request) - cache_item = await cache_service.get_cache_item(content_hash) + cache_item = await cache_service.get_cache_item(cid) if not cache_item: if wants_html(request): templates = get_templates(request) return render(templates, "cache/not_found.html", request, - content_hash=content_hash, + cid=cid, user=ctx, active_tab="media", ) - raise HTTPException(404, f"Content {content_hash} not in cache") + raise HTTPException(404, f"Content {cid} not in cache") # JSON response if wants_json(request): @@ -71,7 +71,7 @@ async def get_cached( return RedirectResponse(url="/auth", status_code=302) # Check access - has_access = await cache_service.check_access(content_hash, ctx.actor_id, ctx.username) + has_access = await cache_service.check_access(cid, ctx.actor_id, ctx.username) if not has_access: raise HTTPException(403, "Access denied") @@ -83,27 +83,27 @@ async def get_cached( ) -@router.get("/{content_hash}/raw") +@router.get("/{cid}/raw") async def get_cached_raw( - content_hash: str, + cid: str, cache_service: CacheService = Depends(get_cache_service), ): """Get raw cached content (file download).""" - file_path, media_type, filename = await cache_service.get_raw_file(content_hash) + file_path, media_type, filename = await cache_service.get_raw_file(cid) if not file_path: - raise HTTPException(404, f"Content {content_hash} not in cache") + raise HTTPException(404, f"Content {cid} not in cache") return FileResponse(file_path, media_type=media_type, filename=filename) -@router.get("/{content_hash}/mp4") +@router.get("/{cid}/mp4") async def get_cached_mp4( - content_hash: str, + cid: str, cache_service: CacheService = Depends(get_cache_service), ): """Get cached content as MP4 (transcodes MKV on first request).""" - mp4_path, error = await cache_service.get_as_mp4(content_hash) + mp4_path, error = await cache_service.get_as_mp4(cid) if error: raise HTTPException(400 if "not a video" in error else 404, error) @@ -111,29 +111,29 @@ async def get_cached_mp4( return FileResponse(mp4_path, media_type="video/mp4") -@router.get("/{content_hash}/meta") +@router.get("/{cid}/meta") async def get_metadata( - content_hash: str, + cid: str, ctx: UserContext = Depends(require_auth), cache_service: CacheService = Depends(get_cache_service), ): """Get content metadata.""" - meta = await cache_service.get_metadata(content_hash, ctx.actor_id) + meta = await cache_service.get_metadata(cid, ctx.actor_id) if meta is None: raise HTTPException(404, "Content not found") return meta -@router.patch("/{content_hash}/meta") +@router.patch("/{cid}/meta") async def update_metadata( - content_hash: str, + cid: str, req: UpdateMetadataRequest, ctx: UserContext = Depends(require_auth), cache_service: CacheService = Depends(get_cache_service), ): """Update content metadata.""" success, error = await cache_service.update_metadata( - content_hash=content_hash, + cid=cid, actor_id=ctx.actor_id, title=req.title, description=req.description, @@ -147,16 +147,16 @@ async def update_metadata( return {"updated": True} -@router.post("/{content_hash}/publish") +@router.post("/{cid}/publish") async def publish_content( - content_hash: str, + cid: str, request: Request, ctx: UserContext = Depends(require_auth), cache_service: CacheService = Depends(get_cache_service), ): """Publish content to L2 and IPFS.""" ipfs_cid, error = await cache_service.publish_to_l2( - content_hash=content_hash, + cid=cid, actor_id=ctx.actor_id, l2_server=ctx.l2_server, auth_token=request.cookies.get("auth_token"), @@ -173,14 +173,14 @@ async def publish_content( return {"ipfs_cid": ipfs_cid, "published": True} -@router.delete("/{content_hash}") +@router.delete("/{cid}") async def delete_content( - content_hash: str, + cid: str, ctx: UserContext = Depends(require_auth), cache_service: CacheService = Depends(get_cache_service), ): """Delete content from cache.""" - success, error = await cache_service.delete_content(content_hash, ctx.actor_id) + success, error = await cache_service.delete_content(cid, ctx.actor_id) if error: raise HTTPException(400 if "Cannot" in error or "pinned" in error else 404, error) @@ -195,12 +195,12 @@ async def import_from_ipfs( cache_service: CacheService = Depends(get_cache_service), ): """Import content from IPFS.""" - content_hash, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id) + cid, error = await cache_service.import_from_ipfs(ipfs_cid, ctx.actor_id) if error: raise HTTPException(400, error) - return {"content_hash": content_hash, "imported": True} + return {"cid": cid, "imported": True} @router.post("/upload") @@ -211,7 +211,7 @@ async def upload_content( ): """Upload content to cache and IPFS.""" content = await file.read() - content_hash, ipfs_cid, error = await cache_service.upload_content( + cid, ipfs_cid, error = await cache_service.upload_content( content=content, filename=file.filename, actor_id=ctx.actor_id, @@ -222,7 +222,7 @@ async def upload_content( return { "cid": ipfs_cid, - "content_hash": content_hash, # Legacy, for backwards compatibility + "cid": cid, # Legacy, for backwards compatibility "filename": file.filename, "size": len(content), "uploaded": True, @@ -272,9 +272,9 @@ async def list_media( # HTMX metadata form -@router.get("/{content_hash}/meta-form", response_class=HTMLResponse) +@router.get("/{cid}/meta-form", response_class=HTMLResponse) async def get_metadata_form( - content_hash: str, + cid: str, request: Request, cache_service: CacheService = Depends(get_cache_service), ): @@ -285,11 +285,11 @@ async def get_metadata_form( if not ctx: return HTMLResponse('
Login required
') - meta = await cache_service.get_metadata(content_hash, ctx.actor_id) + meta = await cache_service.get_metadata(cid, ctx.actor_id) return HTMLResponse(f'''

Metadata

-
@@ -312,9 +312,9 @@ async def get_metadata_form( ''') -@router.patch("/{content_hash}/meta", response_class=HTMLResponse) +@router.patch("/{cid}/meta", response_class=HTMLResponse) async def update_metadata_htmx( - content_hash: str, + cid: str, request: Request, cache_service: CacheService = Depends(get_cache_service), ): @@ -328,7 +328,7 @@ async def update_metadata_htmx( form_data = await request.form() success, error = await cache_service.update_metadata( - content_hash=content_hash, + cid=cid, actor_id=ctx.actor_id, title=form_data.get("title"), description=form_data.get("description"), diff --git a/app/routers/recipes.py b/app/routers/recipes.py index c5924d5..3fd4e8b 100644 --- a/app/routers/recipes.py +++ b/app/routers/recipes.py @@ -350,7 +350,7 @@ async def run_recipe( if node.get("type") == "SOURCE" and "asset" in config: asset_name = config["asset"] if asset_name in assets: - config["content_hash"] = assets[asset_name].get("hash") + config["cid"] = assets[asset_name].get("hash") # Resolve effect references for EFFECT nodes if node.get("type") == "EFFECT" and "effect" in config: @@ -392,21 +392,21 @@ async def run_recipe( input_name_to_node[node["name"].replace("-", "_")] = node_id # Map user-provided input names to content hashes (for variable inputs) - for input_name, content_hash in req.inputs.items(): + for input_name, cid in req.inputs.items(): # Try direct node ID match first if input_name in nodes: node = nodes[input_name] if node.get("node_type") == "SOURCE": if "config" not in node: node["config"] = {} - node["config"]["content_hash"] = content_hash + node["config"]["cid"] = cid # Try input name lookup elif input_name in input_name_to_node: node_id = input_name_to_node[input_name] node = nodes[node_id] if "config" not in node: node["config"] = {} - node["config"]["content_hash"] = content_hash + node["config"]["cid"] = cid # Transform output to output_id if "output" in dag_copy: @@ -527,7 +527,7 @@ async def publish_recipe( # Use cache service to publish (recipes are stored in cache) cache_service = CacheService(database, get_cache_manager()) ipfs_cid, error = await cache_service.publish_to_l2( - content_hash=recipe_id, + cid=recipe_id, actor_id=ctx.actor_id, l2_server=ctx.l2_server, auth_token=request.cookies.get("auth_token"), diff --git a/app/routers/runs.py b/app/routers/runs.py index bf59ee9..736479d 100644 --- a/app/routers/runs.py +++ b/app/routers/runs.py @@ -99,7 +99,7 @@ class RunStatus(BaseModel): output_name: Optional[str] = None created_at: Optional[str] = None completed_at: Optional[str] = None - output_hash: Optional[str] = None + output_cid: Optional[str] = None username: Optional[str] = None provenance_cid: Optional[str] = None celery_task_id: Optional[str] = None @@ -244,13 +244,13 @@ async def get_run( # Build artifacts list from output and inputs artifacts = [] output_media_type = None - if run.get("output_hash"): + if run.get("output_cid"): # Detect media type using magic bytes - output_hash = run["output_hash"] + output_cid = run["output_cid"] media_type = None try: from ..services.run_service import detect_media_type - cache_path = get_cache_manager().get_by_content_hash(output_hash) + cache_path = get_cache_manager().get_by_cid(output_cid) if cache_path and cache_path.exists(): simple_type = detect_media_type(cache_path) media_type = type_to_mime(simple_type) @@ -258,7 +258,7 @@ async def get_run( except Exception: pass artifacts.append({ - "hash": output_hash, + "hash": output_cid, "step_name": "Output", "media_type": media_type or "application/octet-stream", }) @@ -271,7 +271,7 @@ async def get_run( for i, input_hash in enumerate(run["inputs"]): media_type = None try: - cache_path = cache_manager.get_by_content_hash(input_hash) + cache_path = cache_manager.get_by_cid(input_hash) if cache_path and cache_path.exists(): simple_type = detect_media_type(cache_path) media_type = type_to_mime(simple_type) @@ -393,9 +393,9 @@ async def list_runs( for run in runs: # Add output media info - if run.get("output_hash"): + if run.get("output_cid"): try: - cache_path = cache_manager.get_by_content_hash(run["output_hash"]) + cache_path = cache_manager.get_by_cid(run["output_cid"]) if cache_path and cache_path.exists(): simple_type = detect_media_type(cache_path) run["output_media_type"] = type_to_mime(simple_type) @@ -409,7 +409,7 @@ async def list_runs( for input_hash in inputs[:3]: preview = {"hash": input_hash, "media_type": None} try: - cache_path = cache_manager.get_by_content_hash(input_hash) + cache_path = cache_manager.get_by_cid(input_hash) if cache_path and cache_path.exists(): simple_type = detect_media_type(cache_path) preview["media_type"] = type_to_mime(simple_type) @@ -756,8 +756,8 @@ async def publish_run( raise HTTPException(404, "Run not found") # Check if run has output - output_hash = run.get("output_hash") - if not output_hash: + output_cid = run.get("output_cid") + if not output_cid: error = "Run has no output to publish" if wants_html(request): return HTMLResponse(f'{error}') @@ -766,7 +766,7 @@ async def publish_run( # Use cache service to publish the output cache_service = CacheService(database, get_cache_manager()) ipfs_cid, error = await cache_service.publish_to_l2( - content_hash=output_hash, + cid=output_cid, actor_id=ctx.actor_id, l2_server=ctx.l2_server, auth_token=request.cookies.get("auth_token"), @@ -780,4 +780,4 @@ async def publish_run( if wants_html(request): return HTMLResponse(f'Shared: {ipfs_cid[:16]}...') - return {"ipfs_cid": ipfs_cid, "output_hash": output_hash, "published": True} + return {"ipfs_cid": ipfs_cid, "output_cid": output_cid, "published": True} diff --git a/app/services/cache_service.py b/app/services/cache_service.py index cb7acc7..4fa182e 100644 --- a/app/services/cache_service.py +++ b/app/services/cache_service.py @@ -91,26 +91,26 @@ class CacheService: self.cache = cache_manager self.cache_dir = Path(os.environ.get("CACHE_DIR", "/tmp/artdag-cache")) - async def get_cache_item(self, content_hash: str) -> Optional[Dict[str, Any]]: + async def get_cache_item(self, cid: str) -> Optional[Dict[str, Any]]: """Get cached item with full metadata for display.""" # Check if content exists - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return None - path = self.cache.get_by_content_hash(content_hash) + path = self.cache.get_by_cid(cid) if not path or not path.exists(): return None # Get metadata from database - meta = await self.db.load_item_metadata(content_hash, None) - cache_item = await self.db.get_cache_item(content_hash) + meta = await self.db.load_item_metadata(cid, None) + cache_item = await self.db.get_cache_item(cid) media_type = detect_media_type(path) mime_type = get_mime_type(path) size = path.stat().st_size return { - "content_hash": content_hash, + "cid": cid, "path": str(path), "media_type": media_type, "mime_type": mime_type, @@ -119,10 +119,10 @@ class CacheService: "meta": meta, } - async def check_access(self, content_hash: str, actor_id: str, username: str) -> bool: + async def check_access(self, cid: str, actor_id: str, username: str) -> bool: """Check if user has access to content.""" user_hashes = await self._get_user_cache_hashes(username, actor_id) - return content_hash in user_hashes + return cid in user_hashes async def _get_user_cache_hashes(self, username: str, actor_id: Optional[str] = None) -> set: """Get all cache hashes owned by or associated with a user.""" @@ -137,7 +137,7 @@ class CacheService: try: db_items = await self.db.get_user_items(actor_id) for item in db_items: - hashes.add(item["content_hash"]) + hashes.add(item["cid"]) except Exception: pass @@ -160,8 +160,8 @@ class CacheService: if isinstance(inputs, dict): inputs = list(inputs.values()) hashes.update(inputs) - if run.get("output_hash"): - hashes.add(run["output_hash"]) + if run.get("output_cid"): + hashes.add(run["output_cid"]) return hashes @@ -188,12 +188,12 @@ class CacheService: return runs - async def get_raw_file(self, content_hash: str) -> Tuple[Optional[Path], Optional[str], Optional[str]]: + async def get_raw_file(self, cid: str) -> Tuple[Optional[Path], Optional[str], Optional[str]]: """Get raw file path, media type, and filename for download.""" - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return None, None, None - path = self.cache.get_by_content_hash(content_hash) + path = self.cache.get_by_cid(cid) if not path or not path.exists(): return None, None, None @@ -223,17 +223,17 @@ class CacheService: except Exception: ext = "jpg" - filename = f"{content_hash}.{ext}" + filename = f"{cid}.{ext}" return path, mime, filename - async def get_as_mp4(self, content_hash: str) -> Tuple[Optional[Path], Optional[str]]: + async def get_as_mp4(self, cid: str) -> Tuple[Optional[Path], Optional[str]]: """Get content as MP4, transcoding if necessary. Returns (path, error).""" - if not self.cache.has_content(content_hash): - return None, f"Content {content_hash} not in cache" + if not self.cache.has_content(cid): + return None, f"Content {cid} not in cache" - path = self.cache.get_by_content_hash(content_hash) + path = self.cache.get_by_cid(cid) if not path or not path.exists(): - return None, f"Content {content_hash} not in cache" + return None, f"Content {cid} not in cache" # Check if video media_type = detect_media_type(path) @@ -241,7 +241,7 @@ class CacheService: return None, "Content is not a video" # Check for cached MP4 - mp4_path = self.cache_dir / f"{content_hash}.mp4" + mp4_path = self.cache_dir / f"{cid}.mp4" if mp4_path.exists(): return mp4_path, None @@ -258,7 +258,7 @@ class CacheService: pass # Transcode to MP4 - transcode_path = self.cache_dir / f"{content_hash}.transcoding.mp4" + transcode_path = self.cache_dir / f"{cid}.transcoding.mp4" try: result = subprocess.run( ["ffmpeg", "-y", "-i", str(path), @@ -283,15 +283,15 @@ class CacheService: transcode_path.unlink() return None, f"Transcoding failed: {e}" - async def get_metadata(self, content_hash: str, actor_id: str) -> Optional[Dict[str, Any]]: + async def get_metadata(self, cid: str, actor_id: str) -> Optional[Dict[str, Any]]: """Get content metadata.""" - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return None - return await self.db.load_item_metadata(content_hash, actor_id) + return await self.db.load_item_metadata(cid, actor_id) async def update_metadata( self, - content_hash: str, + cid: str, actor_id: str, title: str = None, description: str = None, @@ -299,7 +299,7 @@ class CacheService: custom: Dict[str, Any] = None, ) -> Tuple[bool, Optional[str]]: """Update content metadata. Returns (success, error).""" - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return False, "Content not found" # Build update dict @@ -314,28 +314,28 @@ class CacheService: updates["custom"] = custom try: - await self.db.update_item_metadata(content_hash, actor_id, **updates) + await self.db.update_item_metadata(cid, actor_id, **updates) return True, None except Exception as e: return False, str(e) async def publish_to_l2( self, - content_hash: str, + cid: str, actor_id: str, l2_server: str, auth_token: str, ) -> Tuple[Optional[str], Optional[str]]: """Publish content to L2 and IPFS. Returns (ipfs_cid, error).""" - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return None, "Content not found" # Get IPFS CID - cache_item = await self.db.get_cache_item(content_hash) + cache_item = await self.db.get_cache_item(cid) ipfs_cid = cache_item.get("ipfs_cid") if cache_item else None # Get metadata for origin info - meta = await self.db.load_item_metadata(content_hash, actor_id) + meta = await self.db.load_item_metadata(cid, actor_id) origin = meta.get("origin") if meta else None if not origin or "type" not in origin: @@ -351,10 +351,10 @@ class CacheService: f"{l2_server}/assets/publish-cache", headers={"Authorization": f"Bearer {auth_token}"}, json={ - "content_hash": content_hash, + "cid": cid, "ipfs_cid": ipfs_cid, - "asset_name": meta.get("title") or content_hash[:16], - "asset_type": detect_media_type(self.cache.get_by_content_hash(content_hash)), + "asset_name": meta.get("title") or cid[:16], + "asset_type": detect_media_type(self.cache.get_by_cid(cid)), "origin": origin, "description": meta.get("description"), "tags": meta.get("tags", []), @@ -374,14 +374,14 @@ class CacheService: # Update local metadata with publish status await self.db.save_l2_share( - content_hash=content_hash, + cid=cid, actor_id=actor_id, l2_server=l2_server, - asset_name=meta.get("title") or content_hash[:16], - content_type=detect_media_type(self.cache.get_by_content_hash(content_hash)) + asset_name=meta.get("title") or cid[:16], + content_type=detect_media_type(self.cache.get_by_cid(cid)) ) await self.db.update_item_metadata( - content_hash=content_hash, + cid=cid, actor_id=actor_id, pinned=True, pin_reason="published" @@ -389,37 +389,37 @@ class CacheService: return l2_result.get("ipfs_cid") or ipfs_cid, None - async def delete_content(self, content_hash: str, actor_id: str) -> Tuple[bool, Optional[str]]: + async def delete_content(self, cid: str, actor_id: str) -> Tuple[bool, Optional[str]]: """Delete content from cache. Returns (success, error).""" - if not self.cache.has_content(content_hash): + if not self.cache.has_content(cid): return False, "Content not found" # Check if pinned - meta = await self.db.load_item_metadata(content_hash, actor_id) + meta = await self.db.load_item_metadata(cid, actor_id) if meta and meta.get("pinned"): pin_reason = meta.get("pin_reason", "unknown") return False, f"Cannot discard pinned item (reason: {pin_reason})" # Check deletion rules via cache_manager - can_delete, reason = self.cache.can_delete(content_hash) + can_delete, reason = self.cache.can_delete(cid) if not can_delete: return False, f"Cannot discard: {reason}" # Delete via cache_manager - success, msg = self.cache.delete_by_content_hash(content_hash) + success, msg = self.cache.delete_by_cid(cid) # Clean up legacy metadata files - meta_path = self.cache_dir / f"{content_hash}.meta.json" + meta_path = self.cache_dir / f"{cid}.meta.json" if meta_path.exists(): meta_path.unlink() - mp4_path = self.cache_dir / f"{content_hash}.mp4" + mp4_path = self.cache_dir / f"{cid}.mp4" if mp4_path.exists(): mp4_path.unlink() return True, None async def import_from_ipfs(self, ipfs_cid: str, actor_id: str) -> Tuple[Optional[str], Optional[str]]: - """Import content from IPFS. Returns (content_hash, error).""" + """Import content from IPFS. Returns (cid, error).""" try: import ipfs_client @@ -433,18 +433,18 @@ class CacheService: # Store in cache cached, _ = self.cache.put(tmp_path, node_type="import", move=True) - content_hash = cached.content_hash + cid = cached.cid # Save to database - await self.db.create_cache_item(content_hash, ipfs_cid) + await self.db.create_cache_item(cid, ipfs_cid) await self.db.save_item_metadata( - content_hash=content_hash, + cid=cid, actor_id=actor_id, item_type="media", filename=f"ipfs-{ipfs_cid[:16]}" ) - return content_hash, None + return cid, None except Exception as e: return None, f"Import failed: {e}" @@ -454,7 +454,7 @@ class CacheService: filename: str, actor_id: str, ) -> Tuple[Optional[str], Optional[str], Optional[str]]: - """Upload content to cache. Returns (content_hash, ipfs_cid, error).""" + """Upload content to cache. Returns (cid, ipfs_cid, error).""" import tempfile try: @@ -468,18 +468,18 @@ class CacheService: # Store in cache (also stores in IPFS) cached, ipfs_cid = self.cache.put(tmp_path, node_type="upload", move=True) - content_hash = cached.content_hash + cid = cached.cid # Save to database with detected MIME type - await self.db.create_cache_item(content_hash, ipfs_cid) + await self.db.create_cache_item(cid, ipfs_cid) await self.db.save_item_metadata( - content_hash=content_hash, + cid=cid, actor_id=actor_id, item_type=mime_type, # Store actual MIME type filename=filename ) - return content_hash, ipfs_cid, None + return cid, ipfs_cid, None except Exception as e: return None, None, f"Upload failed: {e}" @@ -502,10 +502,10 @@ class CacheService: return items # Legacy compatibility methods - def has_content(self, content_hash: str) -> bool: + def has_content(self, cid: str) -> bool: """Check if content exists in cache.""" - return self.cache.has_content(content_hash) + return self.cache.has_content(cid) - def get_ipfs_cid(self, content_hash: str) -> Optional[str]: + def get_ipfs_cid(self, cid: str) -> Optional[str]: """Get IPFS CID for cached content.""" - return self.cache.get_ipfs_cid(content_hash) + return self.cache.get_ipfs_cid(cid) diff --git a/app/services/recipe_service.py b/app/services/recipe_service.py index e67a2ae..451048f 100644 --- a/app/services/recipe_service.py +++ b/app/services/recipe_service.py @@ -27,7 +27,7 @@ class RecipeService: async def get_recipe(self, recipe_id: str) -> Optional[Dict[str, Any]]: """Get a recipe by ID (content hash).""" # Get from cache (content-addressed storage) - path = self.cache.get_by_content_hash(recipe_id) + path = self.cache.get_by_cid(recipe_id) if not path or not path.exists(): return None @@ -70,8 +70,8 @@ class RecipeService: if hasattr(self.cache, 'list_by_type'): items = self.cache.list_by_type('recipe') logger.info(f"Found {len(items)} recipes in cache") - for content_hash in items: - recipe = await self.get_recipe(content_hash) + for cid in items: + recipe = await self.get_recipe(cid) if recipe and not recipe.get("error"): owner = recipe.get("owner") # Filter by actor - L1 is per-user @@ -114,7 +114,7 @@ class RecipeService: # Store in cache (content-addressed, auto-pins to IPFS) cached, ipfs_cid = self.cache.put(tmp_path, node_type="recipe", move=True) - recipe_id = cached.content_hash + recipe_id = cached.cid return recipe_id, None @@ -140,12 +140,12 @@ class RecipeService: # Delete from cache try: - if hasattr(self.cache, 'delete_by_content_hash'): - success, msg = self.cache.delete_by_content_hash(recipe_id) + if hasattr(self.cache, 'delete_by_cid'): + success, msg = self.cache.delete_by_cid(recipe_id) if not success: return False, msg else: - path = self.cache.get_by_content_hash(recipe_id) + path = self.cache.get_by_cid(recipe_id) if path and path.exists(): path.unlink() diff --git a/app/services/run_service.py b/app/services/run_service.py index bd1638c..94e5457 100644 --- a/app/services/run_service.py +++ b/app/services/run_service.py @@ -122,7 +122,7 @@ class RunService: "status": "completed", "recipe": cached.get("recipe"), "inputs": self._ensure_inputs_list(cached.get("inputs")), - "output_hash": cached.get("output_hash"), + "output_cid": cached.get("output_cid"), "ipfs_cid": cached.get("ipfs_cid"), "provenance_cid": cached.get("provenance_cid"), "actor_id": cached.get("actor_id"), @@ -171,7 +171,7 @@ class RunService: run_data["status"] = "completed" task_result = result.result if isinstance(task_result, dict): - run_data["output_hash"] = task_result.get("output_hash") + run_data["output_cid"] = task_result.get("output_cid") else: run_data["status"] = "failed" run_data["error"] = str(result.result) @@ -258,7 +258,7 @@ class RunService: run_data["status"] = "completed" task_result = result.result if isinstance(task_result, dict): - run_data["output_hash"] = task_result.get("output_hash") + run_data["output_cid"] = task_result.get("output_cid") else: run_data["status"] = "failed" run_data["error"] = str(result.result) @@ -332,15 +332,15 @@ class RunService: # Check database cache first (completed runs) cached_run = await self.db.get_run_cache(run_id) if cached_run: - output_hash = cached_run.get("output_hash") - if output_hash and self.cache.has_content(output_hash): + output_cid = cached_run.get("output_cid") + if output_cid and self.cache.has_content(output_cid): return { "run_id": run_id, "status": "completed", "recipe": recipe, "inputs": input_list, "output_name": output_name, - "output_hash": output_hash, + "output_cid": output_cid, "ipfs_cid": cached_run.get("ipfs_cid"), "provenance_cid": cached_run.get("provenance_cid"), "created_at": cached_run.get("created_at"), @@ -355,20 +355,20 @@ class RunService: l2_resp = await client.get(f"{l2_server}/assets/by-run-id/{run_id}") if l2_resp.status_code == 200: l2_data = l2_resp.json() - output_hash = l2_data.get("output_hash") + output_cid = l2_data.get("output_cid") ipfs_cid = l2_data.get("ipfs_cid") - if output_hash and ipfs_cid: + if output_cid and ipfs_cid: # Pull from IPFS to local cache try: import ipfs_client legacy_dir = self.cache_dir / "legacy" legacy_dir.mkdir(parents=True, exist_ok=True) - recovery_path = legacy_dir / output_hash + recovery_path = legacy_dir / output_cid if ipfs_client.get_file(ipfs_cid, str(recovery_path)): # Save to database cache await self.db.save_run_cache( run_id=run_id, - output_hash=output_hash, + output_cid=output_cid, recipe=recipe, inputs=input_list, ipfs_cid=ipfs_cid, @@ -380,7 +380,7 @@ class RunService: "status": "completed", "recipe": recipe, "inputs": input_list, - "output_hash": output_hash, + "output_cid": output_cid, "ipfs_cid": ipfs_cid, "provenance_cid": l2_data.get("provenance_cid"), "created_at": datetime.now(timezone.utc).isoformat(), @@ -493,7 +493,7 @@ class RunService: plan_cache_id = run.get("plan_cache_id") if plan_cache_id: # Get plan from cache by content hash - plan_path = self.cache.get_by_content_hash(plan_cache_id) + plan_path = self.cache.get_by_cid(plan_cache_id) if plan_path and plan_path.exists(): with open(plan_path) as f: content = f.read() @@ -535,12 +535,12 @@ class RunService: artifacts = [] - def get_artifact_info(content_hash: str, role: str, name: str) -> Optional[Dict]: - if self.cache.has_content(content_hash): - path = self.cache.get_by_content_hash(content_hash) + def get_artifact_info(cid: str, role: str, name: str) -> Optional[Dict]: + if self.cache.has_content(cid): + path = self.cache.get_by_cid(cid) if path and path.exists(): return { - "hash": content_hash, + "hash": cid, "size_bytes": path.stat().st_size, "media_type": detect_media_type(path), "role": role, @@ -558,8 +558,8 @@ class RunService: artifacts.append(info) # Add output - if run.get("output_hash"): - info = get_artifact_info(run["output_hash"], "output", "Output") + if run.get("output_cid"): + info = get_artifact_info(run["output_cid"], "output", "Output") if info: artifacts.append(info) @@ -669,10 +669,10 @@ class RunService: if result.successful(): # Task completed - move to run_cache task_result = result.result - if isinstance(task_result, dict) and task_result.get("output_hash"): + if isinstance(task_result, dict) and task_result.get("output_cid"): await self.db.save_run_cache( run_id=run_id, - output_hash=task_result["output_hash"], + output_cid=task_result["output_cid"], recipe=run.get("recipe", "unknown"), inputs=run.get("inputs", []), ipfs_cid=task_result.get("ipfs_cid"), diff --git a/app/templates/cache/detail.html b/app/templates/cache/detail.html index ea2be15..3420d2c 100644 --- a/app/templates/cache/detail.html +++ b/app/templates/cache/detail.html @@ -1,29 +1,29 @@ {% extends "base.html" %} -{% block title %}{{ cache.content_hash[:16] }} - Cache - Art-DAG L1{% endblock %} +{% block title %}{{ cache.cid[:16] }} - Cache - Art-DAG L1{% endblock %} {% block content %}
← Media -

{{ cache.content_hash[:24] }}...

+

{{ cache.cid[:24] }}...

{% if cache.mime_type and cache.mime_type.startswith('image/') %} - {% elif cache.mime_type and cache.mime_type.startswith('video/') %} - {% elif cache.mime_type and cache.mime_type.startswith('audio/') %}
- +
{% elif cache.mime_type == 'application/json' %} @@ -42,8 +42,8 @@
-
Hash
-
{{ cache.content_hash }}
+
CID
+
{{ cache.cid }}
Content Type
@@ -92,12 +92,12 @@
- Download - @@ -3600,7 +3600,7 @@ async def ui_cache_meta_form(content_hash: str, request: Request): if origin_type: publish_html = f'''
-
@@ -3625,7 +3625,7 @@ async def ui_cache_meta_form(content_hash: str, request: Request):

Metadata

- +
@@ -3690,7 +3690,7 @@ async def ui_cache_meta_form(content_hash: str, request: Request):
{'

Cannot discard pinned items.

' if pinned else f""" -