Save IPFS CID to database when caching files

- Make cache_file() async and save ipfs_cid to cache_items table
- Update all call sites to use await
- Add create_cache_item call in upload endpoint

Fixes IPFS info not showing for uploaded files.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
gilesb
2026-01-08 19:02:52 +00:00
parent 552c4590c2
commit c73e79fe28

View File

@@ -396,13 +396,16 @@ def file_hash(path: Path) -> str:
return hasher.hexdigest() return hasher.hexdigest()
def cache_file(source: Path, node_type: str = "output") -> str: async def cache_file(source: Path, node_type: str = "output") -> str:
""" """
Copy file to cache using L1CacheManager, return content hash. Copy file to cache using L1CacheManager, return content hash.
Uses artdag's Cache internally for proper tracking. Uses artdag's Cache internally for proper tracking.
Saves IPFS CID to database.
""" """
cached, ipfs_cid = cache_manager.put(source, node_type=node_type) cached, ipfs_cid = cache_manager.put(source, node_type=node_type)
# Save to cache_items table (with IPFS CID)
await database.create_cache_item(cached.content_hash, ipfs_cid)
return cached.content_hash return cached.content_hash
@@ -609,7 +612,7 @@ async def get_run(run_id: str):
# Cache the output (legacy mode - DAG already caches via cache_manager) # Cache the output (legacy mode - DAG already caches via cache_manager)
if output_path and output_path.exists() and "output_hash" not in result: if output_path and output_path.exists() and "output_hash" not in result:
cache_file(output_path, node_type="effect_output") await cache_file(output_path, node_type="effect_output")
# Record activity for deletion tracking (legacy mode) # Record activity for deletion tracking (legacy mode)
if run.output_hash and run.inputs: if run.output_hash and run.inputs:
@@ -739,7 +742,7 @@ async def run_detail(run_id: str, request: Request):
run.infrastructure = result.get("infrastructure") run.infrastructure = result.get("infrastructure")
output_path = Path(result.get("output", {}).get("local_path", "")) output_path = Path(result.get("output", {}).get("local_path", ""))
if output_path.exists(): if output_path.exists():
cache_file(output_path) await cache_file(output_path)
else: else:
run.status = "failed" run.status = "failed"
run.error = str(task.result) run.error = str(task.result)
@@ -2561,7 +2564,7 @@ async def import_to_cache(path: str):
if not source.exists(): if not source.exists():
raise HTTPException(404, f"File not found: {path}") raise HTTPException(404, f"File not found: {path}")
content_hash = cache_file(source) content_hash = await cache_file(source)
return {"content_hash": content_hash, "cached": True} return {"content_hash": content_hash, "cached": True}
@@ -2686,6 +2689,9 @@ async def upload_to_cache(file: UploadFile = File(...), ctx: UserContext = Depen
cached, ipfs_cid = cache_manager.put(tmp_path, node_type="upload", move=True) cached, ipfs_cid = cache_manager.put(tmp_path, node_type="upload", move=True)
content_hash = cached.content_hash content_hash = cached.content_hash
# Save to cache_items table (with IPFS CID)
await database.create_cache_item(content_hash, ipfs_cid)
# Save uploader metadata to database # Save uploader metadata to database
await database.save_item_metadata( await database.save_item_metadata(
content_hash=content_hash, content_hash=content_hash,
@@ -3656,7 +3662,7 @@ async def ui_run_partial(run_id: str, request: Request):
run.infrastructure = result.get("infrastructure") run.infrastructure = result.get("infrastructure")
output_path = Path(result.get("output", {}).get("local_path", "")) output_path = Path(result.get("output", {}).get("local_path", ""))
if output_path.exists(): if output_path.exists():
cache_file(output_path) await cache_file(output_path)
else: else:
run.status = "failed" run.status = "failed"
run.error = str(task.result) run.error = str(task.result)