Save IPFS CID to database when caching files
- Make cache_file() async and save ipfs_cid to cache_items table - Update all call sites to use await - Add create_cache_item call in upload endpoint Fixes IPFS info not showing for uploaded files. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
16
server.py
16
server.py
@@ -396,13 +396,16 @@ def file_hash(path: Path) -> str:
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def cache_file(source: Path, node_type: str = "output") -> str:
|
||||
async def cache_file(source: Path, node_type: str = "output") -> str:
|
||||
"""
|
||||
Copy file to cache using L1CacheManager, return content hash.
|
||||
|
||||
Uses artdag's Cache internally for proper tracking.
|
||||
Saves IPFS CID to database.
|
||||
"""
|
||||
cached, ipfs_cid = cache_manager.put(source, node_type=node_type)
|
||||
# Save to cache_items table (with IPFS CID)
|
||||
await database.create_cache_item(cached.content_hash, ipfs_cid)
|
||||
return cached.content_hash
|
||||
|
||||
|
||||
@@ -609,7 +612,7 @@ async def get_run(run_id: str):
|
||||
|
||||
# Cache the output (legacy mode - DAG already caches via cache_manager)
|
||||
if output_path and output_path.exists() and "output_hash" not in result:
|
||||
cache_file(output_path, node_type="effect_output")
|
||||
await cache_file(output_path, node_type="effect_output")
|
||||
|
||||
# Record activity for deletion tracking (legacy mode)
|
||||
if run.output_hash and run.inputs:
|
||||
@@ -739,7 +742,7 @@ async def run_detail(run_id: str, request: Request):
|
||||
run.infrastructure = result.get("infrastructure")
|
||||
output_path = Path(result.get("output", {}).get("local_path", ""))
|
||||
if output_path.exists():
|
||||
cache_file(output_path)
|
||||
await cache_file(output_path)
|
||||
else:
|
||||
run.status = "failed"
|
||||
run.error = str(task.result)
|
||||
@@ -2561,7 +2564,7 @@ async def import_to_cache(path: str):
|
||||
if not source.exists():
|
||||
raise HTTPException(404, f"File not found: {path}")
|
||||
|
||||
content_hash = cache_file(source)
|
||||
content_hash = await cache_file(source)
|
||||
return {"content_hash": content_hash, "cached": True}
|
||||
|
||||
|
||||
@@ -2686,6 +2689,9 @@ async def upload_to_cache(file: UploadFile = File(...), ctx: UserContext = Depen
|
||||
cached, ipfs_cid = cache_manager.put(tmp_path, node_type="upload", move=True)
|
||||
content_hash = cached.content_hash
|
||||
|
||||
# Save to cache_items table (with IPFS CID)
|
||||
await database.create_cache_item(content_hash, ipfs_cid)
|
||||
|
||||
# Save uploader metadata to database
|
||||
await database.save_item_metadata(
|
||||
content_hash=content_hash,
|
||||
@@ -3656,7 +3662,7 @@ async def ui_run_partial(run_id: str, request: Request):
|
||||
run.infrastructure = result.get("infrastructure")
|
||||
output_path = Path(result.get("output", {}).get("local_path", ""))
|
||||
if output_path.exists():
|
||||
cache_file(output_path)
|
||||
await cache_file(output_path)
|
||||
else:
|
||||
run.status = "failed"
|
||||
run.error = str(task.result)
|
||||
|
||||
Reference in New Issue
Block a user