feat: add cache metadata, folders, and collections API
- Update save_cache_meta() to support updates (not just create)
- Add GET/PATCH /cache/{hash}/meta endpoints for metadata management
- Add user data storage for folders/collections (per-user .json files)
- Add folder CRUD endpoints (/user/folders)
- Add collection CRUD endpoints (/user/collections)
- Add cache list filtering by folder, collection, and tags
- Support origin tracking (self vs external URL)
- Support tags, description, and organization metadata
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
279
server.py
279
server.py
@@ -495,18 +495,30 @@ async def import_to_cache(path: str):
|
||||
return {"content_hash": content_hash, "cached": True}
|
||||
|
||||
|
||||
def save_cache_meta(content_hash: str, uploader: str, filename: str = None):
|
||||
"""Save metadata for a cached file."""
|
||||
def save_cache_meta(content_hash: str, uploader: str = None, filename: str = None, **updates):
|
||||
"""Save or update metadata for a cached file."""
|
||||
meta_path = CACHE_DIR / f"{content_hash}.meta.json"
|
||||
meta = {
|
||||
"uploader": uploader,
|
||||
"uploaded_at": datetime.now(timezone.utc).isoformat(),
|
||||
"filename": filename
|
||||
}
|
||||
# Don't overwrite existing metadata (preserve original uploader)
|
||||
if not meta_path.exists():
|
||||
with open(meta_path, "w") as f:
|
||||
json.dump(meta, f)
|
||||
|
||||
# Load existing or create new
|
||||
if meta_path.exists():
|
||||
with open(meta_path) as f:
|
||||
meta = json.load(f)
|
||||
else:
|
||||
meta = {
|
||||
"uploader": uploader,
|
||||
"uploaded_at": datetime.now(timezone.utc).isoformat(),
|
||||
"filename": filename
|
||||
}
|
||||
|
||||
# Apply updates (but never change uploader or uploaded_at)
|
||||
for key, value in updates.items():
|
||||
if key not in ("uploader", "uploaded_at"):
|
||||
meta[key] = value
|
||||
|
||||
with open(meta_path, "w") as f:
|
||||
json.dump(meta, f, indent=2)
|
||||
|
||||
return meta
|
||||
|
||||
|
||||
def load_cache_meta(content_hash: str) -> dict:
|
||||
@@ -518,6 +530,31 @@ def load_cache_meta(content_hash: str) -> dict:
|
||||
return {}
|
||||
|
||||
|
||||
# User data storage (folders, collections)
|
||||
USER_DATA_DIR = CACHE_DIR / ".user-data"
|
||||
|
||||
|
||||
def load_user_data(username: str) -> dict:
|
||||
"""Load user's folders and collections."""
|
||||
USER_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
# Normalize username (remove @ prefix if present)
|
||||
safe_name = username.replace("@", "").replace("/", "_")
|
||||
user_file = USER_DATA_DIR / f"{safe_name}.json"
|
||||
if user_file.exists():
|
||||
with open(user_file) as f:
|
||||
return json.load(f)
|
||||
return {"folders": ["/"], "collections": []}
|
||||
|
||||
|
||||
def save_user_data(username: str, data: dict):
|
||||
"""Save user's folders and collections."""
|
||||
USER_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
safe_name = username.replace("@", "").replace("/", "_")
|
||||
user_file = USER_DATA_DIR / f"{safe_name}.json"
|
||||
with open(user_file, "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
|
||||
def get_user_cache_hashes(username: str) -> set:
|
||||
"""Get all cache hashes owned by or associated with a user."""
|
||||
actor_id = f"@{username}@{L2_DOMAIN}"
|
||||
@@ -568,6 +605,184 @@ async def upload_to_cache(file: UploadFile = File(...), username: str = Depends(
|
||||
return {"content_hash": content_hash, "filename": file.filename, "size": len(content)}
|
||||
|
||||
|
||||
class CacheMetaUpdate(BaseModel):
|
||||
"""Request to update cache metadata."""
|
||||
origin: Optional[dict] = None # {"type": "self"|"external", "url": "...", "note": "..."}
|
||||
description: Optional[str] = None
|
||||
tags: Optional[list[str]] = None
|
||||
folder: Optional[str] = None
|
||||
collections: Optional[list[str]] = None
|
||||
|
||||
|
||||
@app.get("/cache/{content_hash}/meta")
|
||||
async def get_cache_meta(content_hash: str, username: str = Depends(get_required_user)):
|
||||
"""Get metadata for a cached file."""
|
||||
# Check file exists
|
||||
cache_path = CACHE_DIR / content_hash
|
||||
if not cache_path.exists():
|
||||
raise HTTPException(404, "Content not found")
|
||||
|
||||
# Check ownership
|
||||
user_hashes = get_user_cache_hashes(username)
|
||||
if content_hash not in user_hashes:
|
||||
raise HTTPException(403, "Access denied")
|
||||
|
||||
return load_cache_meta(content_hash)
|
||||
|
||||
|
||||
@app.patch("/cache/{content_hash}/meta")
|
||||
async def update_cache_meta(content_hash: str, update: CacheMetaUpdate, username: str = Depends(get_required_user)):
|
||||
"""Update metadata for a cached file."""
|
||||
# Check file exists
|
||||
cache_path = CACHE_DIR / content_hash
|
||||
if not cache_path.exists():
|
||||
raise HTTPException(404, "Content not found")
|
||||
|
||||
# Check ownership
|
||||
user_hashes = get_user_cache_hashes(username)
|
||||
if content_hash not in user_hashes:
|
||||
raise HTTPException(403, "Access denied")
|
||||
|
||||
# Build update dict from non-None fields
|
||||
updates = {}
|
||||
if update.origin is not None:
|
||||
updates["origin"] = update.origin
|
||||
if update.description is not None:
|
||||
updates["description"] = update.description
|
||||
if update.tags is not None:
|
||||
updates["tags"] = update.tags
|
||||
if update.folder is not None:
|
||||
# Ensure folder exists in user's folder list
|
||||
user_data = load_user_data(username)
|
||||
if update.folder not in user_data["folders"]:
|
||||
raise HTTPException(400, f"Folder does not exist: {update.folder}")
|
||||
updates["folder"] = update.folder
|
||||
if update.collections is not None:
|
||||
# Validate collections exist
|
||||
user_data = load_user_data(username)
|
||||
existing = {c["name"] for c in user_data["collections"]}
|
||||
for col in update.collections:
|
||||
if col not in existing:
|
||||
raise HTTPException(400, f"Collection does not exist: {col}")
|
||||
updates["collections"] = update.collections
|
||||
|
||||
meta = save_cache_meta(content_hash, **updates)
|
||||
return meta
|
||||
|
||||
|
||||
# ============ Folder & Collection Management ============
|
||||
|
||||
@app.get("/user/folders")
|
||||
async def list_folders(username: str = Depends(get_required_user)):
|
||||
"""List user's folders."""
|
||||
user_data = load_user_data(username)
|
||||
return {"folders": user_data["folders"]}
|
||||
|
||||
|
||||
@app.post("/user/folders")
|
||||
async def create_folder(folder_path: str, username: str = Depends(get_required_user)):
|
||||
"""Create a new folder."""
|
||||
user_data = load_user_data(username)
|
||||
|
||||
# Validate path format
|
||||
if not folder_path.startswith("/"):
|
||||
raise HTTPException(400, "Folder path must start with /")
|
||||
|
||||
# Check parent exists
|
||||
parent = "/".join(folder_path.rsplit("/", 1)[:-1]) or "/"
|
||||
if parent != "/" and parent not in user_data["folders"]:
|
||||
raise HTTPException(400, f"Parent folder does not exist: {parent}")
|
||||
|
||||
# Check doesn't already exist
|
||||
if folder_path in user_data["folders"]:
|
||||
raise HTTPException(400, f"Folder already exists: {folder_path}")
|
||||
|
||||
user_data["folders"].append(folder_path)
|
||||
user_data["folders"].sort()
|
||||
save_user_data(username, user_data)
|
||||
|
||||
return {"folder": folder_path, "created": True}
|
||||
|
||||
|
||||
@app.delete("/user/folders")
|
||||
async def delete_folder(folder_path: str, username: str = Depends(get_required_user)):
|
||||
"""Delete a folder (must be empty)."""
|
||||
if folder_path == "/":
|
||||
raise HTTPException(400, "Cannot delete root folder")
|
||||
|
||||
user_data = load_user_data(username)
|
||||
|
||||
if folder_path not in user_data["folders"]:
|
||||
raise HTTPException(404, "Folder not found")
|
||||
|
||||
# Check no subfolders
|
||||
for f in user_data["folders"]:
|
||||
if f.startswith(folder_path + "/"):
|
||||
raise HTTPException(400, f"Folder has subfolders: {f}")
|
||||
|
||||
# Check no items in folder
|
||||
user_hashes = get_user_cache_hashes(username)
|
||||
for h in user_hashes:
|
||||
meta = load_cache_meta(h)
|
||||
if meta.get("folder") == folder_path:
|
||||
raise HTTPException(400, "Folder is not empty")
|
||||
|
||||
user_data["folders"].remove(folder_path)
|
||||
save_user_data(username, user_data)
|
||||
|
||||
return {"folder": folder_path, "deleted": True}
|
||||
|
||||
|
||||
@app.get("/user/collections")
|
||||
async def list_collections(username: str = Depends(get_required_user)):
|
||||
"""List user's collections."""
|
||||
user_data = load_user_data(username)
|
||||
return {"collections": user_data["collections"]}
|
||||
|
||||
|
||||
@app.post("/user/collections")
|
||||
async def create_collection(name: str, username: str = Depends(get_required_user)):
|
||||
"""Create a new collection."""
|
||||
user_data = load_user_data(username)
|
||||
|
||||
# Check doesn't already exist
|
||||
for col in user_data["collections"]:
|
||||
if col["name"] == name:
|
||||
raise HTTPException(400, f"Collection already exists: {name}")
|
||||
|
||||
user_data["collections"].append({
|
||||
"name": name,
|
||||
"created_at": datetime.now(timezone.utc).isoformat()
|
||||
})
|
||||
save_user_data(username, user_data)
|
||||
|
||||
return {"collection": name, "created": True}
|
||||
|
||||
|
||||
@app.delete("/user/collections")
|
||||
async def delete_collection(name: str, username: str = Depends(get_required_user)):
|
||||
"""Delete a collection."""
|
||||
user_data = load_user_data(username)
|
||||
|
||||
# Find and remove
|
||||
for i, col in enumerate(user_data["collections"]):
|
||||
if col["name"] == name:
|
||||
user_data["collections"].pop(i)
|
||||
save_user_data(username, user_data)
|
||||
|
||||
# Remove from all cache items
|
||||
user_hashes = get_user_cache_hashes(username)
|
||||
for h in user_hashes:
|
||||
meta = load_cache_meta(h)
|
||||
if name in meta.get("collections", []):
|
||||
meta["collections"].remove(name)
|
||||
save_cache_meta(h, **{k: v for k, v in meta.items() if k not in ("uploader", "uploaded_at")})
|
||||
|
||||
return {"collection": name, "deleted": True}
|
||||
|
||||
raise HTTPException(404, "Collection not found")
|
||||
|
||||
|
||||
def detect_media_type(cache_path: Path) -> str:
|
||||
"""Detect if file is image or video based on magic bytes."""
|
||||
with open(cache_path, "rb") as f:
|
||||
@@ -987,8 +1202,13 @@ async def ui_runs(request: Request):
|
||||
|
||||
|
||||
@app.get("/ui/cache-list", response_class=HTMLResponse)
|
||||
async def ui_cache_list(request: Request):
|
||||
"""HTMX partial: list of cached items."""
|
||||
async def ui_cache_list(
|
||||
request: Request,
|
||||
folder: Optional[str] = None,
|
||||
collection: Optional[str] = None,
|
||||
tag: Optional[str] = None
|
||||
):
|
||||
"""HTMX partial: list of cached items with optional filtering."""
|
||||
current_user = get_user_from_cookie(request)
|
||||
|
||||
# Require login to see cache
|
||||
@@ -1004,18 +1224,47 @@ async def ui_cache_list(request: Request):
|
||||
for f in CACHE_DIR.iterdir():
|
||||
if f.is_file() and not f.name.endswith('.provenance.json') and not f.name.endswith('.meta.json'):
|
||||
if f.name in user_hashes:
|
||||
# Load metadata for filtering
|
||||
meta = load_cache_meta(f.name)
|
||||
|
||||
# Apply folder filter
|
||||
if folder:
|
||||
item_folder = meta.get("folder", "/")
|
||||
if folder != "/" and not item_folder.startswith(folder):
|
||||
continue
|
||||
if folder == "/" and item_folder != "/":
|
||||
continue
|
||||
|
||||
# Apply collection filter
|
||||
if collection:
|
||||
if collection not in meta.get("collections", []):
|
||||
continue
|
||||
|
||||
# Apply tag filter
|
||||
if tag:
|
||||
if tag not in meta.get("tags", []):
|
||||
continue
|
||||
|
||||
stat = f.stat()
|
||||
cache_items.append({
|
||||
"hash": f.name,
|
||||
"size": stat.st_size,
|
||||
"mtime": stat.st_mtime
|
||||
"mtime": stat.st_mtime,
|
||||
"meta": meta
|
||||
})
|
||||
|
||||
# Sort by modification time (newest first)
|
||||
cache_items.sort(key=lambda x: x["mtime"], reverse=True)
|
||||
|
||||
if not cache_items:
|
||||
return '<p class="no-runs">No cached files. Upload files or run effects to see them here.</p>'
|
||||
filter_msg = ""
|
||||
if folder:
|
||||
filter_msg = f" in folder {folder}"
|
||||
elif collection:
|
||||
filter_msg = f" in collection '{collection}'"
|
||||
elif tag:
|
||||
filter_msg = f" with tag '{tag}'"
|
||||
return f'<p class="no-runs">No cached files{filter_msg}. Upload files or run effects to see them here.</p>'
|
||||
|
||||
html_parts = ['<div class="runs">']
|
||||
|
||||
|
||||
Reference in New Issue
Block a user