#!/usr/bin/env python3 """ Art DAG L2 Server - ActivityPub Manages ownership registry, activities, and federation. - Registry of owned assets - ActivityPub actor endpoints - Sign and publish Create activities - Federation with other servers """ import hashlib import json import logging import os import uuid from contextlib import asynccontextmanager from datetime import datetime, timezone from pathlib import Path from typing import Optional from urllib.parse import urlparse # Configure logging logging.basicConfig( level=logging.INFO, format='%(asctime)s %(levelname)s %(name)s: %(message)s' ) logger = logging.getLogger(__name__) from fastapi import FastAPI, HTTPException, Request, Response, Depends, Cookie, Form from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse, FileResponse from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from pydantic import BaseModel import requests import markdown import db from auth import ( UserCreate, UserLogin, Token, User, create_user, authenticate_user, create_access_token, verify_token, get_token_claims, get_current_user ) # Configuration DOMAIN = os.environ.get("ARTDAG_DOMAIN", "artdag.rose-ash.com") DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2"))) L1_PUBLIC_URL = os.environ.get("L1_PUBLIC_URL", "https://celery-artdag.rose-ash.com") EFFECTS_REPO_URL = os.environ.get("EFFECTS_REPO_URL", "https://git.rose-ash.com/art-dag/effects") IPFS_GATEWAY_URL = os.environ.get("IPFS_GATEWAY_URL", "") # Known L1 renderers (comma-separated URLs) L1_SERVERS_STR = os.environ.get("L1_SERVERS", "https://celery-artdag.rose-ash.com") L1_SERVERS = [s.strip() for s in L1_SERVERS_STR.split(",") if s.strip()] # Cookie domain for sharing auth across subdomains (e.g., ".rose-ash.com") # If not set, derives from DOMAIN (strips first subdomain, adds leading dot) def _get_cookie_domain(): env_val = os.environ.get("COOKIE_DOMAIN") if env_val: return env_val # Derive from DOMAIN: artdag.rose-ash.com -> .rose-ash.com parts = DOMAIN.split(".") if len(parts) >= 2: return "." + ".".join(parts[-2:]) return None COOKIE_DOMAIN = _get_cookie_domain() # Ensure data directory exists DATA_DIR.mkdir(parents=True, exist_ok=True) (DATA_DIR / "assets").mkdir(exist_ok=True) def compute_run_id(input_hashes: list[str], recipe: str, recipe_hash: str = None) -> str: """ Compute a deterministic run_id from inputs and recipe. The run_id is a SHA3-256 hash of: - Sorted input content hashes - Recipe identifier (recipe_hash if provided, else "effect:{recipe}") This makes runs content-addressable: same inputs + recipe = same run_id. Must match the L1 implementation exactly. """ data = { "inputs": sorted(input_hashes), "recipe": recipe_hash or f"effect:{recipe}", "version": "1", # For future schema changes } json_str = json.dumps(data, sort_keys=True, separators=(",", ":")) return hashlib.sha3_256(json_str.encode()).hexdigest() # Load README README_PATH = Path(__file__).parent / "README.md" README_CONTENT = "" if README_PATH.exists(): README_CONTENT = README_PATH.read_text() @asynccontextmanager async def lifespan(app: FastAPI): """Manage database connection pool lifecycle.""" await db.init_pool() yield await db.close_pool() app = FastAPI( title="Art DAG L2 Server", description="ActivityPub server for Art DAG ownership and federation", version="0.1.0", lifespan=lifespan ) @app.exception_handler(404) async def not_found_handler(request: Request, exc): """Custom 404 page.""" accept = request.headers.get("accept", "") if "text/html" in accept and "application/json" not in accept: content = '''
''' username = get_user_from_cookie(request) return HTMLResponse(base_html("Not Found", content, username), status_code=404) return JSONResponse({"detail": "Not found"}, status_code=404) # ============ Data Models ============ class Asset(BaseModel): """An owned asset.""" name: str content_hash: str ipfs_cid: Optional[str] = None # IPFS content identifier asset_type: str # image, video, effect, recipe, infrastructure tags: list[str] = [] metadata: dict = {} url: Optional[str] = None provenance: Optional[dict] = None created_at: str = "" class Activity(BaseModel): """An ActivityPub activity.""" activity_id: str activity_type: str # Create, Update, Delete, Announce actor_id: str object_data: dict published: str signature: Optional[dict] = None class RegisterRequest(BaseModel): """Request to register an asset.""" name: str content_hash: str ipfs_cid: Optional[str] = None # IPFS content identifier asset_type: str tags: list[str] = [] metadata: dict = {} url: Optional[str] = None provenance: Optional[dict] = None class RecordRunRequest(BaseModel): """Request to record an L1 run.""" run_id: str l1_server: str # URL of the L1 server that has this run output_name: Optional[str] = None # Deprecated - assets now named by content_hash class PublishCacheRequest(BaseModel): """Request to publish a cache item from L1.""" content_hash: str ipfs_cid: Optional[str] = None # IPFS content identifier asset_name: str asset_type: str = "image" origin: dict # {type: "self"|"external", url?: str, note?: str} description: Optional[str] = None tags: list[str] = [] metadata: dict = {} class UpdateAssetRequest(BaseModel): """Request to update an existing asset.""" description: Optional[str] = None tags: Optional[list[str]] = None metadata: Optional[dict] = None origin: Optional[dict] = None ipfs_cid: Optional[str] = None # IPFS content identifier class AddStorageRequest(BaseModel): """Request to add a storage provider.""" provider_type: str # 'pinata', 'web3storage', 'local' provider_name: Optional[str] = None # User-friendly name config: dict # Provider-specific config (api_key, path, etc.) capacity_gb: int # Storage capacity in GB class UpdateStorageRequest(BaseModel): """Request to update a storage provider.""" config: Optional[dict] = None capacity_gb: Optional[int] = None is_active: Optional[bool] = None class SetAssetSourceRequest(BaseModel): """Request to set source URL for an asset.""" source_url: str source_type: str # 'youtube', 'local', 'url' # ============ Storage (Database) ============ async def load_registry() -> dict: """Load registry from database.""" assets = await db.get_all_assets() return {"version": "1.0", "assets": assets} async def load_activities() -> list: """Load activities from database.""" return await db.get_all_activities() def load_actor(username: str) -> dict: """Load actor data for a specific user with public key if available.""" actor = { "id": f"https://{DOMAIN}/users/{username}", "type": "Person", "preferredUsername": username, "name": username, "inbox": f"https://{DOMAIN}/users/{username}/inbox", "outbox": f"https://{DOMAIN}/users/{username}/outbox", "followers": f"https://{DOMAIN}/users/{username}/followers", "following": f"https://{DOMAIN}/users/{username}/following", } # Add public key if available from keys import has_keys, load_public_key_pem if has_keys(DATA_DIR, username): actor["publicKey"] = { "id": f"https://{DOMAIN}/users/{username}#main-key", "owner": f"https://{DOMAIN}/users/{username}", "publicKeyPem": load_public_key_pem(DATA_DIR, username) } return actor async def user_exists(username: str) -> bool: """Check if a user exists.""" return await db.user_exists(username) async def load_followers() -> list: """Load followers list from database.""" return await db.get_all_followers() # ============ Signing ============ from keys import has_keys, load_public_key_pem, create_signature def sign_activity(activity: dict, username: str) -> dict: """Sign an activity with the user's RSA private key.""" if not has_keys(DATA_DIR, username): # No keys - use placeholder (for testing) activity["signature"] = { "type": "RsaSignature2017", "creator": f"https://{DOMAIN}/users/{username}#main-key", "created": datetime.now(timezone.utc).isoformat(), "signatureValue": "NO_KEYS_CONFIGURED" } else: activity["signature"] = create_signature(DATA_DIR, username, DOMAIN, activity) return activity # ============ HTML Templates ============ # Tailwind CSS config for L2 - dark theme to match L1 TAILWIND_CONFIG = ''' ''' def base_html(title: str, content: str, username: str = None) -> str: """Base HTML template with Tailwind CSS dark theme.""" user_section = f'''Don't have an account? Register
''' return HTMLResponse(base_html("Login", content)) @app.post("/login", response_class=HTMLResponse) async def ui_login_submit(request: Request): """Handle login form submission.""" form = await request.form() username = form.get("username", "").strip() password = form.get("password", "") return_to = form.get("return_to", "").strip() if not username or not password: return HTMLResponse('Already have an account? Login
''' return HTMLResponse(base_html("Register", content)) @app.post("/register", response_class=HTMLResponse) async def ui_register_submit(request: Request): """Handle register form submission.""" form = await request.form() username = form.get("username", "").strip() email = form.get("email", "").strip() or None password = form.get("password", "") password2 = form.get("password2", "") if not username or not password: return HTMLResponse('This activity does not exist.
''' return HTMLResponse(base_html("Activity Not Found", content, username)) activity = activities[activity_index] return await _render_activity_detail(activity, request) async def ui_activity_detail_by_data(activity: dict, request: Request): """Activity detail page taking activity data directly.""" return await _render_activity_detail(activity, request) async def _render_activity_detail(activity: dict, request: Request): """Core activity detail rendering logic.""" username = get_user_from_cookie(request) activity_type = activity.get("activity_type", "") activity_id = activity.get("activity_id", "") actor_id = activity.get("actor_id", "") actor_name = actor_id.split("/")[-1] if actor_id else "unknown" published = format_date(activity.get("published")) obj = activity.get("object_data", {}) # Object details obj_name = obj.get("name", "Untitled") obj_type = obj.get("type", "") content_hash_obj = obj.get("contentHash", {}) content_hash = content_hash_obj.get("value", "") if isinstance(content_hash_obj, dict) else "" media_type = obj.get("mediaType", "") description = obj.get("summary", "") or obj.get("content", "") # Provenance from object - or fallback to registry asset provenance = obj.get("provenance", {}) origin = obj.get("origin", {}) # Fallback: if activity doesn't have provenance, look up the asset from registry if not provenance or not origin: registry = await load_registry() assets = registry.get("assets", {}) # Find asset by content_hash or name for asset_name, asset_data in assets.items(): if asset_data.get("content_hash") == content_hash or asset_data.get("name") == obj_name: if not provenance: provenance = asset_data.get("provenance", {}) if not origin: origin = asset_data.get("origin", {}) break # Type colors type_color = "bg-green-600" if activity_type == "Create" else "bg-yellow-600" if activity_type == "Update" else "bg-gray-600" obj_type_color = "bg-blue-600" if "Image" in obj_type else "bg-purple-600" if "Video" in obj_type else "bg-gray-600" # Determine L1 server and asset type l1_server = provenance.get("l1_server", L1_PUBLIC_URL).rstrip("/") if provenance else L1_PUBLIC_URL.rstrip("/") is_video = "Video" in obj_type or "video" in media_type # Content display if is_video: content_html = f''' ''' elif "Image" in obj_type or "image" in media_type: content_html = f''' ''' else: content_html = f'''Content type: {media_type or obj_type}
Download{origin_note}
' # Provenance section provenance_html = "" if provenance and provenance.get("recipe"): recipe = provenance.get("recipe", "") inputs = provenance.get("inputs", []) l1_run_id = provenance.get("l1_run_id", "") rendered_at = format_date(provenance.get("rendered_at")) effects_commit = provenance.get("effects_commit", "") effect_url = provenance.get("effect_url") infrastructure = provenance.get("infrastructure", {}) if not effect_url: if effects_commit and effects_commit != "unknown": effect_url = f"{EFFECTS_REPO_URL}/src/commit/{effects_commit}/{recipe}" else: effect_url = f"{EFFECTS_REPO_URL}/src/branch/main/{recipe}" # Build inputs display - show actual content as thumbnails inputs_html = "" for inp in inputs: inp_hash = inp.get("content_hash", "") if isinstance(inp, dict) else inp if inp_hash: inputs_html += f'''{inp_hash[:16]}...
view
{" | ".join(infra_parts)}
' provenance_html = f'''This content was created by applying an effect to input content.
{description if description else 'No description'}
{content_hash}
{activity_id}
Object URL: https://{DOMAIN}/objects/{content_hash}
Actor: {actor_id}
No asset named "{name}" exists.
''' return HTMLResponse(base_html("Asset Not Found", content, username)) asset = assets[name] owner = asset.get("owner", "unknown") content_hash = asset.get("content_hash", "") ipfs_cid = asset.get("ipfs_cid", "") asset_type = asset.get("asset_type", "") tags = asset.get("tags", []) description = asset.get("description", "") origin = asset.get("origin") or {} provenance = asset.get("provenance") or {} metadata = asset.get("metadata") or {} created_at = format_date(asset.get("created_at")) type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" # Determine L1 server URL for content l1_server = provenance.get("l1_server", L1_PUBLIC_URL).rstrip("/") # Content display - image or video from L1 if asset_type == "video": # Use iOS-compatible MP4 endpoint content_html = f''' ''' elif asset_type == "image": content_html = f''' ''' elif asset_type == "recipe": # Fetch recipe source from L1 or IPFS recipe_source = "" try: resp = requests.get(f"{l1_server}/cache/{content_hash}", timeout=10, headers={"Accept": "text/plain"}) if resp.status_code == 200: recipe_source = resp.text except Exception: pass if not recipe_source and ipfs_cid: # Try IPFS try: import ipfs_client recipe_bytes = ipfs_client.get_bytes(ipfs_cid) if recipe_bytes: recipe_source = recipe_bytes.decode('utf-8') except Exception: pass import html as html_module recipe_source_escaped = html_module.escape(recipe_source) if recipe_source else "(Could not load recipe source)" content_html = f''' ''' else: content_html = f'''Content type: {asset_type}
Download{origin_note}
' # Tags display tags_html = 'No tags' if tags: tags_html = " ".join([f'{t}' for t in tags]) # IPFS display if ipfs_cid: local_gateway = f'Local' if IPFS_GATEWAY_URL else '' ipfs_html = f'''{ipfs_cid}
'''
else:
ipfs_html = 'Not on IPFS'
# Provenance section - for rendered outputs
provenance_html = ""
if provenance:
recipe = provenance.get("recipe", "")
inputs = provenance.get("inputs", [])
l1_run_id = provenance.get("l1_run_id", "")
rendered_at = format_date(provenance.get("rendered_at"))
effects_commit = provenance.get("effects_commit", "")
infrastructure = provenance.get("infrastructure", {})
# Use stored effect_url or build fallback
effect_url = provenance.get("effect_url")
if not effect_url:
# Fallback for older records
if effects_commit and effects_commit != "unknown":
effect_url = f"{EFFECTS_REPO_URL}/src/commit/{effects_commit}/{recipe}"
else:
effect_url = f"{EFFECTS_REPO_URL}/src/branch/main/{recipe}"
# Build inputs display - show actual content as thumbnails
inputs_html = ""
for inp in inputs:
inp_hash = inp.get("content_hash", "") if isinstance(inp, dict) else inp
if inp_hash:
inputs_html += f'''
{inp_hash[:16]}...
view
This asset was created by applying an effect to input content.
{content_hash}
Object URL: https://{DOMAIN}/objects/{content_hash}
Owner Actor: https://{DOMAIN}/users/{owner}
No user named "{username}" exists.
''' return HTMLResponse(base_html("User Not Found", content, current_user)) # Get user's assets registry = await load_registry() all_assets = registry.get("assets", {}) user_assets = {name: asset for name, asset in all_assets.items() if asset.get("owner") == username} # Get user's activities all_activities = await load_activities() actor_id = f"https://{DOMAIN}/users/{username}" user_activities = [a for a in all_activities if a.get("actor_id") == actor_id] webfinger = f"@{username}@{DOMAIN}" # Assets table if user_assets: rows = "" for name, asset in sorted(user_assets.items(), key=lambda x: x[1].get("created_at", ""), reverse=True): hash_short = asset.get("content_hash", "")[:16] + "..." asset_type = asset.get("asset_type", "") type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" rows += f'''{hash_short}| Name | Type | Content Hash | Tags |
|---|
No published assets yet.
' content = f'''{webfinger}
Actor URL: https://{DOMAIN}/users/{username}
No users registered yet.
''' else: return HTMLResponse("") # Empty for infinite scroll else: rows = "" for uname, user_data in users_page: webfinger = f"@{uname}@{DOMAIN}" created_at = format_date(user_data.get("created_at")) rows += f'''{webfinger}| Username | WebFinger | Created |
|---|
No user named "{username}" exists.
''' return HTMLResponse(base_html("User Not Found", content, get_user_from_cookie(request))) raise HTTPException(404, f"Unknown user: {username}") if wants_html(request): # Render user detail page return await ui_user_detail(username, request) actor = load_actor(username) # Add ActivityPub context actor["@context"] = [ "https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1" ] return JSONResponse( content=actor, media_type="application/activity+json" ) @app.get("/users/{username}/outbox") async def get_outbox(username: str, page: bool = False): """Get actor's outbox (activities they created).""" if not await user_exists(username): raise HTTPException(404, f"Unknown user: {username}") # Filter activities by this user's actor_id all_activities = await load_activities() actor_id = f"https://{DOMAIN}/users/{username}" user_activities = [a for a in all_activities if a.get("actor_id") == actor_id] if not page: return JSONResponse( content={ "@context": "https://www.w3.org/ns/activitystreams", "id": f"https://{DOMAIN}/users/{username}/outbox", "type": "OrderedCollection", "totalItems": len(user_activities), "first": f"https://{DOMAIN}/users/{username}/outbox?page=true" }, media_type="application/activity+json" ) # Return activities page return JSONResponse( content={ "@context": "https://www.w3.org/ns/activitystreams", "id": f"https://{DOMAIN}/users/{username}/outbox?page=true", "type": "OrderedCollectionPage", "partOf": f"https://{DOMAIN}/users/{username}/outbox", "orderedItems": user_activities }, media_type="application/activity+json" ) @app.post("/users/{username}/inbox") async def post_inbox(username: str, request: Request): """Receive activities from other servers.""" if not await user_exists(username): raise HTTPException(404, f"Unknown user: {username}") body = await request.json() activity_type = body.get("type") # Handle Follow requests if activity_type == "Follow": follower_url = body.get("actor") # Add follower to database await db.add_follower(username, follower_url, follower_url) # Send Accept (in production, do this async) # For now just acknowledge return {"status": "accepted"} # Handle other activity types return {"status": "received"} @app.get("/users/{username}/followers") async def get_followers(username: str): """Get actor's followers.""" if not await user_exists(username): raise HTTPException(404, f"Unknown user: {username}") # TODO: Per-user followers - for now use global followers followers = await load_followers() return JSONResponse( content={ "@context": "https://www.w3.org/ns/activitystreams", "id": f"https://{DOMAIN}/users/{username}/followers", "type": "OrderedCollection", "totalItems": len(followers), "orderedItems": followers }, media_type="application/activity+json" ) # ============ Assets Endpoints ============ @app.get("/assets") async def get_registry(request: Request, page: int = 1, limit: int = 20): """Get registry. HTML for browsers (with infinite scroll), JSON for APIs (with pagination).""" registry = await load_registry() all_assets = list(registry.get("assets", {}).items()) total = len(all_assets) # Sort by created_at descending all_assets.sort(key=lambda x: x[1].get("created_at", ""), reverse=True) # Pagination start = (page - 1) * limit end = start + limit assets_page = all_assets[start:end] has_more = end < total if wants_html(request): username = get_user_from_cookie(request) if not assets_page: if page == 1: content = '''No assets registered yet.
''' else: return HTMLResponse("") # Empty for infinite scroll else: rows = "" for name, asset in assets_page: asset_type = asset.get("asset_type", "") type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" owner = asset.get("owner", "unknown") content_hash = asset.get("content_hash", "")[:16] + "..." rows += f'''{content_hash}| Name | Type | Owner | Hash |
|---|
No asset named "{name}" exists.
''' return HTMLResponse(base_html("Asset Not Found", content, get_user_from_cookie(request))) if wants_json: return registry["assets"][name] # Default to HTML for browsers return await ui_asset_detail(name, request) @app.get("/assets/by-run-id/{run_id}") async def get_asset_by_run_id(run_id: str): """ Get asset by content-addressable run_id. Returns the asset info including output_hash and ipfs_cid for L1 recovery. The run_id is stored in the asset's provenance when the run is recorded. """ asset = await db.get_asset_by_run_id(run_id) if not asset: raise HTTPException(404, f"No asset found for run_id: {run_id}") return { "run_id": run_id, "asset_name": asset.get("name"), "output_hash": asset.get("content_hash"), "ipfs_cid": asset.get("ipfs_cid"), "provenance_cid": asset.get("provenance", {}).get("provenance_cid") if asset.get("provenance") else None, } @app.patch("/assets/{name}") async def update_asset(name: str, req: UpdateAssetRequest, user: User = Depends(get_required_user)): """Update an existing asset's metadata. Creates an Update activity.""" asset = await db.get_asset(name) if not asset: raise HTTPException(404, f"Asset not found: {name}") # Check ownership if asset.get("owner") != user.username: raise HTTPException(403, f"Not authorized to update asset owned by {asset.get('owner')}") # Build updates dict updates = {} if req.description is not None: updates["description"] = req.description if req.tags is not None: updates["tags"] = req.tags if req.metadata is not None: updates["metadata"] = {**asset.get("metadata", {}), **req.metadata} if req.origin is not None: updates["origin"] = req.origin if req.ipfs_cid is not None: updates["ipfs_cid"] = req.ipfs_cid # Pin on IPFS (fire-and-forget, don't block) import threading threading.Thread(target=_pin_ipfs_async, args=(req.ipfs_cid,), daemon=True).start() # Update asset in database updated_asset = await db.update_asset(name, updates) # Create Update activity activity = { "activity_id": str(uuid.uuid4()), "activity_type": "Update", "actor_id": f"https://{DOMAIN}/users/{user.username}", "object_data": { "type": updated_asset.get("asset_type", "Object").capitalize(), "name": name, "id": f"https://{DOMAIN}/objects/{updated_asset['content_hash']}", "contentHash": { "algorithm": "sha3-256", "value": updated_asset["content_hash"] }, "attributedTo": f"https://{DOMAIN}/users/{user.username}", "summary": req.description, "tag": req.tags or updated_asset.get("tags", []) }, "published": updated_asset.get("updated_at", datetime.now(timezone.utc).isoformat()) } # Sign activity with the user's keys activity = sign_activity(activity, user.username) # Save activity to database await db.create_activity(activity) return {"asset": updated_asset, "activity": activity} def _pin_ipfs_async(cid: str): """Pin IPFS content in background thread.""" try: import ipfs_client if ipfs_client.is_available(): ipfs_client.pin(cid) logger.info(f"Pinned IPFS content: {cid}") except Exception as e: logger.warning(f"Failed to pin IPFS content {cid}: {e}") async def _register_asset_impl(req: RegisterRequest, owner: str): """ Internal implementation for registering an asset atomically. Requires IPFS CID - content must be on IPFS before registering. Uses a transaction for all DB operations. """ import ipfs_client from ipfs_client import IPFSError logger.info(f"register_asset: Starting for {req.name} (hash={req.content_hash[:16]}...)") # ===== PHASE 1: VALIDATION ===== # IPFS CID is required if not req.ipfs_cid: raise HTTPException(400, "IPFS CID is required for registration") # Check if name exists - return existing asset if so existing = await db.get_asset(req.name) if existing: logger.info(f"register_asset: Asset {req.name} already exists, returning existing") return {"asset": existing, "activity": None, "existing": True} # ===== PHASE 2: IPFS OPERATIONS (non-blocking) ===== import asyncio logger.info(f"register_asset: Pinning CID {req.ipfs_cid[:16]}... on IPFS") try: await asyncio.to_thread(ipfs_client.pin_or_raise, req.ipfs_cid) logger.info("register_asset: CID pinned successfully") except IPFSError as e: logger.error(f"register_asset: IPFS pin failed: {e}") raise HTTPException(500, f"IPFS operation failed: {e}") # ===== PHASE 3: DB TRANSACTION ===== now = datetime.now(timezone.utc).isoformat() try: async with db.transaction() as conn: # Check name again inside transaction (race condition protection) if await db.asset_exists_by_name_tx(conn, req.name): # Race condition - another request created it first, return existing existing = await db.get_asset(req.name) logger.info(f"register_asset: Asset {req.name} created by concurrent request") return {"asset": existing, "activity": None, "existing": True} # Create asset asset = { "name": req.name, "content_hash": req.content_hash, "ipfs_cid": req.ipfs_cid, "asset_type": req.asset_type, "tags": req.tags, "metadata": req.metadata, "url": req.url, "provenance": req.provenance, "owner": owner, "created_at": now } created_asset = await db.create_asset_tx(conn, asset) # Create ownership activity object_data = { "type": req.asset_type.capitalize(), "name": req.name, "id": f"https://{DOMAIN}/objects/{req.content_hash}", "contentHash": { "algorithm": "sha3-256", "value": req.content_hash }, "attributedTo": f"https://{DOMAIN}/users/{owner}" } # Include provenance in activity object_data if present if req.provenance: object_data["provenance"] = req.provenance activity = { "activity_id": req.content_hash, # Content-addressable by content hash "activity_type": "Create", "actor_id": f"https://{DOMAIN}/users/{owner}", "object_data": object_data, "published": now } activity = sign_activity(activity, owner) created_activity = await db.create_activity_tx(conn, activity) # Transaction commits here on successful exit except HTTPException: raise except Exception as e: logger.error(f"register_asset: Database transaction failed: {e}") raise HTTPException(500, f"Failed to register asset: {e}") logger.info(f"register_asset: Successfully registered {req.name}") return {"asset": created_asset, "activity": created_activity} @app.post("/assets") async def register_asset(req: RegisterRequest, user: User = Depends(get_required_user)): """Register a new asset and create ownership activity. Requires authentication.""" return await _register_asset_impl(req, user.username) @app.post("/assets/record-run") @app.post("/registry/record-run") # Legacy route async def record_run(req: RecordRunRequest, user: User = Depends(get_required_user)): """ Record an L1 run and register the output atomically. Ensures all operations succeed or none do: 1. All input assets registered (if not already on L2) + pinned on IPFS 2. Output asset registered + pinned on IPFS 3. Recipe serialized to JSON, stored on IPFS, CID saved in provenance """ import ipfs_client from ipfs_client import IPFSError # ===== PHASE 1: PREPARATION (read-only, non-blocking) ===== import asyncio l1_url = req.l1_server.rstrip('/') logger.info(f"record_run: Starting for run_id={req.run_id} from {l1_url}") # Helper to fetch from L1 without blocking event loop def fetch_l1_run(): import time as _time url = f"{l1_url}/runs/{req.run_id}" logger.info(f"record_run: Fetching run from L1: {url}") t0 = _time.time() resp = requests.get(url, timeout=30) logger.info(f"record_run: L1 request took {_time.time()-t0:.3f}s, status={resp.status_code}") if resp.status_code == 404: raise ValueError(f"Run not found on L1: {req.run_id}") resp.raise_for_status() try: return resp.json() except Exception: body_preview = resp.text[:200] if resp.text else "(empty)" logger.error(f"L1 returned non-JSON for {url}: status={resp.status_code}, body={body_preview}") raise ValueError(f"L1 returned invalid response: {body_preview[:100]}") def fetch_l1_cache(content_hash): logger.debug(f"record_run: Fetching cache {content_hash[:16]}... from L1") url = f"{l1_url}/cache/{content_hash}" resp = requests.get(url, headers={"Accept": "application/json"}, timeout=10) if resp.status_code == 404: raise ValueError(f"Cache item not found on L1: {content_hash[:16]}...") resp.raise_for_status() try: return resp.json() except Exception as e: # Log what we actually got back body_preview = resp.text[:200] if resp.text else "(empty)" logger.error(f"L1 returned non-JSON for {url}: status={resp.status_code}, body={body_preview}") raise ValueError(f"L1 returned invalid response (status={resp.status_code}): {body_preview[:100]}") # Fetch run from L1 try: run = await asyncio.to_thread(fetch_l1_run) logger.info(f"record_run: Fetched run, status={run.get('status')}, inputs={len(run.get('inputs', []))}") except Exception as e: logger.error(f"record_run: Failed to fetch run from L1: {e}") raise HTTPException(400, f"Failed to fetch run from L1 ({l1_url}): {e}") if run.get("status") != "completed": raise HTTPException(400, f"Run not completed: {run.get('status')}") output_hash = run.get("output_hash") if not output_hash: raise HTTPException(400, "Run has no output hash") # Fetch output cache info from L1 (must exist - it's new) logger.info(f"record_run: Fetching output cache {output_hash[:16]}... from L1") try: cache_info = await asyncio.to_thread(fetch_l1_cache, output_hash) output_media_type = cache_info.get("media_type", "image") output_ipfs_cid = cache_info.get("ipfs_cid") logger.info(f"record_run: Output has IPFS CID: {output_ipfs_cid[:16] if output_ipfs_cid else 'None'}...") except Exception as e: logger.error(f"record_run: Failed to fetch output cache info: {e}") raise HTTPException(400, f"Failed to fetch output cache info: {e}") if not output_ipfs_cid: logger.error("record_run: Output has no IPFS CID") raise HTTPException(400, "Output has no IPFS CID - cannot publish") # Gather input info: check L2 first, then fall back to L1 input_hashes = run.get("inputs", []) input_infos = [] # List of {content_hash, ipfs_cid, media_type, existing_asset} logger.info(f"record_run: Gathering info for {len(input_hashes)} inputs") for input_hash in input_hashes: # Check if already on L2 existing = await db.get_asset_by_hash(input_hash) if existing and existing.get("ipfs_cid"): logger.info(f"record_run: Input {input_hash[:16]}... found on L2") input_infos.append({ "content_hash": input_hash, "ipfs_cid": existing["ipfs_cid"], "media_type": existing.get("asset_type", "image"), "existing_asset": existing }) else: # Not on L2, try L1 logger.info(f"record_run: Input {input_hash[:16]}... not on L2, fetching from L1") try: inp_info = await asyncio.to_thread(fetch_l1_cache, input_hash) ipfs_cid = inp_info.get("ipfs_cid") if not ipfs_cid: logger.error(f"record_run: Input {input_hash[:16]}... has no IPFS CID") raise HTTPException(400, f"Input {input_hash[:16]}... has no IPFS CID (not on L2 or L1)") input_infos.append({ "content_hash": input_hash, "ipfs_cid": ipfs_cid, "media_type": inp_info.get("media_type", "image"), "existing_asset": None }) except HTTPException: raise except Exception as e: logger.error(f"record_run: Failed to fetch input {input_hash[:16]}... from L1: {e}") raise HTTPException(400, f"Input {input_hash[:16]}... not on L2 and failed to fetch from L1: {e}") # Prepare recipe data recipe_data = run.get("recipe") if not recipe_data: recipe_data = { "name": run.get("recipe_name", "unknown"), "effect_url": run.get("effect_url"), "effects_commit": run.get("effects_commit"), } # Build registered_inputs list - all referenced by content_hash registered_inputs = [] for inp in input_infos: registered_inputs.append({ "content_hash": inp["content_hash"], "ipfs_cid": inp["ipfs_cid"] }) # ===== PHASE 2: IPFS OPERATIONS (non-blocking for event loop) ===== def do_ipfs_operations(): """Run IPFS operations in thread pool to not block event loop.""" from concurrent.futures import ThreadPoolExecutor, as_completed # Collect all CIDs to pin (inputs + output) cids_to_pin = [inp["ipfs_cid"] for inp in input_infos] + [output_ipfs_cid] logger.info(f"record_run: Pinning {len(cids_to_pin)} CIDs on IPFS") # Pin all in parallel with ThreadPoolExecutor(max_workers=5) as executor: futures = {executor.submit(ipfs_client.pin_or_raise, cid): cid for cid in cids_to_pin} for future in as_completed(futures): future.result() # Raises IPFSError if failed logger.info("record_run: All CIDs pinned successfully") # Store recipe on IPFS logger.info("record_run: Storing recipe on IPFS") recipe_cid = ipfs_client.add_json(recipe_data) # Build and store full provenance on IPFS # Compute content-addressable run_id from inputs + recipe recipe_name = recipe_data.get("name", "unknown") if isinstance(recipe_data, dict) else str(recipe_data) run_id = compute_run_id(input_hashes, recipe_name) provenance = { "run_id": run_id, # Content-addressable run identifier "inputs": registered_inputs, "output": { "content_hash": output_hash, "ipfs_cid": output_ipfs_cid }, "recipe": recipe_data, "recipe_cid": recipe_cid, "effect_url": run.get("effect_url"), "effects_commit": run.get("effects_commit"), "l1_server": l1_url, "l1_run_id": req.run_id, "rendered_at": run.get("completed_at"), "infrastructure": run.get("infrastructure") } logger.info("record_run: Storing provenance on IPFS") provenance_cid = ipfs_client.add_json(provenance) return recipe_cid, provenance_cid, provenance try: import asyncio recipe_cid, provenance_cid, provenance = await asyncio.to_thread(do_ipfs_operations) logger.info(f"record_run: Recipe CID: {recipe_cid[:16]}..., Provenance CID: {provenance_cid[:16]}...") except IPFSError as e: logger.error(f"record_run: IPFS operation failed: {e}") raise HTTPException(500, f"IPFS operation failed: {e}") # ===== PHASE 3: DB TRANSACTION (all-or-nothing) ===== logger.info("record_run: Starting DB transaction") now = datetime.now(timezone.utc).isoformat() # Add provenance_cid to provenance for storage in DB provenance["provenance_cid"] = provenance_cid try: async with db.transaction() as conn: # Register input assets (if not already on L2) - named by content_hash for inp in input_infos: if not inp["existing_asset"]: media_type = inp["media_type"] tags = ["auto-registered", "input"] if media_type == "recipe": tags.append("recipe") input_asset = { "name": inp["content_hash"], # Use content_hash as name "content_hash": inp["content_hash"], "ipfs_cid": inp["ipfs_cid"], "asset_type": media_type, "tags": tags, "metadata": {"auto_registered_from_run": req.run_id}, "owner": user.username, "created_at": now } await db.create_asset_tx(conn, input_asset) # Check if output already exists (by content_hash) - return existing if so existing = await db.get_asset_by_name_tx(conn, output_hash) if existing: logger.info(f"record_run: Output {output_hash[:16]}... already exists") # Check if activity already exists for this run existing_activity = await db.get_activity(provenance["run_id"]) if existing_activity: logger.info(f"record_run: Activity {provenance['run_id'][:16]}... also exists") return {"asset": existing, "activity": existing_activity, "existing": True} # Asset exists but no activity - create one logger.info(f"record_run: Creating activity for existing asset") object_data = { "type": existing.get("asset_type", "image").capitalize(), "name": output_hash, "id": f"https://{DOMAIN}/objects/{output_hash}", "contentHash": { "algorithm": "sha3-256", "value": output_hash }, "attributedTo": f"https://{DOMAIN}/users/{user.username}", "provenance": provenance } activity = { "activity_id": provenance["run_id"], "activity_type": "Create", "actor_id": f"https://{DOMAIN}/users/{user.username}", "object_data": object_data, "published": now } activity = sign_activity(activity, user.username) created_activity = await db.create_activity_tx(conn, activity) return {"asset": existing, "activity": created_activity, "existing": True} # Create output asset with provenance - named by content_hash output_asset = { "name": output_hash, # Use content_hash as name "content_hash": output_hash, "ipfs_cid": output_ipfs_cid, "asset_type": output_media_type, "tags": ["rendered", "l1"], "metadata": {"l1_server": l1_url, "l1_run_id": req.run_id}, "provenance": provenance, "owner": user.username, "created_at": now } created_asset = await db.create_asset_tx(conn, output_asset) # Create activity - all referenced by content_hash object_data = { "type": output_media_type.capitalize(), "name": output_hash, # Use content_hash as name "id": f"https://{DOMAIN}/objects/{output_hash}", "contentHash": { "algorithm": "sha3-256", "value": output_hash }, "attributedTo": f"https://{DOMAIN}/users/{user.username}", "provenance": provenance } activity = { "activity_id": provenance["run_id"], # Content-addressable run_id "activity_type": "Create", "actor_id": f"https://{DOMAIN}/users/{user.username}", "object_data": object_data, "published": now } activity = sign_activity(activity, user.username) created_activity = await db.create_activity_tx(conn, activity) # Transaction commits here on successful exit except HTTPException: raise except Exception as e: logger.error(f"record_run: Database transaction failed: {e}") raise HTTPException(500, f"Failed to record run: {e}") logger.info(f"record_run: Successfully published {output_hash[:16]}... with {len(registered_inputs)} inputs") return {"asset": created_asset, "activity": created_activity} @app.post("/assets/publish-cache") async def publish_cache(req: PublishCacheRequest, user: User = Depends(get_required_user)): """ Publish a cache item from L1 with metadata atomically. Requires origin to be set (self or external URL). Requires IPFS CID - content must be on IPFS before publishing. Creates a new asset and Create activity in a single transaction. """ import ipfs_client from ipfs_client import IPFSError logger.info(f"publish_cache: Starting for {req.asset_name} (hash={req.content_hash[:16]}...)") # ===== PHASE 1: VALIDATION ===== # Validate origin if not req.origin or "type" not in req.origin: raise HTTPException(400, "Origin is required for publishing (type: 'self' or 'external')") origin_type = req.origin.get("type") if origin_type not in ("self", "external"): raise HTTPException(400, "Origin type must be 'self' or 'external'") if origin_type == "external" and not req.origin.get("url"): raise HTTPException(400, "External origin requires a URL") # IPFS CID is now required if not req.ipfs_cid: raise HTTPException(400, "IPFS CID is required for publishing") # Check if asset name already exists if await db.asset_exists(req.asset_name): raise HTTPException(400, f"Asset name already exists: {req.asset_name}") # ===== PHASE 2: IPFS OPERATIONS (non-blocking) ===== import asyncio logger.info(f"publish_cache: Pinning CID {req.ipfs_cid[:16]}... on IPFS") try: await asyncio.to_thread(ipfs_client.pin_or_raise, req.ipfs_cid) logger.info("publish_cache: CID pinned successfully") except IPFSError as e: logger.error(f"publish_cache: IPFS pin failed: {e}") raise HTTPException(500, f"IPFS operation failed: {e}") # ===== PHASE 3: DB TRANSACTION ===== logger.info("publish_cache: Starting DB transaction") now = datetime.now(timezone.utc).isoformat() try: async with db.transaction() as conn: # Check name again inside transaction (race condition protection) if await db.asset_exists_by_name_tx(conn, req.asset_name): raise HTTPException(400, f"Asset name already exists: {req.asset_name}") # Create asset asset = { "name": req.asset_name, "content_hash": req.content_hash, "ipfs_cid": req.ipfs_cid, "asset_type": req.asset_type, "tags": req.tags, "description": req.description, "origin": req.origin, "metadata": req.metadata, "owner": user.username, "created_at": now } created_asset = await db.create_asset_tx(conn, asset) # Create ownership activity with origin info object_data = { "type": req.asset_type.capitalize(), "name": req.asset_name, "id": f"https://{DOMAIN}/objects/{req.content_hash}", "contentHash": { "algorithm": "sha3-256", "value": req.content_hash }, "attributedTo": f"https://{DOMAIN}/users/{user.username}", "tag": req.tags } if req.description: object_data["summary"] = req.description # Include origin in ActivityPub object if origin_type == "self": object_data["generator"] = { "type": "Application", "name": "Art DAG", "note": "Original content created by the author" } else: object_data["source"] = { "type": "Link", "href": req.origin.get("url"), "name": req.origin.get("note", "External source") } activity = { "activity_id": req.content_hash, # Content-addressable by content hash "activity_type": "Create", "actor_id": f"https://{DOMAIN}/users/{user.username}", "object_data": object_data, "published": now } activity = sign_activity(activity, user.username) created_activity = await db.create_activity_tx(conn, activity) # Transaction commits here on successful exit except HTTPException: raise except Exception as e: logger.error(f"publish_cache: Database transaction failed: {e}") raise HTTPException(500, f"Failed to publish cache item: {e}") logger.info(f"publish_cache: Successfully published {req.asset_name}") return {"asset": created_asset, "activity": created_activity} # ============ Activities Endpoints ============ @app.get("/activities") async def get_activities(request: Request, page: int = 1, limit: int = 20): """Get activities. HTML for browsers (with infinite scroll), JSON for APIs (with pagination).""" all_activities = await load_activities() total = len(all_activities) # Reverse for newest first all_activities = list(reversed(all_activities)) # Pagination start = (page - 1) * limit end = start + limit activities_page = all_activities[start:end] has_more = end < total if wants_html(request): username = get_user_from_cookie(request) if not activities_page: if page == 1: content = '''No activities yet.
''' else: return HTMLResponse("") # Empty for infinite scroll else: rows = "" for i, activity in enumerate(activities_page): activity_index = total - 1 - (start + i) # Original index obj = activity.get("object_data", {}) activity_type = activity.get("activity_type", "") type_color = "bg-green-600" if activity_type == "Create" else "bg-yellow-600" if activity_type == "Update" else "bg-gray-600" actor_id = activity.get("actor_id", "") actor_name = actor_id.split("/")[-1] if actor_id else "unknown" rows += f'''| Type | Object | Actor | Published |
|---|
This activity does not exist.
''' return HTMLResponse(base_html("Activity Not Found", content, get_user_from_cookie(request))) if wants_json: return activity # Default to HTML for browsers if activity_index is not None: return await ui_activity_detail(activity_index, request) else: # Render activity directly if no index found return await ui_activity_detail_by_data(activity, request) @app.get("/activity/{activity_index}") async def get_activity_legacy(activity_index: int): """Legacy route - redirect to /activities/{activity_index}.""" return RedirectResponse(url=f"/activities/{activity_index}", status_code=301) @app.get("/objects/{content_hash}") async def get_object(content_hash: str, request: Request): """Get object by content hash. Content negotiation: HTML for browsers, JSON for APIs.""" registry = await load_registry() # Find asset by hash for name, asset in registry.get("assets", {}).items(): if asset.get("content_hash") == content_hash: # Check Accept header - only return JSON if explicitly requested accept = request.headers.get("accept", "") wants_json = ("application/json" in accept or "application/activity+json" in accept) and "text/html" not in accept if not wants_json: # Default: redirect to detail page for browsers return RedirectResponse(url=f"/assets/{name}", status_code=303) owner = asset.get("owner", "unknown") return JSONResponse( content={ "@context": "https://www.w3.org/ns/activitystreams", "id": f"https://{DOMAIN}/objects/{content_hash}", "type": asset.get("asset_type", "Object").capitalize(), "name": name, "contentHash": { "algorithm": "sha3-256", "value": content_hash }, "attributedTo": f"https://{DOMAIN}/users/{owner}", "published": asset.get("created_at") }, media_type="application/activity+json" ) raise HTTPException(404, f"Object not found: {content_hash}") # ============ Anchoring (Bitcoin timestamps) ============ @app.post("/anchors/create") async def create_anchor_endpoint(request: Request): """ Create a new anchor for all unanchored activities. Builds a merkle tree, stores it on IPFS, and submits to OpenTimestamps for Bitcoin anchoring. The anchor proof is backed up to persistent storage. """ import anchoring import ipfs_client # Check auth (cookie or header) username = get_user_from_cookie(request) if not username: if wants_html(request): return HTMLResponse('''Create a test anchor for unanchored activities, or test the OTS connection.
| Merkle Root | Activities | Status | Created | Actions |
|---|
Log in to manage your renderer connections.
''' return HTMLResponse(base_html("Renderers", content)) # Get user's attached renderers attached = await db.get_user_renderers(username) from urllib.parse import quote # Build renderer list rows = [] for l1_url in L1_SERVERS: is_attached = l1_url in attached # Extract display name from URL display_name = l1_url.replace("https://", "").replace("http://", "") if is_attached: status = 'Attached' action = f''' Open ''' else: status = 'Not attached' # Attach via endpoint that creates scoped token (not raw token in URL) attach_url = f"/renderers/attach?l1_url={quote(l1_url, safe='')}" action = f''' Attach ''' row_id = l1_url.replace("://", "-").replace("/", "-").replace(".", "-") rows.append(f'''Connect to L1 rendering servers. After attaching, you can run effects and manage media on that renderer.
No renderers configured.
'}Attach your own storage to help power the network. 50% of your capacity is donated to store shared content, making popular assets more resilient.
No {info["name"]} configs yet. Add one below.
' # Build form fields based on provider type form_fields = "" if provider_type == "pinata": form_fields = '''