commit f54b0fb5dae7670168abddc577443fca22fa30d1 Author: giles Date: Tue Feb 24 23:07:31 2026 +0000 Squashed 'l2/' content from commit 79caa24 git-subtree-dir: l2 git-subtree-split: 79caa24e2129bf6e2cee819327d5622425306b67 diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..d0bb2cf --- /dev/null +++ b/.env.example @@ -0,0 +1,20 @@ +# L2 Server Configuration + +# PostgreSQL password (REQUIRED - no default) +POSTGRES_PASSWORD=changeme-generate-with-openssl-rand-hex-16 + +# Domain for this ActivityPub server +ARTDAG_DOMAIN=artdag.rose-ash.com + +# JWT secret for token signing (generate with: openssl rand -hex 32) +JWT_SECRET=your-secret-here-generate-with-openssl-rand-hex-32 + +# L1 server URL for fetching content (images/videos) +L1_PUBLIC_URL=https://celery-artdag.rose-ash.com + +# Effects repository URL for linking to effect source code +EFFECTS_REPO_URL=https://git.rose-ash.com/art-dag/effects + +# Notes: +# - ARTDAG_USER removed - now multi-actor, each registered user is their own actor +# - L1 URL can also come from provenance data per-asset diff --git a/.gitea/workflows/ci.yml b/.gitea/workflows/ci.yml new file mode 100644 index 0000000..30d34ea --- /dev/null +++ b/.gitea/workflows/ci.yml @@ -0,0 +1,62 @@ +name: Build and Deploy + +on: + push: + branches: [main] + +env: + REGISTRY: registry.rose-ash.com:5000 + IMAGE: l2-server + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install tools + run: | + apt-get update && apt-get install -y --no-install-recommends openssh-client + + - name: Set up SSH + env: + SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }} + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + mkdir -p ~/.ssh + echo "$SSH_KEY" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true + + - name: Pull latest code on server + env: + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + ssh "root@$DEPLOY_HOST" " + cd /root/art-dag/activity-pub + git fetch origin main + git reset --hard origin/main + " + + - name: Build and push image + env: + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + ssh "root@$DEPLOY_HOST" " + cd /root/art-dag/activity-pub + docker build --build-arg CACHEBUST=\$(date +%s) -t ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest -t ${{ env.REGISTRY }}/${{ env.IMAGE }}:${{ github.sha }} . + docker push ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest + docker push ${{ env.REGISTRY }}/${{ env.IMAGE }}:${{ github.sha }} + " + + - name: Deploy stack + env: + DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }} + run: | + ssh "root@$DEPLOY_HOST" " + cd /root/art-dag/activity-pub + docker stack deploy -c docker-compose.yml activitypub + echo 'Waiting for services to update...' + sleep 10 + docker stack services activitypub + " diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..705d35d --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +__pycache__/ +*.py[cod] +.venv/ +venv/ + +# Private keys - NEVER commit these +*.pem +keys/ + +# Secrets +.env diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..409aadf --- /dev/null +++ b/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install git for pip to clone dependencies +RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* + +# Install dependencies +COPY requirements.txt . +ARG CACHEBUST=1 +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application +COPY . . + +# Create data directory +RUN mkdir -p /data/l2 + +ENV PYTHONUNBUFFERED=1 +ENV ARTDAG_DATA=/data/l2 + +# Default command runs the server +CMD ["python", "server.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..31f8c36 --- /dev/null +++ b/README.md @@ -0,0 +1,389 @@ +# Art DAG L2 Server - ActivityPub + +Ownership registry and ActivityPub federation for Art DAG. Manages asset provenance, cryptographic anchoring, and distributed identity. + +## Features + +- **Asset Registry**: Content-addressed assets with provenance tracking +- **ActivityPub Federation**: Standard protocol for distributed social networking +- **OpenTimestamps Anchoring**: Cryptographic proof of existence on Bitcoin blockchain +- **L1 Integration**: Record and verify L1 rendering runs +- **Storage Providers**: S3, IPFS, and local storage backends +- **Scoped Authentication**: Secure token-based auth for federated L1 servers + +## Dependencies + +- **PostgreSQL**: Primary data storage +- **artdag-common**: Shared templates and middleware +- **cryptography**: RSA key generation and signing +- **httpx**: Async HTTP client for federation + +## Quick Start + +```bash +# Install dependencies +pip install -r requirements.txt + +# Configure +export ARTDAG_DOMAIN=artdag.example.com +export ARTDAG_USER=giles +export DATABASE_URL=postgresql://artdag:$POSTGRES_PASSWORD@localhost:5432/artdag +export L1_SERVERS=https://celery-artdag.example.com + +# Generate signing keys (required for federation) +python setup_keys.py + +# Start server +python server.py +``` + +## Docker Deployment + +```bash +docker stack deploy -c docker-compose.yml artdag-l2 +``` + +## Configuration + +### Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `ARTDAG_DOMAIN` | `artdag.rose-ash.com` | Domain for ActivityPub actors | +| `ARTDAG_USER` | `giles` | Default username | +| `ARTDAG_DATA` | `~/.artdag/l2` | Data directory | +| `DATABASE_URL` | **(required)** | PostgreSQL connection | +| `L1_SERVERS` | - | Comma-separated list of L1 server URLs | +| `JWT_SECRET` | (generated) | JWT signing secret | +| `HOST` | `0.0.0.0` | Server bind address | +| `PORT` | `8200` | Server port | + +### JWT Secret + +The JWT secret signs authentication tokens. Without a persistent secret, tokens are invalidated on restart. + +```bash +# Generate a secret +openssl rand -hex 32 + +# Set in environment +export JWT_SECRET="your-generated-secret" + +# Or use Docker secrets (recommended for production) +echo "your-secret" | docker secret create jwt_secret - +``` + +### RSA Keys + +ActivityPub requires RSA keys for signing activities: + +```bash +# Generate keys +python setup_keys.py + +# Or with custom paths +python setup_keys.py --data-dir /data/l2 --user giles +``` + +Keys stored in `$ARTDAG_DATA/keys/`: +- `{username}.pem` - Private key (chmod 600) +- `{username}.pub` - Public key (in actor profile) + +## Web UI + +| Path | Description | +|------|-------------| +| `/` | Home page with stats | +| `/login` | Login form | +| `/register` | Registration form | +| `/logout` | Log out | +| `/assets` | Browse registered assets | +| `/asset/{name}` | Asset detail page | +| `/activities` | Published activities | +| `/activity/{id}` | Activity detail | +| `/users` | Registered users | +| `/renderers` | L1 renderer connections | +| `/anchors/ui` | OpenTimestamps management | +| `/storage` | Storage provider config | +| `/download/client` | Download CLI client | + +## API Reference + +Interactive docs: http://localhost:8200/docs + +### Authentication + +| Method | Path | Description | +|--------|------|-------------| +| POST | `/auth/register` | Register new user | +| POST | `/auth/login` | Login, get JWT token | +| GET | `/auth/me` | Get current user info | +| POST | `/auth/verify` | Verify token (for L1 servers) | + +### Assets + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/assets` | List all assets | +| GET | `/assets/{name}` | Get asset by name | +| POST | `/assets` | Register new asset | +| PATCH | `/assets/{name}` | Update asset metadata | +| POST | `/assets/record-run` | Record L1 run as asset | +| POST | `/assets/publish-cache` | Publish L1 cache item | +| GET | `/assets/by-run-id/{run_id}` | Find asset by L1 run ID | + +### ActivityPub + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/.well-known/webfinger` | Actor discovery | +| GET | `/users/{username}` | Actor profile | +| GET | `/users/{username}/outbox` | Published activities | +| POST | `/users/{username}/inbox` | Receive activities | +| GET | `/users/{username}/followers` | Followers list | +| GET | `/objects/{hash}` | Get object by content hash | +| GET | `/activities` | List activities (paginated) | +| GET | `/activities/{ref}` | Get activity by reference | +| GET | `/activity/{index}` | Get activity by index | + +### OpenTimestamps Anchoring + +| Method | Path | Description | +|--------|------|-------------| +| POST | `/anchors/create` | Create timestamp anchor | +| GET | `/anchors` | List all anchors | +| GET | `/anchors/{merkle_root}` | Get anchor details | +| GET | `/anchors/{merkle_root}/tree` | Get merkle tree | +| GET | `/anchors/verify/{activity_id}` | Verify activity timestamp | +| POST | `/anchors/{merkle_root}/upgrade` | Upgrade pending timestamp | +| GET | `/anchors/ui` | Anchor management UI | +| POST | `/anchors/test-ots` | Test OTS functionality | + +### Renderers (L1 Connections) + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/renderers` | List attached L1 servers | +| GET | `/renderers/attach` | Initiate L1 attachment | +| POST | `/renderers/detach` | Detach from L1 server | + +### Storage Providers + +| Method | Path | Description | +|--------|------|-------------| +| GET | `/storage` | List storage providers | +| POST | `/storage` | Add provider (form) | +| POST | `/storage/add` | Add provider (JSON) | +| GET | `/storage/{id}` | Get provider details | +| PATCH | `/storage/{id}` | Update provider | +| DELETE | `/storage/{id}` | Delete provider | +| POST | `/storage/{id}/test` | Test connection | +| GET | `/storage/type/{type}` | Get form for provider type | + +## L1 Renderer Integration + +L2 coordinates with L1 rendering servers for distributed processing. + +### Configuration + +```bash +# Single L1 server +export L1_SERVERS=https://celery-artdag.rose-ash.com + +# Multiple L1 servers +export L1_SERVERS=https://server1.example.com,https://server2.example.com +``` + +### Attachment Flow + +1. User visits `/renderers` and clicks "Attach" +2. L2 creates a **scoped token** bound to the specific L1 +3. User redirected to L1's `/auth?auth_token=...` +4. L1 calls L2's `/auth/verify` to validate +5. L2 checks token scope matches requesting L1 +6. L1 sets local cookie, attachment recorded in `user_renderers` + +### Security + +- **Scoped tokens**: Tokens bound to specific L1; can't be used elsewhere +- **No shared secrets**: L1 verifies via L2's `/auth/verify` endpoint +- **Federated logout**: L2 revokes tokens on all attached L1s + +## OpenTimestamps Anchoring + +Cryptographic proof of existence using Bitcoin blockchain. + +### How It Works + +1. Activities are collected into merkle trees +2. Merkle root submitted to Bitcoin via OpenTimestamps +3. Pending proofs upgraded when Bitcoin confirms +4. Final proof verifiable without trusted third parties + +### Verification + +```bash +# Verify an activity's timestamp +curl https://artdag.example.com/anchors/verify/123 + +# Returns: +{ + "activity_id": 123, + "merkle_root": "abc123...", + "status": "confirmed", + "bitcoin_block": 800000, + "verified_at": "2026-01-01T..." +} +``` + +## Data Model + +### PostgreSQL Tables + +| Table | Description | +|-------|-------------| +| `users` | Registered users with hashed passwords | +| `assets` | Asset registry with content hashes | +| `activities` | Signed ActivityPub activities | +| `followers` | Follower relationships | +| `anchors` | OpenTimestamps anchor records | +| `anchor_activities` | Activity-to-anchor mappings | +| `user_renderers` | L1 attachment records | +| `revoked_tokens` | Token revocation list | +| `storage_providers` | Storage configurations | + +### Asset Structure + +```json +{ + "name": "my-video", + "content_hash": "sha3-256:abc123...", + "asset_type": "video", + "owner": "@giles@artdag.rose-ash.com", + "created_at": "2026-01-01T...", + "provenance": { + "inputs": [...], + "recipe": "beat-sync", + "l1_server": "https://celery-artdag.rose-ash.com", + "run_id": "..." + }, + "tags": ["art", "generated"] +} +``` + +### Activity Structure + +```json +{ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "Create", + "actor": "https://artdag.rose-ash.com/users/giles", + "object": { + "type": "Document", + "name": "my-video", + "content": "sha3-256:abc123...", + "attributedTo": "https://artdag.rose-ash.com/users/giles" + }, + "published": "2026-01-01T..." +} +``` + +## CLI Commands + +### Register Asset + +```bash +curl -X POST https://artdag.example.com/assets \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "name": "my-video", + "content_hash": "abc123...", + "asset_type": "video", + "tags": ["art", "generated"] + }' +``` + +### Record L1 Run + +```bash +curl -X POST https://artdag.example.com/assets/record-run \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "run_id": "uuid-from-l1", + "l1_server": "https://celery-artdag.rose-ash.com", + "output_name": "my-rendered-video" + }' +``` + +### Publish L1 Cache Item + +```bash +curl -X POST https://artdag.example.com/assets/publish-cache \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "content_hash": "abc123...", + "l1_server": "https://celery-artdag.rose-ash.com", + "name": "my-asset", + "asset_type": "video" + }' +``` + +## Architecture + +``` +L2 Server (FastAPI) + │ + ├── Web UI (Jinja2 + HTMX + Tailwind) + │ + ├── /assets → Asset Registry + │ │ + │ └── PostgreSQL (assets table) + │ + ├── /users/{user}/outbox → ActivityPub + │ │ + │ ├── Sign activities (RSA) + │ └── PostgreSQL (activities table) + │ + ├── /anchors → OpenTimestamps + │ │ + │ ├── Merkle tree construction + │ └── Bitcoin anchoring + │ + ├── /auth/verify → L1 Token Verification + │ │ + │ └── Scoped token validation + │ + └── /storage → Storage Providers + │ + ├── S3 (boto3) + ├── IPFS (ipfs_client) + └── Local filesystem +``` + +## Federation + +L2 implements ActivityPub for federated asset sharing. + +### Discovery + +```bash +# Webfinger lookup +curl "https://artdag.example.com/.well-known/webfinger?resource=acct:giles@artdag.example.com" +``` + +### Actor Profile + +```bash +curl -H "Accept: application/activity+json" \ + https://artdag.example.com/users/giles +``` + +### Outbox + +```bash +curl -H "Accept: application/activity+json" \ + https://artdag.example.com/users/giles/outbox +``` diff --git a/anchoring.py b/anchoring.py new file mode 100644 index 0000000..49f6f1a --- /dev/null +++ b/anchoring.py @@ -0,0 +1,334 @@ +# art-activity-pub/anchoring.py +""" +Merkle tree anchoring to Bitcoin via OpenTimestamps. + +Provides provable timestamps for ActivityPub activities without running +our own blockchain. Activities are hashed into a merkle tree, the root +is submitted to OpenTimestamps (free), and the proof is stored on IPFS. + +The merkle tree + OTS proof provides cryptographic evidence that +activities existed at a specific time, anchored to Bitcoin. +""" + +import hashlib +import json +import logging +import os +from datetime import datetime, timezone +from pathlib import Path +from typing import List, Optional + +import requests + +logger = logging.getLogger(__name__) + +# Backup file location (should be on persistent volume) +ANCHOR_BACKUP_DIR = Path(os.getenv("ANCHOR_BACKUP_DIR", "/data/anchors")) +ANCHOR_BACKUP_FILE = ANCHOR_BACKUP_DIR / "anchors.jsonl" + +# OpenTimestamps calendar servers +OTS_SERVERS = [ + "https://a.pool.opentimestamps.org", + "https://b.pool.opentimestamps.org", + "https://a.pool.eternitywall.com", +] + + +def _ensure_backup_dir(): + """Ensure backup directory exists.""" + ANCHOR_BACKUP_DIR.mkdir(parents=True, exist_ok=True) + + +def build_merkle_tree(items: List[str]) -> Optional[dict]: + """ + Build a merkle tree from a list of strings (activity IDs). + + Args: + items: List of activity IDs to include + + Returns: + Dict with root, tree structure, and metadata, or None if empty + """ + if not items: + return None + + # Sort for deterministic ordering + items = sorted(items) + + # Hash each item to create leaves + leaves = [hashlib.sha256(item.encode()).hexdigest() for item in items] + + # Build tree bottom-up + tree_levels = [leaves] + current_level = leaves + + while len(current_level) > 1: + next_level = [] + for i in range(0, len(current_level), 2): + left = current_level[i] + # If odd number, duplicate last node + right = current_level[i + 1] if i + 1 < len(current_level) else left + # Hash pair together + combined = hashlib.sha256((left + right).encode()).hexdigest() + next_level.append(combined) + tree_levels.append(next_level) + current_level = next_level + + root = current_level[0] + + return { + "root": root, + "tree": tree_levels, + "items": items, + "item_count": len(items), + "created_at": datetime.now(timezone.utc).isoformat() + } + + +def get_merkle_proof(tree: dict, item: str) -> Optional[List[dict]]: + """ + Get merkle proof for a specific item. + + Args: + tree: Merkle tree dict from build_merkle_tree + item: The item to prove membership for + + Returns: + List of proof steps, or None if item not in tree + """ + items = tree["items"] + if item not in items: + return None + + # Find leaf index + sorted_items = sorted(items) + leaf_index = sorted_items.index(item) + leaf_hash = hashlib.sha256(item.encode()).hexdigest() + + proof = [] + tree_levels = tree["tree"] + current_index = leaf_index + + for level in tree_levels[:-1]: # Skip root level + sibling_index = current_index ^ 1 # XOR to get sibling + if sibling_index < len(level): + sibling_hash = level[sibling_index] + proof.append({ + "hash": sibling_hash, + "position": "right" if current_index % 2 == 0 else "left" + }) + current_index //= 2 + + return proof + + +def verify_merkle_proof(item: str, proof: List[dict], root: str) -> bool: + """ + Verify a merkle proof. + + Args: + item: The item to verify + proof: Proof steps from get_merkle_proof + root: Expected merkle root + + Returns: + True if proof is valid + """ + current_hash = hashlib.sha256(item.encode()).hexdigest() + + for step in proof: + sibling = step["hash"] + if step["position"] == "right": + combined = current_hash + sibling + else: + combined = sibling + current_hash + current_hash = hashlib.sha256(combined.encode()).hexdigest() + + return current_hash == root + + +def submit_to_opentimestamps(hash_hex: str) -> Optional[bytes]: + """ + Submit a hash to OpenTimestamps for Bitcoin anchoring. + + Args: + hash_hex: Hex-encoded SHA256 hash to timestamp + + Returns: + Incomplete .ots proof bytes, or None on failure + + Note: + The returned proof is "incomplete" - it becomes complete + after Bitcoin confirms (usually 1-2 hours). Use upgrade_ots_proof + to get the complete proof later. + """ + hash_bytes = bytes.fromhex(hash_hex) + + for server in OTS_SERVERS: + try: + resp = requests.post( + f"{server}/digest", + data=hash_bytes, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=10 + ) + if resp.status_code == 200: + logger.info(f"Submitted to OpenTimestamps via {server}") + return resp.content + except Exception as e: + logger.warning(f"OTS server {server} failed: {e}") + continue + + logger.error("All OpenTimestamps servers failed") + return None + + +def upgrade_ots_proof(ots_proof: bytes) -> Optional[bytes]: + """ + Upgrade an incomplete OTS proof to a complete Bitcoin-anchored proof. + + Args: + ots_proof: Incomplete .ots proof bytes + + Returns: + Complete .ots proof bytes, or None if not yet confirmed + + Note: + This should be called periodically (e.g., hourly) until + the proof is complete. Bitcoin confirmation takes ~1-2 hours. + """ + for server in OTS_SERVERS: + try: + resp = requests.post( + f"{server}/upgrade", + data=ots_proof, + headers={"Content-Type": "application/octet-stream"}, + timeout=10 + ) + if resp.status_code == 200 and len(resp.content) > len(ots_proof): + logger.info(f"OTS proof upgraded via {server}") + return resp.content + except Exception as e: + logger.warning(f"OTS upgrade via {server} failed: {e}") + continue + + return None + + +def append_to_backup(anchor_record: dict): + """ + Append anchor record to persistent JSONL backup file. + + Args: + anchor_record: Dict with anchor metadata + """ + _ensure_backup_dir() + + with open(ANCHOR_BACKUP_FILE, "a") as f: + f.write(json.dumps(anchor_record, sort_keys=True) + "\n") + + logger.info(f"Anchor backed up to {ANCHOR_BACKUP_FILE}") + + +def load_backup_anchors() -> List[dict]: + """ + Load all anchors from backup file. + + Returns: + List of anchor records + """ + if not ANCHOR_BACKUP_FILE.exists(): + return [] + + anchors = [] + with open(ANCHOR_BACKUP_FILE, "r") as f: + for line in f: + line = line.strip() + if line: + try: + anchors.append(json.loads(line)) + except json.JSONDecodeError: + logger.warning(f"Invalid JSON in backup: {line[:50]}...") + + return anchors + + +def get_latest_anchor_from_backup() -> Optional[dict]: + """Get the most recent anchor from backup.""" + anchors = load_backup_anchors() + return anchors[-1] if anchors else None + + +async def create_anchor( + activity_ids: List[str], + db_module, + ipfs_module +) -> Optional[dict]: + """ + Create a new anchor for a batch of activities. + + Args: + activity_ids: List of activity UUIDs to anchor + db_module: Database module with anchor functions + ipfs_module: IPFS client module + + Returns: + Anchor record dict, or None on failure + """ + if not activity_ids: + logger.info("No activities to anchor") + return None + + # Build merkle tree + tree = build_merkle_tree(activity_ids) + if not tree: + return None + + root = tree["root"] + logger.info(f"Built merkle tree: {len(activity_ids)} activities, root={root[:16]}...") + + # Store tree on IPFS + try: + tree_cid = ipfs_module.add_json(tree) + logger.info(f"Merkle tree stored on IPFS: {tree_cid}") + except Exception as e: + logger.error(f"Failed to store tree on IPFS: {e}") + tree_cid = None + + # Submit to OpenTimestamps + ots_proof = submit_to_opentimestamps(root) + + # Store OTS proof on IPFS too + ots_cid = None + if ots_proof and ipfs_module: + try: + ots_cid = ipfs_module.add_bytes(ots_proof) + logger.info(f"OTS proof stored on IPFS: {ots_cid}") + except Exception as e: + logger.warning(f"Failed to store OTS proof on IPFS: {e}") + + # Create anchor record + anchor_record = { + "merkle_root": root, + "tree_ipfs_cid": tree_cid, + "ots_proof_cid": ots_cid, + "activity_count": len(activity_ids), + "first_activity_id": activity_ids[0], + "last_activity_id": activity_ids[-1], + "created_at": datetime.now(timezone.utc).isoformat(), + "confirmed_at": None, + "bitcoin_txid": None + } + + # Save to database + if db_module: + try: + await db_module.create_anchor(anchor_record) + await db_module.mark_activities_anchored(activity_ids, root) + except Exception as e: + logger.error(f"Failed to save anchor to database: {e}") + + # Append to backup file (persistent) + append_to_backup(anchor_record) + + return anchor_record diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..1062a13 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,116 @@ +""" +Art-DAG L2 Server Application Factory. + +Creates and configures the FastAPI application with all routers and middleware. +""" + +from pathlib import Path +from contextlib import asynccontextmanager +from fastapi import FastAPI, Request +from fastapi.responses import JSONResponse, HTMLResponse + +from artdag_common import create_jinja_env +from artdag_common.middleware.auth import get_user_from_cookie + +from .config import settings + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Manage database connection pool lifecycle.""" + import db + await db.init_pool() + yield + await db.close_pool() + + +def create_app() -> FastAPI: + """ + Create and configure the L2 FastAPI application. + + Returns: + Configured FastAPI instance + """ + app = FastAPI( + title="Art-DAG L2 Server", + description="ActivityPub server for Art-DAG ownership and federation", + version="1.0.0", + lifespan=lifespan, + ) + + # Coop fragment pre-fetch — inject nav-tree, auth-menu, cart-mini + _FRAG_SKIP = ("/auth/", "/.well-known/", "/health", + "/internal/", "/static/", "/inbox") + + @app.middleware("http") + async def coop_fragments_middleware(request: Request, call_next): + path = request.url.path + if ( + request.method != "GET" + or any(path.startswith(p) for p in _FRAG_SKIP) + or request.headers.get("hx-request") + ): + request.state.nav_tree_html = "" + request.state.auth_menu_html = "" + request.state.cart_mini_html = "" + return await call_next(request) + + from artdag_common.fragments import fetch_fragments as _fetch_frags + + user = get_user_from_cookie(request) + auth_params = {"email": user.email} if user and user.email else {} + nav_params = {"app_name": "artdag", "path": path} + + try: + nav_tree_html, auth_menu_html, cart_mini_html = await _fetch_frags([ + ("blog", "nav-tree", nav_params), + ("account", "auth-menu", auth_params or None), + ("cart", "cart-mini", None), + ]) + except Exception: + nav_tree_html = auth_menu_html = cart_mini_html = "" + + request.state.nav_tree_html = nav_tree_html + request.state.auth_menu_html = auth_menu_html + request.state.cart_mini_html = cart_mini_html + + return await call_next(request) + + # Initialize Jinja2 templates + template_dir = Path(__file__).parent / "templates" + app.state.templates = create_jinja_env(template_dir) + + # Custom 404 handler + @app.exception_handler(404) + async def not_found_handler(request: Request, exc): + from artdag_common.middleware import wants_html + if wants_html(request): + from artdag_common import render + return render(app.state.templates, "404.html", request, + user=None, + ) + return JSONResponse({"detail": "Not found"}, status_code=404) + + # Include routers + from .routers import auth, assets, activities, anchors, storage, users, renderers + + # Root routes + app.include_router(auth.router, prefix="/auth", tags=["auth"]) + app.include_router(users.router, tags=["users"]) + + # Feature routers + app.include_router(assets.router, prefix="/assets", tags=["assets"]) + app.include_router(activities.router, prefix="/activities", tags=["activities"]) + app.include_router(anchors.router, prefix="/anchors", tags=["anchors"]) + app.include_router(storage.router, prefix="/storage", tags=["storage"]) + app.include_router(renderers.router, prefix="/renderers", tags=["renderers"]) + + # WebFinger and ActivityPub discovery + from .routers import federation + app.include_router(federation.router, tags=["federation"]) + + return app + + +# Create the default app instance +app = create_app() diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..d88d435 --- /dev/null +++ b/app/config.py @@ -0,0 +1,56 @@ +""" +L2 Server Configuration. + +Environment-based settings for the ActivityPub server. +""" + +import os +from dataclasses import dataclass +from pathlib import Path + + +@dataclass +class Settings: + """L2 Server configuration.""" + + # Domain and URLs + domain: str = os.environ.get("ARTDAG_DOMAIN", "artdag.rose-ash.com") + l1_public_url: str = os.environ.get("L1_PUBLIC_URL", "https://celery-artdag.rose-ash.com") + effects_repo_url: str = os.environ.get("EFFECTS_REPO_URL", "https://git.rose-ash.com/art-dag/effects") + ipfs_gateway_url: str = os.environ.get("IPFS_GATEWAY_URL", "") + + # L1 servers + l1_servers: list = None + + # Cookie domain for cross-subdomain auth + cookie_domain: str = None + + # Data directory + data_dir: Path = None + + # JWT settings + jwt_secret: str = os.environ.get("JWT_SECRET", "") + jwt_algorithm: str = "HS256" + access_token_expire_minutes: int = 60 * 24 * 30 # 30 days + + def __post_init__(self): + # Parse L1 servers + l1_str = os.environ.get("L1_SERVERS", "https://celery-artdag.rose-ash.com") + self.l1_servers = [s.strip() for s in l1_str.split(",") if s.strip()] + + # Cookie domain + env_cookie = os.environ.get("COOKIE_DOMAIN") + if env_cookie: + self.cookie_domain = env_cookie + else: + parts = self.domain.split(".") + if len(parts) >= 2: + self.cookie_domain = "." + ".".join(parts[-2:]) + + # Data directory + self.data_dir = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2"))) + self.data_dir.mkdir(parents=True, exist_ok=True) + (self.data_dir / "assets").mkdir(exist_ok=True) + + +settings = Settings() diff --git a/app/dependencies.py b/app/dependencies.py new file mode 100644 index 0000000..d10d063 --- /dev/null +++ b/app/dependencies.py @@ -0,0 +1,80 @@ +""" +L2 Server Dependency Injection. + +Provides common dependencies for routes. +""" + +from typing import Optional + +from fastapi import Request, HTTPException, Depends +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials + +from .config import settings + +security = HTTPBearer(auto_error=False) + + +def get_templates(request: Request): + """Get Jinja2 templates from app state.""" + return request.app.state.templates + + +async def get_current_user(request: Request) -> Optional[dict]: + """ + Get current user from cookie or header. + + Returns user dict or None if not authenticated. + """ + from auth import verify_token, get_token_claims + + # Try cookie first + token = request.cookies.get("auth_token") + + # Try Authorization header + if not token: + auth_header = request.headers.get("Authorization", "") + if auth_header.startswith("Bearer "): + token = auth_header[7:] + + if not token: + return None + + # Verify token + username = verify_token(token) + if not username: + return None + + # Get full claims + claims = get_token_claims(token) + if not claims: + return None + + return { + "username": username, + "actor_id": f"https://{settings.domain}/users/{username}", + "token": token, + **claims, + } + + +async def require_auth(request: Request) -> dict: + """ + Require authentication. + + Raises HTTPException 401 if not authenticated. + """ + user = await get_current_user(request) + if not user: + raise HTTPException(401, "Authentication required") + return user + + +def get_user_from_cookie(request: Request) -> Optional[str]: + """Get username from cookie (for HTML pages).""" + from auth import verify_token + + token = request.cookies.get("auth_token") + if not token: + return None + + return verify_token(token) diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100644 index 0000000..8365296 --- /dev/null +++ b/app/routers/__init__.py @@ -0,0 +1,25 @@ +""" +L2 Server Routers. + +Each router handles a specific domain of functionality. +""" + +from . import auth +from . import assets +from . import activities +from . import anchors +from . import storage +from . import users +from . import renderers +from . import federation + +__all__ = [ + "auth", + "assets", + "activities", + "anchors", + "storage", + "users", + "renderers", + "federation", +] diff --git a/app/routers/activities.py b/app/routers/activities.py new file mode 100644 index 0000000..10740c8 --- /dev/null +++ b/app/routers/activities.py @@ -0,0 +1,99 @@ +""" +Activity routes for L2 server. + +Handles ActivityPub activities and outbox. +""" + +import logging +from typing import Optional + +from fastapi import APIRouter, Request, Depends, HTTPException +from fastapi.responses import JSONResponse + +from artdag_common import render +from artdag_common.middleware import wants_html, wants_json + +from ..config import settings +from ..dependencies import get_templates, require_auth, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.get("") +async def list_activities( + request: Request, + offset: int = 0, + limit: int = 20, +): + """List recent activities.""" + import db + + username = get_user_from_cookie(request) + + activities, total = await db.get_activities_paginated(limit=limit, offset=offset) + has_more = offset + len(activities) < total + + if wants_json(request): + return {"activities": activities, "offset": offset, "limit": limit} + + templates = get_templates(request) + return render(templates, "activities/list.html", request, + activities=activities, + user={"username": username} if username else None, + offset=offset, + limit=limit, + has_more=has_more, + active_tab="activities", + ) + + +@router.get("/{activity_id}") +async def get_activity( + activity_id: str, + request: Request, +): + """Get activity details.""" + import db + + activity = await db.get_activity(activity_id) + if not activity: + raise HTTPException(404, "Activity not found") + + # ActivityPub response + if "application/activity+json" in request.headers.get("accept", ""): + return JSONResponse( + content=activity.get("activity_json", activity), + media_type="application/activity+json", + ) + + if wants_json(request): + return activity + + username = get_user_from_cookie(request) + templates = get_templates(request) + return render(templates, "activities/detail.html", request, + activity=activity, + user={"username": username} if username else None, + active_tab="activities", + ) + + +@router.post("") +async def create_activity( + request: Request, + user: dict = Depends(require_auth), +): + """Create a new activity (internal use).""" + import db + import json + + body = await request.json() + + activity_id = await db.create_activity( + actor=user["actor_id"], + activity_type=body.get("type", "Create"), + object_data=body.get("object"), + ) + + return {"activity_id": activity_id, "created": True} diff --git a/app/routers/anchors.py b/app/routers/anchors.py new file mode 100644 index 0000000..6bfb6a5 --- /dev/null +++ b/app/routers/anchors.py @@ -0,0 +1,203 @@ +""" +Anchor routes for L2 server. + +Handles OpenTimestamps anchoring and verification. +""" + +import logging +from typing import Optional + +from fastapi import APIRouter, Request, Depends, HTTPException +from fastapi.responses import HTMLResponse, FileResponse + +from artdag_common import render +from artdag_common.middleware import wants_html, wants_json + +from ..config import settings +from ..dependencies import get_templates, require_auth, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.get("") +async def list_anchors( + request: Request, + offset: int = 0, + limit: int = 20, +): + """List user's anchors.""" + import db + + username = get_user_from_cookie(request) + if not username: + if wants_json(request): + raise HTTPException(401, "Authentication required") + from fastapi.responses import RedirectResponse + return RedirectResponse(url="/login", status_code=302) + + anchors = await db.get_anchors_paginated(offset=offset, limit=limit) + has_more = len(anchors) >= limit + + if wants_json(request): + return {"anchors": anchors, "offset": offset, "limit": limit} + + templates = get_templates(request) + return render(templates, "anchors/list.html", request, + anchors=anchors, + user={"username": username}, + offset=offset, + limit=limit, + has_more=has_more, + active_tab="anchors", + ) + + +@router.post("") +async def create_anchor( + request: Request, + user: dict = Depends(require_auth), +): + """Create a new timestamp anchor.""" + import db + import anchoring + + body = await request.json() + content_hash = body.get("content_hash") + + if not content_hash: + raise HTTPException(400, "content_hash required") + + # Create OTS timestamp + try: + ots_data = await anchoring.create_timestamp(content_hash) + except Exception as e: + logger.error(f"Failed to create timestamp: {e}") + raise HTTPException(500, f"Timestamping failed: {e}") + + # Save anchor + anchor_id = await db.create_anchor( + username=user["username"], + content_hash=content_hash, + ots_data=ots_data, + ) + + return { + "anchor_id": anchor_id, + "content_hash": content_hash, + "status": "pending", + "message": "Anchor created, pending Bitcoin confirmation", + } + + +@router.get("/{anchor_id}") +async def get_anchor( + anchor_id: str, + request: Request, +): + """Get anchor details.""" + import db + + anchor = await db.get_anchor(anchor_id) + if not anchor: + raise HTTPException(404, "Anchor not found") + + if wants_json(request): + return anchor + + username = get_user_from_cookie(request) + templates = get_templates(request) + return render(templates, "anchors/detail.html", request, + anchor=anchor, + user={"username": username} if username else None, + active_tab="anchors", + ) + + +@router.get("/{anchor_id}/ots") +async def download_ots(anchor_id: str): + """Download OTS proof file.""" + import db + + anchor = await db.get_anchor(anchor_id) + if not anchor: + raise HTTPException(404, "Anchor not found") + + ots_data = anchor.get("ots_data") + if not ots_data: + raise HTTPException(404, "OTS data not available") + + # Return as file download + from fastapi.responses import Response + return Response( + content=ots_data, + media_type="application/octet-stream", + headers={ + "Content-Disposition": f"attachment; filename={anchor['content_hash']}.ots" + }, + ) + + +@router.post("/{anchor_id}/verify") +async def verify_anchor( + anchor_id: str, + request: Request, + user: dict = Depends(require_auth), +): + """Verify anchor status (check Bitcoin confirmation).""" + import db + import anchoring + + anchor = await db.get_anchor(anchor_id) + if not anchor: + raise HTTPException(404, "Anchor not found") + + try: + result = await anchoring.verify_timestamp( + anchor["content_hash"], + anchor["ots_data"], + ) + + # Update anchor status + if result.get("confirmed"): + await db.update_anchor( + anchor_id, + status="confirmed", + bitcoin_block=result.get("block_height"), + confirmed_at=result.get("confirmed_at"), + ) + + if wants_html(request): + if result.get("confirmed"): + return HTMLResponse( + f'Confirmed in block {result["block_height"]}' + ) + return HTMLResponse('Pending confirmation') + + return result + + except Exception as e: + logger.error(f"Verification failed: {e}") + raise HTTPException(500, f"Verification failed: {e}") + + +@router.delete("/{anchor_id}") +async def delete_anchor( + anchor_id: str, + user: dict = Depends(require_auth), +): + """Delete an anchor.""" + import db + + anchor = await db.get_anchor(anchor_id) + if not anchor: + raise HTTPException(404, "Anchor not found") + + if anchor.get("username") != user["username"]: + raise HTTPException(403, "Not authorized") + + success = await db.delete_anchor(anchor_id) + if not success: + raise HTTPException(400, "Failed to delete anchor") + + return {"deleted": True} diff --git a/app/routers/assets.py b/app/routers/assets.py new file mode 100644 index 0000000..cd8f5fd --- /dev/null +++ b/app/routers/assets.py @@ -0,0 +1,244 @@ +""" +Asset management routes for L2 server. + +Handles asset registration, listing, and publishing. +""" + +import logging +from typing import Optional, List + +from fastapi import APIRouter, Request, Depends, HTTPException, Form +from fastapi.responses import HTMLResponse +from pydantic import BaseModel + +from artdag_common import render +from artdag_common.middleware import wants_html, wants_json + +from ..config import settings +from ..dependencies import get_templates, require_auth, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +class AssetCreate(BaseModel): + name: str + content_hash: str + ipfs_cid: Optional[str] = None + asset_type: str # image, video, effect, recipe + tags: List[str] = [] + metadata: dict = {} + provenance: Optional[dict] = None + + +class RecordRunRequest(BaseModel): + run_id: str + recipe: str + inputs: List[str] + output_hash: str + ipfs_cid: Optional[str] = None + provenance: Optional[dict] = None + + +@router.get("") +async def list_assets( + request: Request, + offset: int = 0, + limit: int = 20, + asset_type: Optional[str] = None, +): + """List user's assets.""" + import db + + username = get_user_from_cookie(request) + if not username: + if wants_json(request): + raise HTTPException(401, "Authentication required") + from fastapi.responses import RedirectResponse + return RedirectResponse(url="/login", status_code=302) + + assets = await db.get_user_assets(username, offset=offset, limit=limit, asset_type=asset_type) + has_more = len(assets) >= limit + + if wants_json(request): + return {"assets": assets, "offset": offset, "limit": limit, "has_more": has_more} + + templates = get_templates(request) + return render(templates, "assets/list.html", request, + assets=assets, + user={"username": username}, + offset=offset, + limit=limit, + has_more=has_more, + active_tab="assets", + ) + + +@router.post("") +async def create_asset( + req: AssetCreate, + user: dict = Depends(require_auth), +): + """Register a new asset.""" + import db + + asset = await db.create_asset({ + "owner": user["username"], + "name": req.name, + "content_hash": req.content_hash, + "ipfs_cid": req.ipfs_cid, + "asset_type": req.asset_type, + "tags": req.tags or [], + "metadata": req.metadata or {}, + "provenance": req.provenance, + }) + + if not asset: + raise HTTPException(400, "Failed to create asset") + + return {"asset_id": asset.get("name"), "message": "Asset registered"} + + +@router.get("/{asset_id}") +async def get_asset( + asset_id: str, + request: Request, +): + """Get asset details.""" + import db + + username = get_user_from_cookie(request) + + asset = await db.get_asset(asset_id) + if not asset: + raise HTTPException(404, "Asset not found") + + if wants_json(request): + return asset + + templates = get_templates(request) + return render(templates, "assets/detail.html", request, + asset=asset, + user={"username": username} if username else None, + active_tab="assets", + ) + + +@router.delete("/{asset_id}") +async def delete_asset( + asset_id: str, + user: dict = Depends(require_auth), +): + """Delete an asset.""" + import db + + asset = await db.get_asset(asset_id) + if not asset: + raise HTTPException(404, "Asset not found") + + if asset.get("owner") != user["username"]: + raise HTTPException(403, "Not authorized") + + success = await db.delete_asset(asset_id) + if not success: + raise HTTPException(400, "Failed to delete asset") + + return {"deleted": True} + + +@router.post("/record-run") +async def record_run( + req: RecordRunRequest, + user: dict = Depends(require_auth), +): + """Record a run completion and register output as asset.""" + import db + + # Create asset for output + asset = await db.create_asset({ + "owner": user["username"], + "name": f"{req.recipe}-{req.run_id[:8]}", + "content_hash": req.output_hash, + "ipfs_cid": req.ipfs_cid, + "asset_type": "render", + "metadata": { + "run_id": req.run_id, + "recipe": req.recipe, + "inputs": req.inputs, + }, + "provenance": req.provenance, + }) + asset_id = asset.get("name") if asset else None + + # Record run + await db.record_run( + run_id=req.run_id, + username=user["username"], + recipe=req.recipe, + inputs=req.inputs or [], + output_hash=req.output_hash, + ipfs_cid=req.ipfs_cid, + asset_id=asset_id, + ) + + return { + "run_id": req.run_id, + "asset_id": asset_id, + "recorded": True, + } + + +@router.get("/by-run-id/{run_id}") +async def get_asset_by_run_id(run_id: str): + """Get asset by run ID (for L1 cache lookup).""" + import db + + run = await db.get_run(run_id) + if not run: + raise HTTPException(404, "Run not found") + + return { + "run_id": run_id, + "output_hash": run.get("output_hash"), + "ipfs_cid": run.get("ipfs_cid"), + "provenance_cid": run.get("provenance_cid"), + } + + +@router.post("/{asset_id}/publish") +async def publish_asset( + asset_id: str, + request: Request, + user: dict = Depends(require_auth), +): + """Publish asset to IPFS.""" + import db + import ipfs_client + + asset = await db.get_asset(asset_id) + if not asset: + raise HTTPException(404, "Asset not found") + + if asset.get("owner") != user["username"]: + raise HTTPException(403, "Not authorized") + + # Already published? + if asset.get("ipfs_cid"): + return {"ipfs_cid": asset["ipfs_cid"], "already_published": True} + + # Get content from L1 + content_hash = asset.get("content_hash") + for l1_url in settings.l1_servers: + try: + import requests + resp = requests.get(f"{l1_url}/cache/{content_hash}/raw", timeout=30) + if resp.status_code == 200: + # Pin to IPFS + cid = await ipfs_client.add_bytes(resp.content) + if cid: + await db.update_asset(asset_id, {"ipfs_cid": cid}) + return {"ipfs_cid": cid, "published": True} + except Exception as e: + logger.warning(f"Failed to fetch from {l1_url}: {e}") + + raise HTTPException(400, "Failed to publish - content not found on any L1") diff --git a/app/routers/auth.py b/app/routers/auth.py new file mode 100644 index 0000000..4691caf --- /dev/null +++ b/app/routers/auth.py @@ -0,0 +1,223 @@ +""" +Authentication routes for L2 server. + +Handles login, registration, logout, and token verification. +""" + +import hashlib +from datetime import datetime, timezone + +from fastapi import APIRouter, Request, Form, HTTPException, Depends +from fastapi.responses import HTMLResponse, RedirectResponse +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials + +from artdag_common import render +from artdag_common.middleware import wants_html + +from ..config import settings +from ..dependencies import get_templates, get_user_from_cookie + +router = APIRouter() +security = HTTPBearer(auto_error=False) + + +@router.get("/login", response_class=HTMLResponse) +async def login_page(request: Request, return_to: str = None): + """Login page.""" + username = get_user_from_cookie(request) + + if username: + templates = get_templates(request) + return render(templates, "auth/already_logged_in.html", request, + user={"username": username}, + ) + + templates = get_templates(request) + return render(templates, "auth/login.html", request, + return_to=return_to, + ) + + +@router.post("/login", response_class=HTMLResponse) +async def login_submit( + request: Request, + username: str = Form(...), + password: str = Form(...), + return_to: str = Form(None), +): + """Handle login form submission.""" + from auth import authenticate_user, create_access_token + + if not username or not password: + return HTMLResponse( + '
Username and password are required
' + ) + + user = await authenticate_user(settings.data_dir, username.strip(), password) + if not user: + return HTMLResponse( + '
Invalid username or password
' + ) + + token = create_access_token(user.username, l2_server=f"https://{settings.domain}") + + # Handle return_to redirect + if return_to and return_to.startswith("http"): + separator = "&" if "?" in return_to else "?" + redirect_url = f"{return_to}{separator}auth_token={token.access_token}" + response = HTMLResponse(f''' +
Login successful! Redirecting...
+ + ''') + else: + response = HTMLResponse(''' +
Login successful! Redirecting...
+ + ''') + + response.set_cookie( + key="auth_token", + value=token.access_token, + httponly=True, + max_age=60 * 60 * 24 * 30, + samesite="lax", + secure=True, + ) + return response + + +@router.get("/register", response_class=HTMLResponse) +async def register_page(request: Request): + """Registration page.""" + username = get_user_from_cookie(request) + + if username: + templates = get_templates(request) + return render(templates, "auth/already_logged_in.html", request, + user={"username": username}, + ) + + templates = get_templates(request) + return render(templates, "auth/register.html", request) + + +@router.post("/register", response_class=HTMLResponse) +async def register_submit( + request: Request, + username: str = Form(...), + password: str = Form(...), + password2: str = Form(...), + email: str = Form(None), +): + """Handle registration form submission.""" + from auth import create_user, create_access_token + + if not username or not password: + return HTMLResponse('
Username and password are required
') + + if password != password2: + return HTMLResponse('
Passwords do not match
') + + if len(password) < 6: + return HTMLResponse('
Password must be at least 6 characters
') + + try: + user = await create_user(settings.data_dir, username.strip(), password, email) + except ValueError as e: + return HTMLResponse(f'
{str(e)}
') + + token = create_access_token(user.username, l2_server=f"https://{settings.domain}") + + response = HTMLResponse(''' +
Registration successful! Redirecting...
+ + ''') + response.set_cookie( + key="auth_token", + value=token.access_token, + httponly=True, + max_age=60 * 60 * 24 * 30, + samesite="lax", + secure=True, + ) + return response + + +@router.get("/logout") +async def logout(request: Request): + """Handle logout.""" + import db + import requests + from auth import get_token_claims + + token = request.cookies.get("auth_token") + claims = get_token_claims(token) if token else None + username = claims.get("sub") if claims else None + + if username and token and claims: + # Revoke token in database + token_hash = hashlib.sha256(token.encode()).hexdigest() + expires_at = datetime.fromtimestamp(claims.get("exp", 0), tz=timezone.utc) + await db.revoke_token(token_hash, username, expires_at) + + # Revoke on attached L1 servers + attached = await db.get_user_renderers(username) + for l1_url in attached: + try: + requests.post( + f"{l1_url}/auth/revoke-user", + json={"username": username, "l2_server": f"https://{settings.domain}"}, + timeout=5, + ) + except Exception: + pass + + response = RedirectResponse(url="/", status_code=302) + response.delete_cookie("auth_token") + return response + + +@router.get("/verify") +async def verify_token( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), +): + """ + Verify a token is valid. + + Called by L1 servers to verify tokens during auth callback. + Returns user info if valid, 401 if not. + """ + import db + from auth import verify_token as verify_jwt, get_token_claims + + # Get token from Authorization header or query param + token = None + if credentials: + token = credentials.credentials + else: + # Try Authorization header manually (for clients that don't use Bearer format) + auth_header = request.headers.get("Authorization", "") + if auth_header.startswith("Bearer "): + token = auth_header[7:] + + if not token: + raise HTTPException(401, "No token provided") + + # Verify JWT signature and expiry + username = verify_jwt(token) + if not username: + raise HTTPException(401, "Invalid or expired token") + + # Check if token is revoked + claims = get_token_claims(token) + if claims: + token_hash = hashlib.sha256(token.encode()).hexdigest() + if await db.is_token_revoked(token_hash): + raise HTTPException(401, "Token has been revoked") + + return { + "valid": True, + "username": username, + "claims": claims, + } diff --git a/app/routers/federation.py b/app/routers/federation.py new file mode 100644 index 0000000..ab3fb4f --- /dev/null +++ b/app/routers/federation.py @@ -0,0 +1,115 @@ +""" +Federation routes for L2 server. + +Handles WebFinger, nodeinfo, and ActivityPub discovery. +""" + +import logging + +from fastapi import APIRouter, Request, HTTPException +from fastapi.responses import JSONResponse + +from ..config import settings + +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.get("/.well-known/webfinger") +async def webfinger(resource: str): + """WebFinger endpoint for actor discovery.""" + import db + + # Parse resource (acct:username@domain) + if not resource.startswith("acct:"): + raise HTTPException(400, "Invalid resource format") + + parts = resource[5:].split("@") + if len(parts) != 2: + raise HTTPException(400, "Invalid resource format") + + username, domain = parts + + if domain != settings.domain: + raise HTTPException(404, "User not on this server") + + user = await db.get_user(username) + if not user: + raise HTTPException(404, "User not found") + + return JSONResponse( + content={ + "subject": resource, + "aliases": [f"https://{settings.domain}/users/{username}"], + "links": [ + { + "rel": "self", + "type": "application/activity+json", + "href": f"https://{settings.domain}/users/{username}", + }, + { + "rel": "http://webfinger.net/rel/profile-page", + "type": "text/html", + "href": f"https://{settings.domain}/users/{username}", + }, + ], + }, + media_type="application/jrd+json", + ) + + +@router.get("/.well-known/nodeinfo") +async def nodeinfo_index(): + """NodeInfo index.""" + return JSONResponse( + content={ + "links": [ + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.0", + "href": f"https://{settings.domain}/nodeinfo/2.0", + } + ] + }, + media_type="application/json", + ) + + +@router.get("/nodeinfo/2.0") +async def nodeinfo(): + """NodeInfo 2.0 endpoint.""" + import db + + user_count = await db.count_users() + activity_count = await db.count_activities() + + return JSONResponse( + content={ + "version": "2.0", + "software": { + "name": "artdag", + "version": "1.0.0", + }, + "protocols": ["activitypub"], + "usage": { + "users": {"total": user_count, "activeMonth": user_count}, + "localPosts": activity_count, + }, + "openRegistrations": True, + "metadata": { + "nodeName": "Art-DAG", + "nodeDescription": "Content-addressable media processing with ActivityPub federation", + }, + }, + media_type="application/json", + ) + + +@router.get("/.well-known/host-meta") +async def host_meta(): + """Host-meta endpoint.""" + xml = f''' + + +''' + from fastapi.responses import Response + return Response(content=xml, media_type="application/xrd+xml") diff --git a/app/routers/renderers.py b/app/routers/renderers.py new file mode 100644 index 0000000..4b9edf6 --- /dev/null +++ b/app/routers/renderers.py @@ -0,0 +1,93 @@ +""" +Renderer (L1) management routes for L2 server. + +L1 servers are configured via environment variable L1_SERVERS. +Users connect to renderers to create and run recipes. +""" + +import logging +from typing import Optional + +import requests +from fastapi import APIRouter, Request, Depends, HTTPException +from fastapi.responses import HTMLResponse, RedirectResponse + +from artdag_common import render +from artdag_common.middleware import wants_html, wants_json + +from ..config import settings +from ..dependencies import get_templates, require_auth, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +def check_renderer_health(url: str, timeout: float = 5.0) -> bool: + """Check if a renderer is healthy.""" + try: + resp = requests.get(f"{url}/", timeout=timeout) + return resp.status_code == 200 + except Exception: + return False + + +@router.get("") +async def list_renderers(request: Request): + """List configured L1 renderers.""" + # Get user if logged in + username = get_user_from_cookie(request) + user = None + if username: + # Get token for connection links + token = request.cookies.get("auth_token", "") + user = {"username": username, "token": token} + + # Build server list with health status + servers = [] + for url in settings.l1_servers: + servers.append({ + "url": url, + "healthy": check_renderer_health(url), + }) + + if wants_json(request): + return {"servers": servers} + + templates = get_templates(request) + return render(templates, "renderers/list.html", request, + servers=servers, + user=user, + active_tab="renderers", + ) + + +@router.get("/{path:path}") +async def renderer_catchall(path: str, request: Request): + """Catch-all for invalid renderer URLs - redirect to list.""" + if wants_json(request): + raise HTTPException(404, "Not found") + return RedirectResponse(url="/renderers", status_code=302) + + +@router.post("") +@router.post("/{path:path}") +async def renderer_post_catchall(request: Request, path: str = ""): + """ + Catch-all for POST requests. + + The old API expected JSON POST to attach renderers. + Now renderers are env-configured, so redirect to the list. + """ + if wants_json(request): + return { + "error": "Renderers are now configured via environment. See /renderers for available servers.", + "servers": settings.l1_servers, + } + + templates = get_templates(request) + return render(templates, "renderers/list.html", request, + servers=[{"url": url, "healthy": check_renderer_health(url)} for url in settings.l1_servers], + user=get_user_from_cookie(request), + error="Renderers are configured by the system administrator. Use the Connect button to access a renderer.", + active_tab="renderers", + ) diff --git a/app/routers/storage.py b/app/routers/storage.py new file mode 100644 index 0000000..f9cdcaf --- /dev/null +++ b/app/routers/storage.py @@ -0,0 +1,254 @@ +""" +Storage provider routes for L2 server. + +Manages user storage backends. +""" + +import logging +from typing import Optional, Dict, Any + +from fastapi import APIRouter, Request, Depends, HTTPException, Form +from fastapi.responses import HTMLResponse +from pydantic import BaseModel + +from artdag_common import render +from artdag_common.middleware import wants_html, wants_json + +from ..config import settings +from ..dependencies import get_templates, require_auth, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +STORAGE_PROVIDERS_INFO = { + "pinata": {"name": "Pinata", "desc": "1GB free, IPFS pinning", "color": "blue"}, + "web3storage": {"name": "web3.storage", "desc": "IPFS + Filecoin", "color": "green"}, + "nftstorage": {"name": "NFT.Storage", "desc": "Free for NFTs", "color": "pink"}, + "infura": {"name": "Infura IPFS", "desc": "5GB free", "color": "orange"}, + "filebase": {"name": "Filebase", "desc": "5GB free, S3+IPFS", "color": "cyan"}, + "storj": {"name": "Storj", "desc": "25GB free", "color": "indigo"}, + "local": {"name": "Local Storage", "desc": "Your own disk", "color": "purple"}, +} + + +class AddStorageRequest(BaseModel): + provider_type: str + config: Dict[str, Any] + capacity_gb: int = 5 + provider_name: Optional[str] = None + + +@router.get("") +async def list_storage(request: Request): + """List user's storage providers.""" + import db + + username = get_user_from_cookie(request) + if not username: + if wants_json(request): + raise HTTPException(401, "Authentication required") + from fastapi.responses import RedirectResponse + return RedirectResponse(url="/login", status_code=302) + + storages = await db.get_user_storage(username) + + if wants_json(request): + return {"storages": storages} + + templates = get_templates(request) + return render(templates, "storage/list.html", request, + storages=storages, + user={"username": username}, + providers_info=STORAGE_PROVIDERS_INFO, + active_tab="storage", + ) + + +@router.post("") +async def add_storage( + req: AddStorageRequest, + user: dict = Depends(require_auth), +): + """Add a storage provider.""" + import db + import storage_providers + + if req.provider_type not in STORAGE_PROVIDERS_INFO: + raise HTTPException(400, f"Invalid provider type: {req.provider_type}") + + # Test connection + provider = storage_providers.create_provider(req.provider_type, { + **req.config, + "capacity_gb": req.capacity_gb, + }) + if not provider: + raise HTTPException(400, "Failed to create provider") + + success, message = await provider.test_connection() + if not success: + raise HTTPException(400, f"Connection failed: {message}") + + # Save + storage_id = await db.add_user_storage( + username=user["username"], + provider_type=req.provider_type, + provider_name=req.provider_name, + config=req.config, + capacity_gb=req.capacity_gb, + ) + + return {"id": storage_id, "message": "Storage provider added"} + + +@router.post("/add", response_class=HTMLResponse) +async def add_storage_form( + request: Request, + provider_type: str = Form(...), + provider_name: Optional[str] = Form(None), + capacity_gb: int = Form(5), + api_key: Optional[str] = Form(None), + secret_key: Optional[str] = Form(None), + api_token: Optional[str] = Form(None), + project_id: Optional[str] = Form(None), + project_secret: Optional[str] = Form(None), + access_key: Optional[str] = Form(None), + bucket: Optional[str] = Form(None), + path: Optional[str] = Form(None), +): + """Add storage via HTML form.""" + import db + import storage_providers + + username = get_user_from_cookie(request) + if not username: + return HTMLResponse('
Not authenticated
', status_code=401) + + # Build config + config = {} + if provider_type == "pinata": + if not api_key or not secret_key: + return HTMLResponse('
Pinata requires API Key and Secret Key
') + config = {"api_key": api_key, "secret_key": secret_key} + elif provider_type in ["web3storage", "nftstorage"]: + if not api_token: + return HTMLResponse(f'
{provider_type} requires API Token
') + config = {"api_token": api_token} + elif provider_type == "infura": + if not project_id or not project_secret: + return HTMLResponse('
Infura requires Project ID and Secret
') + config = {"project_id": project_id, "project_secret": project_secret} + elif provider_type in ["filebase", "storj"]: + if not access_key or not secret_key or not bucket: + return HTMLResponse('
Requires Access Key, Secret Key, and Bucket
') + config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket} + elif provider_type == "local": + if not path: + return HTMLResponse('
Local storage requires a path
') + config = {"path": path} + else: + return HTMLResponse(f'
Unknown provider: {provider_type}
') + + # Test + provider = storage_providers.create_provider(provider_type, {**config, "capacity_gb": capacity_gb}) + if provider: + success, message = await provider.test_connection() + if not success: + return HTMLResponse(f'
Connection failed: {message}
') + + # Save + storage_id = await db.add_user_storage( + username=username, + provider_type=provider_type, + provider_name=provider_name, + config=config, + capacity_gb=capacity_gb, + ) + + return HTMLResponse(f''' +
Storage provider added!
+ + ''') + + +@router.get("/{storage_id}") +async def get_storage( + storage_id: int, + user: dict = Depends(require_auth), +): + """Get storage details.""" + import db + + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage not found") + + if storage.get("username") != user["username"]: + raise HTTPException(403, "Not authorized") + + return storage + + +@router.delete("/{storage_id}") +async def delete_storage( + storage_id: int, + request: Request, + user: dict = Depends(require_auth), +): + """Delete a storage provider.""" + import db + + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage not found") + + if storage.get("username") != user["username"]: + raise HTTPException(403, "Not authorized") + + success = await db.remove_user_storage(storage_id) + + if wants_html(request): + return HTMLResponse("") + + return {"deleted": True} + + +@router.post("/{storage_id}/test") +async def test_storage( + storage_id: int, + request: Request, + user: dict = Depends(require_auth), +): + """Test storage connectivity.""" + import db + import storage_providers + import json + + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage not found") + + if storage.get("username") != user["username"]: + raise HTTPException(403, "Not authorized") + + config = storage["config"] + if isinstance(config, str): + config = json.loads(config) + + provider = storage_providers.create_provider(storage["provider_type"], { + **config, + "capacity_gb": storage.get("capacity_gb", 5), + }) + + if not provider: + if wants_html(request): + return HTMLResponse('Failed to create provider') + return {"success": False, "message": "Failed to create provider"} + + success, message = await provider.test_connection() + + if wants_html(request): + color = "green" if success else "red" + return HTMLResponse(f'{message}') + + return {"success": success, "message": message} diff --git a/app/routers/users.py b/app/routers/users.py new file mode 100644 index 0000000..1715418 --- /dev/null +++ b/app/routers/users.py @@ -0,0 +1,161 @@ +""" +User profile routes for L2 server. + +Handles ActivityPub actor profiles. +""" + +import logging + +from fastapi import APIRouter, Request, HTTPException +from fastapi.responses import JSONResponse + +from artdag_common import render +from artdag_common.middleware import wants_html + +from ..config import settings +from ..dependencies import get_templates, get_user_from_cookie + +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.get("/users/{username}") +async def get_user_profile( + username: str, + request: Request, +): + """Get user profile (ActivityPub actor).""" + import db + + user = await db.get_user(username) + if not user: + raise HTTPException(404, "User not found") + + # ActivityPub response + accept = request.headers.get("accept", "") + if "application/activity+json" in accept or "application/ld+json" in accept: + actor = { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + ], + "type": "Person", + "id": f"https://{settings.domain}/users/{username}", + "name": user.get("display_name", username), + "preferredUsername": username, + "inbox": f"https://{settings.domain}/users/{username}/inbox", + "outbox": f"https://{settings.domain}/users/{username}/outbox", + "publicKey": { + "id": f"https://{settings.domain}/users/{username}#main-key", + "owner": f"https://{settings.domain}/users/{username}", + "publicKeyPem": user.get("public_key", ""), + }, + } + return JSONResponse(content=actor, media_type="application/activity+json") + + # HTML profile page + current_user = get_user_from_cookie(request) + assets = await db.get_user_assets(username, limit=12) + + templates = get_templates(request) + return render(templates, "users/profile.html", request, + profile=user, + assets=assets, + user={"username": current_user} if current_user else None, + ) + + +@router.get("/users/{username}/outbox") +async def get_outbox( + username: str, + request: Request, + page: bool = False, +): + """Get user's outbox (ActivityPub).""" + import db + + user = await db.get_user(username) + if not user: + raise HTTPException(404, "User not found") + + actor_id = f"https://{settings.domain}/users/{username}" + + if not page: + # Return collection summary + total = await db.count_user_activities(username) + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollection", + "id": f"{actor_id}/outbox", + "totalItems": total, + "first": f"{actor_id}/outbox?page=true", + }, + media_type="application/activity+json", + ) + + # Return paginated activities + activities = await db.get_user_activities(username, limit=20) + items = [a.get("activity_json", a) for a in activities] + + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "type": "OrderedCollectionPage", + "id": f"{actor_id}/outbox?page=true", + "partOf": f"{actor_id}/outbox", + "orderedItems": items, + }, + media_type="application/activity+json", + ) + + +@router.post("/users/{username}/inbox") +async def receive_inbox( + username: str, + request: Request, +): + """Receive ActivityPub inbox message.""" + import db + + user = await db.get_user(username) + if not user: + raise HTTPException(404, "User not found") + + # TODO: Verify HTTP signature + # TODO: Process activity (Follow, Like, Announce, etc.) + + body = await request.json() + logger.info(f"Received inbox activity for {username}: {body.get('type')}") + + # For now, just acknowledge + return {"status": "accepted"} + + +@router.get("/") +async def home(request: Request): + """Home page.""" + import db + import markdown + + username = get_user_from_cookie(request) + + # Get recent activities + activities, _ = await db.get_activities_paginated(limit=10) + + # Get README if exists + readme_html = "" + try: + from pathlib import Path + readme_path = Path(__file__).parent.parent.parent / "README.md" + if readme_path.exists(): + readme_html = markdown.markdown(readme_path.read_text(), extensions=['tables', 'fenced_code']) + except Exception: + pass + + templates = get_templates(request) + return render(templates, "home.html", request, + user={"username": username} if username else None, + activities=activities, + readme_html=readme_html, + ) diff --git a/app/templates/404.html b/app/templates/404.html new file mode 100644 index 0000000..f6dbdcb --- /dev/null +++ b/app/templates/404.html @@ -0,0 +1,11 @@ +{% extends "base.html" %} + +{% block title %}Not Found - Art-DAG{% endblock %} + +{% block content %} +
+

404

+

Page not found

+ Go to home page +
+{% endblock %} diff --git a/app/templates/activities/list.html b/app/templates/activities/list.html new file mode 100644 index 0000000..d8a63cf --- /dev/null +++ b/app/templates/activities/list.html @@ -0,0 +1,39 @@ +{% extends "base.html" %} + +{% block title %}Activities - Art-DAG{% endblock %} + +{% block content %} +
+
+

Activities

+
+ + {% if activities %} + + + {% if has_more %} +
+ Load More +
+ {% endif %} + {% else %} +
+

No activities yet.

+
+ {% endif %} +
+{% endblock %} diff --git a/app/templates/anchors/list.html b/app/templates/anchors/list.html new file mode 100644 index 0000000..3626852 --- /dev/null +++ b/app/templates/anchors/list.html @@ -0,0 +1,47 @@ +{% extends "base.html" %} + +{% block title %}Anchors - Art-DAG{% endblock %} + +{% block content %} +
+
+

Bitcoin Anchors

+
+ + {% if anchors %} +
+ {% for anchor in anchors %} +
+
+ {{ anchor.merkle_root[:16] }}... + {% if anchor.confirmed_at %} + Confirmed + {% else %} + Pending + {% endif %} +
+
+ {{ anchor.activity_count or 0 }} activities | Created: {{ anchor.created_at }} +
+ {% if anchor.bitcoin_txid %} +
+ TX: {{ anchor.bitcoin_txid }} +
+ {% endif %} +
+ {% endfor %} +
+ + {% if has_more %} +
+ Load More +
+ {% endif %} + {% else %} +
+

No anchors yet.

+
+ {% endif %} +
+{% endblock %} diff --git a/app/templates/assets/list.html b/app/templates/assets/list.html new file mode 100644 index 0000000..b82f3b1 --- /dev/null +++ b/app/templates/assets/list.html @@ -0,0 +1,58 @@ +{% extends "base.html" %} + +{% block title %}Assets - Art-DAG{% endblock %} + +{% block content %} +
+

Your Assets

+ + {% if assets %} + + + {% if has_more %} +
+ Loading more... +
+ {% endif %} + + {% else %} +
+

No assets yet

+

Create content on an L1 renderer and publish it here.

+
+ {% endif %} +
+{% endblock %} diff --git a/app/templates/auth/already_logged_in.html b/app/templates/auth/already_logged_in.html new file mode 100644 index 0000000..aa94799 --- /dev/null +++ b/app/templates/auth/already_logged_in.html @@ -0,0 +1,12 @@ +{% extends "base.html" %} + +{% block title %}Already Logged In - Art-DAG{% endblock %} + +{% block content %} +
+
+ You are already logged in as {{ user.username }} +
+

Go to home page

+
+{% endblock %} diff --git a/app/templates/auth/login.html b/app/templates/auth/login.html new file mode 100644 index 0000000..0ba4e66 --- /dev/null +++ b/app/templates/auth/login.html @@ -0,0 +1,37 @@ +{% extends "base.html" %} + +{% block title %}Login - Art-DAG{% endblock %} + +{% block content %} +
+

Login

+ +
+ +
+ {% if return_to %} + + {% endif %} + +
+ + +
+ +
+ + +
+ + +
+ +

+ Don't have an account? Register +

+
+{% endblock %} diff --git a/app/templates/auth/register.html b/app/templates/auth/register.html new file mode 100644 index 0000000..8a1837e --- /dev/null +++ b/app/templates/auth/register.html @@ -0,0 +1,45 @@ +{% extends "base.html" %} + +{% block title %}Register - Art-DAG{% endblock %} + +{% block content %} +
+

Register

+ +
+ +
+
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ + +
+ +

+ Already have an account? Login +

+
+{% endblock %} diff --git a/app/templates/base.html b/app/templates/base.html new file mode 100644 index 0000000..380ef13 --- /dev/null +++ b/app/templates/base.html @@ -0,0 +1,47 @@ +{% extends "_base.html" %} + +{% block brand %} +Rose Ash +| +Art-DAG +/ +L2 +{% endblock %} + +{% block cart_mini %} +{% if request and request.state.cart_mini_html %} + {{ request.state.cart_mini_html | safe }} +{% endif %} +{% endblock %} + +{% block nav_tree %} +{% if request and request.state.nav_tree_html %} + {{ request.state.nav_tree_html | safe }} +{% endif %} +{% endblock %} + +{% block auth_menu %} +{% if request and request.state.auth_menu_html %} + {{ request.state.auth_menu_html | safe }} +{% endif %} +{% endblock %} + +{% block auth_menu_mobile %} +{% if request and request.state.auth_menu_html %} + {{ request.state.auth_menu_html | safe }} +{% endif %} +{% endblock %} + +{% block sub_nav %} + +{% endblock %} diff --git a/app/templates/home.html b/app/templates/home.html new file mode 100644 index 0000000..1898981 --- /dev/null +++ b/app/templates/home.html @@ -0,0 +1,42 @@ +{% extends "base.html" %} + +{% block title %}Art-DAG{% endblock %} + +{% block content %} +
+ {% if readme_html %} +
+ {{ readme_html | safe }} +
+ {% else %} +
+

Art-DAG

+

Content-Addressable Media with ActivityPub Federation

+ + {% if not user %} +
+ Login + Register +
+ {% endif %} +
+ {% endif %} + + {% if activities %} +

Recent Activity

+
+ {% for activity in activities %} +
+
+ {{ activity.actor }} + {{ activity.created_at }} +
+
+ {{ activity.type }}: {{ activity.summary or activity.object_type }} +
+
+ {% endfor %} +
+ {% endif %} +
+{% endblock %} diff --git a/app/templates/renderers/list.html b/app/templates/renderers/list.html new file mode 100644 index 0000000..66f93b8 --- /dev/null +++ b/app/templates/renderers/list.html @@ -0,0 +1,52 @@ +{% extends "base.html" %} + +{% block content %} +
+

Renderers

+ +

+ Renderers are L1 servers that process your media. Connect to a renderer to create and run recipes. +

+ + {% if error %} +
+ {{ error }} +
+ {% endif %} + + {% if success %} +
+ {{ success }} +
+ {% endif %} + +
+ {% for server in servers %} +
+
+ + {{ server.url }} + + {% if server.healthy %} + Online + {% else %} + Offline + {% endif %} +
+ +
+ {% else %} +

No renderers configured.

+ {% endfor %} +
+ +
+

Renderers are configured by the system administrator.

+
+
+{% endblock %} diff --git a/app/templates/storage/list.html b/app/templates/storage/list.html new file mode 100644 index 0000000..a3aebf5 --- /dev/null +++ b/app/templates/storage/list.html @@ -0,0 +1,41 @@ +{% extends "base.html" %} + +{% block title %}Storage - Art-DAG{% endblock %} + +{% block content %} +
+
+

Storage Providers

+ + Add Storage + +
+ + {% if storages %} +
+ {% for storage in storages %} +
+
+ {{ storage.name or storage.provider_type }} + + {{ storage.provider_type }} + +
+
+ {% if storage.endpoint %} + {{ storage.endpoint }} + {% elif storage.bucket %} + Bucket: {{ storage.bucket }} + {% endif %} +
+
+ {% endfor %} +
+ {% else %} +
+

No storage providers configured.

+ Add one now +
+ {% endif %} +
+{% endblock %} diff --git a/artdag-client.tar.gz b/artdag-client.tar.gz new file mode 100644 index 0000000..a4ec7f4 Binary files /dev/null and b/artdag-client.tar.gz differ diff --git a/auth.py b/auth.py new file mode 100644 index 0000000..a56e13c --- /dev/null +++ b/auth.py @@ -0,0 +1,213 @@ +""" +Authentication for Art DAG L2 Server. + +User registration, login, and JWT tokens. +""" + +import os +import secrets +from datetime import datetime, timezone, timedelta +from pathlib import Path +from typing import Optional + +import bcrypt +from jose import JWTError, jwt +from pydantic import BaseModel + +import db + +# JWT settings +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_DAYS = 30 + + +def load_jwt_secret() -> str: + """Load JWT secret from Docker secret, env var, or generate.""" + # Try Docker secret first + secret_path = Path("/run/secrets/jwt_secret") + if secret_path.exists(): + return secret_path.read_text().strip() + + # Try environment variable + if os.environ.get("JWT_SECRET"): + return os.environ["JWT_SECRET"] + + # Generate one (tokens won't persist across restarts!) + print("WARNING: No JWT_SECRET configured. Tokens will be invalidated on restart.") + return secrets.token_hex(32) + + +SECRET_KEY = load_jwt_secret() + + +class User(BaseModel): + """A registered user.""" + username: str + password_hash: str + created_at: str + email: Optional[str] = None + + +class UserCreate(BaseModel): + """Request to register a user.""" + username: str + password: str + email: Optional[str] = None + + +class UserLogin(BaseModel): + """Request to login.""" + username: str + password: str + + +class Token(BaseModel): + """JWT token response.""" + access_token: str + token_type: str = "bearer" + username: str + expires_at: str + + +# Keep DATA_DIR for keys (RSA keys still stored as files) +DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2"))) + + +def hash_password(password: str) -> str: + """Hash a password (truncate to 72 bytes for bcrypt).""" + # Truncate to 72 bytes (bcrypt limit) + pw_bytes = password.encode('utf-8')[:72] + return bcrypt.hashpw(pw_bytes, bcrypt.gensalt()).decode('utf-8') + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """Verify a password against its hash.""" + pw_bytes = plain_password.encode('utf-8')[:72] + return bcrypt.checkpw(pw_bytes, hashed_password.encode('utf-8')) + + +async def create_user(data_dir: Path, username: str, password: str, email: Optional[str] = None) -> User: + """Create a new user with ActivityPub keys.""" + from keys import generate_keypair + + if await db.user_exists(username): + raise ValueError(f"Username already exists: {username}") + + password_hash = hash_password(password) + user_data = await db.create_user(username, password_hash, email) + + # Generate ActivityPub keys for this user + generate_keypair(data_dir, username) + + # Convert datetime to ISO string if needed + created_at = user_data.get("created_at") + if hasattr(created_at, 'isoformat'): + created_at = created_at.isoformat() + + return User( + username=username, + password_hash=password_hash, + created_at=created_at, + email=email + ) + + +async def authenticate_user(data_dir: Path, username: str, password: str) -> Optional[User]: + """Authenticate a user by username and password.""" + user_data = await db.get_user(username) + + if not user_data: + return None + + if not verify_password(password, user_data["password_hash"]): + return None + + # Convert datetime to ISO string if needed + created_at = user_data.get("created_at") + if hasattr(created_at, 'isoformat'): + created_at = created_at.isoformat() + + return User( + username=user_data["username"], + password_hash=user_data["password_hash"], + created_at=created_at, + email=user_data.get("email") + ) + + +def create_access_token(username: str, l2_server: str = None, l1_server: str = None) -> Token: + """Create a JWT access token. + + Args: + username: The username + l2_server: The L2 server URL (e.g., https://artdag.rose-ash.com) + Required for L1 to verify tokens with the correct L2. + l1_server: Optional L1 server URL to scope the token to. + If set, token only works for this specific L1. + """ + expires = datetime.now(timezone.utc) + timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS) + + payload = { + "sub": username, + "username": username, # Also include as username for compatibility + "exp": expires, + "iat": datetime.now(timezone.utc) + } + + # Include l2_server so L1 knows which L2 to verify with + if l2_server: + payload["l2_server"] = l2_server + + # Include l1_server to scope token to specific L1 + if l1_server: + payload["l1_server"] = l1_server + + token = jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM) + + return Token( + access_token=token, + username=username, + expires_at=expires.isoformat() + ) + + +def verify_token(token: str) -> Optional[str]: + """Verify a JWT token, return username if valid.""" + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username = payload.get("sub") + return username + except JWTError: + return None + + +def get_token_claims(token: str) -> Optional[dict]: + """Decode token and return all claims. Returns None if invalid.""" + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + return payload + except JWTError: + return None + + +async def get_current_user(data_dir: Path, token: str) -> Optional[User]: + """Get current user from token.""" + username = verify_token(token) + if not username: + return None + + user_data = await db.get_user(username) + if not user_data: + return None + + # Convert datetime to ISO string if needed + created_at = user_data.get("created_at") + if hasattr(created_at, 'isoformat'): + created_at = created_at.isoformat() + + return User( + username=user_data["username"], + password_hash=user_data["password_hash"], + created_at=created_at, + email=user_data.get("email") + ) diff --git a/db.py b/db.py new file mode 100644 index 0000000..205271d --- /dev/null +++ b/db.py @@ -0,0 +1,1215 @@ +""" +Database module for Art DAG L2 Server. + +Uses asyncpg for async PostgreSQL access with connection pooling. +""" + +import json +import os +from datetime import datetime, timezone +from typing import Optional +from contextlib import asynccontextmanager + +import asyncpg + +# Connection pool (initialized on startup) + + +def _parse_timestamp(ts) -> datetime: + """Parse a timestamp string or datetime to datetime object.""" + if ts is None: + return datetime.now(timezone.utc) + if isinstance(ts, datetime): + return ts + # Parse ISO format string + if isinstance(ts, str): + if ts.endswith('Z'): + ts = ts[:-1] + '+00:00' + return datetime.fromisoformat(ts) + return datetime.now(timezone.utc) + + +_pool: Optional[asyncpg.Pool] = None + +# Configuration from environment +DATABASE_URL = os.environ.get("DATABASE_URL") +if not DATABASE_URL: + raise RuntimeError("DATABASE_URL environment variable is required") + +# Schema for database initialization +SCHEMA = """ +-- Users table +CREATE TABLE IF NOT EXISTS users ( + username VARCHAR(255) PRIMARY KEY, + password_hash VARCHAR(255) NOT NULL, + email VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Assets table +CREATE TABLE IF NOT EXISTS assets ( + name VARCHAR(255) PRIMARY KEY, + content_hash VARCHAR(128) NOT NULL, + ipfs_cid VARCHAR(128), + asset_type VARCHAR(50) NOT NULL, + tags JSONB DEFAULT '[]'::jsonb, + metadata JSONB DEFAULT '{}'::jsonb, + url TEXT, + provenance JSONB, + description TEXT, + origin JSONB, + owner VARCHAR(255) NOT NULL REFERENCES users(username), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ +); + +-- Activities table (activity_id is content-addressable run_id hash) +CREATE TABLE IF NOT EXISTS activities ( + activity_id VARCHAR(64) PRIMARY KEY, + activity_type VARCHAR(50) NOT NULL, + actor_id TEXT NOT NULL, + object_data JSONB NOT NULL, + published TIMESTAMPTZ NOT NULL, + signature JSONB, + anchor_root VARCHAR(64) -- Merkle root this activity is anchored to +); + +-- Anchors table (Bitcoin timestamps via OpenTimestamps) +CREATE TABLE IF NOT EXISTS anchors ( + id SERIAL PRIMARY KEY, + merkle_root VARCHAR(64) NOT NULL UNIQUE, + tree_ipfs_cid VARCHAR(128), + ots_proof_cid VARCHAR(128), + activity_count INTEGER NOT NULL, + first_activity_id VARCHAR(64), + last_activity_id VARCHAR(64), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + confirmed_at TIMESTAMPTZ, + bitcoin_txid VARCHAR(64) +); + +-- Followers table +CREATE TABLE IF NOT EXISTS followers ( + id SERIAL PRIMARY KEY, + username VARCHAR(255) NOT NULL REFERENCES users(username), + acct VARCHAR(255) NOT NULL, + url TEXT NOT NULL, + public_key TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(username, acct) +); + +-- User's attached L1 renderers +CREATE TABLE IF NOT EXISTS user_renderers ( + id SERIAL PRIMARY KEY, + username VARCHAR(255) NOT NULL REFERENCES users(username), + l1_url TEXT NOT NULL, + attached_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(username, l1_url) +); + +-- Revoked tokens (for federated logout) +CREATE TABLE IF NOT EXISTS revoked_tokens ( + token_hash VARCHAR(64) PRIMARY KEY, + username VARCHAR(255) NOT NULL, + revoked_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ NOT NULL +); + +-- User storage providers (IPFS pinning services, local storage, etc.) +-- Users can have multiple configs of the same provider type +CREATE TABLE IF NOT EXISTS user_storage ( + id SERIAL PRIMARY KEY, + username VARCHAR(255) NOT NULL REFERENCES users(username), + provider_type VARCHAR(50) NOT NULL, -- 'pinata', 'web3storage', 'nftstorage', 'infura', 'filebase', 'storj', 'local' + provider_name VARCHAR(255), -- User-friendly name + description TEXT, -- User description to distinguish configs + config JSONB NOT NULL DEFAULT '{}', -- API keys, endpoints, paths + capacity_gb INTEGER NOT NULL, -- Total capacity user is contributing + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() +); + +-- Track what's stored where +CREATE TABLE IF NOT EXISTS storage_pins ( + id SERIAL PRIMARY KEY, + content_hash VARCHAR(64) NOT NULL, + storage_id INTEGER NOT NULL REFERENCES user_storage(id) ON DELETE CASCADE, + ipfs_cid VARCHAR(128), + pin_type VARCHAR(20) NOT NULL, -- 'user_content', 'donated', 'system' + size_bytes BIGINT, + pinned_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(content_hash, storage_id) +); + +-- Indexes +CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at); +CREATE INDEX IF NOT EXISTS idx_assets_content_hash ON assets(content_hash); +CREATE INDEX IF NOT EXISTS idx_assets_owner ON assets(owner); +CREATE INDEX IF NOT EXISTS idx_assets_created_at ON assets(created_at DESC); +CREATE INDEX IF NOT EXISTS idx_assets_tags ON assets USING GIN(tags); +CREATE INDEX IF NOT EXISTS idx_activities_actor_id ON activities(actor_id); +CREATE INDEX IF NOT EXISTS idx_activities_published ON activities(published DESC); +CREATE INDEX IF NOT EXISTS idx_activities_anchor ON activities(anchor_root); +CREATE INDEX IF NOT EXISTS idx_anchors_created ON anchors(created_at DESC); +CREATE INDEX IF NOT EXISTS idx_followers_username ON followers(username); +CREATE INDEX IF NOT EXISTS idx_revoked_tokens_expires ON revoked_tokens(expires_at); +CREATE INDEX IF NOT EXISTS idx_user_storage_username ON user_storage(username); +CREATE INDEX IF NOT EXISTS idx_storage_pins_hash ON storage_pins(content_hash); +CREATE INDEX IF NOT EXISTS idx_storage_pins_storage ON storage_pins(storage_id); + +-- Add source URL columns to assets if they don't exist +DO $$ BEGIN + ALTER TABLE assets ADD COLUMN source_url TEXT; +EXCEPTION WHEN duplicate_column THEN NULL; +END $$; + +DO $$ BEGIN + ALTER TABLE assets ADD COLUMN source_type VARCHAR(50); +EXCEPTION WHEN duplicate_column THEN NULL; +END $$; + +-- Add description column to user_storage if it doesn't exist +DO $$ BEGIN + ALTER TABLE user_storage ADD COLUMN description TEXT; +EXCEPTION WHEN duplicate_column THEN NULL; +END $$; +""" + + +async def init_pool(): + """Initialize the connection pool and create tables. Call on app startup.""" + global _pool + _pool = await asyncpg.create_pool( + DATABASE_URL, + min_size=2, + max_size=10, + command_timeout=60 + ) + # Create tables if they don't exist + async with _pool.acquire() as conn: + await conn.execute(SCHEMA) + + +async def close_pool(): + """Close the connection pool. Call on app shutdown.""" + global _pool + if _pool: + await _pool.close() + _pool = None + + +def get_pool() -> asyncpg.Pool: + """Get the connection pool.""" + if _pool is None: + raise RuntimeError("Database pool not initialized") + return _pool + + +@asynccontextmanager +async def get_connection(): + """Get a connection from the pool.""" + async with get_pool().acquire() as conn: + yield conn + + +@asynccontextmanager +async def transaction(): + """ + Get a connection with an active transaction. + + Usage: + async with db.transaction() as conn: + await create_asset_tx(conn, asset1) + await create_asset_tx(conn, asset2) + await create_activity_tx(conn, activity) + # Commits on exit, rolls back on exception + """ + async with get_pool().acquire() as conn: + async with conn.transaction(): + yield conn + + +# ============ Users ============ + +async def get_user(username: str) -> Optional[dict]: + """Get user by username.""" + async with get_connection() as conn: + row = await conn.fetchrow( + "SELECT username, password_hash, email, created_at FROM users WHERE username = $1", + username + ) + if row: + return dict(row) + return None + + +async def get_all_users() -> dict[str, dict]: + """Get all users as a dict indexed by username.""" + async with get_connection() as conn: + rows = await conn.fetch( + "SELECT username, password_hash, email, created_at FROM users ORDER BY username" + ) + return {row["username"]: dict(row) for row in rows} + + +async def create_user(username: str, password_hash: str, email: Optional[str] = None) -> dict: + """Create a new user.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """INSERT INTO users (username, password_hash, email) + VALUES ($1, $2, $3) + RETURNING username, password_hash, email, created_at""", + username, password_hash, email + ) + return dict(row) + + +async def user_exists(username: str) -> bool: + """Check if user exists.""" + async with get_connection() as conn: + result = await conn.fetchval( + "SELECT EXISTS(SELECT 1 FROM users WHERE username = $1)", + username + ) + return result + + +# ============ Assets ============ + +async def get_asset(name: str) -> Optional[dict]: + """Get asset by name.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE name = $1""", + name + ) + if row: + return _parse_asset_row(row) + return None + + +async def get_asset_by_hash(content_hash: str) -> Optional[dict]: + """Get asset by content hash.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE content_hash = $1""", + content_hash + ) + if row: + return _parse_asset_row(row) + return None + + +async def get_asset_by_run_id(run_id: str) -> Optional[dict]: + """Get asset by run_id stored in provenance.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE provenance->>'run_id' = $1""", + run_id + ) + if row: + return _parse_asset_row(row) + return None + + +async def get_all_assets() -> dict[str, dict]: + """Get all assets as a dict indexed by name.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets ORDER BY created_at DESC""" + ) + return {row["name"]: _parse_asset_row(row) for row in rows} + + +async def get_assets_paginated(limit: int = 100, offset: int = 0) -> tuple[list[tuple[str, dict]], int]: + """Get paginated assets, returns (list of (name, asset) tuples, total_count).""" + async with get_connection() as conn: + total = await conn.fetchval("SELECT COUNT(*) FROM assets") + rows = await conn.fetch( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets ORDER BY created_at DESC LIMIT $1 OFFSET $2""", + limit, offset + ) + return [(row["name"], _parse_asset_row(row)) for row in rows], total + + +async def get_assets_by_owner(owner: str) -> dict[str, dict]: + """Get all assets owned by a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE owner = $1 ORDER BY created_at DESC""", + owner + ) + return {row["name"]: _parse_asset_row(row) for row in rows} + + +async def create_asset(asset: dict) -> dict: + """Create a new asset.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """INSERT INTO assets (name, content_hash, ipfs_cid, asset_type, tags, metadata, + url, provenance, description, origin, owner, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + RETURNING *""", + asset["name"], + asset["content_hash"], + asset.get("ipfs_cid"), + asset["asset_type"], + json.dumps(asset.get("tags", [])), + json.dumps(asset.get("metadata", {})), + asset.get("url"), + json.dumps(asset.get("provenance")) if asset.get("provenance") else None, + asset.get("description"), + json.dumps(asset.get("origin")) if asset.get("origin") else None, + asset["owner"], + _parse_timestamp(asset.get("created_at")) + ) + return _parse_asset_row(row) + + +async def update_asset(name: str, updates: dict) -> Optional[dict]: + """Update an existing asset.""" + # Build dynamic UPDATE query + set_clauses = [] + values = [] + idx = 1 + + for key, value in updates.items(): + if key in ("tags", "metadata", "provenance", "origin"): + set_clauses.append(f"{key} = ${idx}") + values.append(json.dumps(value) if value is not None else None) + else: + set_clauses.append(f"{key} = ${idx}") + values.append(value) + idx += 1 + + set_clauses.append(f"updated_at = ${idx}") + values.append(datetime.now(timezone.utc)) + idx += 1 + + values.append(name) # WHERE clause + + async with get_connection() as conn: + row = await conn.fetchrow( + f"""UPDATE assets SET {', '.join(set_clauses)} + WHERE name = ${idx} RETURNING *""", + *values + ) + if row: + return _parse_asset_row(row) + return None + + +async def asset_exists(name: str) -> bool: + """Check if asset exists.""" + async with get_connection() as conn: + return await conn.fetchval( + "SELECT EXISTS(SELECT 1 FROM assets WHERE name = $1)", + name + ) + + +def _parse_asset_row(row) -> dict: + """Parse a database row into an asset dict, handling JSONB fields.""" + asset = dict(row) + # Convert datetime to ISO string + if asset.get("created_at"): + asset["created_at"] = asset["created_at"].isoformat() + if asset.get("updated_at"): + asset["updated_at"] = asset["updated_at"].isoformat() + # Ensure JSONB fields are dicts (handle string case) + for field in ("tags", "metadata", "provenance", "origin"): + if isinstance(asset.get(field), str): + try: + asset[field] = json.loads(asset[field]) + except (json.JSONDecodeError, TypeError): + pass + return asset + + +# ============ Assets (Transaction variants) ============ + +async def get_asset_by_hash_tx(conn, content_hash: str) -> Optional[dict]: + """Get asset by content hash within a transaction.""" + row = await conn.fetchrow( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE content_hash = $1""", + content_hash + ) + if row: + return _parse_asset_row(row) + return None + + +async def asset_exists_by_name_tx(conn, name: str) -> bool: + """Check if asset name exists within a transaction.""" + return await conn.fetchval( + "SELECT EXISTS(SELECT 1 FROM assets WHERE name = $1)", + name + ) + + +async def get_asset_by_name_tx(conn, name: str) -> Optional[dict]: + """Get asset by name within a transaction.""" + row = await conn.fetchrow( + """SELECT name, content_hash, ipfs_cid, asset_type, tags, metadata, url, + provenance, description, origin, owner, created_at, updated_at + FROM assets WHERE name = $1""", + name + ) + if row: + return _parse_asset_row(row) + return None + + +async def create_asset_tx(conn, asset: dict) -> dict: + """Create a new asset within a transaction.""" + row = await conn.fetchrow( + """INSERT INTO assets (name, content_hash, ipfs_cid, asset_type, tags, metadata, + url, provenance, description, origin, owner, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + RETURNING *""", + asset["name"], + asset["content_hash"], + asset.get("ipfs_cid"), + asset["asset_type"], + json.dumps(asset.get("tags", [])), + json.dumps(asset.get("metadata", {})), + asset.get("url"), + json.dumps(asset.get("provenance")) if asset.get("provenance") else None, + asset.get("description"), + json.dumps(asset.get("origin")) if asset.get("origin") else None, + asset["owner"], + _parse_timestamp(asset.get("created_at")) + ) + return _parse_asset_row(row) + + +# ============ Activities ============ + +async def get_activity(activity_id: str) -> Optional[dict]: + """Get activity by ID.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities WHERE activity_id = $1""", + activity_id + ) + if row: + return _parse_activity_row(row) + return None + + +async def get_activity_by_index(index: int) -> Optional[dict]: + """Get activity by index (for backward compatibility with URL scheme).""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities ORDER BY published ASC LIMIT 1 OFFSET $1""", + index + ) + if row: + return _parse_activity_row(row) + return None + + +async def get_all_activities() -> list[dict]: + """Get all activities ordered by published date (oldest first for index compatibility).""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities ORDER BY published ASC""" + ) + return [_parse_activity_row(row) for row in rows] + + +async def get_activities_paginated(limit: int = 100, offset: int = 0) -> tuple[list[dict], int]: + """Get paginated activities (newest first), returns (activities, total_count).""" + async with get_connection() as conn: + total = await conn.fetchval("SELECT COUNT(*) FROM activities") + rows = await conn.fetch( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities ORDER BY published DESC LIMIT $1 OFFSET $2""", + limit, offset + ) + return [_parse_activity_row(row) for row in rows], total + + +async def get_activities_by_actor(actor_id: str) -> list[dict]: + """Get all activities by an actor.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities WHERE actor_id = $1 ORDER BY published DESC""", + actor_id + ) + return [_parse_activity_row(row) for row in rows] + + +async def create_activity(activity: dict) -> dict: + """Create a new activity.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """INSERT INTO activities (activity_id, activity_type, actor_id, object_data, published, signature) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING *""", + activity["activity_id"], + activity["activity_type"], + activity["actor_id"], + json.dumps(activity["object_data"]), + _parse_timestamp(activity["published"]), + json.dumps(activity.get("signature")) if activity.get("signature") else None + ) + return _parse_activity_row(row) + + +async def count_activities() -> int: + """Get total activity count.""" + async with get_connection() as conn: + return await conn.fetchval("SELECT COUNT(*) FROM activities") + + +def _parse_activity_row(row) -> dict: + """Parse a database row into an activity dict, handling JSONB fields.""" + activity = dict(row) + # Convert datetime to ISO string + if activity.get("published"): + activity["published"] = activity["published"].isoformat() + # Ensure JSONB fields are dicts (handle string case) + for field in ("object_data", "signature"): + if isinstance(activity.get(field), str): + try: + activity[field] = json.loads(activity[field]) + except (json.JSONDecodeError, TypeError): + pass + return activity + + +# ============ Activities (Transaction variants) ============ + +async def create_activity_tx(conn, activity: dict) -> dict: + """Create a new activity within a transaction.""" + row = await conn.fetchrow( + """INSERT INTO activities (activity_id, activity_type, actor_id, object_data, published, signature) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING *""", + activity["activity_id"], + activity["activity_type"], + activity["actor_id"], + json.dumps(activity["object_data"]), + _parse_timestamp(activity["published"]), + json.dumps(activity.get("signature")) if activity.get("signature") else None + ) + return _parse_activity_row(row) + + +# ============ Followers ============ + +async def get_followers(username: str) -> list[dict]: + """Get followers for a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT id, username, acct, url, public_key, created_at + FROM followers WHERE username = $1""", + username + ) + return [dict(row) for row in rows] + + +async def get_all_followers() -> list: + """Get all followers (for backward compatibility with old global list).""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT DISTINCT url FROM followers""" + ) + return [row["url"] for row in rows] + + +async def add_follower(username: str, acct: str, url: str, public_key: Optional[str] = None) -> dict: + """Add a follower.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """INSERT INTO followers (username, acct, url, public_key) + VALUES ($1, $2, $3, $4) + ON CONFLICT (username, acct) DO UPDATE SET url = $3, public_key = $4 + RETURNING *""", + username, acct, url, public_key + ) + return dict(row) + + +async def remove_follower(username: str, acct: str) -> bool: + """Remove a follower.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM followers WHERE username = $1 AND acct = $2", + username, acct + ) + return result == "DELETE 1" + + +# ============ Stats ============ + +async def get_stats() -> dict: + """Get counts for dashboard.""" + async with get_connection() as conn: + assets = await conn.fetchval("SELECT COUNT(*) FROM assets") + activities = await conn.fetchval("SELECT COUNT(*) FROM activities") + users = await conn.fetchval("SELECT COUNT(*) FROM users") + return {"assets": assets, "activities": activities, "users": users} + + +# ============ Anchors (Bitcoin timestamps) ============ + +async def get_unanchored_activities() -> list[dict]: + """Get all activities not yet anchored to Bitcoin.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities WHERE anchor_root IS NULL ORDER BY published ASC""" + ) + return [_parse_activity_row(row) for row in rows] + + +async def create_anchor(anchor: dict) -> dict: + """Create an anchor record.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """INSERT INTO anchors (merkle_root, tree_ipfs_cid, ots_proof_cid, + activity_count, first_activity_id, last_activity_id) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING *""", + anchor["merkle_root"], + anchor.get("tree_ipfs_cid"), + anchor.get("ots_proof_cid"), + anchor["activity_count"], + anchor.get("first_activity_id"), + anchor.get("last_activity_id") + ) + return dict(row) + + +async def mark_activities_anchored(activity_ids: list[str], merkle_root: str) -> int: + """Mark activities as anchored with the given merkle root.""" + async with get_connection() as conn: + result = await conn.execute( + """UPDATE activities SET anchor_root = $1 + WHERE activity_id = ANY($2::text[])""", + merkle_root, + activity_ids + ) + # Returns "UPDATE N" + return int(result.split()[1]) if result else 0 + + +async def get_anchor(merkle_root: str) -> Optional[dict]: + """Get anchor by merkle root.""" + async with get_connection() as conn: + row = await conn.fetchrow( + "SELECT * FROM anchors WHERE merkle_root = $1", + merkle_root + ) + if row: + result = dict(row) + if result.get("first_activity_id"): + result["first_activity_id"] = str(result["first_activity_id"]) + if result.get("last_activity_id"): + result["last_activity_id"] = str(result["last_activity_id"]) + if result.get("created_at"): + result["created_at"] = result["created_at"].isoformat() + if result.get("confirmed_at"): + result["confirmed_at"] = result["confirmed_at"].isoformat() + return result + return None + + +async def get_all_anchors() -> list[dict]: + """Get all anchors, newest first.""" + async with get_connection() as conn: + rows = await conn.fetch( + "SELECT * FROM anchors ORDER BY created_at DESC" + ) + results = [] + for row in rows: + result = dict(row) + if result.get("first_activity_id"): + result["first_activity_id"] = str(result["first_activity_id"]) + if result.get("last_activity_id"): + result["last_activity_id"] = str(result["last_activity_id"]) + if result.get("created_at"): + result["created_at"] = result["created_at"].isoformat() + if result.get("confirmed_at"): + result["confirmed_at"] = result["confirmed_at"].isoformat() + results.append(result) + return results + + +async def get_anchors_paginated(offset: int = 0, limit: int = 20) -> list[dict]: + """Get anchors with pagination, newest first.""" + async with get_connection() as conn: + rows = await conn.fetch( + "SELECT * FROM anchors ORDER BY created_at DESC LIMIT $1 OFFSET $2", + limit, offset + ) + results = [] + for row in rows: + result = dict(row) + if result.get("first_activity_id"): + result["first_activity_id"] = str(result["first_activity_id"]) + if result.get("last_activity_id"): + result["last_activity_id"] = str(result["last_activity_id"]) + if result.get("created_at"): + result["created_at"] = result["created_at"].isoformat() + if result.get("confirmed_at"): + result["confirmed_at"] = result["confirmed_at"].isoformat() + results.append(result) + return results + + +async def update_anchor_confirmed(merkle_root: str, bitcoin_txid: str) -> bool: + """Mark anchor as confirmed with Bitcoin txid.""" + async with get_connection() as conn: + result = await conn.execute( + """UPDATE anchors SET confirmed_at = NOW(), bitcoin_txid = $1 + WHERE merkle_root = $2""", + bitcoin_txid, merkle_root + ) + return result == "UPDATE 1" + + +async def get_anchor_stats() -> dict: + """Get anchoring statistics.""" + async with get_connection() as conn: + total_anchors = await conn.fetchval("SELECT COUNT(*) FROM anchors") + confirmed_anchors = await conn.fetchval( + "SELECT COUNT(*) FROM anchors WHERE confirmed_at IS NOT NULL" + ) + pending_anchors = await conn.fetchval( + "SELECT COUNT(*) FROM anchors WHERE confirmed_at IS NULL" + ) + anchored_activities = await conn.fetchval( + "SELECT COUNT(*) FROM activities WHERE anchor_root IS NOT NULL" + ) + unanchored_activities = await conn.fetchval( + "SELECT COUNT(*) FROM activities WHERE anchor_root IS NULL" + ) + return { + "total_anchors": total_anchors, + "confirmed_anchors": confirmed_anchors, + "pending_anchors": pending_anchors, + "anchored_activities": anchored_activities, + "unanchored_activities": unanchored_activities + } + + +# ============ User Renderers (L1 attachments) ============ + +async def get_user_renderers(username: str) -> list[str]: + """Get L1 renderer URLs attached by a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + "SELECT l1_url FROM user_renderers WHERE username = $1 ORDER BY attached_at", + username + ) + return [row["l1_url"] for row in rows] + + +async def attach_renderer(username: str, l1_url: str) -> bool: + """Attach a user to an L1 renderer. Returns True if newly attached.""" + async with get_connection() as conn: + try: + await conn.execute( + """INSERT INTO user_renderers (username, l1_url) + VALUES ($1, $2) + ON CONFLICT (username, l1_url) DO NOTHING""", + username, l1_url + ) + return True + except Exception: + return False + + +async def detach_renderer(username: str, l1_url: str) -> bool: + """Detach a user from an L1 renderer. Returns True if was attached.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM user_renderers WHERE username = $1 AND l1_url = $2", + username, l1_url + ) + return "DELETE 1" in result + + +# ============ User Storage ============ + +async def get_user_storage(username: str) -> list[dict]: + """Get all storage providers for a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT id, username, provider_type, provider_name, description, config, + capacity_gb, is_active, created_at, updated_at + FROM user_storage WHERE username = $1 + ORDER BY provider_type, created_at""", + username + ) + return [dict(row) for row in rows] + + +async def get_user_storage_by_type(username: str, provider_type: str) -> list[dict]: + """Get storage providers of a specific type for a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT id, username, provider_type, provider_name, description, config, + capacity_gb, is_active, created_at, updated_at + FROM user_storage WHERE username = $1 AND provider_type = $2 + ORDER BY created_at""", + username, provider_type + ) + return [dict(row) for row in rows] + + +async def get_storage_by_id(storage_id: int) -> Optional[dict]: + """Get a storage provider by ID.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT id, username, provider_type, provider_name, description, config, + capacity_gb, is_active, created_at, updated_at + FROM user_storage WHERE id = $1""", + storage_id + ) + return dict(row) if row else None + + +async def add_user_storage( + username: str, + provider_type: str, + provider_name: str, + config: dict, + capacity_gb: int, + description: Optional[str] = None +) -> Optional[int]: + """Add a storage provider for a user. Returns storage ID.""" + async with get_connection() as conn: + try: + row = await conn.fetchrow( + """INSERT INTO user_storage (username, provider_type, provider_name, description, config, capacity_gb) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING id""", + username, provider_type, provider_name, description, json.dumps(config), capacity_gb + ) + return row["id"] if row else None + except Exception: + return None + + +async def update_user_storage( + storage_id: int, + provider_name: Optional[str] = None, + description: Optional[str] = None, + config: Optional[dict] = None, + capacity_gb: Optional[int] = None, + is_active: Optional[bool] = None +) -> bool: + """Update a storage provider.""" + updates = [] + params = [] + param_num = 1 + + if provider_name is not None: + updates.append(f"provider_name = ${param_num}") + params.append(provider_name) + param_num += 1 + if description is not None: + updates.append(f"description = ${param_num}") + params.append(description) + param_num += 1 + if config is not None: + updates.append(f"config = ${param_num}") + params.append(json.dumps(config)) + param_num += 1 + if capacity_gb is not None: + updates.append(f"capacity_gb = ${param_num}") + params.append(capacity_gb) + param_num += 1 + if is_active is not None: + updates.append(f"is_active = ${param_num}") + params.append(is_active) + param_num += 1 + + if not updates: + return False + + updates.append("updated_at = NOW()") + params.append(storage_id) + + async with get_connection() as conn: + result = await conn.execute( + f"UPDATE user_storage SET {', '.join(updates)} WHERE id = ${param_num}", + *params + ) + return "UPDATE 1" in result + + +async def remove_user_storage(storage_id: int) -> bool: + """Remove a storage provider. Cascades to storage_pins.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM user_storage WHERE id = $1", + storage_id + ) + return "DELETE 1" in result + + +async def get_storage_usage(storage_id: int) -> dict: + """Get storage usage stats for a provider.""" + async with get_connection() as conn: + row = await conn.fetchrow( + """SELECT + COUNT(*) as pin_count, + COALESCE(SUM(size_bytes), 0) as used_bytes + FROM storage_pins WHERE storage_id = $1""", + storage_id + ) + return {"pin_count": row["pin_count"], "used_bytes": row["used_bytes"]} + + +async def add_storage_pin( + content_hash: str, + storage_id: int, + ipfs_cid: Optional[str], + pin_type: str, + size_bytes: int +) -> Optional[int]: + """Add a pin record. Returns pin ID.""" + async with get_connection() as conn: + try: + row = await conn.fetchrow( + """INSERT INTO storage_pins (content_hash, storage_id, ipfs_cid, pin_type, size_bytes) + VALUES ($1, $2, $3, $4, $5) + ON CONFLICT (content_hash, storage_id) DO UPDATE SET + ipfs_cid = EXCLUDED.ipfs_cid, + pin_type = EXCLUDED.pin_type, + size_bytes = EXCLUDED.size_bytes, + pinned_at = NOW() + RETURNING id""", + content_hash, storage_id, ipfs_cid, pin_type, size_bytes + ) + return row["id"] if row else None + except Exception: + return None + + +async def remove_storage_pin(content_hash: str, storage_id: int) -> bool: + """Remove a pin record.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM storage_pins WHERE content_hash = $1 AND storage_id = $2", + content_hash, storage_id + ) + return "DELETE 1" in result + + +async def get_pins_for_content(content_hash: str) -> list[dict]: + """Get all storage locations where content is pinned.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT sp.*, us.provider_type, us.provider_name, us.username + FROM storage_pins sp + JOIN user_storage us ON sp.storage_id = us.id + WHERE sp.content_hash = $1""", + content_hash + ) + return [dict(row) for row in rows] + + +async def get_all_active_storage() -> list[dict]: + """Get all active storage providers (for distributed pinning).""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT us.id, us.username, us.provider_type, us.provider_name, us.description, + us.config, us.capacity_gb, us.is_active, us.created_at, us.updated_at, + COALESCE(SUM(sp.size_bytes), 0) as used_bytes, + COUNT(sp.id) as pin_count + FROM user_storage us + LEFT JOIN storage_pins sp ON us.id = sp.storage_id + WHERE us.is_active = true + GROUP BY us.id + ORDER BY us.provider_type, us.created_at""" + ) + return [dict(row) for row in rows] + + +# ============ Token Revocation ============ + +async def revoke_token(token_hash: str, username: str, expires_at) -> bool: + """Revoke a token. Returns True if newly revoked.""" + async with get_connection() as conn: + try: + await conn.execute( + """INSERT INTO revoked_tokens (token_hash, username, expires_at) + VALUES ($1, $2, $3) + ON CONFLICT (token_hash) DO NOTHING""", + token_hash, username, expires_at + ) + return True + except Exception: + return False + + +async def is_token_revoked(token_hash: str) -> bool: + """Check if a token has been revoked.""" + async with get_connection() as conn: + row = await conn.fetchrow( + "SELECT 1 FROM revoked_tokens WHERE token_hash = $1 AND expires_at > NOW()", + token_hash + ) + return row is not None + + +async def cleanup_expired_revocations() -> int: + """Remove expired revocation entries. Returns count removed.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM revoked_tokens WHERE expires_at < NOW()" + ) + # Extract count from "DELETE N" + try: + return int(result.split()[-1]) + except (ValueError, IndexError): + return 0 + + +# ============ Additional helper functions ============ + +async def get_user_assets(username: str, offset: int = 0, limit: int = 20, asset_type: str = None) -> list[dict]: + """Get assets owned by a user with pagination.""" + async with get_connection() as conn: + if asset_type: + rows = await conn.fetch( + """SELECT * FROM assets WHERE owner = $1 AND asset_type = $2 + ORDER BY created_at DESC LIMIT $3 OFFSET $4""", + username, asset_type, limit, offset + ) + else: + rows = await conn.fetch( + """SELECT * FROM assets WHERE owner = $1 + ORDER BY created_at DESC LIMIT $2 OFFSET $3""", + username, limit, offset + ) + return [dict(row) for row in rows] + + +async def delete_asset(asset_id: str) -> bool: + """Delete an asset by name/id.""" + async with get_connection() as conn: + result = await conn.execute("DELETE FROM assets WHERE name = $1", asset_id) + return "DELETE 1" in result + + +async def count_users() -> int: + """Count total users.""" + async with get_connection() as conn: + return await conn.fetchval("SELECT COUNT(*) FROM users") + + +async def count_user_activities(username: str) -> int: + """Count activities by a user.""" + async with get_connection() as conn: + return await conn.fetchval( + "SELECT COUNT(*) FROM activities WHERE actor_id LIKE $1", + f"%{username}%" + ) + + +async def get_user_activities(username: str, limit: int = 20, offset: int = 0) -> list[dict]: + """Get activities by a user.""" + async with get_connection() as conn: + rows = await conn.fetch( + """SELECT activity_id, activity_type, actor_id, object_data, published, signature + FROM activities WHERE actor_id LIKE $1 + ORDER BY published DESC LIMIT $2 OFFSET $3""", + f"%{username}%", limit, offset + ) + return [_parse_activity_row(row) for row in rows] + + +async def get_renderer(renderer_id: str) -> Optional[dict]: + """Get a renderer by ID/URL.""" + async with get_connection() as conn: + row = await conn.fetchrow( + "SELECT * FROM user_renderers WHERE l1_url = $1", + renderer_id + ) + return dict(row) if row else None + + +async def update_anchor(anchor_id: str, **updates) -> bool: + """Update an anchor.""" + async with get_connection() as conn: + if "bitcoin_txid" in updates: + result = await conn.execute( + """UPDATE anchors SET bitcoin_txid = $1, confirmed_at = NOW() + WHERE merkle_root = $2""", + updates["bitcoin_txid"], anchor_id + ) + return "UPDATE 1" in result + return False + + +async def delete_anchor(anchor_id: str) -> bool: + """Delete an anchor.""" + async with get_connection() as conn: + result = await conn.execute( + "DELETE FROM anchors WHERE merkle_root = $1", anchor_id + ) + return "DELETE 1" in result + + +async def record_run(run_id: str, username: str, recipe: str, inputs: list, + output_hash: str, ipfs_cid: str = None, asset_id: str = None) -> dict: + """Record a completed run.""" + async with get_connection() as conn: + # Check if runs table exists, if not just return the data + try: + row = await conn.fetchrow( + """INSERT INTO runs (run_id, username, recipe, inputs, output_hash, ipfs_cid, asset_id, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, NOW()) + ON CONFLICT (run_id) DO UPDATE SET + output_hash = EXCLUDED.output_hash, + ipfs_cid = EXCLUDED.ipfs_cid, + asset_id = EXCLUDED.asset_id + RETURNING *""", + run_id, username, recipe, json.dumps(inputs), output_hash, ipfs_cid, asset_id + ) + return dict(row) if row else None + except Exception: + # Table might not exist + return {"run_id": run_id, "username": username, "recipe": recipe} + + +async def get_run(run_id: str) -> Optional[dict]: + """Get a run by ID.""" + async with get_connection() as conn: + try: + row = await conn.fetchrow("SELECT * FROM runs WHERE run_id = $1", run_id) + if row: + result = dict(row) + if result.get("inputs") and isinstance(result["inputs"], str): + result["inputs"] = json.loads(result["inputs"]) + return result + except Exception: + pass + return None diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 0000000..aac2460 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +cd "$(dirname "$0")" + +echo "=== Pulling latest code ===" +git pull + +echo "=== Building Docker image ===" +docker build --build-arg CACHEBUST=$(date +%s) -t git.rose-ash.com/art-dag/l2-server:latest . + +echo "=== Redeploying activitypub stack ===" +docker stack deploy -c docker-compose.yml activitypub + +echo "=== Restarting proxy nginx ===" +docker service update --force proxy_nginx + +echo "=== Done ===" +docker stack services activitypub diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0f67e81 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,90 @@ +version: "3.8" + +services: + postgres: + image: postgres:16-alpine + env_file: + - .env + environment: + POSTGRES_USER: artdag + POSTGRES_DB: artdag + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - internal + healthcheck: + test: ["CMD-SHELL", "pg_isready -U artdag"] + interval: 5s + timeout: 5s + retries: 5 + deploy: + placement: + constraints: + - node.labels.gpu != true + + ipfs: + image: ipfs/kubo:latest + ports: + - "4002:4001" # Swarm TCP (4002 external, L1 uses 4001) + - "4002:4001/udp" # Swarm UDP + volumes: + - ipfs_data:/data/ipfs + networks: + - internal + - externalnet # For gateway access + deploy: + replicas: 1 + restart_policy: + condition: on-failure + placement: + constraints: + - node.labels.gpu != true + + l2-server: + image: registry.rose-ash.com:5000/l2-server:latest + env_file: + - .env + environment: + - ARTDAG_DATA=/data/l2 + - IPFS_API=/dns/ipfs/tcp/5001 + - ANCHOR_BACKUP_DIR=/data/anchors + # Coop app internal URLs for fragment composition + - INTERNAL_URL_BLOG=http://blog:8000 + - INTERNAL_URL_CART=http://cart:8000 + - INTERNAL_URL_ACCOUNT=http://account:8000 + # DATABASE_URL, ARTDAG_DOMAIN, ARTDAG_USER, JWT_SECRET from .env file + healthcheck: + test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8200/')"] + interval: 10s + timeout: 5s + retries: 3 + start_period: 15s + volumes: + - l2_data:/data/l2 # Still needed for RSA keys + - anchor_backup:/data/anchors # Persistent anchor proofs (survives DB wipes) + networks: + - internal + - externalnet + depends_on: + - postgres + - ipfs + deploy: + replicas: 1 + update_config: + order: start-first + restart_policy: + condition: on-failure + placement: + constraints: + - node.labels.gpu != true + +volumes: + l2_data: + postgres_data: + ipfs_data: + anchor_backup: # Persistent - don't delete when resetting DB + +networks: + internal: + externalnet: + external: true diff --git a/docker-stack.yml b/docker-stack.yml new file mode 100644 index 0000000..3411aeb --- /dev/null +++ b/docker-stack.yml @@ -0,0 +1,91 @@ +version: "3.8" + +# Full Art DAG stack for Docker Swarm deployment +# Deploy with: docker stack deploy -c docker-stack.yml artdag + +services: + # Redis for L1 + redis: + image: redis:7-alpine + volumes: + - redis_data:/data + networks: + - artdag + deploy: + replicas: 1 + placement: + constraints: + - node.role == manager + restart_policy: + condition: on-failure + + # L1 Server (API) + l1-server: + image: git.rose-ash.com/art-dag/l1-server:latest + ports: + - "8100:8100" + env_file: + - .env + environment: + - REDIS_URL=redis://redis:6379/5 + - CACHE_DIR=/data/cache + # L1_PUBLIC_URL, L2_SERVER, L2_DOMAIN from .env file + volumes: + - l1_cache:/data/cache + depends_on: + - redis + networks: + - artdag + deploy: + replicas: 1 + restart_policy: + condition: on-failure + + # L1 Worker (Celery) + l1-worker: + image: git.rose-ash.com/art-dag/l1-server:latest + command: celery -A celery_app worker --loglevel=info + environment: + - REDIS_URL=redis://redis:6379/5 + - CACHE_DIR=/data/cache + - C_FORCE_ROOT=true + volumes: + - l1_cache:/data/cache + depends_on: + - redis + networks: + - artdag + deploy: + replicas: 2 + restart_policy: + condition: on-failure + + # L2 Server (ActivityPub) + l2-server: + image: git.rose-ash.com/art-dag/l2-server:latest + ports: + - "8200:8200" + env_file: + - .env + environment: + - ARTDAG_DATA=/data/l2 + # ARTDAG_DOMAIN, JWT_SECRET from .env file (multi-actor, no ARTDAG_USER) + volumes: + - l2_data:/data/l2 + depends_on: + - l1-server + networks: + - artdag + deploy: + replicas: 1 + restart_policy: + condition: on-failure + +volumes: + redis_data: + l1_cache: + l2_data: + +networks: + artdag: + driver: overlay diff --git a/ipfs_client.py b/ipfs_client.py new file mode 100644 index 0000000..108327b --- /dev/null +++ b/ipfs_client.py @@ -0,0 +1,226 @@ +# art-activity-pub/ipfs_client.py +""" +IPFS client for Art DAG L2 server. + +Provides functions to fetch, pin, and add content to IPFS. +Uses direct HTTP API calls for compatibility with all Kubo versions. +""" + +import json +import logging +import os +import re +from typing import Optional + +import requests + + +class IPFSError(Exception): + """Raised when an IPFS operation fails.""" + pass + +logger = logging.getLogger(__name__) + +# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001 +IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001") + +# Connection timeout in seconds +IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "60")) + + +def _multiaddr_to_url(multiaddr: str) -> str: + """Convert IPFS multiaddr to HTTP URL.""" + # Handle /dns/hostname/tcp/port format + dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr) + if dns_match: + return f"http://{dns_match.group(1)}:{dns_match.group(2)}" + + # Handle /ip4/address/tcp/port format + ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr) + if ip4_match: + return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}" + + # Fallback: assume it's already a URL or use default + if multiaddr.startswith("http"): + return multiaddr + return "http://127.0.0.1:5001" + + +# Base URL for IPFS API +IPFS_BASE_URL = _multiaddr_to_url(IPFS_API) + + +def get_bytes(cid: str) -> Optional[bytes]: + """ + Retrieve content from IPFS by CID. + + Args: + cid: IPFS CID to retrieve + + Returns: + Content as bytes or None on failure + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/cat" + params = {"arg": cid} + + response = requests.post(url, params=params, timeout=IPFS_TIMEOUT) + response.raise_for_status() + data = response.content + + logger.info(f"Retrieved from IPFS: {cid} ({len(data)} bytes)") + return data + except Exception as e: + logger.error(f"Failed to get from IPFS: {e}") + return None + + +def pin(cid: str) -> bool: + """ + Pin a CID on this node. + + Args: + cid: IPFS CID to pin + + Returns: + True on success, False on failure + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/pin/add" + params = {"arg": cid} + + response = requests.post(url, params=params, timeout=IPFS_TIMEOUT) + response.raise_for_status() + + logger.info(f"Pinned on IPFS: {cid}") + return True + except Exception as e: + logger.error(f"Failed to pin on IPFS: {e}") + return False + + +def unpin(cid: str) -> bool: + """ + Unpin a CID from this node. + + Args: + cid: IPFS CID to unpin + + Returns: + True on success, False on failure + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/pin/rm" + params = {"arg": cid} + + response = requests.post(url, params=params, timeout=IPFS_TIMEOUT) + response.raise_for_status() + + logger.info(f"Unpinned from IPFS: {cid}") + return True + except Exception as e: + logger.error(f"Failed to unpin from IPFS: {e}") + return False + + +def is_available() -> bool: + """ + Check if IPFS daemon is available. + + Returns: + True if IPFS is available, False otherwise + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/id" + response = requests.post(url, timeout=5) + return response.status_code == 200 + except Exception: + return False + + +def get_node_id() -> Optional[str]: + """ + Get this IPFS node's peer ID. + + Returns: + Peer ID string or None on failure + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/id" + response = requests.post(url, timeout=IPFS_TIMEOUT) + response.raise_for_status() + return response.json().get("ID") + except Exception as e: + logger.error(f"Failed to get node ID: {e}") + return None + + +def add_bytes(data: bytes, pin: bool = True) -> str: + """ + Add bytes data to IPFS and optionally pin it. + + Args: + data: Bytes to add + pin: Whether to pin the data (default: True) + + Returns: + IPFS CID + + Raises: + IPFSError: If adding fails + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/add" + params = {"pin": str(pin).lower()} + files = {"file": ("data", data)} + + response = requests.post(url, params=params, files=files, timeout=IPFS_TIMEOUT) + response.raise_for_status() + result = response.json() + cid = result["Hash"] + + logger.info(f"Added to IPFS: {len(data)} bytes -> {cid}") + return cid + except Exception as e: + logger.error(f"Failed to add bytes to IPFS: {e}") + raise IPFSError(f"Failed to add bytes to IPFS: {e}") from e + + +def add_json(data: dict) -> str: + """ + Serialize dict to JSON and add to IPFS. + + Args: + data: Dictionary to serialize and store + + Returns: + IPFS CID + + Raises: + IPFSError: If adding fails + """ + json_bytes = json.dumps(data, indent=2, sort_keys=True).encode('utf-8') + return add_bytes(json_bytes, pin=True) + + +def pin_or_raise(cid: str) -> None: + """ + Pin a CID on IPFS. Raises exception on failure. + + Args: + cid: IPFS CID to pin + + Raises: + IPFSError: If pinning fails + """ + try: + url = f"{IPFS_BASE_URL}/api/v0/pin/add" + params = {"arg": cid} + + response = requests.post(url, params=params, timeout=IPFS_TIMEOUT) + response.raise_for_status() + + logger.info(f"Pinned on IPFS: {cid}") + except Exception as e: + logger.error(f"Failed to pin on IPFS: {e}") + raise IPFSError(f"Failed to pin {cid}: {e}") from e diff --git a/keys.py b/keys.py new file mode 100644 index 0000000..247a558 --- /dev/null +++ b/keys.py @@ -0,0 +1,119 @@ +""" +Key management for ActivityPub signing. + +Keys are stored in DATA_DIR/keys/: +- {username}.pem - Private key (chmod 600) +- {username}.pub - Public key +""" + +import base64 +import hashlib +import json +from datetime import datetime, timezone +from pathlib import Path + +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa, padding + + +def get_keys_dir(data_dir: Path) -> Path: + """Get keys directory, create if needed.""" + keys_dir = data_dir / "keys" + keys_dir.mkdir(parents=True, exist_ok=True) + return keys_dir + + +def generate_keypair(data_dir: Path, username: str) -> tuple[str, str]: + """Generate RSA keypair for a user. + + Returns (private_pem, public_pem) + """ + keys_dir = get_keys_dir(data_dir) + private_path = keys_dir / f"{username}.pem" + public_path = keys_dir / f"{username}.pub" + + # Generate key + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + ) + + # Serialize private key + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption() + ).decode() + + # Serialize public key + public_pem = private_key.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo + ).decode() + + # Save keys + private_path.write_text(private_pem) + private_path.chmod(0o600) + public_path.write_text(public_pem) + + return private_pem, public_pem + + +def load_private_key(data_dir: Path, username: str): + """Load private key for signing.""" + keys_dir = get_keys_dir(data_dir) + private_path = keys_dir / f"{username}.pem" + + if not private_path.exists(): + raise FileNotFoundError(f"Private key not found: {private_path}") + + private_pem = private_path.read_text() + return serialization.load_pem_private_key( + private_pem.encode(), + password=None + ) + + +def load_public_key_pem(data_dir: Path, username: str) -> str: + """Load public key PEM for actor profile.""" + keys_dir = get_keys_dir(data_dir) + public_path = keys_dir / f"{username}.pub" + + if not public_path.exists(): + raise FileNotFoundError(f"Public key not found: {public_path}") + + return public_path.read_text() + + +def has_keys(data_dir: Path, username: str) -> bool: + """Check if keys exist for user.""" + keys_dir = get_keys_dir(data_dir) + return (keys_dir / f"{username}.pem").exists() + + +def sign_data(private_key, data: str) -> str: + """Sign data with private key, return base64 signature.""" + signature = private_key.sign( + data.encode(), + padding.PKCS1v15(), + hashes.SHA256() + ) + return base64.b64encode(signature).decode() + + +def create_signature(data_dir: Path, username: str, domain: str, activity: dict) -> dict: + """Create RsaSignature2017 for an activity.""" + private_key = load_private_key(data_dir, username) + + # Create canonical JSON for signing + canonical = json.dumps(activity, sort_keys=True, separators=(',', ':')) + + # Sign + signature_value = sign_data(private_key, canonical) + + return { + "type": "RsaSignature2017", + "creator": f"https://{domain}/users/{username}#main-key", + "created": datetime.now(timezone.utc).isoformat(), + "signatureValue": signature_value + } diff --git a/migrate.py b/migrate.py new file mode 100755 index 0000000..146c487 --- /dev/null +++ b/migrate.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python3 +""" +Migration script: JSON files to PostgreSQL. + +Usage: + python migrate.py [--dry-run] + +Migrates: +- users.json -> users table +- registry.json -> assets table +- activities.json -> activities table +- followers.json -> followers table + +Does NOT migrate: +- keys/ directory (stays as files) +""" + +import asyncio +import json +import os +import sys +from pathlib import Path +from datetime import datetime, timezone +from uuid import UUID + +import asyncpg + +# Configuration +DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2"))) +DATABASE_URL = os.environ.get("DATABASE_URL") +if not DATABASE_URL: + raise RuntimeError("DATABASE_URL environment variable is required") + +SCHEMA = """ +-- Drop existing tables (careful in production!) +DROP TABLE IF EXISTS followers CASCADE; +DROP TABLE IF EXISTS activities CASCADE; +DROP TABLE IF EXISTS assets CASCADE; +DROP TABLE IF EXISTS users CASCADE; + +-- Users table +CREATE TABLE users ( + username VARCHAR(255) PRIMARY KEY, + password_hash VARCHAR(255) NOT NULL, + email VARCHAR(255), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Assets table +CREATE TABLE assets ( + name VARCHAR(255) PRIMARY KEY, + content_hash VARCHAR(128) NOT NULL, + asset_type VARCHAR(50) NOT NULL, + tags JSONB DEFAULT '[]'::jsonb, + metadata JSONB DEFAULT '{}'::jsonb, + url TEXT, + provenance JSONB, + description TEXT, + origin JSONB, + owner VARCHAR(255) NOT NULL REFERENCES users(username), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ +); + +-- Activities table +CREATE TABLE activities ( + activity_id UUID PRIMARY KEY, + activity_type VARCHAR(50) NOT NULL, + actor_id TEXT NOT NULL, + object_data JSONB NOT NULL, + published TIMESTAMPTZ NOT NULL, + signature JSONB +); + +-- Followers table +CREATE TABLE followers ( + id SERIAL PRIMARY KEY, + username VARCHAR(255) NOT NULL REFERENCES users(username), + acct VARCHAR(255) NOT NULL, + url TEXT NOT NULL, + public_key TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(username, acct) +); + +-- Indexes +CREATE INDEX idx_users_created_at ON users(created_at); +CREATE INDEX idx_assets_content_hash ON assets(content_hash); +CREATE INDEX idx_assets_owner ON assets(owner); +CREATE INDEX idx_assets_created_at ON assets(created_at DESC); +CREATE INDEX idx_assets_tags ON assets USING GIN(tags); +CREATE INDEX idx_activities_actor_id ON activities(actor_id); +CREATE INDEX idx_activities_published ON activities(published DESC); +CREATE INDEX idx_followers_username ON followers(username); +""" + + +async def migrate(dry_run: bool = False): + """Run the migration.""" + print(f"Migrating from {DATA_DIR} to PostgreSQL") + print(f"Database: {DATABASE_URL}") + print(f"Dry run: {dry_run}") + print() + + # Load JSON files + users = load_json(DATA_DIR / "users.json") or {} + registry = load_json(DATA_DIR / "registry.json") or {"assets": {}} + activities_data = load_json(DATA_DIR / "activities.json") or {"activities": []} + followers = load_json(DATA_DIR / "followers.json") or [] + + assets = registry.get("assets", {}) + activities = activities_data.get("activities", []) + + print(f"Found {len(users)} users") + print(f"Found {len(assets)} assets") + print(f"Found {len(activities)} activities") + print(f"Found {len(followers)} followers") + print() + + if dry_run: + print("DRY RUN - no changes made") + return + + # Connect and migrate + conn = await asyncpg.connect(DATABASE_URL) + try: + # Create schema + print("Creating schema...") + await conn.execute(SCHEMA) + + # Migrate users + print("Migrating users...") + for username, user_data in users.items(): + await conn.execute( + """INSERT INTO users (username, password_hash, email, created_at) + VALUES ($1, $2, $3, $4)""", + username, + user_data["password_hash"], + user_data.get("email"), + parse_timestamp(user_data.get("created_at")) + ) + print(f" Migrated {len(users)} users") + + # Migrate assets + print("Migrating assets...") + for name, asset in assets.items(): + await conn.execute( + """INSERT INTO assets (name, content_hash, asset_type, tags, metadata, + url, provenance, description, origin, owner, + created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)""", + name, + asset["content_hash"], + asset["asset_type"], + json.dumps(asset.get("tags", [])), + json.dumps(asset.get("metadata", {})), + asset.get("url"), + json.dumps(asset.get("provenance")) if asset.get("provenance") else None, + asset.get("description"), + json.dumps(asset.get("origin")) if asset.get("origin") else None, + asset["owner"], + parse_timestamp(asset.get("created_at")), + parse_timestamp(asset.get("updated_at")) + ) + print(f" Migrated {len(assets)} assets") + + # Migrate activities + print("Migrating activities...") + for activity in activities: + await conn.execute( + """INSERT INTO activities (activity_id, activity_type, actor_id, + object_data, published, signature) + VALUES ($1, $2, $3, $4, $5, $6)""", + UUID(activity["activity_id"]), + activity["activity_type"], + activity["actor_id"], + json.dumps(activity["object_data"]), + parse_timestamp(activity["published"]), + json.dumps(activity.get("signature")) if activity.get("signature") else None + ) + print(f" Migrated {len(activities)} activities") + + # Migrate followers + print("Migrating followers...") + if followers and users: + first_user = list(users.keys())[0] + migrated = 0 + for follower in followers: + if isinstance(follower, str): + # Old format: just URL string + await conn.execute( + """INSERT INTO followers (username, acct, url) + VALUES ($1, $2, $3) + ON CONFLICT DO NOTHING""", + first_user, + follower, + follower + ) + migrated += 1 + elif isinstance(follower, dict): + await conn.execute( + """INSERT INTO followers (username, acct, url, public_key) + VALUES ($1, $2, $3, $4) + ON CONFLICT DO NOTHING""", + follower.get("username", first_user), + follower.get("acct", follower.get("url", "")), + follower["url"], + follower.get("public_key") + ) + migrated += 1 + print(f" Migrated {migrated} followers") + else: + print(" No followers to migrate") + + print() + print("Migration complete!") + + finally: + await conn.close() + + +def load_json(path: Path) -> dict | list | None: + """Load JSON file if it exists.""" + if path.exists(): + with open(path) as f: + return json.load(f) + return None + + +def parse_timestamp(ts: str | None) -> datetime | None: + """Parse ISO timestamp string to datetime.""" + if not ts: + return datetime.now(timezone.utc) + try: + # Handle various ISO formats + if ts.endswith('Z'): + ts = ts[:-1] + '+00:00' + return datetime.fromisoformat(ts) + except Exception: + return datetime.now(timezone.utc) + + +if __name__ == "__main__": + dry_run = "--dry-run" in sys.argv + asyncio.run(migrate(dry_run)) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..94d1e5a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,13 @@ +fastapi>=0.109.0 +uvicorn>=0.27.0 +requests>=2.31.0 +httpx>=0.27.0 +cryptography>=42.0.0 +bcrypt>=4.0.0 +python-jose[cryptography]>=3.3.0 +markdown>=3.5.0 +python-multipart>=0.0.6 +asyncpg>=0.29.0 +boto3>=1.34.0 +# Shared components +git+https://git.rose-ash.com/art-dag/common.git@889ea98 diff --git a/server.py b/server.py new file mode 100644 index 0000000..9c00b57 --- /dev/null +++ b/server.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +""" +Art DAG L2 Server - ActivityPub + +Minimal entry point that uses the modular app factory. +All routes are defined in app/routers/. +All templates are in app/templates/. +""" + +import logging +import os + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s %(levelname)s %(name)s: %(message)s' +) + +# Import the app from the factory +from app import app + +if __name__ == "__main__": + import uvicorn + host = os.environ.get("HOST", "0.0.0.0") + port = int(os.environ.get("PORT", "8200")) + uvicorn.run("server:app", host=host, port=port, workers=4) diff --git a/server_legacy.py b/server_legacy.py new file mode 100644 index 0000000..7ab9a56 --- /dev/null +++ b/server_legacy.py @@ -0,0 +1,3765 @@ +#!/usr/bin/env python3 +""" +Art DAG L2 Server - ActivityPub + +Manages ownership registry, activities, and federation. +- Registry of owned assets +- ActivityPub actor endpoints +- Sign and publish Create activities +- Federation with other servers +""" + +import hashlib +import json +import logging +import os +import uuid +from contextlib import asynccontextmanager +from datetime import datetime, timezone +from pathlib import Path +from typing import Optional +from urllib.parse import urlparse + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s %(levelname)s %(name)s: %(message)s' +) +logger = logging.getLogger(__name__) + +from fastapi import FastAPI, HTTPException, Request, Response, Depends, Cookie, Form +from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse, FileResponse +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from pydantic import BaseModel +import requests +import markdown + +import db +from auth import ( + UserCreate, UserLogin, Token, User, + create_user, authenticate_user, create_access_token, + verify_token, get_token_claims, get_current_user +) + +# Configuration +DOMAIN = os.environ.get("ARTDAG_DOMAIN", "artdag.rose-ash.com") +DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2"))) +L1_PUBLIC_URL = os.environ.get("L1_PUBLIC_URL", "https://celery-artdag.rose-ash.com") +EFFECTS_REPO_URL = os.environ.get("EFFECTS_REPO_URL", "https://git.rose-ash.com/art-dag/effects") +IPFS_GATEWAY_URL = os.environ.get("IPFS_GATEWAY_URL", "") + +# Known L1 renderers (comma-separated URLs) +L1_SERVERS_STR = os.environ.get("L1_SERVERS", "https://celery-artdag.rose-ash.com") +L1_SERVERS = [s.strip() for s in L1_SERVERS_STR.split(",") if s.strip()] + +# Cookie domain for sharing auth across subdomains (e.g., ".rose-ash.com") +# If not set, derives from DOMAIN (strips first subdomain, adds leading dot) +def _get_cookie_domain(): + env_val = os.environ.get("COOKIE_DOMAIN") + if env_val: + return env_val + # Derive from DOMAIN: artdag.rose-ash.com -> .rose-ash.com + parts = DOMAIN.split(".") + if len(parts) >= 2: + return "." + ".".join(parts[-2:]) + return None + +COOKIE_DOMAIN = _get_cookie_domain() + +# Ensure data directory exists +DATA_DIR.mkdir(parents=True, exist_ok=True) +(DATA_DIR / "assets").mkdir(exist_ok=True) + + +def compute_run_id(input_hashes: list[str], recipe: str, recipe_hash: str = None) -> str: + """ + Compute a deterministic run_id from inputs and recipe. + + The run_id is a SHA3-256 hash of: + - Sorted input content hashes + - Recipe identifier (recipe_hash if provided, else "effect:{recipe}") + + This makes runs content-addressable: same inputs + recipe = same run_id. + Must match the L1 implementation exactly. + """ + data = { + "inputs": sorted(input_hashes), + "recipe": recipe_hash or f"effect:{recipe}", + "version": "1", # For future schema changes + } + json_str = json.dumps(data, sort_keys=True, separators=(",", ":")) + return hashlib.sha3_256(json_str.encode()).hexdigest() + +# Load README +README_PATH = Path(__file__).parent / "README.md" +README_CONTENT = "" +if README_PATH.exists(): + README_CONTENT = README_PATH.read_text() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Manage database connection pool lifecycle.""" + await db.init_pool() + yield + await db.close_pool() + + +app = FastAPI( + title="Art DAG L2 Server", + description="ActivityPub server for Art DAG ownership and federation", + version="0.1.0", + lifespan=lifespan +) + + +@app.exception_handler(404) +async def not_found_handler(request: Request, exc): + """Custom 404 page.""" + accept = request.headers.get("accept", "") + if "text/html" in accept and "application/json" not in accept: + content = ''' +
+

404

+

Page not found

+ Go to home page +
+ ''' + username = get_user_from_cookie(request) + return HTMLResponse(base_html("Not Found", content, username), status_code=404) + return JSONResponse({"detail": "Not found"}, status_code=404) + + +# ============ Data Models ============ + +class Asset(BaseModel): + """An owned asset.""" + name: str + content_hash: str + ipfs_cid: Optional[str] = None # IPFS content identifier + asset_type: str # image, video, effect, recipe, infrastructure + tags: list[str] = [] + metadata: dict = {} + url: Optional[str] = None + provenance: Optional[dict] = None + created_at: str = "" + + +class Activity(BaseModel): + """An ActivityPub activity.""" + activity_id: str + activity_type: str # Create, Update, Delete, Announce + actor_id: str + object_data: dict + published: str + signature: Optional[dict] = None + + +class RegisterRequest(BaseModel): + """Request to register an asset.""" + name: str + content_hash: str + ipfs_cid: Optional[str] = None # IPFS content identifier + asset_type: str + tags: list[str] = [] + metadata: dict = {} + url: Optional[str] = None + provenance: Optional[dict] = None + + +class RecordRunRequest(BaseModel): + """Request to record an L1 run.""" + run_id: str + l1_server: str # URL of the L1 server that has this run + output_name: Optional[str] = None # Deprecated - assets now named by content_hash + + +class PublishCacheRequest(BaseModel): + """Request to publish a cache item from L1.""" + content_hash: str + ipfs_cid: Optional[str] = None # IPFS content identifier + asset_name: str + asset_type: str = "image" + origin: dict # {type: "self"|"external", url?: str, note?: str} + description: Optional[str] = None + tags: list[str] = [] + metadata: dict = {} + + +class UpdateAssetRequest(BaseModel): + """Request to update an existing asset.""" + description: Optional[str] = None + tags: Optional[list[str]] = None + metadata: Optional[dict] = None + origin: Optional[dict] = None + ipfs_cid: Optional[str] = None # IPFS content identifier + + +class AddStorageRequest(BaseModel): + """Request to add a storage provider.""" + provider_type: str # 'pinata', 'web3storage', 'local' + provider_name: Optional[str] = None # User-friendly name + config: dict # Provider-specific config (api_key, path, etc.) + capacity_gb: int # Storage capacity in GB + + +class UpdateStorageRequest(BaseModel): + """Request to update a storage provider.""" + config: Optional[dict] = None + capacity_gb: Optional[int] = None + is_active: Optional[bool] = None + + +class SetAssetSourceRequest(BaseModel): + """Request to set source URL for an asset.""" + source_url: str + source_type: str # 'youtube', 'local', 'url' + + +# ============ Storage (Database) ============ + +async def load_registry() -> dict: + """Load registry from database.""" + assets = await db.get_all_assets() + return {"version": "1.0", "assets": assets} + + +async def load_activities() -> list: + """Load activities from database.""" + return await db.get_all_activities() + + +def load_actor(username: str) -> dict: + """Load actor data for a specific user with public key if available.""" + actor = { + "id": f"https://{DOMAIN}/users/{username}", + "type": "Person", + "preferredUsername": username, + "name": username, + "inbox": f"https://{DOMAIN}/users/{username}/inbox", + "outbox": f"https://{DOMAIN}/users/{username}/outbox", + "followers": f"https://{DOMAIN}/users/{username}/followers", + "following": f"https://{DOMAIN}/users/{username}/following", + } + + # Add public key if available + from keys import has_keys, load_public_key_pem + if has_keys(DATA_DIR, username): + actor["publicKey"] = { + "id": f"https://{DOMAIN}/users/{username}#main-key", + "owner": f"https://{DOMAIN}/users/{username}", + "publicKeyPem": load_public_key_pem(DATA_DIR, username) + } + + return actor + + +async def user_exists(username: str) -> bool: + """Check if a user exists.""" + return await db.user_exists(username) + + +async def load_followers() -> list: + """Load followers list from database.""" + return await db.get_all_followers() + + +# ============ Signing ============ + +from keys import has_keys, load_public_key_pem, create_signature + + +def sign_activity(activity: dict, username: str) -> dict: + """Sign an activity with the user's RSA private key.""" + if not has_keys(DATA_DIR, username): + # No keys - use placeholder (for testing) + activity["signature"] = { + "type": "RsaSignature2017", + "creator": f"https://{DOMAIN}/users/{username}#main-key", + "created": datetime.now(timezone.utc).isoformat(), + "signatureValue": "NO_KEYS_CONFIGURED" + } + else: + activity["signature"] = create_signature(DATA_DIR, username, DOMAIN, activity) + return activity + + +# ============ HTML Templates ============ + +# Tailwind CSS config for L2 - dark theme to match L1 +TAILWIND_CONFIG = ''' + + + +''' + + +def base_html(title: str, content: str, username: str = None) -> str: + """Base HTML template with Tailwind CSS dark theme.""" + user_section = f''' +
+ Logged in as {username} + + Logout + +
+ ''' if username else ''' +
+ Login + | + Register +
+ ''' + + return f''' + + + + + {title} - Art DAG L2 + {TAILWIND_CONFIG} + + +
+
+

+ Art DAG L2 +

+ {user_section} +
+ + + +
+ {content} +
+
+ +''' + + +def get_user_from_cookie(request: Request) -> Optional[str]: + """Get username from auth cookie.""" + token = request.cookies.get("auth_token") + if token: + return verify_token(token) + return None + + +def wants_html(request: Request) -> bool: + """Check if request wants HTML (browser) vs JSON (API).""" + accept = request.headers.get("accept", "") + return "text/html" in accept and "application/json" not in accept and "application/activity+json" not in accept + + +def format_date(value, length: int = 10) -> str: + """Format a date value (datetime or string) to a string, sliced to length.""" + if value is None: + return "" + if hasattr(value, 'isoformat'): + return value.isoformat()[:length] + if isinstance(value, str): + return value[:length] + return "" + + +# ============ Auth UI Endpoints ============ + +@app.get("/login", response_class=HTMLResponse) +async def ui_login_page(request: Request, return_to: str = None): + """Login page. Accepts optional return_to URL for redirect after login.""" + username = get_user_from_cookie(request) + if username: + return HTMLResponse(base_html("Already Logged In", f''' +
+ You are already logged in as {username} +
+

Go to home page

+ ''', username)) + + # Hidden field for return_to URL + return_to_field = f'' if return_to else '' + + content = f''' +

Login

+
+
+ {return_to_field} +
+ + +
+
+ + +
+ +
+

Don't have an account? Register

+ ''' + return HTMLResponse(base_html("Login", content)) + + +@app.post("/login", response_class=HTMLResponse) +async def ui_login_submit(request: Request): + """Handle login form submission.""" + form = await request.form() + username = form.get("username", "").strip() + password = form.get("password", "") + return_to = form.get("return_to", "").strip() + + if not username or not password: + return HTMLResponse('
Username and password are required
') + + user = await authenticate_user(DATA_DIR, username, password) + if not user: + return HTMLResponse('
Invalid username or password
') + + token = create_access_token(user.username, l2_server=f"https://{DOMAIN}") + + # If return_to is specified, redirect there with token for the other site to set its own cookie + if return_to and return_to.startswith("http"): + # Append token to return_to URL for the target site to set its own cookie + separator = "&" if "?" in return_to else "?" + redirect_url = f"{return_to}{separator}auth_token={token.access_token}" + response = HTMLResponse(f''' +
Login successful! Redirecting...
+ + ''') + else: + response = HTMLResponse(f''' +
Login successful! Redirecting...
+ + ''') + + # Set cookie for L2 only (L1 servers set their own cookies via /auth endpoint) + response.set_cookie( + key="auth_token", + value=token.access_token, + httponly=True, + max_age=60 * 60 * 24 * 30, # 30 days + samesite="lax", + secure=True + ) + return response + + +@app.get("/register", response_class=HTMLResponse) +async def ui_register_page(request: Request): + """Register page.""" + username = get_user_from_cookie(request) + if username: + return HTMLResponse(base_html("Already Logged In", f''' +
+ You are already logged in as {username} +
+

Go to home page

+ ''', username)) + + content = ''' +

Register

+
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+ +
+

Already have an account? Login

+ ''' + return HTMLResponse(base_html("Register", content)) + + +@app.post("/register", response_class=HTMLResponse) +async def ui_register_submit(request: Request): + """Handle register form submission.""" + form = await request.form() + username = form.get("username", "").strip() + email = form.get("email", "").strip() or None + password = form.get("password", "") + password2 = form.get("password2", "") + + if not username or not password: + return HTMLResponse('
Username and password are required
') + + if password != password2: + return HTMLResponse('
Passwords do not match
') + + if len(password) < 6: + return HTMLResponse('
Password must be at least 6 characters
') + + try: + user = await create_user(DATA_DIR, username, password, email) + except ValueError as e: + return HTMLResponse(f'
{str(e)}
') + + token = create_access_token(user.username, l2_server=f"https://{DOMAIN}") + + response = HTMLResponse(f''' +
Registration successful! Redirecting...
+ + ''') + response.set_cookie( + key="auth_token", + value=token.access_token, + httponly=True, + max_age=60 * 60 * 24 * 30, # 30 days + samesite="lax", + secure=True + ) + return response + + +@app.get("/logout") +async def logout(request: Request): + """Handle logout - clear cookie, revoke token on L2 and attached L1s, and redirect to home.""" + token = request.cookies.get("auth_token") + claims = get_token_claims(token) if token else None + username = claims.get("sub") if claims else None + + if username and token and claims: + # Revoke token in L2 database (so even if L1 ignores revoke, token won't verify) + token_hash = hashlib.sha256(token.encode()).hexdigest() + expires_at = datetime.fromtimestamp(claims.get("exp", 0), tz=timezone.utc) + await db.revoke_token(token_hash, username, expires_at) + + # Revoke ALL tokens for this user on attached L1 renderers + # (L1 may have scoped tokens different from L2's token) + attached = await db.get_user_renderers(username) + for l1_url in attached: + try: + requests.post( + f"{l1_url}/auth/revoke-user", + json={"username": username, "l2_server": f"https://{DOMAIN}"}, + timeout=5 + ) + except Exception as e: + logger.warning(f"Failed to revoke user tokens on {l1_url}: {e}") + + # Remove all attachments for this user + for l1_url in attached: + await db.detach_renderer(username, l1_url) + + response = RedirectResponse(url="/", status_code=302) + # Delete both legacy (no domain) and new (shared domain) cookies + response.delete_cookie("auth_token") + if COOKIE_DOMAIN: + response.delete_cookie("auth_token", domain=COOKIE_DOMAIN) + return response + + +# ============ HTML Rendering Helpers ============ + +async def ui_activity_detail(activity_index: int, request: Request): + """Activity detail page with full content display. Helper function for HTML rendering.""" + username = get_user_from_cookie(request) + activities = await load_activities() + + if activity_index < 0 or activity_index >= len(activities): + content = ''' +

Activity Not Found

+

This activity does not exist.

+

← Back to Activities

+ ''' + return HTMLResponse(base_html("Activity Not Found", content, username)) + + activity = activities[activity_index] + return await _render_activity_detail(activity, request) + + +async def ui_activity_detail_by_data(activity: dict, request: Request): + """Activity detail page taking activity data directly.""" + return await _render_activity_detail(activity, request) + + +async def _render_activity_detail(activity: dict, request: Request): + """Core activity detail rendering logic.""" + username = get_user_from_cookie(request) + activity_type = activity.get("activity_type", "") + activity_id = activity.get("activity_id", "") + actor_id = activity.get("actor_id", "") + actor_name = actor_id.split("/")[-1] if actor_id else "unknown" + published = format_date(activity.get("published")) + obj = activity.get("object_data", {}) + + # Object details + obj_name = obj.get("name", "Untitled") + obj_type = obj.get("type", "") + content_hash_obj = obj.get("contentHash", {}) + content_hash = content_hash_obj.get("value", "") if isinstance(content_hash_obj, dict) else "" + media_type = obj.get("mediaType", "") + description = obj.get("summary", "") or obj.get("content", "") + + # Provenance from object - or fallback to registry asset + provenance = obj.get("provenance", {}) + origin = obj.get("origin", {}) + + # Fallback: if activity doesn't have provenance, look up the asset from registry + if not provenance or not origin: + registry = await load_registry() + assets = registry.get("assets", {}) + # Find asset by content_hash or name + for asset_name, asset_data in assets.items(): + if asset_data.get("content_hash") == content_hash or asset_data.get("name") == obj_name: + if not provenance: + provenance = asset_data.get("provenance", {}) + if not origin: + origin = asset_data.get("origin", {}) + break + + # Type colors + type_color = "bg-green-600" if activity_type == "Create" else "bg-yellow-600" if activity_type == "Update" else "bg-gray-600" + obj_type_color = "bg-blue-600" if "Image" in obj_type else "bg-purple-600" if "Video" in obj_type else "bg-gray-600" + + # Determine L1 server and asset type + l1_server = provenance.get("l1_server", L1_PUBLIC_URL).rstrip("/") if provenance else L1_PUBLIC_URL.rstrip("/") + is_video = "Video" in obj_type or "video" in media_type + + # Content display + if is_video: + content_html = f''' + + ''' + elif "Image" in obj_type or "image" in media_type: + content_html = f''' +
+ {obj_name} + +
+ ''' + else: + content_html = f''' +
+

Content type: {media_type or obj_type}

+ + Download + +
+ ''' + + # Origin display + origin_html = 'Not specified' + if origin: + origin_type = origin.get("type", "") + if origin_type == "self": + origin_html = 'Original content by author' + elif origin_type == "external": + origin_url = origin.get("url", "") + origin_note = origin.get("note", "") + origin_html = f'{origin_url}' + if origin_note: + origin_html += f'

{origin_note}

' + + # Provenance section + provenance_html = "" + if provenance and provenance.get("recipe"): + recipe = provenance.get("recipe", "") + inputs = provenance.get("inputs", []) + l1_run_id = provenance.get("l1_run_id", "") + rendered_at = format_date(provenance.get("rendered_at")) + effects_commit = provenance.get("effects_commit", "") + effect_url = provenance.get("effect_url") + infrastructure = provenance.get("infrastructure", {}) + + if not effect_url: + if effects_commit and effects_commit != "unknown": + effect_url = f"{EFFECTS_REPO_URL}/src/commit/{effects_commit}/{recipe}" + else: + effect_url = f"{EFFECTS_REPO_URL}/src/branch/main/{recipe}" + + # Build inputs display - show actual content as thumbnails + inputs_html = "" + for inp in inputs: + inp_hash = inp.get("content_hash", "") if isinstance(inp, dict) else inp + if inp_hash: + inputs_html += f''' +
+
+ + Input +
+
+ {inp_hash[:16]}... + view +
+
+ ''' + + # Infrastructure display + infra_html = "" + if infrastructure: + software = infrastructure.get("software", {}) + hardware = infrastructure.get("hardware", {}) + if software or hardware: + infra_parts = [] + if software: + infra_parts.append(f"Software: {software.get('name', 'unknown')}") + if hardware: + infra_parts.append(f"Hardware: {hardware.get('name', 'unknown')}") + infra_html = f'

{" | ".join(infra_parts)}

' + + provenance_html = f''' +
+

Provenance

+

This content was created by applying an effect to input content.

+
+
+

Effect

+ + + + + {recipe} + + {f'
Commit: {effects_commit[:12]}...
' if effects_commit else ''} +
+
+

Input(s)

+ {inputs_html if inputs_html else 'No inputs recorded'} +
+
+

L1 Run

+ {l1_run_id[:20]}... +
+
+

Rendered

+ {rendered_at if rendered_at else 'Unknown'} + {infra_html} +
+
+
+ ''' + + content = f''' +

← Back to Activities

+ +
+ {activity_type} +

{obj_name}

+ {obj_type} +
+ + {content_html} + +
+
+
+

Actor

+ {actor_name} +
+ +
+

Description

+

{description if description else 'No description'}

+
+ +
+

Origin

+ {origin_html} +
+
+ +
+
+

Content Hash

+ {content_hash} +
+ +
+

Published

+ {published} +
+ +
+

Activity ID

+ {activity_id} +
+
+
+ + {provenance_html} + +
+

ActivityPub

+
+

+ Object URL: + https://{DOMAIN}/objects/{content_hash} +

+

+ Actor: + {actor_id} +

+
+
+ ''' + return HTMLResponse(base_html(f"Activity: {obj_name}", content, username)) + + +async def ui_asset_detail(name: str, request: Request): + """Asset detail page with content preview and provenance. Helper function for HTML rendering.""" + username = get_user_from_cookie(request) + registry = await load_registry() + assets = registry.get("assets", {}) + + if name not in assets: + content = f''' +

Asset Not Found

+

No asset named "{name}" exists.

+

← Back to Assets

+ ''' + return HTMLResponse(base_html("Asset Not Found", content, username)) + + asset = assets[name] + owner = asset.get("owner", "unknown") + content_hash = asset.get("content_hash", "") + ipfs_cid = asset.get("ipfs_cid", "") + asset_type = asset.get("asset_type", "") + tags = asset.get("tags", []) + description = asset.get("description", "") + origin = asset.get("origin") or {} + provenance = asset.get("provenance") or {} + metadata = asset.get("metadata") or {} + created_at = format_date(asset.get("created_at")) + + type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" + + # Determine L1 server URL for content + l1_server = provenance.get("l1_server", L1_PUBLIC_URL).rstrip("/") + + # Content display - image or video from L1 + if asset_type == "video": + # Use iOS-compatible MP4 endpoint + content_html = f''' + + ''' + elif asset_type == "image": + content_html = f''' +
+ {name} + +
+ ''' + elif asset_type == "recipe": + # Fetch recipe source from L1 or IPFS + recipe_source = "" + try: + resp = requests.get(f"{l1_server}/cache/{content_hash}", timeout=10, headers={"Accept": "text/plain"}) + if resp.status_code == 200: + recipe_source = resp.text + except Exception: + pass + + if not recipe_source and ipfs_cid: + # Try IPFS + try: + import ipfs_client + recipe_bytes = ipfs_client.get_bytes(ipfs_cid) + if recipe_bytes: + recipe_source = recipe_bytes.decode('utf-8') + except Exception: + pass + + import html as html_module + recipe_source_escaped = html_module.escape(recipe_source) if recipe_source else "(Could not load recipe source)" + + content_html = f''' +
+

Recipe Source

+
{recipe_source_escaped}
+ +
+ ''' + else: + content_html = f''' +
+

Content type: {asset_type}

+ + Download + +
+ ''' + + # Origin display + origin_html = 'Not specified' + if origin: + origin_type = origin.get("type", "unknown") + if origin_type == "self": + origin_html = 'Original content by author' + elif origin_type == "external": + origin_url = origin.get("url", "") + origin_note = origin.get("note", "") + origin_html = f'{origin_url}' + if origin_note: + origin_html += f'

{origin_note}

' + + # Tags display + tags_html = 'No tags' + if tags: + tags_html = " ".join([f'{t}' for t in tags]) + + # IPFS display + if ipfs_cid: + local_gateway = f'Local' if IPFS_GATEWAY_URL else '' + ipfs_html = f'''{ipfs_cid} +
+ {local_gateway} + ipfs.io + dweb.link +
''' + else: + ipfs_html = 'Not on IPFS' + + # Provenance section - for rendered outputs + provenance_html = "" + if provenance: + recipe = provenance.get("recipe", "") + inputs = provenance.get("inputs", []) + l1_run_id = provenance.get("l1_run_id", "") + rendered_at = format_date(provenance.get("rendered_at")) + effects_commit = provenance.get("effects_commit", "") + infrastructure = provenance.get("infrastructure", {}) + + # Use stored effect_url or build fallback + effect_url = provenance.get("effect_url") + if not effect_url: + # Fallback for older records + if effects_commit and effects_commit != "unknown": + effect_url = f"{EFFECTS_REPO_URL}/src/commit/{effects_commit}/{recipe}" + else: + effect_url = f"{EFFECTS_REPO_URL}/src/branch/main/{recipe}" + + # Build inputs display - show actual content as thumbnails + inputs_html = "" + for inp in inputs: + inp_hash = inp.get("content_hash", "") if isinstance(inp, dict) else inp + if inp_hash: + inputs_html += f''' +
+
+ + Input +
+
+ {inp_hash[:16]}... + view +
+
+ ''' + + # Infrastructure display + infra_html = "" + if infrastructure: + software = infrastructure.get("software", {}) + hardware = infrastructure.get("hardware", {}) + if software or hardware: + infra_html = f''' +
+

Infrastructure

+
+ {f"Software: {software.get('name', 'unknown')}" if software else ""} + {f" ({software.get('content_hash', '')[:16]}...)" if software.get('content_hash') else ""} + {" | " if software and hardware else ""} + {f"Hardware: {hardware.get('name', 'unknown')}" if hardware else ""} +
+
+ ''' + + provenance_html = f''' +
+

Provenance

+

This asset was created by applying an effect to input content.

+
+
+

Effect

+ + + + + {recipe} + + {f'
Commit: {effects_commit[:12]}...
' if effects_commit else ''} +
+
+

Input(s)

+ {inputs_html if inputs_html else 'No inputs recorded'} +
+
+

L1 Run

+ {l1_run_id[:16]}... +
+
+

Rendered

+ {rendered_at if rendered_at else 'Unknown'} +
+ {infra_html} +
+
+ ''' + + content = f''' +

← Back to Assets

+ +
+

{name}

+ {asset_type} +
+ + {content_html} + +
+
+
+

Owner

+ {owner} +
+ +
+

Description

+

{description if description else 'No description'}

+
+ +
+

Origin

+ {origin_html} +
+
+ +
+
+

Content Hash

+ {content_hash} +
+ +
+

IPFS

+ {ipfs_html} +
+ +
+

Created

+ {created_at} +
+ +
+

Tags

+
{tags_html}
+
+
+
+ + {provenance_html} + +
+

ActivityPub

+
+

+ Object URL: + https://{DOMAIN}/objects/{content_hash} +

+

+ Owner Actor: + https://{DOMAIN}/users/{owner} +

+
+
+ ''' + return HTMLResponse(base_html(f"Asset: {name}", content, username)) + + +async def ui_user_detail(username: str, request: Request): + """User detail page showing their published assets. Helper function for HTML rendering.""" + current_user = get_user_from_cookie(request) + + if not await user_exists(username): + content = f''' +

User Not Found

+

No user named "{username}" exists.

+

← Back to Users

+ ''' + return HTMLResponse(base_html("User Not Found", content, current_user)) + + # Get user's assets + registry = await load_registry() + all_assets = registry.get("assets", {}) + user_assets = {name: asset for name, asset in all_assets.items() if asset.get("owner") == username} + + # Get user's activities + all_activities = await load_activities() + actor_id = f"https://{DOMAIN}/users/{username}" + user_activities = [a for a in all_activities if a.get("actor_id") == actor_id] + + webfinger = f"@{username}@{DOMAIN}" + + # Assets table + if user_assets: + rows = "" + for name, asset in sorted(user_assets.items(), key=lambda x: x[1].get("created_at", ""), reverse=True): + hash_short = asset.get("content_hash", "")[:16] + "..." + asset_type = asset.get("asset_type", "") + type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" + rows += f''' + + + {name} + + {asset_type} + {hash_short} + {", ".join(asset.get("tags", []))} + + ''' + assets_html = f''' +
+ + + + + + + + + + + {rows} + +
NameTypeContent HashTags
+
+ ''' + else: + assets_html = '

No published assets yet.

' + + content = f''' +

← Back to Users

+ +
+

{username}

+ {webfinger} +
+ +
+
+
{len(user_assets)}
+
Published Assets
+
+
+
{len(user_activities)}
+
Activities
+
+
+ +
+

ActivityPub

+

+ Actor URL: https://{DOMAIN}/users/{username} +

+

+ Outbox: https://{DOMAIN}/users/{username}/outbox +

+
+ +

Published Assets ({len(user_assets)})

+ {assets_html} + ''' + return HTMLResponse(base_html(f"User: {username}", content, current_user)) + + +# ============ API Endpoints ============ + +@app.get("/") +async def root(request: Request): + """Server info. HTML shows home page with counts, JSON returns stats.""" + registry = await load_registry() + activities = await load_activities() + users = await db.get_all_users() + + assets_count = len(registry.get("assets", {})) + activities_count = len(activities) + users_count = len(users) + + if wants_html(request): + username = get_user_from_cookie(request) + readme_html = markdown.markdown(README_CONTENT, extensions=['tables', 'fenced_code']) + content = f''' + +
+ {readme_html} +
+ ''' + return HTMLResponse(base_html("Home", content, username)) + + return { + "name": "Art DAG L2 Server", + "version": "0.1.0", + "domain": DOMAIN, + "assets_count": assets_count, + "activities_count": activities_count, + "users_count": users_count + } + + +# ============ Auth Endpoints ============ + +security = HTTPBearer(auto_error=False) + + +async def get_optional_user( + credentials: HTTPAuthorizationCredentials = Depends(security) +) -> Optional[User]: + """Get current user if authenticated, None otherwise.""" + if not credentials: + return None + return await get_current_user(DATA_DIR, credentials.credentials) + + +async def get_required_user( + credentials: HTTPAuthorizationCredentials = Depends(security) +) -> User: + """Get current user, raise 401 if not authenticated.""" + if not credentials: + raise HTTPException(401, "Not authenticated") + user = await get_current_user(DATA_DIR, credentials.credentials) + if not user: + raise HTTPException(401, "Invalid token") + return user + + +@app.post("/auth/register", response_model=Token) +async def register(req: UserCreate): + """Register a new user.""" + try: + user = await create_user(DATA_DIR, req.username, req.password, req.email) + except ValueError as e: + raise HTTPException(400, str(e)) + + return create_access_token(user.username, l2_server=f"https://{DOMAIN}") + + +@app.post("/auth/login", response_model=Token) +async def login(req: UserLogin): + """Login and get access token.""" + user = await authenticate_user(DATA_DIR, req.username, req.password) + if not user: + raise HTTPException(401, "Invalid username or password") + + return create_access_token(user.username, l2_server=f"https://{DOMAIN}") + + +@app.get("/auth/me") +async def get_me(user: User = Depends(get_required_user)): + """Get current user info.""" + return { + "username": user.username, + "email": user.email, + "created_at": user.created_at + } + + +class VerifyRequest(BaseModel): + l1_server: str # URL of the L1 server requesting verification + + +@app.post("/auth/verify") +async def verify_auth( + request: VerifyRequest, + credentials: HTTPAuthorizationCredentials = Depends(security) +): + """Verify a token and return username. Only authorized L1 servers can call this.""" + if not credentials: + raise HTTPException(401, "No token provided") + + token = credentials.credentials + + # Check L1 is authorized + l1_normalized = request.l1_server.rstrip("/") + authorized = any(l1_normalized == s.rstrip("/") for s in L1_SERVERS) + if not authorized: + raise HTTPException(403, f"L1 server not authorized: {request.l1_server}") + + # Check if token is revoked (L2-side revocation) + token_hash = hashlib.sha256(token.encode()).hexdigest() + if await db.is_token_revoked(token_hash): + raise HTTPException(401, "Token has been revoked") + + # Verify token and get claims + claims = get_token_claims(token) + if not claims: + raise HTTPException(401, "Invalid token") + + username = claims.get("sub") + if not username: + raise HTTPException(401, "Invalid token") + + # Check token scope - if token is scoped to an L1, it must match + token_l1_server = claims.get("l1_server") + if token_l1_server: + token_l1_normalized = token_l1_server.rstrip("/") + if token_l1_normalized != l1_normalized: + raise HTTPException(403, f"Token is scoped to {token_l1_server}, not {request.l1_server}") + + # Record the attachment (L1 successfully verified user's token) + await db.attach_renderer(username, l1_normalized) + + return {"username": username, "valid": True, "l1_server": request.l1_server} + + +@app.get("/.well-known/webfinger") +async def webfinger(resource: str): + """WebFinger endpoint for actor discovery.""" + # Parse acct:username@domain + if not resource.startswith("acct:"): + raise HTTPException(400, "Resource must be acct: URI") + + acct = resource[5:] # Remove "acct:" + if "@" not in acct: + raise HTTPException(400, "Invalid acct format") + + username, domain = acct.split("@", 1) + + if domain != DOMAIN: + raise HTTPException(404, f"Unknown domain: {domain}") + + if not await user_exists(username): + raise HTTPException(404, f"Unknown user: {username}") + + return JSONResponse( + content={ + "subject": resource, + "links": [ + { + "rel": "self", + "type": "application/activity+json", + "href": f"https://{DOMAIN}/users/{username}" + } + ] + }, + media_type="application/jrd+json" + ) + + +@app.get("/users") +async def get_users_list(request: Request, page: int = 1, limit: int = 20): + """Get all users. HTML for browsers (with infinite scroll), JSON for APIs (with pagination).""" + all_users = list((await db.get_all_users()).items()) + total = len(all_users) + + # Sort by username + all_users.sort(key=lambda x: x[0]) + + # Pagination + start = (page - 1) * limit + end = start + limit + users_page = all_users[start:end] + has_more = end < total + + if wants_html(request): + username = get_user_from_cookie(request) + + if not users_page: + if page == 1: + content = ''' +

Users

+

No users registered yet.

+ ''' + else: + return HTMLResponse("") # Empty for infinite scroll + else: + rows = "" + for uname, user_data in users_page: + webfinger = f"@{uname}@{DOMAIN}" + created_at = format_date(user_data.get("created_at")) + rows += f''' + + + {uname} + + {webfinger} + {created_at} + + ''' + + # For infinite scroll, just return rows if not first page + if page > 1: + if has_more: + rows += f''' + + Loading more... + + ''' + return HTMLResponse(rows) + + # First page - full content + infinite_scroll_trigger = "" + if has_more: + infinite_scroll_trigger = f''' + + Loading more... + + ''' + + content = f''' +

Users ({total} total)

+
+ + + + + + + + + + {rows} + {infinite_scroll_trigger} + +
UsernameWebFingerCreated
+
+ ''' + + return HTMLResponse(base_html("Users", content, username)) + + # JSON response for APIs + return { + "users": [{"username": uname, **data} for uname, data in users_page], + "pagination": { + "page": page, + "limit": limit, + "total": total, + "has_more": has_more + } + } + + +@app.get("/users/{username}") +async def get_actor(username: str, request: Request): + """Get actor profile for any registered user. Content negotiation: HTML for browsers, JSON for APIs.""" + if not await user_exists(username): + if wants_html(request): + content = f''' +

User Not Found

+

No user named "{username}" exists.

+

← Back to Users

+ ''' + return HTMLResponse(base_html("User Not Found", content, get_user_from_cookie(request))) + raise HTTPException(404, f"Unknown user: {username}") + + if wants_html(request): + # Render user detail page + return await ui_user_detail(username, request) + + actor = load_actor(username) + + # Add ActivityPub context + actor["@context"] = [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1" + ] + + return JSONResponse( + content=actor, + media_type="application/activity+json" + ) + + +@app.get("/users/{username}/outbox") +async def get_outbox(username: str, page: bool = False): + """Get actor's outbox (activities they created).""" + if not await user_exists(username): + raise HTTPException(404, f"Unknown user: {username}") + + # Filter activities by this user's actor_id + all_activities = await load_activities() + actor_id = f"https://{DOMAIN}/users/{username}" + user_activities = [a for a in all_activities if a.get("actor_id") == actor_id] + + if not page: + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"https://{DOMAIN}/users/{username}/outbox", + "type": "OrderedCollection", + "totalItems": len(user_activities), + "first": f"https://{DOMAIN}/users/{username}/outbox?page=true" + }, + media_type="application/activity+json" + ) + + # Return activities page + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"https://{DOMAIN}/users/{username}/outbox?page=true", + "type": "OrderedCollectionPage", + "partOf": f"https://{DOMAIN}/users/{username}/outbox", + "orderedItems": user_activities + }, + media_type="application/activity+json" + ) + + +@app.post("/users/{username}/inbox") +async def post_inbox(username: str, request: Request): + """Receive activities from other servers.""" + if not await user_exists(username): + raise HTTPException(404, f"Unknown user: {username}") + + body = await request.json() + activity_type = body.get("type") + + # Handle Follow requests + if activity_type == "Follow": + follower_url = body.get("actor") + # Add follower to database + await db.add_follower(username, follower_url, follower_url) + + # Send Accept (in production, do this async) + # For now just acknowledge + return {"status": "accepted"} + + # Handle other activity types + return {"status": "received"} + + +@app.get("/users/{username}/followers") +async def get_followers(username: str): + """Get actor's followers.""" + if not await user_exists(username): + raise HTTPException(404, f"Unknown user: {username}") + + # TODO: Per-user followers - for now use global followers + followers = await load_followers() + + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"https://{DOMAIN}/users/{username}/followers", + "type": "OrderedCollection", + "totalItems": len(followers), + "orderedItems": followers + }, + media_type="application/activity+json" + ) + + +# ============ Assets Endpoints ============ + +@app.get("/assets") +async def get_registry(request: Request, page: int = 1, limit: int = 20): + """Get registry. HTML for browsers (with infinite scroll), JSON for APIs (with pagination).""" + registry = await load_registry() + all_assets = list(registry.get("assets", {}).items()) + total = len(all_assets) + + # Sort by created_at descending + all_assets.sort(key=lambda x: x[1].get("created_at", ""), reverse=True) + + # Pagination + start = (page - 1) * limit + end = start + limit + assets_page = all_assets[start:end] + has_more = end < total + + if wants_html(request): + username = get_user_from_cookie(request) + + if not assets_page: + if page == 1: + content = ''' +

Registry

+

No assets registered yet.

+ ''' + else: + return HTMLResponse("") # Empty for infinite scroll + else: + rows = "" + for name, asset in assets_page: + asset_type = asset.get("asset_type", "") + type_color = "bg-blue-600" if asset_type == "image" else "bg-purple-600" if asset_type == "video" else "bg-gray-600" + owner = asset.get("owner", "unknown") + content_hash = asset.get("content_hash", "")[:16] + "..." + rows += f''' + + {name} + {asset_type} + + {owner} + + {content_hash} + + View + + + ''' + + # For infinite scroll, just return rows if not first page + if page > 1: + if has_more: + rows += f''' + + Loading more... + + ''' + return HTMLResponse(rows) + + # First page - full content + infinite_scroll_trigger = "" + if has_more: + infinite_scroll_trigger = f''' + + Loading more... + + ''' + + content = f''' +

Registry ({total} assets)

+
+ + + + + + + + + + + + {rows} + {infinite_scroll_trigger} + +
NameTypeOwnerHash
+
+ ''' + + return HTMLResponse(base_html("Registry", content, username)) + + # JSON response for APIs + return { + "assets": {name: asset for name, asset in assets_page}, + "pagination": { + "page": page, + "limit": limit, + "total": total, + "has_more": has_more + } + } + + +@app.get("/asset/{name}") +async def get_asset_by_name_legacy(name: str): + """Legacy route - redirect to /assets/{name}.""" + return RedirectResponse(url=f"/assets/{name}", status_code=301) + + +@app.get("/assets/{name}") +async def get_asset(name: str, request: Request): + """Get asset by name. HTML for browsers (default), JSON only if explicitly requested.""" + registry = await load_registry() + + # Check if JSON explicitly requested + accept = request.headers.get("accept", "") + wants_json = "application/json" in accept and "text/html" not in accept + + if name not in registry.get("assets", {}): + if wants_json: + raise HTTPException(404, f"Asset not found: {name}") + content = f''' +

Asset Not Found

+

No asset named "{name}" exists.

+

← Back to Assets

+ ''' + return HTMLResponse(base_html("Asset Not Found", content, get_user_from_cookie(request))) + + if wants_json: + return registry["assets"][name] + + # Default to HTML for browsers + return await ui_asset_detail(name, request) + + +@app.get("/assets/by-run-id/{run_id}") +async def get_asset_by_run_id(run_id: str): + """ + Get asset by content-addressable run_id. + + Returns the asset info including output_hash and ipfs_cid for L1 recovery. + The run_id is stored in the asset's provenance when the run is recorded. + """ + asset = await db.get_asset_by_run_id(run_id) + if not asset: + raise HTTPException(404, f"No asset found for run_id: {run_id}") + + return { + "run_id": run_id, + "asset_name": asset.get("name"), + "output_hash": asset.get("content_hash"), + "ipfs_cid": asset.get("ipfs_cid"), + "provenance_cid": asset.get("provenance", {}).get("provenance_cid") if asset.get("provenance") else None, + } + + +@app.patch("/assets/{name}") +async def update_asset(name: str, req: UpdateAssetRequest, user: User = Depends(get_required_user)): + """Update an existing asset's metadata. Creates an Update activity.""" + asset = await db.get_asset(name) + if not asset: + raise HTTPException(404, f"Asset not found: {name}") + + # Check ownership + if asset.get("owner") != user.username: + raise HTTPException(403, f"Not authorized to update asset owned by {asset.get('owner')}") + + # Build updates dict + updates = {} + if req.description is not None: + updates["description"] = req.description + if req.tags is not None: + updates["tags"] = req.tags + if req.metadata is not None: + updates["metadata"] = {**asset.get("metadata", {}), **req.metadata} + if req.origin is not None: + updates["origin"] = req.origin + if req.ipfs_cid is not None: + updates["ipfs_cid"] = req.ipfs_cid + # Pin on IPFS (fire-and-forget, don't block) + import threading + threading.Thread(target=_pin_ipfs_async, args=(req.ipfs_cid,), daemon=True).start() + + # Update asset in database + updated_asset = await db.update_asset(name, updates) + + # Create Update activity + activity = { + "activity_id": str(uuid.uuid4()), + "activity_type": "Update", + "actor_id": f"https://{DOMAIN}/users/{user.username}", + "object_data": { + "type": updated_asset.get("asset_type", "Object").capitalize(), + "name": name, + "id": f"https://{DOMAIN}/objects/{updated_asset['content_hash']}", + "contentHash": { + "algorithm": "sha3-256", + "value": updated_asset["content_hash"] + }, + "attributedTo": f"https://{DOMAIN}/users/{user.username}", + "summary": req.description, + "tag": req.tags or updated_asset.get("tags", []) + }, + "published": updated_asset.get("updated_at", datetime.now(timezone.utc).isoformat()) + } + + # Sign activity with the user's keys + activity = sign_activity(activity, user.username) + + # Save activity to database + await db.create_activity(activity) + + return {"asset": updated_asset, "activity": activity} + + +def _pin_ipfs_async(cid: str): + """Pin IPFS content in background thread.""" + try: + import ipfs_client + if ipfs_client.is_available(): + ipfs_client.pin(cid) + logger.info(f"Pinned IPFS content: {cid}") + except Exception as e: + logger.warning(f"Failed to pin IPFS content {cid}: {e}") + + +async def _register_asset_impl(req: RegisterRequest, owner: str): + """ + Internal implementation for registering an asset atomically. + + Requires IPFS CID - content must be on IPFS before registering. + Uses a transaction for all DB operations. + """ + import ipfs_client + from ipfs_client import IPFSError + + logger.info(f"register_asset: Starting for {req.name} (hash={req.content_hash[:16]}...)") + + # ===== PHASE 1: VALIDATION ===== + # IPFS CID is required + if not req.ipfs_cid: + raise HTTPException(400, "IPFS CID is required for registration") + + # Check if name exists - return existing asset if so + existing = await db.get_asset(req.name) + if existing: + logger.info(f"register_asset: Asset {req.name} already exists, returning existing") + return {"asset": existing, "activity": None, "existing": True} + + # ===== PHASE 2: IPFS OPERATIONS (non-blocking) ===== + import asyncio + logger.info(f"register_asset: Pinning CID {req.ipfs_cid[:16]}... on IPFS") + try: + await asyncio.to_thread(ipfs_client.pin_or_raise, req.ipfs_cid) + logger.info("register_asset: CID pinned successfully") + except IPFSError as e: + logger.error(f"register_asset: IPFS pin failed: {e}") + raise HTTPException(500, f"IPFS operation failed: {e}") + + # ===== PHASE 3: DB TRANSACTION ===== + now = datetime.now(timezone.utc).isoformat() + + try: + async with db.transaction() as conn: + # Check name again inside transaction (race condition protection) + if await db.asset_exists_by_name_tx(conn, req.name): + # Race condition - another request created it first, return existing + existing = await db.get_asset(req.name) + logger.info(f"register_asset: Asset {req.name} created by concurrent request") + return {"asset": existing, "activity": None, "existing": True} + + # Create asset + asset = { + "name": req.name, + "content_hash": req.content_hash, + "ipfs_cid": req.ipfs_cid, + "asset_type": req.asset_type, + "tags": req.tags, + "metadata": req.metadata, + "url": req.url, + "provenance": req.provenance, + "owner": owner, + "created_at": now + } + created_asset = await db.create_asset_tx(conn, asset) + + # Create ownership activity + object_data = { + "type": req.asset_type.capitalize(), + "name": req.name, + "id": f"https://{DOMAIN}/objects/{req.content_hash}", + "contentHash": { + "algorithm": "sha3-256", + "value": req.content_hash + }, + "attributedTo": f"https://{DOMAIN}/users/{owner}" + } + + # Include provenance in activity object_data if present + if req.provenance: + object_data["provenance"] = req.provenance + + activity = { + "activity_id": req.content_hash, # Content-addressable by content hash + "activity_type": "Create", + "actor_id": f"https://{DOMAIN}/users/{owner}", + "object_data": object_data, + "published": now + } + activity = sign_activity(activity, owner) + created_activity = await db.create_activity_tx(conn, activity) + + # Transaction commits here on successful exit + + except HTTPException: + raise + except Exception as e: + logger.error(f"register_asset: Database transaction failed: {e}") + raise HTTPException(500, f"Failed to register asset: {e}") + + logger.info(f"register_asset: Successfully registered {req.name}") + return {"asset": created_asset, "activity": created_activity} + + +@app.post("/assets") +async def register_asset(req: RegisterRequest, user: User = Depends(get_required_user)): + """Register a new asset and create ownership activity. Requires authentication.""" + return await _register_asset_impl(req, user.username) + + +@app.post("/assets/record-run") +@app.post("/registry/record-run") # Legacy route +async def record_run(req: RecordRunRequest, user: User = Depends(get_required_user)): + """ + Record an L1 run and register the output atomically. + + Ensures all operations succeed or none do: + 1. All input assets registered (if not already on L2) + pinned on IPFS + 2. Output asset registered + pinned on IPFS + 3. Recipe serialized to JSON, stored on IPFS, CID saved in provenance + """ + import ipfs_client + from ipfs_client import IPFSError + + # ===== PHASE 1: PREPARATION (read-only, non-blocking) ===== + import asyncio + l1_url = req.l1_server.rstrip('/') + + logger.info(f"record_run: Starting for run_id={req.run_id} from {l1_url}") + + # Helper to fetch from L1 without blocking event loop + def fetch_l1_run(): + import time as _time + url = f"{l1_url}/runs/{req.run_id}" + logger.info(f"record_run: Fetching run from L1: {url}") + t0 = _time.time() + resp = requests.get(url, timeout=30) + logger.info(f"record_run: L1 request took {_time.time()-t0:.3f}s, status={resp.status_code}") + if resp.status_code == 404: + raise ValueError(f"Run not found on L1: {req.run_id}") + resp.raise_for_status() + try: + return resp.json() + except Exception: + body_preview = resp.text[:200] if resp.text else "(empty)" + logger.error(f"L1 returned non-JSON for {url}: status={resp.status_code}, body={body_preview}") + raise ValueError(f"L1 returned invalid response: {body_preview[:100]}") + + def fetch_l1_cache(content_hash): + logger.debug(f"record_run: Fetching cache {content_hash[:16]}... from L1") + url = f"{l1_url}/cache/{content_hash}" + resp = requests.get(url, headers={"Accept": "application/json"}, timeout=10) + if resp.status_code == 404: + raise ValueError(f"Cache item not found on L1: {content_hash[:16]}...") + resp.raise_for_status() + try: + return resp.json() + except Exception as e: + # Log what we actually got back + body_preview = resp.text[:200] if resp.text else "(empty)" + logger.error(f"L1 returned non-JSON for {url}: status={resp.status_code}, body={body_preview}") + raise ValueError(f"L1 returned invalid response (status={resp.status_code}): {body_preview[:100]}") + + # Fetch run from L1 + try: + run = await asyncio.to_thread(fetch_l1_run) + logger.info(f"record_run: Fetched run, status={run.get('status')}, inputs={len(run.get('inputs', []))}") + except Exception as e: + logger.error(f"record_run: Failed to fetch run from L1: {e}") + raise HTTPException(400, f"Failed to fetch run from L1 ({l1_url}): {e}") + + if run.get("status") != "completed": + raise HTTPException(400, f"Run not completed: {run.get('status')}") + + output_hash = run.get("output_hash") + if not output_hash: + raise HTTPException(400, "Run has no output hash") + + # Fetch output cache info from L1 (must exist - it's new) + logger.info(f"record_run: Fetching output cache {output_hash[:16]}... from L1") + try: + cache_info = await asyncio.to_thread(fetch_l1_cache, output_hash) + output_media_type = cache_info.get("media_type", "image") + output_ipfs_cid = cache_info.get("ipfs_cid") + logger.info(f"record_run: Output has IPFS CID: {output_ipfs_cid[:16] if output_ipfs_cid else 'None'}...") + except Exception as e: + logger.error(f"record_run: Failed to fetch output cache info: {e}") + raise HTTPException(400, f"Failed to fetch output cache info: {e}") + + if not output_ipfs_cid: + logger.error("record_run: Output has no IPFS CID") + raise HTTPException(400, "Output has no IPFS CID - cannot publish") + + # Gather input info: check L2 first, then fall back to L1 + input_hashes = run.get("inputs", []) + input_infos = [] # List of {content_hash, ipfs_cid, media_type, existing_asset} + logger.info(f"record_run: Gathering info for {len(input_hashes)} inputs") + + for input_hash in input_hashes: + # Check if already on L2 + existing = await db.get_asset_by_hash(input_hash) + if existing and existing.get("ipfs_cid"): + logger.info(f"record_run: Input {input_hash[:16]}... found on L2") + input_infos.append({ + "content_hash": input_hash, + "ipfs_cid": existing["ipfs_cid"], + "media_type": existing.get("asset_type", "image"), + "existing_asset": existing + }) + else: + # Not on L2, try L1 + logger.info(f"record_run: Input {input_hash[:16]}... not on L2, fetching from L1") + try: + inp_info = await asyncio.to_thread(fetch_l1_cache, input_hash) + ipfs_cid = inp_info.get("ipfs_cid") + if not ipfs_cid: + logger.error(f"record_run: Input {input_hash[:16]}... has no IPFS CID") + raise HTTPException(400, f"Input {input_hash[:16]}... has no IPFS CID (not on L2 or L1)") + input_infos.append({ + "content_hash": input_hash, + "ipfs_cid": ipfs_cid, + "media_type": inp_info.get("media_type", "image"), + "existing_asset": None + }) + except HTTPException: + raise + except Exception as e: + logger.error(f"record_run: Failed to fetch input {input_hash[:16]}... from L1: {e}") + raise HTTPException(400, f"Input {input_hash[:16]}... not on L2 and failed to fetch from L1: {e}") + + # Prepare recipe data + recipe_data = run.get("recipe") + if not recipe_data: + recipe_data = { + "name": run.get("recipe_name", "unknown"), + "effect_url": run.get("effect_url"), + "effects_commit": run.get("effects_commit"), + } + + # Build registered_inputs list - all referenced by content_hash + registered_inputs = [] + for inp in input_infos: + registered_inputs.append({ + "content_hash": inp["content_hash"], + "ipfs_cid": inp["ipfs_cid"] + }) + + # ===== PHASE 2: IPFS OPERATIONS (non-blocking for event loop) ===== + def do_ipfs_operations(): + """Run IPFS operations in thread pool to not block event loop.""" + from concurrent.futures import ThreadPoolExecutor, as_completed + + # Collect all CIDs to pin (inputs + output) + cids_to_pin = [inp["ipfs_cid"] for inp in input_infos] + [output_ipfs_cid] + logger.info(f"record_run: Pinning {len(cids_to_pin)} CIDs on IPFS") + + # Pin all in parallel + with ThreadPoolExecutor(max_workers=5) as executor: + futures = {executor.submit(ipfs_client.pin_or_raise, cid): cid for cid in cids_to_pin} + for future in as_completed(futures): + future.result() # Raises IPFSError if failed + logger.info("record_run: All CIDs pinned successfully") + + # Store recipe on IPFS + logger.info("record_run: Storing recipe on IPFS") + recipe_cid = ipfs_client.add_json(recipe_data) + + # Build and store full provenance on IPFS + # Compute content-addressable run_id from inputs + recipe + recipe_name = recipe_data.get("name", "unknown") if isinstance(recipe_data, dict) else str(recipe_data) + run_id = compute_run_id(input_hashes, recipe_name) + provenance = { + "run_id": run_id, # Content-addressable run identifier + "inputs": registered_inputs, + "output": { + "content_hash": output_hash, + "ipfs_cid": output_ipfs_cid + }, + "recipe": recipe_data, + "recipe_cid": recipe_cid, + "effect_url": run.get("effect_url"), + "effects_commit": run.get("effects_commit"), + "l1_server": l1_url, + "l1_run_id": req.run_id, + "rendered_at": run.get("completed_at"), + "infrastructure": run.get("infrastructure") + } + logger.info("record_run: Storing provenance on IPFS") + provenance_cid = ipfs_client.add_json(provenance) + + return recipe_cid, provenance_cid, provenance + + try: + import asyncio + recipe_cid, provenance_cid, provenance = await asyncio.to_thread(do_ipfs_operations) + logger.info(f"record_run: Recipe CID: {recipe_cid[:16]}..., Provenance CID: {provenance_cid[:16]}...") + except IPFSError as e: + logger.error(f"record_run: IPFS operation failed: {e}") + raise HTTPException(500, f"IPFS operation failed: {e}") + + # ===== PHASE 3: DB TRANSACTION (all-or-nothing) ===== + logger.info("record_run: Starting DB transaction") + now = datetime.now(timezone.utc).isoformat() + + # Add provenance_cid to provenance for storage in DB + provenance["provenance_cid"] = provenance_cid + + try: + async with db.transaction() as conn: + # Register input assets (if not already on L2) - named by content_hash + for inp in input_infos: + if not inp["existing_asset"]: + media_type = inp["media_type"] + tags = ["auto-registered", "input"] + if media_type == "recipe": + tags.append("recipe") + input_asset = { + "name": inp["content_hash"], # Use content_hash as name + "content_hash": inp["content_hash"], + "ipfs_cid": inp["ipfs_cid"], + "asset_type": media_type, + "tags": tags, + "metadata": {"auto_registered_from_run": req.run_id}, + "owner": user.username, + "created_at": now + } + await db.create_asset_tx(conn, input_asset) + + # Check if output already exists (by content_hash) - return existing if so + existing = await db.get_asset_by_name_tx(conn, output_hash) + if existing: + logger.info(f"record_run: Output {output_hash[:16]}... already exists") + # Check if activity already exists for this run + existing_activity = await db.get_activity(provenance["run_id"]) + if existing_activity: + logger.info(f"record_run: Activity {provenance['run_id'][:16]}... also exists") + return {"asset": existing, "activity": existing_activity, "existing": True} + # Asset exists but no activity - create one + logger.info(f"record_run: Creating activity for existing asset") + object_data = { + "type": existing.get("asset_type", "image").capitalize(), + "name": output_hash, + "id": f"https://{DOMAIN}/objects/{output_hash}", + "contentHash": { + "algorithm": "sha3-256", + "value": output_hash + }, + "attributedTo": f"https://{DOMAIN}/users/{user.username}", + "provenance": provenance + } + activity = { + "activity_id": provenance["run_id"], + "activity_type": "Create", + "actor_id": f"https://{DOMAIN}/users/{user.username}", + "object_data": object_data, + "published": now + } + activity = sign_activity(activity, user.username) + created_activity = await db.create_activity_tx(conn, activity) + return {"asset": existing, "activity": created_activity, "existing": True} + + # Create output asset with provenance - named by content_hash + output_asset = { + "name": output_hash, # Use content_hash as name + "content_hash": output_hash, + "ipfs_cid": output_ipfs_cid, + "asset_type": output_media_type, + "tags": ["rendered", "l1"], + "metadata": {"l1_server": l1_url, "l1_run_id": req.run_id}, + "provenance": provenance, + "owner": user.username, + "created_at": now + } + created_asset = await db.create_asset_tx(conn, output_asset) + + # Create activity - all referenced by content_hash + object_data = { + "type": output_media_type.capitalize(), + "name": output_hash, # Use content_hash as name + "id": f"https://{DOMAIN}/objects/{output_hash}", + "contentHash": { + "algorithm": "sha3-256", + "value": output_hash + }, + "attributedTo": f"https://{DOMAIN}/users/{user.username}", + "provenance": provenance + } + + activity = { + "activity_id": provenance["run_id"], # Content-addressable run_id + "activity_type": "Create", + "actor_id": f"https://{DOMAIN}/users/{user.username}", + "object_data": object_data, + "published": now + } + activity = sign_activity(activity, user.username) + created_activity = await db.create_activity_tx(conn, activity) + + # Transaction commits here on successful exit + + except HTTPException: + raise + except Exception as e: + logger.error(f"record_run: Database transaction failed: {e}") + raise HTTPException(500, f"Failed to record run: {e}") + + logger.info(f"record_run: Successfully published {output_hash[:16]}... with {len(registered_inputs)} inputs") + return {"asset": created_asset, "activity": created_activity} + + +@app.post("/assets/publish-cache") +async def publish_cache(req: PublishCacheRequest, user: User = Depends(get_required_user)): + """ + Publish a cache item from L1 with metadata atomically. + + Requires origin to be set (self or external URL). + Requires IPFS CID - content must be on IPFS before publishing. + Creates a new asset and Create activity in a single transaction. + """ + import ipfs_client + from ipfs_client import IPFSError + + logger.info(f"publish_cache: Starting for {req.asset_name} (hash={req.content_hash[:16]}...)") + + # ===== PHASE 1: VALIDATION ===== + # Validate origin + if not req.origin or "type" not in req.origin: + raise HTTPException(400, "Origin is required for publishing (type: 'self' or 'external')") + + origin_type = req.origin.get("type") + if origin_type not in ("self", "external"): + raise HTTPException(400, "Origin type must be 'self' or 'external'") + + if origin_type == "external" and not req.origin.get("url"): + raise HTTPException(400, "External origin requires a URL") + + # IPFS CID is now required + if not req.ipfs_cid: + raise HTTPException(400, "IPFS CID is required for publishing") + + # Check if asset name already exists + if await db.asset_exists(req.asset_name): + raise HTTPException(400, f"Asset name already exists: {req.asset_name}") + + # ===== PHASE 2: IPFS OPERATIONS (non-blocking) ===== + import asyncio + logger.info(f"publish_cache: Pinning CID {req.ipfs_cid[:16]}... on IPFS") + try: + await asyncio.to_thread(ipfs_client.pin_or_raise, req.ipfs_cid) + logger.info("publish_cache: CID pinned successfully") + except IPFSError as e: + logger.error(f"publish_cache: IPFS pin failed: {e}") + raise HTTPException(500, f"IPFS operation failed: {e}") + + # ===== PHASE 3: DB TRANSACTION ===== + logger.info("publish_cache: Starting DB transaction") + now = datetime.now(timezone.utc).isoformat() + + try: + async with db.transaction() as conn: + # Check name again inside transaction (race condition protection) + if await db.asset_exists_by_name_tx(conn, req.asset_name): + raise HTTPException(400, f"Asset name already exists: {req.asset_name}") + + # Create asset + asset = { + "name": req.asset_name, + "content_hash": req.content_hash, + "ipfs_cid": req.ipfs_cid, + "asset_type": req.asset_type, + "tags": req.tags, + "description": req.description, + "origin": req.origin, + "metadata": req.metadata, + "owner": user.username, + "created_at": now + } + created_asset = await db.create_asset_tx(conn, asset) + + # Create ownership activity with origin info + object_data = { + "type": req.asset_type.capitalize(), + "name": req.asset_name, + "id": f"https://{DOMAIN}/objects/{req.content_hash}", + "contentHash": { + "algorithm": "sha3-256", + "value": req.content_hash + }, + "attributedTo": f"https://{DOMAIN}/users/{user.username}", + "tag": req.tags + } + + if req.description: + object_data["summary"] = req.description + + # Include origin in ActivityPub object + if origin_type == "self": + object_data["generator"] = { + "type": "Application", + "name": "Art DAG", + "note": "Original content created by the author" + } + else: + object_data["source"] = { + "type": "Link", + "href": req.origin.get("url"), + "name": req.origin.get("note", "External source") + } + + activity = { + "activity_id": req.content_hash, # Content-addressable by content hash + "activity_type": "Create", + "actor_id": f"https://{DOMAIN}/users/{user.username}", + "object_data": object_data, + "published": now + } + activity = sign_activity(activity, user.username) + created_activity = await db.create_activity_tx(conn, activity) + + # Transaction commits here on successful exit + + except HTTPException: + raise + except Exception as e: + logger.error(f"publish_cache: Database transaction failed: {e}") + raise HTTPException(500, f"Failed to publish cache item: {e}") + + logger.info(f"publish_cache: Successfully published {req.asset_name}") + return {"asset": created_asset, "activity": created_activity} + + +# ============ Activities Endpoints ============ + +@app.get("/activities") +async def get_activities(request: Request, page: int = 1, limit: int = 20): + """Get activities. HTML for browsers (with infinite scroll), JSON for APIs (with pagination).""" + all_activities = await load_activities() + total = len(all_activities) + + # Reverse for newest first + all_activities = list(reversed(all_activities)) + + # Pagination + start = (page - 1) * limit + end = start + limit + activities_page = all_activities[start:end] + has_more = end < total + + if wants_html(request): + username = get_user_from_cookie(request) + + if not activities_page: + if page == 1: + content = ''' +

Activities

+

No activities yet.

+ ''' + else: + return HTMLResponse("") # Empty for infinite scroll + else: + rows = "" + for i, activity in enumerate(activities_page): + activity_index = total - 1 - (start + i) # Original index + obj = activity.get("object_data", {}) + activity_type = activity.get("activity_type", "") + type_color = "bg-green-600" if activity_type == "Create" else "bg-yellow-600" if activity_type == "Update" else "bg-gray-600" + actor_id = activity.get("actor_id", "") + actor_name = actor_id.split("/")[-1] if actor_id else "unknown" + rows += f''' + + {activity_type} + {obj.get("name", "Untitled")} + + {actor_name} + + {format_date(activity.get("published"))} + + View + + + ''' + + # For infinite scroll, just return rows if not first page + if page > 1: + if has_more: + rows += f''' + + Loading more... + + ''' + return HTMLResponse(rows) + + # First page - full content with header + infinite_scroll_trigger = "" + if has_more: + infinite_scroll_trigger = f''' + + Loading more... + + ''' + + content = f''' +

Activities ({total} total)

+
+ + + + + + + + + + + + {rows} + {infinite_scroll_trigger} + +
TypeObjectActorPublished
+
+ ''' + + return HTMLResponse(base_html("Activities", content, username)) + + # JSON response for APIs + return { + "activities": activities_page, + "pagination": { + "page": page, + "limit": limit, + "total": total, + "has_more": has_more + } + } + + +@app.get("/activities/{activity_ref}") +async def get_activity_detail(activity_ref: str, request: Request): + """Get single activity by index or activity_id. HTML for browsers (default), JSON only if explicitly requested.""" + + # Check if JSON explicitly requested + accept = request.headers.get("accept", "") + wants_json = ("application/json" in accept or "application/activity+json" in accept) and "text/html" not in accept + + activity = None + activity_index = None + + # Check if it's a numeric index or an activity_id (hash) + if activity_ref.isdigit(): + # Numeric index (legacy) + activity_index = int(activity_ref) + activities = await load_activities() + if 0 <= activity_index < len(activities): + activity = activities[activity_index] + else: + # Activity ID (hash) - look up directly + activity = await db.get_activity(activity_ref) + if activity: + # Find index for UI rendering + activities = await load_activities() + for i, a in enumerate(activities): + if a.get("activity_id") == activity_ref: + activity_index = i + break + + if not activity: + if wants_json: + raise HTTPException(404, "Activity not found") + content = ''' +

Activity Not Found

+

This activity does not exist.

+

← Back to Activities

+ ''' + return HTMLResponse(base_html("Activity Not Found", content, get_user_from_cookie(request))) + + if wants_json: + return activity + + # Default to HTML for browsers + if activity_index is not None: + return await ui_activity_detail(activity_index, request) + else: + # Render activity directly if no index found + return await ui_activity_detail_by_data(activity, request) + + +@app.get("/activity/{activity_index}") +async def get_activity_legacy(activity_index: int): + """Legacy route - redirect to /activities/{activity_index}.""" + return RedirectResponse(url=f"/activities/{activity_index}", status_code=301) + + +@app.get("/objects/{content_hash}") +async def get_object(content_hash: str, request: Request): + """Get object by content hash. Content negotiation: HTML for browsers, JSON for APIs.""" + registry = await load_registry() + + # Find asset by hash + for name, asset in registry.get("assets", {}).items(): + if asset.get("content_hash") == content_hash: + # Check Accept header - only return JSON if explicitly requested + accept = request.headers.get("accept", "") + wants_json = ("application/json" in accept or "application/activity+json" in accept) and "text/html" not in accept + + if not wants_json: + # Default: redirect to detail page for browsers + return RedirectResponse(url=f"/assets/{name}", status_code=303) + + owner = asset.get("owner", "unknown") + return JSONResponse( + content={ + "@context": "https://www.w3.org/ns/activitystreams", + "id": f"https://{DOMAIN}/objects/{content_hash}", + "type": asset.get("asset_type", "Object").capitalize(), + "name": name, + "contentHash": { + "algorithm": "sha3-256", + "value": content_hash + }, + "attributedTo": f"https://{DOMAIN}/users/{owner}", + "published": asset.get("created_at") + }, + media_type="application/activity+json" + ) + + raise HTTPException(404, f"Object not found: {content_hash}") + + +# ============ Anchoring (Bitcoin timestamps) ============ + +@app.post("/anchors/create") +async def create_anchor_endpoint(request: Request): + """ + Create a new anchor for all unanchored activities. + + Builds a merkle tree, stores it on IPFS, and submits to OpenTimestamps + for Bitcoin anchoring. The anchor proof is backed up to persistent storage. + """ + import anchoring + import ipfs_client + + # Check auth (cookie or header) + username = get_user_from_cookie(request) + if not username: + if wants_html(request): + return HTMLResponse(''' +
+ Error: Login required +
+ ''') + raise HTTPException(401, "Authentication required") + + # Get unanchored activities + unanchored = await db.get_unanchored_activities() + if not unanchored: + if wants_html(request): + return HTMLResponse(''' +
+ No unanchored activities to anchor. +
+ ''') + return {"message": "No unanchored activities", "anchored": 0} + + activity_ids = [a["activity_id"] for a in unanchored] + + # Create anchor + anchor = await anchoring.create_anchor(activity_ids, db, ipfs_client) + + if anchor: + if wants_html(request): + return HTMLResponse(f''' +
+ Success! Anchored {len(activity_ids)} activities.
+ Merkle root: {anchor["merkle_root"][:32]}...
+ Refresh page to see the new anchor. +
+ ''') + return { + "message": f"Anchored {len(activity_ids)} activities", + "merkle_root": anchor["merkle_root"], + "tree_ipfs_cid": anchor.get("tree_ipfs_cid"), + "activity_count": anchor["activity_count"] + } + else: + if wants_html(request): + return HTMLResponse(''' +
+ Failed! Could not create anchor. +
+ ''') + raise HTTPException(500, "Failed to create anchor") + + +@app.get("/anchors") +async def list_anchors(): + """List all anchors.""" + anchors = await db.get_all_anchors() + stats = await db.get_anchor_stats() + return { + "anchors": anchors, + "stats": stats + } + + +@app.get("/anchors/{merkle_root}") +async def get_anchor_endpoint(merkle_root: str): + """Get anchor details by merkle root.""" + anchor = await db.get_anchor(merkle_root) + if not anchor: + raise HTTPException(404, f"Anchor not found: {merkle_root}") + return anchor + + +@app.get("/anchors/{merkle_root}/tree") +async def get_anchor_tree(merkle_root: str): + """Get the full merkle tree from IPFS.""" + import asyncio + import ipfs_client + + anchor = await db.get_anchor(merkle_root) + if not anchor: + raise HTTPException(404, f"Anchor not found: {merkle_root}") + + tree_cid = anchor.get("tree_ipfs_cid") + if not tree_cid: + raise HTTPException(404, "Anchor has no tree on IPFS") + + try: + tree_bytes = await asyncio.to_thread(ipfs_client.get_bytes, tree_cid) + if tree_bytes: + return json.loads(tree_bytes) + except Exception as e: + raise HTTPException(500, f"Failed to fetch tree from IPFS: {e}") + + +@app.get("/anchors/verify/{activity_id}") +async def verify_activity_anchor(activity_id: str): + """ + Verify an activity's anchor proof. + + Returns the merkle proof showing this activity is included in an anchored batch. + """ + import anchoring + import ipfs_client + + # Get activity + activity = await db.get_activity(activity_id) + if not activity: + raise HTTPException(404, f"Activity not found: {activity_id}") + + anchor_root = activity.get("anchor_root") + if not anchor_root: + return {"verified": False, "reason": "Activity not yet anchored"} + + # Get anchor + anchor = await db.get_anchor(anchor_root) + if not anchor: + return {"verified": False, "reason": "Anchor record not found"} + + # Get tree from IPFS (non-blocking) + import asyncio + tree_cid = anchor.get("tree_ipfs_cid") + if not tree_cid: + return {"verified": False, "reason": "Merkle tree not on IPFS"} + + try: + tree_bytes = await asyncio.to_thread(ipfs_client.get_bytes, tree_cid) + tree = json.loads(tree_bytes) if tree_bytes else None + except Exception: + return {"verified": False, "reason": "Failed to fetch tree from IPFS"} + + if not tree: + return {"verified": False, "reason": "Could not load merkle tree"} + + # Get proof + proof = anchoring.get_merkle_proof(tree, activity_id) + if not proof: + return {"verified": False, "reason": "Activity not in merkle tree"} + + # Verify proof + valid = anchoring.verify_merkle_proof(activity_id, proof, anchor_root) + + return { + "verified": valid, + "activity_id": activity_id, + "merkle_root": anchor_root, + "tree_ipfs_cid": tree_cid, + "proof": proof, + "bitcoin_txid": anchor.get("bitcoin_txid"), + "confirmed_at": anchor.get("confirmed_at") + } + + +@app.post("/anchors/{merkle_root}/upgrade") +async def upgrade_anchor_proof(merkle_root: str): + """ + Try to upgrade an OTS proof from pending to confirmed. + + Bitcoin confirmation typically takes 1-2 hours. Call this periodically + to check if the proof has been included in a Bitcoin block. + """ + import anchoring + import ipfs_client + import asyncio + + anchor = await db.get_anchor(merkle_root) + if not anchor: + raise HTTPException(404, f"Anchor not found: {merkle_root}") + + if anchor.get("confirmed_at"): + return {"status": "already_confirmed", "bitcoin_txid": anchor.get("bitcoin_txid")} + + # Get current OTS proof from IPFS + ots_cid = anchor.get("ots_proof_cid") + if not ots_cid: + return {"status": "no_proof", "message": "No OTS proof stored"} + + try: + ots_proof = await asyncio.to_thread(ipfs_client.get_bytes, ots_cid) + if not ots_proof: + return {"status": "error", "message": "Could not fetch OTS proof from IPFS"} + except Exception as e: + return {"status": "error", "message": f"IPFS error: {e}"} + + # Try to upgrade + upgraded = await asyncio.to_thread(anchoring.upgrade_ots_proof, ots_proof) + + if upgraded and len(upgraded) > len(ots_proof): + # Store upgraded proof on IPFS + try: + new_cid = await asyncio.to_thread(ipfs_client.add_bytes, upgraded) + # TODO: Update anchor record with new CID and confirmed status + return { + "status": "upgraded", + "message": "Proof upgraded - Bitcoin confirmation received", + "new_ots_cid": new_cid, + "proof_size": len(upgraded) + } + except Exception as e: + return {"status": "error", "message": f"Failed to store upgraded proof: {e}"} + else: + return { + "status": "pending", + "message": "Not yet confirmed on Bitcoin. Try again in ~1 hour.", + "proof_size": len(ots_proof) if ots_proof else 0 + } + + +@app.get("/anchors/ui", response_class=HTMLResponse) +async def anchors_ui(request: Request): + """Anchors UI page - view and test OpenTimestamps anchoring.""" + username = get_user_from_cookie(request) + + anchors = await db.get_all_anchors() + stats = await db.get_anchor_stats() + + # Build anchors table rows + rows = "" + for anchor in anchors: + status = "confirmed" if anchor.get("confirmed_at") else "pending" + status_class = "text-green-400" if status == "confirmed" else "text-yellow-400" + merkle_root = anchor.get("merkle_root", "")[:16] + "..." + + rows += f''' + + {merkle_root} + {anchor.get("activity_count", 0)} + {status} + {format_date(anchor.get("created_at"), 16)} + + + + + + ''' + + if not rows: + rows = 'No anchors yet' + + content = f''' + + +

Bitcoin Anchoring via OpenTimestamps

+ +
+
+
{stats.get("total_anchors", 0)}
+
Total Anchors
+
+
+
{stats.get("confirmed_anchors", 0)}
+
Confirmed
+
+
+
{stats.get("pending_anchors", 0)}
+
Pending
+
+
+ +
+

Test Anchoring

+

Create a test anchor for unanchored activities, or test the OTS connection.

+
+ + +
+
+
+ +

Anchors

+
+ + + + + + + + + + + + {rows} + +
Merkle RootActivitiesStatusCreatedActions
+
+ +
+

How it works:

+
    +
  1. Activities are batched and hashed into a merkle tree
  2. +
  3. The merkle root is submitted to OpenTimestamps
  4. +
  5. OTS aggregates hashes and anchors to Bitcoin (~1-2 hours)
  6. +
  7. Once confirmed, anyone can verify the timestamp
  8. +
+
+ ''' + + return HTMLResponse(base_html("Anchors", content, username)) + + +@app.post("/anchors/test-ots", response_class=HTMLResponse) +async def test_ots_connection(): + """Test OpenTimestamps connection by submitting a test hash.""" + import anchoring + import hashlib + import asyncio + + # Create a test hash + test_data = f"test-{datetime.now(timezone.utc).isoformat()}" + test_hash = hashlib.sha256(test_data.encode()).hexdigest() + + # Try to submit + try: + ots_proof = await asyncio.to_thread(anchoring.submit_to_opentimestamps, test_hash) + if ots_proof: + return HTMLResponse(f''' +
+ Success! OpenTimestamps is working.
+ Test hash: {test_hash[:32]}...
+ Proof size: {len(ots_proof)} bytes +
+ ''') + else: + return HTMLResponse(''' +
+ Failed! Could not reach OpenTimestamps servers. +
+ ''') + except Exception as e: + return HTMLResponse(f''' +
+ Error: {str(e)} +
+ ''') + + +# ============ Renderers (L1 servers) ============ + +@app.get("/renderers", response_class=HTMLResponse) +async def renderers_page(request: Request): + """Page to manage L1 renderer attachments.""" + username = get_user_from_cookie(request) + + if not username: + content = ''' +

Renderers

+

Log in to manage your renderer connections.

+ ''' + return HTMLResponse(base_html("Renderers", content)) + + # Get user's attached renderers + attached = await db.get_user_renderers(username) + from urllib.parse import quote + + # Build renderer list + rows = [] + for l1_url in L1_SERVERS: + is_attached = l1_url in attached + # Extract display name from URL + display_name = l1_url.replace("https://", "").replace("http://", "") + + if is_attached: + status = 'Attached' + action = f''' + + Open + + + ''' + else: + status = 'Not attached' + # Attach via endpoint that creates scoped token (not raw token in URL) + attach_url = f"/renderers/attach?l1_url={quote(l1_url, safe='')}" + action = f''' + + Attach + + ''' + + row_id = l1_url.replace("://", "-").replace("/", "-").replace(".", "-") + rows.append(f''' +
+
+
{display_name}
+
{l1_url}
+
+
+ {status} + {action} +
+
+ ''') + + content = f''' +

Renderers

+

Connect to L1 rendering servers. After attaching, you can run effects and manage media on that renderer.

+
+ {"".join(rows) if rows else '

No renderers configured.

'} +
+ ''' + return HTMLResponse(base_html("Renderers", content, username)) + + +@app.get("/renderers/attach") +async def attach_renderer_redirect(request: Request, l1_url: str): + """Create a scoped token and redirect to L1 for attachment.""" + username = get_user_from_cookie(request) + if not username: + return RedirectResponse(url="/login", status_code=302) + + # Verify L1 is in our allowed list + l1_normalized = l1_url.rstrip("/") + if not any(l1_normalized == s.rstrip("/") for s in L1_SERVERS): + raise HTTPException(403, f"L1 server not authorized: {l1_url}") + + # Create a scoped token that only works for this specific L1 + scoped_token = create_access_token( + username, + l2_server=f"https://{DOMAIN}", + l1_server=l1_normalized + ) + + # Redirect to L1 with scoped token + redirect_url = f"{l1_normalized}/auth?auth_token={scoped_token.access_token}" + return RedirectResponse(url=redirect_url, status_code=302) + + +@app.post("/renderers/detach", response_class=HTMLResponse) +async def detach_renderer(request: Request): + """Detach from an L1 renderer.""" + username = get_user_from_cookie(request) + if not username: + return HTMLResponse('
Not logged in
') + + form = await request.form() + l1_url = form.get("l1_url", "") + + await db.detach_renderer(username, l1_url) + + # Return updated row with link to attach endpoint (not raw token) + display_name = l1_url.replace("https://", "").replace("http://", "") + from urllib.parse import quote + attach_url = f"/renderers/attach?l1_url={quote(l1_url, safe='')}" + row_id = l1_url.replace("://", "-").replace("/", "-").replace(".", "-") + + return HTMLResponse(f''' +
+
+
{display_name}
+
{l1_url}
+
+
+ Not attached + + Attach + +
+
+ ''') + + +# ============ User Storage ============ + +import storage_providers + + +@app.get("/storage") +async def list_storage(request: Request, user: User = Depends(get_optional_user)): + """List user's storage providers. HTML for browsers (default), JSON only if explicitly requested.""" + # Check if JSON explicitly requested + accept = request.headers.get("accept", "") + wants_json = "application/json" in accept and "text/html" not in accept + + # For browser sessions, also check cookie authentication + username = user.username if user else get_user_from_cookie(request) + + if not username: + if wants_json: + raise HTTPException(401, "Authentication required") + return RedirectResponse(url="/login", status_code=302) + + storages = await db.get_user_storage(username) + + # Add usage stats to each storage + for storage in storages: + usage = await db.get_storage_usage(storage["id"]) + storage["used_bytes"] = usage["used_bytes"] + storage["pin_count"] = usage["pin_count"] + storage["donated_gb"] = storage["capacity_gb"] // 2 + # Mask sensitive config keys for display + if storage.get("config"): + config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"]) + masked = {} + for k, v in config.items(): + if "key" in k.lower() or "token" in k.lower() or "secret" in k.lower(): + masked[k] = v[:4] + "..." + v[-4:] if len(str(v)) > 8 else "****" + else: + masked[k] = v + storage["config_display"] = masked + + if wants_json: + return {"storages": storages} + + # Default to HTML for browsers + return await ui_storage_page(username, storages, request) + + +@app.post("/storage") +async def add_storage(req: AddStorageRequest, user: User = Depends(get_required_user)): + """Add a storage provider.""" + # Validate provider type + valid_types = ["pinata", "web3storage", "nftstorage", "infura", "filebase", "storj", "local"] + if req.provider_type not in valid_types: + raise HTTPException(400, f"Invalid provider type: {req.provider_type}") + + # Test the provider connection before saving + provider = storage_providers.create_provider(req.provider_type, { + **req.config, + "capacity_gb": req.capacity_gb + }) + if not provider: + raise HTTPException(400, "Failed to create provider with given config") + + success, message = await provider.test_connection() + if not success: + raise HTTPException(400, f"Provider connection failed: {message}") + + # Save to database + provider_name = req.provider_name or f"{req.provider_type}-{user.username}" + storage_id = await db.add_user_storage( + username=user.username, + provider_type=req.provider_type, + provider_name=provider_name, + config=req.config, + capacity_gb=req.capacity_gb + ) + + if not storage_id: + raise HTTPException(500, "Failed to save storage provider") + + return {"id": storage_id, "message": f"Storage provider added: {provider_name}"} + + +@app.post("/storage/add") +async def add_storage_form( + request: Request, + provider_type: str = Form(...), + provider_name: Optional[str] = Form(None), + description: Optional[str] = Form(None), + capacity_gb: int = Form(5), + api_key: Optional[str] = Form(None), + secret_key: Optional[str] = Form(None), + api_token: Optional[str] = Form(None), + project_id: Optional[str] = Form(None), + project_secret: Optional[str] = Form(None), + access_key: Optional[str] = Form(None), + bucket: Optional[str] = Form(None), + path: Optional[str] = Form(None), +): + """Add a storage provider via HTML form (cookie auth).""" + username = get_user_from_cookie(request) + if not username: + return HTMLResponse('
Not authenticated
', status_code=401) + + # Validate provider type + valid_types = ["pinata", "web3storage", "nftstorage", "infura", "filebase", "storj", "local"] + if provider_type not in valid_types: + return HTMLResponse(f'
Invalid provider type: {provider_type}
') + + # Build config based on provider type + config = {} + if provider_type == "pinata": + if not api_key or not secret_key: + return HTMLResponse('
Pinata requires API Key and Secret Key
') + config = {"api_key": api_key, "secret_key": secret_key} + elif provider_type == "web3storage": + if not api_token: + return HTMLResponse('
web3.storage requires API Token
') + config = {"api_token": api_token} + elif provider_type == "nftstorage": + if not api_token: + return HTMLResponse('
NFT.Storage requires API Token
') + config = {"api_token": api_token} + elif provider_type == "infura": + if not project_id or not project_secret: + return HTMLResponse('
Infura requires Project ID and Project Secret
') + config = {"project_id": project_id, "project_secret": project_secret} + elif provider_type == "filebase": + if not access_key or not secret_key or not bucket: + return HTMLResponse('
Filebase requires Access Key, Secret Key, and Bucket
') + config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket} + elif provider_type == "storj": + if not access_key or not secret_key or not bucket: + return HTMLResponse('
Storj requires Access Key, Secret Key, and Bucket
') + config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket} + elif provider_type == "local": + if not path: + return HTMLResponse('
Local storage requires a path
') + config = {"path": path} + + # Test the provider connection before saving + provider = storage_providers.create_provider(provider_type, { + **config, + "capacity_gb": capacity_gb + }) + if not provider: + return HTMLResponse('
Failed to create provider with given config
') + + success, message = await provider.test_connection() + if not success: + return HTMLResponse(f'
Provider connection failed: {message}
') + + # Save to database + name = provider_name or f"{provider_type}-{username}-{len(await db.get_user_storage_by_type(username, provider_type)) + 1}" + storage_id = await db.add_user_storage( + username=username, + provider_type=provider_type, + provider_name=name, + config=config, + capacity_gb=capacity_gb, + description=description + ) + + if not storage_id: + return HTMLResponse('
Failed to save storage provider
') + + return HTMLResponse(f''' +
Storage provider "{name}" added successfully!
+ + ''') + + +@app.get("/storage/{storage_id}") +async def get_storage(storage_id: int, user: User = Depends(get_required_user)): + """Get a specific storage provider.""" + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage provider not found") + if storage["username"] != user.username: + raise HTTPException(403, "Not authorized") + + usage = await db.get_storage_usage(storage_id) + storage["used_bytes"] = usage["used_bytes"] + storage["pin_count"] = usage["pin_count"] + storage["donated_gb"] = storage["capacity_gb"] // 2 + + return storage + + +@app.patch("/storage/{storage_id}") +async def update_storage(storage_id: int, req: UpdateStorageRequest, user: User = Depends(get_required_user)): + """Update a storage provider.""" + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage provider not found") + if storage["username"] != user.username: + raise HTTPException(403, "Not authorized") + + # If updating config, test the new connection + if req.config: + existing_config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"]) + new_config = {**existing_config, **req.config} + provider = storage_providers.create_provider(storage["provider_type"], { + **new_config, + "capacity_gb": req.capacity_gb or storage["capacity_gb"] + }) + if provider: + success, message = await provider.test_connection() + if not success: + raise HTTPException(400, f"Provider connection failed: {message}") + + success = await db.update_user_storage( + storage_id, + config=req.config, + capacity_gb=req.capacity_gb, + is_active=req.is_active + ) + + if not success: + raise HTTPException(500, "Failed to update storage provider") + + return {"message": "Storage provider updated"} + + +@app.delete("/storage/{storage_id}") +async def remove_storage(storage_id: int, request: Request, user: User = Depends(get_optional_user)): + """Remove a storage provider.""" + # Support both Bearer token and cookie auth + username = user.username if user else get_user_from_cookie(request) + if not username: + raise HTTPException(401, "Not authenticated") + + storage = await db.get_storage_by_id(storage_id) + if not storage: + raise HTTPException(404, "Storage provider not found") + if storage["username"] != username: + raise HTTPException(403, "Not authorized") + + success = await db.remove_user_storage(storage_id) + if not success: + raise HTTPException(500, "Failed to remove storage provider") + + # Return empty string for HTMX to remove the element + if wants_html(request): + return HTMLResponse("") + + return {"message": "Storage provider removed"} + + +@app.post("/storage/{storage_id}/test") +async def test_storage(storage_id: int, request: Request, user: User = Depends(get_optional_user)): + """Test storage provider connectivity.""" + # Support both Bearer token and cookie auth + username = user.username if user else get_user_from_cookie(request) + if not username: + if wants_html(request): + return HTMLResponse('Not authenticated', status_code=401) + raise HTTPException(401, "Not authenticated") + + storage = await db.get_storage_by_id(storage_id) + if not storage: + if wants_html(request): + return HTMLResponse('Storage not found', status_code=404) + raise HTTPException(404, "Storage provider not found") + if storage["username"] != username: + if wants_html(request): + return HTMLResponse('Not authorized', status_code=403) + raise HTTPException(403, "Not authorized") + + config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"]) + provider = storage_providers.create_provider(storage["provider_type"], { + **config, + "capacity_gb": storage["capacity_gb"] + }) + + if not provider: + if wants_html(request): + return HTMLResponse('Failed to create provider') + raise HTTPException(500, "Failed to create provider") + + success, message = await provider.test_connection() + + if wants_html(request): + if success: + return HTMLResponse(f'{message}') + return HTMLResponse(f'{message}') + + return {"success": success, "message": message} + + +STORAGE_PROVIDERS_INFO = { + "pinata": {"name": "Pinata", "desc": "1GB free, IPFS pinning", "color": "blue"}, + "web3storage": {"name": "web3.storage", "desc": "IPFS + Filecoin", "color": "green"}, + "nftstorage": {"name": "NFT.Storage", "desc": "Free for NFTs", "color": "pink"}, + "infura": {"name": "Infura IPFS", "desc": "5GB free", "color": "orange"}, + "filebase": {"name": "Filebase", "desc": "5GB free, S3+IPFS", "color": "cyan"}, + "storj": {"name": "Storj", "desc": "25GB free", "color": "indigo"}, + "local": {"name": "Local Storage", "desc": "Your own disk", "color": "purple"}, +} + + +async def ui_storage_page(username: str, storages: list, request: Request) -> HTMLResponse: + """Render main storage settings page showing provider types.""" + # Count configs per type + type_counts = {} + for s in storages: + pt = s["provider_type"] + type_counts[pt] = type_counts.get(pt, 0) + 1 + + # Build provider type cards + cards = "" + for ptype, info in STORAGE_PROVIDERS_INFO.items(): + count = type_counts.get(ptype, 0) + count_badge = f'{count}' if count > 0 else "" + cards += f''' + +
+ {info["name"]} + {count_badge} +
+
{info["desc"]}
+
+ ''' + + # Total stats + total_capacity = sum(s["capacity_gb"] for s in storages) + total_used = sum(s.get("used_bytes", 0) for s in storages) + total_pins = sum(s.get("pin_count", 0) for s in storages) + + content = f''' +
+
+

Storage Providers

+
+ +
+

+ Attach your own storage to help power the network. 50% of your capacity is donated to store + shared content, making popular assets more resilient. +

+ +
+
+
{len(storages)}
+
Total Configs
+
+
+
{total_capacity} GB
+
Total Capacity
+
+
+
{total_used / (1024**3):.1f} GB
+
Used
+
+
+
{total_pins}
+
Total Pins
+
+
+ +

Select Provider Type

+
+ {cards} +
+
+
+ ''' + + return HTMLResponse(base_html("Storage", content, username)) + + +@app.get("/storage/type/{provider_type}") +async def storage_type_page(provider_type: str, request: Request, user: User = Depends(get_optional_user)): + """Page for managing storage configs of a specific type.""" + username = user.username if user else get_user_from_cookie(request) + if not username: + return RedirectResponse(url="/login", status_code=302) + + if provider_type not in STORAGE_PROVIDERS_INFO: + raise HTTPException(404, "Invalid provider type") + + storages = await db.get_user_storage_by_type(username, provider_type) + + # Add usage stats + for storage in storages: + usage = await db.get_storage_usage(storage["id"]) + storage["used_bytes"] = usage["used_bytes"] + storage["pin_count"] = usage["pin_count"] + # Mask sensitive config keys + if storage.get("config"): + config = storage["config"] if isinstance(storage["config"], dict) else json.loads(storage["config"]) + masked = {} + for k, v in config.items(): + if "key" in k.lower() or "token" in k.lower() or "secret" in k.lower(): + masked[k] = v[:4] + "..." + v[-4:] if len(str(v)) > 8 else "****" + else: + masked[k] = v + storage["config_display"] = masked + + info = STORAGE_PROVIDERS_INFO[provider_type] + return await ui_storage_type_page(username, provider_type, info, storages, request) + + +async def ui_storage_type_page(username: str, provider_type: str, info: dict, storages: list, request: Request) -> HTMLResponse: + """Render per-type storage management page.""" + + def format_bytes(b): + if b > 1024**3: + return f"{b / 1024**3:.1f} GB" + if b > 1024**2: + return f"{b / 1024**2:.1f} MB" + if b > 1024: + return f"{b / 1024:.1f} KB" + return f"{b} bytes" + + # Build storage rows + storage_rows = "" + for s in storages: + status_class = "bg-green-600" if s["is_active"] else "bg-gray-600" + status_text = "Active" if s["is_active"] else "Inactive" + config_display = s.get("config_display", {}) + config_html = ", ".join(f"{k}: {v}" for k, v in config_display.items() if k != "path") + desc = s.get("description") or "" + desc_html = f'
{desc}
' if desc else "" + + storage_rows += f''' +
+
+
+

{s["provider_name"] or provider_type}

+ {desc_html} +
+
+ {status_text} + + +
+
+
+
+
Capacity
+
{s["capacity_gb"]} GB
+
+
+
Donated
+
{s["capacity_gb"] // 2} GB
+
+
+
Used
+
{format_bytes(s["used_bytes"])}
+
+
+
Pins
+
{s["pin_count"]}
+
+
+
{config_html}
+
+
+ ''' + + if not storages: + storage_rows = f'

No {info["name"]} configs yet. Add one below.

' + + # Build form fields based on provider type + form_fields = "" + if provider_type == "pinata": + form_fields = ''' +
+ + +
+
+ + +
+ ''' + elif provider_type in ("web3storage", "nftstorage"): + form_fields = ''' +
+ + +
+ ''' + elif provider_type == "infura": + form_fields = ''' +
+ + +
+
+ + +
+ ''' + elif provider_type in ("filebase", "storj"): + form_fields = ''' +
+ + +
+
+ + +
+
+ + +
+ ''' + elif provider_type == "local": + form_fields = ''' +
+ + +
+ ''' + + content = f''' +
+
+ ← Back +

{info["name"]} Storage

+
+ +
+

Your {info["name"]} Configs

+
+ {storage_rows} +
+
+ +
+

Add New {info["name"]} Config

+
+ + + {form_fields} + +
+ + +
+
+ + +
+
+ + +
+ +
+ +
+
+
+
+
+ ''' + + return HTMLResponse(base_html(f"{info['name']} Storage", content, username)) + + +# ============ Client Download ============ + +CLIENT_TARBALL = Path(__file__).parent / "artdag-client.tar.gz" + +@app.get("/download/client") +async def download_client(): + """Download the Art DAG CLI client.""" + if not CLIENT_TARBALL.exists(): + raise HTTPException(404, "Client package not found") + return FileResponse( + CLIENT_TARBALL, + media_type="application/gzip", + filename="artdag-client.tar.gz" + ) + + +# ============================================================================ +# Help / Documentation Routes +# ============================================================================ + +L2_DOCS_DIR = Path(__file__).parent +COMMON_DOCS_DIR = Path(__file__).parent.parent / "common" + +L2_DOCS_MAP = { + "l2": ("L2 Server (ActivityPub)", L2_DOCS_DIR / "README.md"), + "common": ("Common Library", COMMON_DOCS_DIR / "README.md"), +} + + +@app.get("/help", response_class=HTMLResponse) +async def help_index(request: Request): + """Documentation index page.""" + username = get_user_from_cookie(request) + + # Build doc links + doc_links = "" + for key, (title, path) in L2_DOCS_MAP.items(): + if path.exists(): + doc_links += f''' + +

{title}

+

View documentation

+
+ ''' + + content = f''' +
+

Documentation

+
+ {doc_links} +
+
+ ''' + return HTMLResponse(base_html("Help", content, username)) + + +@app.get("/help/{doc_name}", response_class=HTMLResponse) +async def help_page(doc_name: str, request: Request): + """Render a README as HTML.""" + if doc_name not in L2_DOCS_MAP: + raise HTTPException(404, f"Documentation '{doc_name}' not found") + + title, doc_path = L2_DOCS_MAP[doc_name] + if not doc_path.exists(): + raise HTTPException(404, f"Documentation file not found") + + username = get_user_from_cookie(request) + + # Read and render markdown + md_content = doc_path.read_text() + html_content = markdown.markdown(md_content, extensions=['tables', 'fenced_code']) + + content = f''' +
+ +
+ {html_content} +
+
+ ''' + return HTMLResponse(base_html(title, content, username)) + + +if __name__ == "__main__": + import uvicorn + uvicorn.run("server:app", host="0.0.0.0", port=8200, workers=4) diff --git a/setup_keys.py b/setup_keys.py new file mode 100755 index 0000000..1042d5a --- /dev/null +++ b/setup_keys.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +""" +Generate RSA keypair for ActivityPub signing. + +Usage: + python setup_keys.py [--data-dir /path/to/data] [--user username] +""" + +import argparse +import os +from pathlib import Path + +from keys import generate_keypair, has_keys, get_keys_dir + + +def main(): + parser = argparse.ArgumentParser(description="Generate RSA keypair for L2 server") + parser.add_argument("--data-dir", default=os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2")), + help="Data directory") + parser.add_argument("--user", default=os.environ.get("ARTDAG_USER", "giles"), + help="Username") + parser.add_argument("--force", action="store_true", + help="Overwrite existing keys") + + args = parser.parse_args() + data_dir = Path(args.data_dir) + username = args.user + + print(f"Data directory: {data_dir}") + print(f"Username: {username}") + + if has_keys(data_dir, username) and not args.force: + print(f"\nKeys already exist for {username}!") + print(f" Private: {get_keys_dir(data_dir) / f'{username}.pem'}") + print(f" Public: {get_keys_dir(data_dir) / f'{username}.pub'}") + print("\nUse --force to regenerate (will invalidate existing signatures)") + return + + print("\nGenerating RSA-2048 keypair...") + private_pem, public_pem = generate_keypair(data_dir, username) + + keys_dir = get_keys_dir(data_dir) + print(f"\nKeys generated:") + print(f" Private: {keys_dir / f'{username}.pem'} (chmod 600)") + print(f" Public: {keys_dir / f'{username}.pub'}") + print(f"\nPublic key (for verification):") + print(public_pem) + + +if __name__ == "__main__": + main() diff --git a/storage_providers.py b/storage_providers.py new file mode 100644 index 0000000..46dee08 --- /dev/null +++ b/storage_providers.py @@ -0,0 +1,1009 @@ +""" +Storage provider abstraction for user-attachable storage. + +Supports: +- Pinata (IPFS pinning service) +- web3.storage (IPFS pinning service) +- Local filesystem storage +""" + +import hashlib +import json +import logging +import os +import shutil +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + +import requests + +logger = logging.getLogger(__name__) + + +class StorageProvider(ABC): + """Abstract base class for storage backends.""" + + provider_type: str = "unknown" + + @abstractmethod + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """ + Pin content to storage. + + Args: + content_hash: SHA3-256 hash of the content + data: Raw bytes to store + filename: Optional filename hint + + Returns: + IPFS CID or provider-specific ID, or None on failure + """ + pass + + @abstractmethod + async def unpin(self, content_hash: str) -> bool: + """ + Unpin content from storage. + + Args: + content_hash: SHA3-256 hash of the content + + Returns: + True if unpinned successfully + """ + pass + + @abstractmethod + async def get(self, content_hash: str) -> Optional[bytes]: + """ + Retrieve content from storage. + + Args: + content_hash: SHA3-256 hash of the content + + Returns: + Raw bytes or None if not found + """ + pass + + @abstractmethod + async def is_pinned(self, content_hash: str) -> bool: + """Check if content is pinned in this storage.""" + pass + + @abstractmethod + async def test_connection(self) -> tuple[bool, str]: + """ + Test connectivity to the storage provider. + + Returns: + (success, message) tuple + """ + pass + + @abstractmethod + def get_usage(self) -> dict: + """ + Get storage usage statistics. + + Returns: + {used_bytes, capacity_bytes, pin_count} + """ + pass + + +class PinataProvider(StorageProvider): + """Pinata IPFS pinning service provider.""" + + provider_type = "pinata" + + def __init__(self, api_key: str, secret_key: str, capacity_gb: int = 1): + self.api_key = api_key + self.secret_key = secret_key + self.capacity_bytes = capacity_gb * 1024**3 + self.base_url = "https://api.pinata.cloud" + self._usage_cache = None + + def _headers(self) -> dict: + return { + "pinata_api_key": self.api_key, + "pinata_secret_api_key": self.secret_key, + } + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Pin content to Pinata.""" + try: + import asyncio + + def do_pin(): + files = {"file": (filename or f"{content_hash[:16]}.bin", data)} + metadata = { + "name": filename or content_hash[:16], + "keyvalues": {"content_hash": content_hash} + } + response = requests.post( + f"{self.base_url}/pinning/pinFileToIPFS", + files=files, + data={"pinataMetadata": json.dumps(metadata)}, + headers=self._headers(), + timeout=120 + ) + response.raise_for_status() + return response.json().get("IpfsHash") + + cid = await asyncio.to_thread(do_pin) + logger.info(f"Pinata: Pinned {content_hash[:16]}... as {cid}") + return cid + except Exception as e: + logger.error(f"Pinata pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """Unpin content from Pinata by finding its CID first.""" + try: + import asyncio + + def do_unpin(): + # First find the pin by content_hash metadata + response = requests.get( + f"{self.base_url}/data/pinList", + params={"metadata[keyvalues][content_hash]": content_hash, "status": "pinned"}, + headers=self._headers(), + timeout=30 + ) + response.raise_for_status() + pins = response.json().get("rows", []) + + if not pins: + return False + + # Unpin each matching CID + for pin in pins: + cid = pin.get("ipfs_pin_hash") + if cid: + resp = requests.delete( + f"{self.base_url}/pinning/unpin/{cid}", + headers=self._headers(), + timeout=30 + ) + resp.raise_for_status() + return True + + result = await asyncio.to_thread(do_unpin) + logger.info(f"Pinata: Unpinned {content_hash[:16]}...") + return result + except Exception as e: + logger.error(f"Pinata unpin failed: {e}") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from Pinata via IPFS gateway.""" + try: + import asyncio + + def do_get(): + # First find the CID + response = requests.get( + f"{self.base_url}/data/pinList", + params={"metadata[keyvalues][content_hash]": content_hash, "status": "pinned"}, + headers=self._headers(), + timeout=30 + ) + response.raise_for_status() + pins = response.json().get("rows", []) + + if not pins: + return None + + cid = pins[0].get("ipfs_pin_hash") + if not cid: + return None + + # Fetch from gateway + gateway_response = requests.get( + f"https://gateway.pinata.cloud/ipfs/{cid}", + timeout=120 + ) + gateway_response.raise_for_status() + return gateway_response.content + + return await asyncio.to_thread(do_get) + except Exception as e: + logger.error(f"Pinata get failed: {e}") + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content is pinned on Pinata.""" + try: + import asyncio + + def do_check(): + response = requests.get( + f"{self.base_url}/data/pinList", + params={"metadata[keyvalues][content_hash]": content_hash, "status": "pinned"}, + headers=self._headers(), + timeout=30 + ) + response.raise_for_status() + return len(response.json().get("rows", [])) > 0 + + return await asyncio.to_thread(do_check) + except Exception: + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test Pinata API connectivity.""" + try: + import asyncio + + def do_test(): + response = requests.get( + f"{self.base_url}/data/testAuthentication", + headers=self._headers(), + timeout=10 + ) + response.raise_for_status() + return True, "Connected to Pinata successfully" + + return await asyncio.to_thread(do_test) + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + return False, "Invalid API credentials" + return False, f"HTTP error: {e}" + except Exception as e: + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get Pinata usage stats.""" + try: + response = requests.get( + f"{self.base_url}/data/userPinnedDataTotal", + headers=self._headers(), + timeout=10 + ) + response.raise_for_status() + data = response.json() + return { + "used_bytes": data.get("pin_size_total", 0), + "capacity_bytes": self.capacity_bytes, + "pin_count": data.get("pin_count", 0) + } + except Exception: + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class Web3StorageProvider(StorageProvider): + """web3.storage pinning service provider.""" + + provider_type = "web3storage" + + def __init__(self, api_token: str, capacity_gb: int = 1): + self.api_token = api_token + self.capacity_bytes = capacity_gb * 1024**3 + self.base_url = "https://api.web3.storage" + + def _headers(self) -> dict: + return {"Authorization": f"Bearer {self.api_token}"} + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Pin content to web3.storage.""" + try: + import asyncio + + def do_pin(): + response = requests.post( + f"{self.base_url}/upload", + data=data, + headers={ + **self._headers(), + "X-Name": filename or content_hash[:16] + }, + timeout=120 + ) + response.raise_for_status() + return response.json().get("cid") + + cid = await asyncio.to_thread(do_pin) + logger.info(f"web3.storage: Pinned {content_hash[:16]}... as {cid}") + return cid + except Exception as e: + logger.error(f"web3.storage pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """web3.storage doesn't support unpinning - data is stored permanently.""" + logger.warning("web3.storage: Unpinning not supported (permanent storage)") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from web3.storage - would need CID mapping.""" + # web3.storage requires knowing the CID to fetch + # For now, return None - we'd need to maintain a mapping + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content is pinned - would need CID mapping.""" + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test web3.storage API connectivity.""" + try: + import asyncio + + def do_test(): + response = requests.get( + f"{self.base_url}/user/uploads", + headers=self._headers(), + params={"size": 1}, + timeout=10 + ) + response.raise_for_status() + return True, "Connected to web3.storage successfully" + + return await asyncio.to_thread(do_test) + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + return False, "Invalid API token" + return False, f"HTTP error: {e}" + except Exception as e: + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get web3.storage usage stats.""" + try: + response = requests.get( + f"{self.base_url}/user/uploads", + headers=self._headers(), + params={"size": 1000}, + timeout=30 + ) + response.raise_for_status() + uploads = response.json() + total_size = sum(u.get("dagSize", 0) for u in uploads) + return { + "used_bytes": total_size, + "capacity_bytes": self.capacity_bytes, + "pin_count": len(uploads) + } + except Exception: + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class NFTStorageProvider(StorageProvider): + """NFT.Storage pinning service provider (free for NFT data).""" + + provider_type = "nftstorage" + + def __init__(self, api_token: str, capacity_gb: int = 5): + self.api_token = api_token + self.capacity_bytes = capacity_gb * 1024**3 + self.base_url = "https://api.nft.storage" + + def _headers(self) -> dict: + return {"Authorization": f"Bearer {self.api_token}"} + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Pin content to NFT.Storage.""" + try: + import asyncio + + def do_pin(): + response = requests.post( + f"{self.base_url}/upload", + data=data, + headers={**self._headers(), "Content-Type": "application/octet-stream"}, + timeout=120 + ) + response.raise_for_status() + return response.json().get("value", {}).get("cid") + + cid = await asyncio.to_thread(do_pin) + logger.info(f"NFT.Storage: Pinned {content_hash[:16]}... as {cid}") + return cid + except Exception as e: + logger.error(f"NFT.Storage pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """NFT.Storage doesn't support unpinning - data is stored permanently.""" + logger.warning("NFT.Storage: Unpinning not supported (permanent storage)") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from NFT.Storage - would need CID mapping.""" + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content is pinned - would need CID mapping.""" + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test NFT.Storage API connectivity.""" + try: + import asyncio + + def do_test(): + response = requests.get( + f"{self.base_url}/", + headers=self._headers(), + timeout=10 + ) + response.raise_for_status() + return True, "Connected to NFT.Storage successfully" + + return await asyncio.to_thread(do_test) + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + return False, "Invalid API token" + return False, f"HTTP error: {e}" + except Exception as e: + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get NFT.Storage usage stats.""" + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class InfuraIPFSProvider(StorageProvider): + """Infura IPFS pinning service provider.""" + + provider_type = "infura" + + def __init__(self, project_id: str, project_secret: str, capacity_gb: int = 5): + self.project_id = project_id + self.project_secret = project_secret + self.capacity_bytes = capacity_gb * 1024**3 + self.base_url = "https://ipfs.infura.io:5001/api/v0" + + def _auth(self) -> tuple: + return (self.project_id, self.project_secret) + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Pin content to Infura IPFS.""" + try: + import asyncio + + def do_pin(): + files = {"file": (filename or f"{content_hash[:16]}.bin", data)} + response = requests.post( + f"{self.base_url}/add", + files=files, + auth=self._auth(), + timeout=120 + ) + response.raise_for_status() + return response.json().get("Hash") + + cid = await asyncio.to_thread(do_pin) + logger.info(f"Infura IPFS: Pinned {content_hash[:16]}... as {cid}") + return cid + except Exception as e: + logger.error(f"Infura IPFS pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """Unpin content from Infura IPFS.""" + try: + import asyncio + + def do_unpin(): + response = requests.post( + f"{self.base_url}/pin/rm", + params={"arg": content_hash}, + auth=self._auth(), + timeout=30 + ) + response.raise_for_status() + return True + + return await asyncio.to_thread(do_unpin) + except Exception as e: + logger.error(f"Infura IPFS unpin failed: {e}") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from Infura IPFS gateway.""" + try: + import asyncio + + def do_get(): + response = requests.post( + f"{self.base_url}/cat", + params={"arg": content_hash}, + auth=self._auth(), + timeout=120 + ) + response.raise_for_status() + return response.content + + return await asyncio.to_thread(do_get) + except Exception as e: + logger.error(f"Infura IPFS get failed: {e}") + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content is pinned on Infura IPFS.""" + try: + import asyncio + + def do_check(): + response = requests.post( + f"{self.base_url}/pin/ls", + params={"arg": content_hash}, + auth=self._auth(), + timeout=30 + ) + return response.status_code == 200 + + return await asyncio.to_thread(do_check) + except Exception: + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test Infura IPFS API connectivity.""" + try: + import asyncio + + def do_test(): + response = requests.post( + f"{self.base_url}/id", + auth=self._auth(), + timeout=10 + ) + response.raise_for_status() + return True, "Connected to Infura IPFS successfully" + + return await asyncio.to_thread(do_test) + except requests.exceptions.HTTPError as e: + if e.response.status_code == 401: + return False, "Invalid project credentials" + return False, f"HTTP error: {e}" + except Exception as e: + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get Infura usage stats.""" + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class FilebaseProvider(StorageProvider): + """Filebase S3-compatible IPFS pinning service.""" + + provider_type = "filebase" + + def __init__(self, access_key: str, secret_key: str, bucket: str, capacity_gb: int = 5): + self.access_key = access_key + self.secret_key = secret_key + self.bucket = bucket + self.capacity_bytes = capacity_gb * 1024**3 + self.endpoint = "https://s3.filebase.com" + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Pin content to Filebase.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_pin(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + key = filename or f"{content_hash[:16]}.bin" + s3.put_object(Bucket=self.bucket, Key=key, Body=data) + # Get CID from response headers + head = s3.head_object(Bucket=self.bucket, Key=key) + return head.get('Metadata', {}).get('cid', content_hash) + + cid = await asyncio.to_thread(do_pin) + logger.info(f"Filebase: Pinned {content_hash[:16]}... as {cid}") + return cid + except Exception as e: + logger.error(f"Filebase pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """Remove content from Filebase.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_unpin(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.delete_object(Bucket=self.bucket, Key=content_hash) + return True + + return await asyncio.to_thread(do_unpin) + except Exception as e: + logger.error(f"Filebase unpin failed: {e}") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from Filebase.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_get(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + response = s3.get_object(Bucket=self.bucket, Key=content_hash) + return response['Body'].read() + + return await asyncio.to_thread(do_get) + except Exception as e: + logger.error(f"Filebase get failed: {e}") + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content exists in Filebase.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_check(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.head_object(Bucket=self.bucket, Key=content_hash) + return True + + return await asyncio.to_thread(do_check) + except Exception: + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test Filebase connectivity.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_test(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.head_bucket(Bucket=self.bucket) + return True, f"Connected to Filebase bucket '{self.bucket}'" + + return await asyncio.to_thread(do_test) + except Exception as e: + if "404" in str(e): + return False, f"Bucket '{self.bucket}' not found" + if "403" in str(e): + return False, "Invalid credentials or no access to bucket" + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get Filebase usage stats.""" + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class StorjProvider(StorageProvider): + """Storj decentralized cloud storage (S3-compatible).""" + + provider_type = "storj" + + def __init__(self, access_key: str, secret_key: str, bucket: str, capacity_gb: int = 25): + self.access_key = access_key + self.secret_key = secret_key + self.bucket = bucket + self.capacity_bytes = capacity_gb * 1024**3 + self.endpoint = "https://gateway.storjshare.io" + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Store content on Storj.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_pin(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + key = filename or content_hash + s3.put_object(Bucket=self.bucket, Key=key, Body=data) + return content_hash + + result = await asyncio.to_thread(do_pin) + logger.info(f"Storj: Stored {content_hash[:16]}...") + return result + except Exception as e: + logger.error(f"Storj pin failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """Remove content from Storj.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_unpin(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.delete_object(Bucket=self.bucket, Key=content_hash) + return True + + return await asyncio.to_thread(do_unpin) + except Exception as e: + logger.error(f"Storj unpin failed: {e}") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from Storj.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_get(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + response = s3.get_object(Bucket=self.bucket, Key=content_hash) + return response['Body'].read() + + return await asyncio.to_thread(do_get) + except Exception as e: + logger.error(f"Storj get failed: {e}") + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content exists on Storj.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_check(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.head_object(Bucket=self.bucket, Key=content_hash) + return True + + return await asyncio.to_thread(do_check) + except Exception: + return False + + async def test_connection(self) -> tuple[bool, str]: + """Test Storj connectivity.""" + try: + import asyncio + import boto3 + from botocore.config import Config + + def do_test(): + s3 = boto3.client( + 's3', + endpoint_url=self.endpoint, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + config=Config(signature_version='s3v4') + ) + s3.head_bucket(Bucket=self.bucket) + return True, f"Connected to Storj bucket '{self.bucket}'" + + return await asyncio.to_thread(do_test) + except Exception as e: + if "404" in str(e): + return False, f"Bucket '{self.bucket}' not found" + if "403" in str(e): + return False, "Invalid credentials or no access to bucket" + return False, f"Connection failed: {e}" + + def get_usage(self) -> dict: + """Get Storj usage stats.""" + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +class LocalStorageProvider(StorageProvider): + """Local filesystem storage provider.""" + + provider_type = "local" + + def __init__(self, base_path: str, capacity_gb: int = 10): + self.base_path = Path(base_path) + self.capacity_bytes = capacity_gb * 1024**3 + # Create directory if it doesn't exist + self.base_path.mkdir(parents=True, exist_ok=True) + + def _get_file_path(self, content_hash: str) -> Path: + """Get file path for a content hash (using subdirectories).""" + # Use first 2 chars as subdirectory for better filesystem performance + subdir = content_hash[:2] + return self.base_path / subdir / content_hash + + async def pin(self, content_hash: str, data: bytes, filename: Optional[str] = None) -> Optional[str]: + """Store content locally.""" + try: + import asyncio + + def do_store(): + file_path = self._get_file_path(content_hash) + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_bytes(data) + return content_hash # Use content_hash as ID for local storage + + result = await asyncio.to_thread(do_store) + logger.info(f"Local: Stored {content_hash[:16]}...") + return result + except Exception as e: + logger.error(f"Local storage failed: {e}") + return None + + async def unpin(self, content_hash: str) -> bool: + """Remove content from local storage.""" + try: + import asyncio + + def do_remove(): + file_path = self._get_file_path(content_hash) + if file_path.exists(): + file_path.unlink() + return True + return False + + return await asyncio.to_thread(do_remove) + except Exception as e: + logger.error(f"Local unpin failed: {e}") + return False + + async def get(self, content_hash: str) -> Optional[bytes]: + """Get content from local storage.""" + try: + import asyncio + + def do_get(): + file_path = self._get_file_path(content_hash) + if file_path.exists(): + return file_path.read_bytes() + return None + + return await asyncio.to_thread(do_get) + except Exception as e: + logger.error(f"Local get failed: {e}") + return None + + async def is_pinned(self, content_hash: str) -> bool: + """Check if content exists in local storage.""" + return self._get_file_path(content_hash).exists() + + async def test_connection(self) -> tuple[bool, str]: + """Test local storage is writable.""" + try: + test_file = self.base_path / ".write_test" + test_file.write_text("test") + test_file.unlink() + return True, f"Local storage ready at {self.base_path}" + except Exception as e: + return False, f"Cannot write to {self.base_path}: {e}" + + def get_usage(self) -> dict: + """Get local storage usage stats.""" + try: + total_size = 0 + file_count = 0 + for subdir in self.base_path.iterdir(): + if subdir.is_dir() and len(subdir.name) == 2: + for f in subdir.iterdir(): + if f.is_file(): + total_size += f.stat().st_size + file_count += 1 + return { + "used_bytes": total_size, + "capacity_bytes": self.capacity_bytes, + "pin_count": file_count + } + except Exception: + return {"used_bytes": 0, "capacity_bytes": self.capacity_bytes, "pin_count": 0} + + +def create_provider(provider_type: str, config: dict) -> Optional[StorageProvider]: + """ + Factory function to create a storage provider from config. + + Args: + provider_type: One of 'pinata', 'web3storage', 'nftstorage', 'infura', 'filebase', 'storj', 'local' + config: Provider-specific configuration dict + + Returns: + StorageProvider instance or None if invalid + """ + try: + if provider_type == "pinata": + return PinataProvider( + api_key=config["api_key"], + secret_key=config["secret_key"], + capacity_gb=config.get("capacity_gb", 1) + ) + elif provider_type == "web3storage": + return Web3StorageProvider( + api_token=config["api_token"], + capacity_gb=config.get("capacity_gb", 5) + ) + elif provider_type == "nftstorage": + return NFTStorageProvider( + api_token=config["api_token"], + capacity_gb=config.get("capacity_gb", 5) + ) + elif provider_type == "infura": + return InfuraIPFSProvider( + project_id=config["project_id"], + project_secret=config["project_secret"], + capacity_gb=config.get("capacity_gb", 5) + ) + elif provider_type == "filebase": + return FilebaseProvider( + access_key=config["access_key"], + secret_key=config["secret_key"], + bucket=config["bucket"], + capacity_gb=config.get("capacity_gb", 5) + ) + elif provider_type == "storj": + return StorjProvider( + access_key=config["access_key"], + secret_key=config["secret_key"], + bucket=config["bucket"], + capacity_gb=config.get("capacity_gb", 25) + ) + elif provider_type == "local": + return LocalStorageProvider( + base_path=config["path"], + capacity_gb=config.get("capacity_gb", 10) + ) + else: + logger.error(f"Unknown provider type: {provider_type}") + return None + except KeyError as e: + logger.error(f"Missing config key for {provider_type}: {e}") + return None + except Exception as e: + logger.error(f"Failed to create provider {provider_type}: {e}") + return None